code
stringlengths
13
6.09M
order_type
stringclasses
2 values
original_example
dict
step_ids
listlengths
1
5
import datetime # weightloss script currentWeight = 73 goalWeight = 67 avgKgPerWeek = 0.45 startDate = datetime.date.today() endDate = startDate while currentWeight > goalWeight: # adding 7 days to simulate a week passing endDate += datetime.timedelta(days=7) currentWeight -= avgKgPerWeek print(endDate, round(currentWeight, 2)) print(f"Start date: {startDate.month.no}, end date: {endDate} ") print(f"Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days")
normal
{ "blob_id": "7fb568880c40895870a0c541d9a88a8070a79e5b", "index": 5762, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile currentWeight > goalWeight:\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n print(endDate, round(currentWeight, 2))\nprint(f'Start date: {startDate.month.no}, end date: {endDate} ')\nprint(\n f'Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days'\n )\n", "step-3": "<mask token>\ncurrentWeight = 73\ngoalWeight = 67\navgKgPerWeek = 0.45\nstartDate = datetime.date.today()\nendDate = startDate\nwhile currentWeight > goalWeight:\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n print(endDate, round(currentWeight, 2))\nprint(f'Start date: {startDate.month.no}, end date: {endDate} ')\nprint(\n f'Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days'\n )\n", "step-4": "import datetime\ncurrentWeight = 73\ngoalWeight = 67\navgKgPerWeek = 0.45\nstartDate = datetime.date.today()\nendDate = startDate\nwhile currentWeight > goalWeight:\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n print(endDate, round(currentWeight, 2))\nprint(f'Start date: {startDate.month.no}, end date: {endDate} ')\nprint(\n f'Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days'\n )\n", "step-5": "import datetime\n\n# weightloss script\ncurrentWeight = 73\ngoalWeight = 67\navgKgPerWeek = 0.45\n\nstartDate = datetime.date.today()\nendDate = startDate\n\nwhile currentWeight > goalWeight:\n\n # adding 7 days to simulate a week passing\n endDate += datetime.timedelta(days=7)\n currentWeight -= avgKgPerWeek\n \n print(endDate, round(currentWeight, 2))\n\n\nprint(f\"Start date: {startDate.month.no}, end date: {endDate} \")\nprint(f\"Weeks to achieve weight goal: {(endDate - startDate).days // 7}, {(endDate - startDate).days} days\")", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
#!/usr/bin/env python3 # -*- coding: UTF-8 -*- import hashlib import re from datetime import datetime import gevent import requests import scrapy from gevent.pool import Pool from lxml import etree from scrapy.http import HtmlResponse from sqlalchemy import create_engine, func from sqlalchemy.orm import sessionmaker from ..items import Album, AlbumImageRelationItem, AlbumItem, AlbumImageItem from ..utils.const import const from ..utils.redis_util import get_redis_conn_from_pool class Beautyleg7Spider(scrapy.Spider): name = 'Beautyleg7Spider' category_list = ['siwameitui', 'xingganmeinv', 'weimeixiezhen', 'ribenmeinv'] start_urls = [('http://www.beautyleg7.com/' + category) for category in category_list] const.REPEATED_THRESHOLD = 10 def __init__(self, name=None, **kwargs): super().__init__(name=None, **kwargs) self.db_session = None self.gevent_pool = Pool(32) self.redis_cmd = get_redis_conn_from_pool() self.ALBUM_URL_REDIS_KEY_PREFIX = "album_url" self.REDIS_LIMITER = ":" self.album_last_item_redis_unique_key = "" self.album_item = None self.album_image_item_list = [] self.album_image_relation_item = AlbumImageRelationItem() def start_requests(self): mysql_host = self.crawler.settings.get("MYSQL_HOST") mysql_port = self.crawler.settings.get("MYSQL_PORT") mysql_user = self.crawler.settings.get("MYSQL_USER") mysql_password = self.crawler.settings.get("MYSQL_PASSWORD") mysql_db_name = self.crawler.settings.get("MYSQL_DB_NAME") engine = create_engine('mysql+mysqlconnector://{}:{}@{}:{}/{}'.format(mysql_user, mysql_password, mysql_host, mysql_port, mysql_db_name), pool_recycle=180, echo=False) session_maker = sessionmaker(bind=engine) self.db_session = session_maker() for url in self.start_urls: yield scrapy.Request(url) def parse(self, response): if self.db_session is None: self.logger.error("db_session is None") return None repeated_count = 0 if response is None: self.logger.warn("响应为空,不做处理!") else: album_nodes = response.css('.pic .item') category = response.css('.sitepath a')[1].css('a::text').extract_first().strip() # 判断最后一页的最后主题是否被持久化 is_persisted_last_item = self.redis_cmd.get(self.album_last_item_redis_unique_key) is_last_item_finished = False if is_persisted_last_item is not None and int(is_persisted_last_item): is_last_item_finished = True self.logger.info("已持久化最后一页的最后主题:%s" % self.album_last_item_redis_unique_key) # 如果是最后一页则设置Redis存储key:“最后一页页码:最后一条主题url”,value:is_persisted(取值为0或1,默认为0) album_last_page_url = response.meta.get("album_last_page_url") if album_last_page_url is not None: album_last_page_url_last_item_redis_suffix = album_nodes[-1].css('.p a::attr(href)').extract_first() self.album_last_item_redis_unique_key = self.ALBUM_URL_REDIS_KEY_PREFIX + self.REDIS_LIMITER + \ self.sub_url_scheme(album_last_page_url, "") + self.REDIS_LIMITER + \ self.sub_url_scheme(album_last_page_url_last_item_redis_suffix, "") self.redis_cmd.setnx(self.album_last_item_redis_unique_key, 0) for album_node in album_nodes: album_url = album_node.css('.p a::attr(href)').extract_first().strip() # 判断当前主题url是否已持久化 is_persisted = self.redis_cmd.get(album_url) if is_persisted is not None and int(is_persisted): self.logger.info("Redis中该url album_url:%s已持久化" % album_url) continue album_url_object_id = self.get_md5(album_url) # 只有name不存在时,当前set操作才执行 self.redis_cmd.setnx(album_url, 0) count = 0 try: count = self.db_session.query(func.count()).filter( Album.album_url_object_id == album_url_object_id).first() if count: count = count[0] except Exception as e: self.logger.error("查询数据库异常,原因:{}".format(e)) finally: self.db_session.rollback() if count: self.logger.info("数据库已有该数据album_url_object_id:%s" % album_url_object_id) repeated_count += 1 # 只有name存在时,当前set操作才执行 self.redis_cmd.set(album_url, 1, xx=True) continue else: album_item = self.parse_album_item(album_node, album_url, album_url_object_id, category) yield response.follow(url=album_url, meta={"AlbumItem": album_item}, callback=self.parse_detail) # 提取下一页并交给scrapy下载 selector_list = response.css('.page li a::attr(href)') # 如果最后一页的最后一个主题url未被持久化则继续爬取 if not is_last_item_finished: if selector_list: last_page_url = None current_url_page = response.xpath('//li[@class="thisclass"]//text()').extract_first() # 如果当前页是第一页则获取最后一页url if current_url_page and int(current_url_page) == 1: last_page_url = selector_list[-1].extract() next_url = selector_list[-2].extract() if next_url == last_page_url: album_last_page_url = response.urljoin(last_page_url) self.logger.info("Last page:%s" % album_last_page_url) else: self.logger.info("Next page:%s" % response.urljoin(next_url)) yield response.follow(url=next_url, meta={"album_last_page_url": album_last_page_url}, callback=self.parse) else: self.logger.info("selector_list is None") self.logger.info("重复次数:%s" % repeated_count) else: self.logger.info("Stop crawler. None Next page!") def parse_album_item(self, album_node, album_url, album_url_object_id, category): album_title = album_node.css('.p a img::attr(alt)').extract_first().strip() cover_url = album_node.css('.p a img::attr(src)').extract_first().strip() regex = "\d+\.\d+.\d+\s+No\.\d+|\d+\-\d+-\d+\s+No\.\d+" number_group = re.findall(regex, album_title) if len(number_group) > 0: number = number_group[0] else: number = "No.unknown" create_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') album_item = AlbumItem() album_item['category'] = category album_item['album_url'] = album_url album_item['album_url_object_id'] = album_url_object_id album_item['album_title'] = album_title album_item['cover_url'] = cover_url album_item['number'] = number album_item['create_date'] = create_date return album_item def parse_detail(self, response): self.album_item = response.meta.get("AlbumItem") self.album_image_relation_item['album_item'] = self.album_item self.parse_album_image_item(response) # 详情页分页链接,循环生成所有子页面的请求 relative_next_page_list = response.css('.page li a::attr(href)').extract() # 使用gevent协程池提升网络IO处理效率 next_page_threads = [ self.gevent_pool.spawn(self.get_album_image_item_list, response.urljoin(relative_next_page)) for relative_next_page in relative_next_page_list[2:-1] ] gevent.joinall(next_page_threads) self.album_image_relation_item['album_image_item_list'] = self.album_image_item_list # 重新初始化 self.album_image_item_list = [] yield self.album_image_relation_item def get_album_image_item_list(self, abs_next_page): """ 使用下页绝对路径同步请求 :param abs_next_page: :return: """ resp = requests.get(abs_next_page) if resp.status_code == 200: encoding = requests.utils.get_encodings_from_content(resp.text) resp.encoding = encoding[0] self.parse_album_image_item(etree.HTML(resp.text)) else: self.logger.warn("下载此页{}失败,返回的状态码为{}".format(abs_next_page, resp.status_code)) def parse_album_image_item(self, response): """ 解析item并返回给pipelines :param response: 如果response类型是继承自scrapy的TextResponse类则使用scrapy的Selector来解析,否则使用lxml来解析 :return: """ if isinstance(response, HtmlResponse): item_title = response.xpath('//div[@class="content"]/h1/text()').extract_first().strip() publish_date = response.xpath('//div[@class="tit"]/span/text()').extract_first().split(":")[1] image_link_list = response.css('.contents a img::attr(src)').extract() else: item_title = response.xpath('//div[@class="content"]/h1/text()')[0].strip() publish_date = response.xpath('//div[@class="tit"]/span/text()')[0].split(":")[1] image_link_list = response.xpath('//div[@class="contents"]/a/img') image_link_list = [image_link.attrib['src'] for image_link in image_link_list] regex = "\s?\w+[^\w]?" regex_group = re.findall(regex, item_title) stage_name = "unknown" if len(regex_group) > 0: str = regex_group[-1] if "[" in str: stage_name = str.split("[")[0].strip() elif "(" in str: stage_name = str.split("(")[0].strip() elif re.match('[^\d*]', str): stage_name = re.match('[^\d*]', str).group() # 详情页多个图片链接 for image_url in image_link_list: album_image_item = AlbumImageItem() album_image_item['item_url'] = image_url album_image_item['item_url_object_id'] = self.get_md5(image_url) item_url_list_json = "{}" album_image_item['item_url_list_json'] = item_url_list_json album_image_item['item_title'] = item_title album_image_item['stage_name'] = stage_name album_image_item['publish_date'] = publish_date self.album_image_item_list.append(album_image_item) return self.album_image_item_list @staticmethod def get_md5(param): if isinstance(param, str): param = param.encode() m = hashlib.md5() m.update(param) return m.hexdigest() @staticmethod def sub_url_scheme(website, replace_str): scheme_regex = "^(http://|https://)" return re.sub(scheme_regex, replace_str, website)
normal
{ "blob_id": "eb853e430b996a81dc2ef20c320979a3e04d956a", "index": 237, "step-1": "<mask token>\n\n\nclass Beautyleg7Spider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def parse(self, response):\n if self.db_session is None:\n self.logger.error('db_session is None')\n return None\n repeated_count = 0\n if response is None:\n self.logger.warn('响应为空,不做处理!')\n else:\n album_nodes = response.css('.pic .item')\n category = response.css('.sitepath a')[1].css('a::text'\n ).extract_first().strip()\n is_persisted_last_item = self.redis_cmd.get(self.\n album_last_item_redis_unique_key)\n is_last_item_finished = False\n if is_persisted_last_item is not None and int(\n is_persisted_last_item):\n is_last_item_finished = True\n self.logger.info('已持久化最后一页的最后主题:%s' % self.\n album_last_item_redis_unique_key)\n album_last_page_url = response.meta.get('album_last_page_url')\n if album_last_page_url is not None:\n album_last_page_url_last_item_redis_suffix = album_nodes[-1\n ].css('.p a::attr(href)').extract_first()\n self.album_last_item_redis_unique_key = (self.\n ALBUM_URL_REDIS_KEY_PREFIX + self.REDIS_LIMITER + self.\n sub_url_scheme(album_last_page_url, '') + self.\n REDIS_LIMITER + self.sub_url_scheme(\n album_last_page_url_last_item_redis_suffix, ''))\n self.redis_cmd.setnx(self.album_last_item_redis_unique_key, 0)\n for album_node in album_nodes:\n album_url = album_node.css('.p a::attr(href)').extract_first(\n ).strip()\n is_persisted = self.redis_cmd.get(album_url)\n if is_persisted is not None and int(is_persisted):\n self.logger.info('Redis中该url album_url:%s已持久化' % album_url)\n continue\n album_url_object_id = self.get_md5(album_url)\n self.redis_cmd.setnx(album_url, 0)\n count = 0\n try:\n count = self.db_session.query(func.count()).filter(\n Album.album_url_object_id == album_url_object_id\n ).first()\n if count:\n count = count[0]\n except Exception as e:\n self.logger.error('查询数据库异常,原因:{}'.format(e))\n finally:\n self.db_session.rollback()\n if count:\n self.logger.info('数据库已有该数据album_url_object_id:%s' %\n album_url_object_id)\n repeated_count += 1\n self.redis_cmd.set(album_url, 1, xx=True)\n continue\n else:\n album_item = self.parse_album_item(album_node,\n album_url, album_url_object_id, category)\n yield response.follow(url=album_url, meta={'AlbumItem':\n album_item}, callback=self.parse_detail)\n selector_list = response.css('.page li a::attr(href)')\n if not is_last_item_finished:\n if selector_list:\n last_page_url = None\n current_url_page = response.xpath(\n '//li[@class=\"thisclass\"]//text()').extract_first()\n if current_url_page and int(current_url_page) == 1:\n last_page_url = selector_list[-1].extract()\n next_url = selector_list[-2].extract()\n if next_url == last_page_url:\n album_last_page_url = response.urljoin(last_page_url)\n self.logger.info('Last page:%s' % album_last_page_url)\n else:\n self.logger.info('Next page:%s' % response.urljoin(\n next_url))\n yield response.follow(url=next_url, meta={\n 'album_last_page_url': album_last_page_url},\n callback=self.parse)\n else:\n self.logger.info('selector_list is None')\n self.logger.info('重复次数:%s' % repeated_count)\n else:\n self.logger.info('Stop crawler. None Next page!')\n <mask token>\n <mask token>\n\n def get_album_image_item_list(self, abs_next_page):\n \"\"\"\n 使用下页绝对路径同步请求\n :param abs_next_page:\n :return:\n \"\"\"\n resp = requests.get(abs_next_page)\n if resp.status_code == 200:\n encoding = requests.utils.get_encodings_from_content(resp.text)\n resp.encoding = encoding[0]\n self.parse_album_image_item(etree.HTML(resp.text))\n else:\n self.logger.warn('下载此页{}失败,返回的状态码为{}'.format(abs_next_page,\n resp.status_code))\n <mask token>\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass Beautyleg7Spider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def parse(self, response):\n if self.db_session is None:\n self.logger.error('db_session is None')\n return None\n repeated_count = 0\n if response is None:\n self.logger.warn('响应为空,不做处理!')\n else:\n album_nodes = response.css('.pic .item')\n category = response.css('.sitepath a')[1].css('a::text'\n ).extract_first().strip()\n is_persisted_last_item = self.redis_cmd.get(self.\n album_last_item_redis_unique_key)\n is_last_item_finished = False\n if is_persisted_last_item is not None and int(\n is_persisted_last_item):\n is_last_item_finished = True\n self.logger.info('已持久化最后一页的最后主题:%s' % self.\n album_last_item_redis_unique_key)\n album_last_page_url = response.meta.get('album_last_page_url')\n if album_last_page_url is not None:\n album_last_page_url_last_item_redis_suffix = album_nodes[-1\n ].css('.p a::attr(href)').extract_first()\n self.album_last_item_redis_unique_key = (self.\n ALBUM_URL_REDIS_KEY_PREFIX + self.REDIS_LIMITER + self.\n sub_url_scheme(album_last_page_url, '') + self.\n REDIS_LIMITER + self.sub_url_scheme(\n album_last_page_url_last_item_redis_suffix, ''))\n self.redis_cmd.setnx(self.album_last_item_redis_unique_key, 0)\n for album_node in album_nodes:\n album_url = album_node.css('.p a::attr(href)').extract_first(\n ).strip()\n is_persisted = self.redis_cmd.get(album_url)\n if is_persisted is not None and int(is_persisted):\n self.logger.info('Redis中该url album_url:%s已持久化' % album_url)\n continue\n album_url_object_id = self.get_md5(album_url)\n self.redis_cmd.setnx(album_url, 0)\n count = 0\n try:\n count = self.db_session.query(func.count()).filter(\n Album.album_url_object_id == album_url_object_id\n ).first()\n if count:\n count = count[0]\n except Exception as e:\n self.logger.error('查询数据库异常,原因:{}'.format(e))\n finally:\n self.db_session.rollback()\n if count:\n self.logger.info('数据库已有该数据album_url_object_id:%s' %\n album_url_object_id)\n repeated_count += 1\n self.redis_cmd.set(album_url, 1, xx=True)\n continue\n else:\n album_item = self.parse_album_item(album_node,\n album_url, album_url_object_id, category)\n yield response.follow(url=album_url, meta={'AlbumItem':\n album_item}, callback=self.parse_detail)\n selector_list = response.css('.page li a::attr(href)')\n if not is_last_item_finished:\n if selector_list:\n last_page_url = None\n current_url_page = response.xpath(\n '//li[@class=\"thisclass\"]//text()').extract_first()\n if current_url_page and int(current_url_page) == 1:\n last_page_url = selector_list[-1].extract()\n next_url = selector_list[-2].extract()\n if next_url == last_page_url:\n album_last_page_url = response.urljoin(last_page_url)\n self.logger.info('Last page:%s' % album_last_page_url)\n else:\n self.logger.info('Next page:%s' % response.urljoin(\n next_url))\n yield response.follow(url=next_url, meta={\n 'album_last_page_url': album_last_page_url},\n callback=self.parse)\n else:\n self.logger.info('selector_list is None')\n self.logger.info('重复次数:%s' % repeated_count)\n else:\n self.logger.info('Stop crawler. None Next page!')\n\n def parse_album_item(self, album_node, album_url, album_url_object_id,\n category):\n album_title = album_node.css('.p a img::attr(alt)').extract_first(\n ).strip()\n cover_url = album_node.css('.p a img::attr(src)').extract_first(\n ).strip()\n regex = '\\\\d+\\\\.\\\\d+.\\\\d+\\\\s+No\\\\.\\\\d+|\\\\d+\\\\-\\\\d+-\\\\d+\\\\s+No\\\\.\\\\d+'\n number_group = re.findall(regex, album_title)\n if len(number_group) > 0:\n number = number_group[0]\n else:\n number = 'No.unknown'\n create_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n album_item = AlbumItem()\n album_item['category'] = category\n album_item['album_url'] = album_url\n album_item['album_url_object_id'] = album_url_object_id\n album_item['album_title'] = album_title\n album_item['cover_url'] = cover_url\n album_item['number'] = number\n album_item['create_date'] = create_date\n return album_item\n\n def parse_detail(self, response):\n self.album_item = response.meta.get('AlbumItem')\n self.album_image_relation_item['album_item'] = self.album_item\n self.parse_album_image_item(response)\n relative_next_page_list = response.css('.page li a::attr(href)'\n ).extract()\n next_page_threads = [self.gevent_pool.spawn(self.\n get_album_image_item_list, response.urljoin(relative_next_page)\n ) for relative_next_page in relative_next_page_list[2:-1]]\n gevent.joinall(next_page_threads)\n self.album_image_relation_item['album_image_item_list'\n ] = self.album_image_item_list\n self.album_image_item_list = []\n yield self.album_image_relation_item\n\n def get_album_image_item_list(self, abs_next_page):\n \"\"\"\n 使用下页绝对路径同步请求\n :param abs_next_page:\n :return:\n \"\"\"\n resp = requests.get(abs_next_page)\n if resp.status_code == 200:\n encoding = requests.utils.get_encodings_from_content(resp.text)\n resp.encoding = encoding[0]\n self.parse_album_image_item(etree.HTML(resp.text))\n else:\n self.logger.warn('下载此页{}失败,返回的状态码为{}'.format(abs_next_page,\n resp.status_code))\n\n def parse_album_image_item(self, response):\n \"\"\"\n 解析item并返回给pipelines\n :param response: 如果response类型是继承自scrapy的TextResponse类则使用scrapy的Selector来解析,否则使用lxml来解析\n :return:\n \"\"\"\n if isinstance(response, HtmlResponse):\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()'\n ).extract_first().strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()'\n ).extract_first().split(':')[1]\n image_link_list = response.css('.contents a img::attr(src)'\n ).extract()\n else:\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()')[0\n ].strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()')[0\n ].split(':')[1]\n image_link_list = response.xpath('//div[@class=\"contents\"]/a/img')\n image_link_list = [image_link.attrib['src'] for image_link in\n image_link_list]\n regex = '\\\\s?\\\\w+[^\\\\w]?'\n regex_group = re.findall(regex, item_title)\n stage_name = 'unknown'\n if len(regex_group) > 0:\n str = regex_group[-1]\n if '[' in str:\n stage_name = str.split('[')[0].strip()\n elif '(' in str:\n stage_name = str.split('(')[0].strip()\n elif re.match('[^\\\\d*]', str):\n stage_name = re.match('[^\\\\d*]', str).group()\n for image_url in image_link_list:\n album_image_item = AlbumImageItem()\n album_image_item['item_url'] = image_url\n album_image_item['item_url_object_id'] = self.get_md5(image_url)\n item_url_list_json = '{}'\n album_image_item['item_url_list_json'] = item_url_list_json\n album_image_item['item_title'] = item_title\n album_image_item['stage_name'] = stage_name\n album_image_item['publish_date'] = publish_date\n self.album_image_item_list.append(album_image_item)\n return self.album_image_item_list\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Beautyleg7Spider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def start_requests(self):\n mysql_host = self.crawler.settings.get('MYSQL_HOST')\n mysql_port = self.crawler.settings.get('MYSQL_PORT')\n mysql_user = self.crawler.settings.get('MYSQL_USER')\n mysql_password = self.crawler.settings.get('MYSQL_PASSWORD')\n mysql_db_name = self.crawler.settings.get('MYSQL_DB_NAME')\n engine = create_engine('mysql+mysqlconnector://{}:{}@{}:{}/{}'.\n format(mysql_user, mysql_password, mysql_host, mysql_port,\n mysql_db_name), pool_recycle=180, echo=False)\n session_maker = sessionmaker(bind=engine)\n self.db_session = session_maker()\n for url in self.start_urls:\n yield scrapy.Request(url)\n\n def parse(self, response):\n if self.db_session is None:\n self.logger.error('db_session is None')\n return None\n repeated_count = 0\n if response is None:\n self.logger.warn('响应为空,不做处理!')\n else:\n album_nodes = response.css('.pic .item')\n category = response.css('.sitepath a')[1].css('a::text'\n ).extract_first().strip()\n is_persisted_last_item = self.redis_cmd.get(self.\n album_last_item_redis_unique_key)\n is_last_item_finished = False\n if is_persisted_last_item is not None and int(\n is_persisted_last_item):\n is_last_item_finished = True\n self.logger.info('已持久化最后一页的最后主题:%s' % self.\n album_last_item_redis_unique_key)\n album_last_page_url = response.meta.get('album_last_page_url')\n if album_last_page_url is not None:\n album_last_page_url_last_item_redis_suffix = album_nodes[-1\n ].css('.p a::attr(href)').extract_first()\n self.album_last_item_redis_unique_key = (self.\n ALBUM_URL_REDIS_KEY_PREFIX + self.REDIS_LIMITER + self.\n sub_url_scheme(album_last_page_url, '') + self.\n REDIS_LIMITER + self.sub_url_scheme(\n album_last_page_url_last_item_redis_suffix, ''))\n self.redis_cmd.setnx(self.album_last_item_redis_unique_key, 0)\n for album_node in album_nodes:\n album_url = album_node.css('.p a::attr(href)').extract_first(\n ).strip()\n is_persisted = self.redis_cmd.get(album_url)\n if is_persisted is not None and int(is_persisted):\n self.logger.info('Redis中该url album_url:%s已持久化' % album_url)\n continue\n album_url_object_id = self.get_md5(album_url)\n self.redis_cmd.setnx(album_url, 0)\n count = 0\n try:\n count = self.db_session.query(func.count()).filter(\n Album.album_url_object_id == album_url_object_id\n ).first()\n if count:\n count = count[0]\n except Exception as e:\n self.logger.error('查询数据库异常,原因:{}'.format(e))\n finally:\n self.db_session.rollback()\n if count:\n self.logger.info('数据库已有该数据album_url_object_id:%s' %\n album_url_object_id)\n repeated_count += 1\n self.redis_cmd.set(album_url, 1, xx=True)\n continue\n else:\n album_item = self.parse_album_item(album_node,\n album_url, album_url_object_id, category)\n yield response.follow(url=album_url, meta={'AlbumItem':\n album_item}, callback=self.parse_detail)\n selector_list = response.css('.page li a::attr(href)')\n if not is_last_item_finished:\n if selector_list:\n last_page_url = None\n current_url_page = response.xpath(\n '//li[@class=\"thisclass\"]//text()').extract_first()\n if current_url_page and int(current_url_page) == 1:\n last_page_url = selector_list[-1].extract()\n next_url = selector_list[-2].extract()\n if next_url == last_page_url:\n album_last_page_url = response.urljoin(last_page_url)\n self.logger.info('Last page:%s' % album_last_page_url)\n else:\n self.logger.info('Next page:%s' % response.urljoin(\n next_url))\n yield response.follow(url=next_url, meta={\n 'album_last_page_url': album_last_page_url},\n callback=self.parse)\n else:\n self.logger.info('selector_list is None')\n self.logger.info('重复次数:%s' % repeated_count)\n else:\n self.logger.info('Stop crawler. None Next page!')\n\n def parse_album_item(self, album_node, album_url, album_url_object_id,\n category):\n album_title = album_node.css('.p a img::attr(alt)').extract_first(\n ).strip()\n cover_url = album_node.css('.p a img::attr(src)').extract_first(\n ).strip()\n regex = '\\\\d+\\\\.\\\\d+.\\\\d+\\\\s+No\\\\.\\\\d+|\\\\d+\\\\-\\\\d+-\\\\d+\\\\s+No\\\\.\\\\d+'\n number_group = re.findall(regex, album_title)\n if len(number_group) > 0:\n number = number_group[0]\n else:\n number = 'No.unknown'\n create_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n album_item = AlbumItem()\n album_item['category'] = category\n album_item['album_url'] = album_url\n album_item['album_url_object_id'] = album_url_object_id\n album_item['album_title'] = album_title\n album_item['cover_url'] = cover_url\n album_item['number'] = number\n album_item['create_date'] = create_date\n return album_item\n\n def parse_detail(self, response):\n self.album_item = response.meta.get('AlbumItem')\n self.album_image_relation_item['album_item'] = self.album_item\n self.parse_album_image_item(response)\n relative_next_page_list = response.css('.page li a::attr(href)'\n ).extract()\n next_page_threads = [self.gevent_pool.spawn(self.\n get_album_image_item_list, response.urljoin(relative_next_page)\n ) for relative_next_page in relative_next_page_list[2:-1]]\n gevent.joinall(next_page_threads)\n self.album_image_relation_item['album_image_item_list'\n ] = self.album_image_item_list\n self.album_image_item_list = []\n yield self.album_image_relation_item\n\n def get_album_image_item_list(self, abs_next_page):\n \"\"\"\n 使用下页绝对路径同步请求\n :param abs_next_page:\n :return:\n \"\"\"\n resp = requests.get(abs_next_page)\n if resp.status_code == 200:\n encoding = requests.utils.get_encodings_from_content(resp.text)\n resp.encoding = encoding[0]\n self.parse_album_image_item(etree.HTML(resp.text))\n else:\n self.logger.warn('下载此页{}失败,返回的状态码为{}'.format(abs_next_page,\n resp.status_code))\n\n def parse_album_image_item(self, response):\n \"\"\"\n 解析item并返回给pipelines\n :param response: 如果response类型是继承自scrapy的TextResponse类则使用scrapy的Selector来解析,否则使用lxml来解析\n :return:\n \"\"\"\n if isinstance(response, HtmlResponse):\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()'\n ).extract_first().strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()'\n ).extract_first().split(':')[1]\n image_link_list = response.css('.contents a img::attr(src)'\n ).extract()\n else:\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()')[0\n ].strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()')[0\n ].split(':')[1]\n image_link_list = response.xpath('//div[@class=\"contents\"]/a/img')\n image_link_list = [image_link.attrib['src'] for image_link in\n image_link_list]\n regex = '\\\\s?\\\\w+[^\\\\w]?'\n regex_group = re.findall(regex, item_title)\n stage_name = 'unknown'\n if len(regex_group) > 0:\n str = regex_group[-1]\n if '[' in str:\n stage_name = str.split('[')[0].strip()\n elif '(' in str:\n stage_name = str.split('(')[0].strip()\n elif re.match('[^\\\\d*]', str):\n stage_name = re.match('[^\\\\d*]', str).group()\n for image_url in image_link_list:\n album_image_item = AlbumImageItem()\n album_image_item['item_url'] = image_url\n album_image_item['item_url_object_id'] = self.get_md5(image_url)\n item_url_list_json = '{}'\n album_image_item['item_url_list_json'] = item_url_list_json\n album_image_item['item_title'] = item_title\n album_image_item['stage_name'] = stage_name\n album_image_item['publish_date'] = publish_date\n self.album_image_item_list.append(album_image_item)\n return self.album_image_item_list\n <mask token>\n\n @staticmethod\n def sub_url_scheme(website, replace_str):\n scheme_regex = '^(http://|https://)'\n return re.sub(scheme_regex, replace_str, website)\n", "step-4": "<mask token>\n\n\nclass Beautyleg7Spider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def start_requests(self):\n mysql_host = self.crawler.settings.get('MYSQL_HOST')\n mysql_port = self.crawler.settings.get('MYSQL_PORT')\n mysql_user = self.crawler.settings.get('MYSQL_USER')\n mysql_password = self.crawler.settings.get('MYSQL_PASSWORD')\n mysql_db_name = self.crawler.settings.get('MYSQL_DB_NAME')\n engine = create_engine('mysql+mysqlconnector://{}:{}@{}:{}/{}'.\n format(mysql_user, mysql_password, mysql_host, mysql_port,\n mysql_db_name), pool_recycle=180, echo=False)\n session_maker = sessionmaker(bind=engine)\n self.db_session = session_maker()\n for url in self.start_urls:\n yield scrapy.Request(url)\n\n def parse(self, response):\n if self.db_session is None:\n self.logger.error('db_session is None')\n return None\n repeated_count = 0\n if response is None:\n self.logger.warn('响应为空,不做处理!')\n else:\n album_nodes = response.css('.pic .item')\n category = response.css('.sitepath a')[1].css('a::text'\n ).extract_first().strip()\n is_persisted_last_item = self.redis_cmd.get(self.\n album_last_item_redis_unique_key)\n is_last_item_finished = False\n if is_persisted_last_item is not None and int(\n is_persisted_last_item):\n is_last_item_finished = True\n self.logger.info('已持久化最后一页的最后主题:%s' % self.\n album_last_item_redis_unique_key)\n album_last_page_url = response.meta.get('album_last_page_url')\n if album_last_page_url is not None:\n album_last_page_url_last_item_redis_suffix = album_nodes[-1\n ].css('.p a::attr(href)').extract_first()\n self.album_last_item_redis_unique_key = (self.\n ALBUM_URL_REDIS_KEY_PREFIX + self.REDIS_LIMITER + self.\n sub_url_scheme(album_last_page_url, '') + self.\n REDIS_LIMITER + self.sub_url_scheme(\n album_last_page_url_last_item_redis_suffix, ''))\n self.redis_cmd.setnx(self.album_last_item_redis_unique_key, 0)\n for album_node in album_nodes:\n album_url = album_node.css('.p a::attr(href)').extract_first(\n ).strip()\n is_persisted = self.redis_cmd.get(album_url)\n if is_persisted is not None and int(is_persisted):\n self.logger.info('Redis中该url album_url:%s已持久化' % album_url)\n continue\n album_url_object_id = self.get_md5(album_url)\n self.redis_cmd.setnx(album_url, 0)\n count = 0\n try:\n count = self.db_session.query(func.count()).filter(\n Album.album_url_object_id == album_url_object_id\n ).first()\n if count:\n count = count[0]\n except Exception as e:\n self.logger.error('查询数据库异常,原因:{}'.format(e))\n finally:\n self.db_session.rollback()\n if count:\n self.logger.info('数据库已有该数据album_url_object_id:%s' %\n album_url_object_id)\n repeated_count += 1\n self.redis_cmd.set(album_url, 1, xx=True)\n continue\n else:\n album_item = self.parse_album_item(album_node,\n album_url, album_url_object_id, category)\n yield response.follow(url=album_url, meta={'AlbumItem':\n album_item}, callback=self.parse_detail)\n selector_list = response.css('.page li a::attr(href)')\n if not is_last_item_finished:\n if selector_list:\n last_page_url = None\n current_url_page = response.xpath(\n '//li[@class=\"thisclass\"]//text()').extract_first()\n if current_url_page and int(current_url_page) == 1:\n last_page_url = selector_list[-1].extract()\n next_url = selector_list[-2].extract()\n if next_url == last_page_url:\n album_last_page_url = response.urljoin(last_page_url)\n self.logger.info('Last page:%s' % album_last_page_url)\n else:\n self.logger.info('Next page:%s' % response.urljoin(\n next_url))\n yield response.follow(url=next_url, meta={\n 'album_last_page_url': album_last_page_url},\n callback=self.parse)\n else:\n self.logger.info('selector_list is None')\n self.logger.info('重复次数:%s' % repeated_count)\n else:\n self.logger.info('Stop crawler. None Next page!')\n\n def parse_album_item(self, album_node, album_url, album_url_object_id,\n category):\n album_title = album_node.css('.p a img::attr(alt)').extract_first(\n ).strip()\n cover_url = album_node.css('.p a img::attr(src)').extract_first(\n ).strip()\n regex = '\\\\d+\\\\.\\\\d+.\\\\d+\\\\s+No\\\\.\\\\d+|\\\\d+\\\\-\\\\d+-\\\\d+\\\\s+No\\\\.\\\\d+'\n number_group = re.findall(regex, album_title)\n if len(number_group) > 0:\n number = number_group[0]\n else:\n number = 'No.unknown'\n create_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n album_item = AlbumItem()\n album_item['category'] = category\n album_item['album_url'] = album_url\n album_item['album_url_object_id'] = album_url_object_id\n album_item['album_title'] = album_title\n album_item['cover_url'] = cover_url\n album_item['number'] = number\n album_item['create_date'] = create_date\n return album_item\n\n def parse_detail(self, response):\n self.album_item = response.meta.get('AlbumItem')\n self.album_image_relation_item['album_item'] = self.album_item\n self.parse_album_image_item(response)\n relative_next_page_list = response.css('.page li a::attr(href)'\n ).extract()\n next_page_threads = [self.gevent_pool.spawn(self.\n get_album_image_item_list, response.urljoin(relative_next_page)\n ) for relative_next_page in relative_next_page_list[2:-1]]\n gevent.joinall(next_page_threads)\n self.album_image_relation_item['album_image_item_list'\n ] = self.album_image_item_list\n self.album_image_item_list = []\n yield self.album_image_relation_item\n\n def get_album_image_item_list(self, abs_next_page):\n \"\"\"\n 使用下页绝对路径同步请求\n :param abs_next_page:\n :return:\n \"\"\"\n resp = requests.get(abs_next_page)\n if resp.status_code == 200:\n encoding = requests.utils.get_encodings_from_content(resp.text)\n resp.encoding = encoding[0]\n self.parse_album_image_item(etree.HTML(resp.text))\n else:\n self.logger.warn('下载此页{}失败,返回的状态码为{}'.format(abs_next_page,\n resp.status_code))\n\n def parse_album_image_item(self, response):\n \"\"\"\n 解析item并返回给pipelines\n :param response: 如果response类型是继承自scrapy的TextResponse类则使用scrapy的Selector来解析,否则使用lxml来解析\n :return:\n \"\"\"\n if isinstance(response, HtmlResponse):\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()'\n ).extract_first().strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()'\n ).extract_first().split(':')[1]\n image_link_list = response.css('.contents a img::attr(src)'\n ).extract()\n else:\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()')[0\n ].strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()')[0\n ].split(':')[1]\n image_link_list = response.xpath('//div[@class=\"contents\"]/a/img')\n image_link_list = [image_link.attrib['src'] for image_link in\n image_link_list]\n regex = '\\\\s?\\\\w+[^\\\\w]?'\n regex_group = re.findall(regex, item_title)\n stage_name = 'unknown'\n if len(regex_group) > 0:\n str = regex_group[-1]\n if '[' in str:\n stage_name = str.split('[')[0].strip()\n elif '(' in str:\n stage_name = str.split('(')[0].strip()\n elif re.match('[^\\\\d*]', str):\n stage_name = re.match('[^\\\\d*]', str).group()\n for image_url in image_link_list:\n album_image_item = AlbumImageItem()\n album_image_item['item_url'] = image_url\n album_image_item['item_url_object_id'] = self.get_md5(image_url)\n item_url_list_json = '{}'\n album_image_item['item_url_list_json'] = item_url_list_json\n album_image_item['item_title'] = item_title\n album_image_item['stage_name'] = stage_name\n album_image_item['publish_date'] = publish_date\n self.album_image_item_list.append(album_image_item)\n return self.album_image_item_list\n\n @staticmethod\n def get_md5(param):\n if isinstance(param, str):\n param = param.encode()\n m = hashlib.md5()\n m.update(param)\n return m.hexdigest()\n\n @staticmethod\n def sub_url_scheme(website, replace_str):\n scheme_regex = '^(http://|https://)'\n return re.sub(scheme_regex, replace_str, website)\n", "step-5": "#!/usr/bin/env python3\n# -*- coding: UTF-8 -*-\nimport hashlib\nimport re\nfrom datetime import datetime\n\nimport gevent\nimport requests\nimport scrapy\nfrom gevent.pool import Pool\nfrom lxml import etree\nfrom scrapy.http import HtmlResponse\nfrom sqlalchemy import create_engine, func\nfrom sqlalchemy.orm import sessionmaker\n\nfrom ..items import Album, AlbumImageRelationItem, AlbumItem, AlbumImageItem\nfrom ..utils.const import const\nfrom ..utils.redis_util import get_redis_conn_from_pool\n\n\nclass Beautyleg7Spider(scrapy.Spider):\n name = 'Beautyleg7Spider'\n category_list = ['siwameitui', 'xingganmeinv', 'weimeixiezhen', 'ribenmeinv']\n start_urls = [('http://www.beautyleg7.com/' + category) for category in category_list]\n\n const.REPEATED_THRESHOLD = 10\n\n def __init__(self, name=None, **kwargs):\n super().__init__(name=None, **kwargs)\n\n self.db_session = None\n\n self.gevent_pool = Pool(32)\n\n self.redis_cmd = get_redis_conn_from_pool()\n\n self.ALBUM_URL_REDIS_KEY_PREFIX = \"album_url\"\n self.REDIS_LIMITER = \":\"\n self.album_last_item_redis_unique_key = \"\"\n self.album_item = None\n self.album_image_item_list = []\n self.album_image_relation_item = AlbumImageRelationItem()\n\n def start_requests(self):\n mysql_host = self.crawler.settings.get(\"MYSQL_HOST\")\n mysql_port = self.crawler.settings.get(\"MYSQL_PORT\")\n mysql_user = self.crawler.settings.get(\"MYSQL_USER\")\n mysql_password = self.crawler.settings.get(\"MYSQL_PASSWORD\")\n mysql_db_name = self.crawler.settings.get(\"MYSQL_DB_NAME\")\n engine = create_engine('mysql+mysqlconnector://{}:{}@{}:{}/{}'.format(mysql_user, mysql_password,\n mysql_host, mysql_port,\n mysql_db_name),\n pool_recycle=180, echo=False)\n session_maker = sessionmaker(bind=engine)\n self.db_session = session_maker()\n\n for url in self.start_urls:\n yield scrapy.Request(url)\n\n def parse(self, response):\n if self.db_session is None:\n self.logger.error(\"db_session is None\")\n return None\n repeated_count = 0\n if response is None:\n self.logger.warn(\"响应为空,不做处理!\")\n else:\n album_nodes = response.css('.pic .item')\n category = response.css('.sitepath a')[1].css('a::text').extract_first().strip()\n\n # 判断最后一页的最后主题是否被持久化\n is_persisted_last_item = self.redis_cmd.get(self.album_last_item_redis_unique_key)\n is_last_item_finished = False\n if is_persisted_last_item is not None and int(is_persisted_last_item):\n is_last_item_finished = True\n self.logger.info(\"已持久化最后一页的最后主题:%s\" % self.album_last_item_redis_unique_key)\n\n # 如果是最后一页则设置Redis存储key:“最后一页页码:最后一条主题url”,value:is_persisted(取值为0或1,默认为0)\n album_last_page_url = response.meta.get(\"album_last_page_url\")\n if album_last_page_url is not None:\n album_last_page_url_last_item_redis_suffix = album_nodes[-1].css('.p a::attr(href)').extract_first()\n self.album_last_item_redis_unique_key = self.ALBUM_URL_REDIS_KEY_PREFIX + self.REDIS_LIMITER + \\\n self.sub_url_scheme(album_last_page_url,\n \"\") + self.REDIS_LIMITER + \\\n self.sub_url_scheme(album_last_page_url_last_item_redis_suffix,\n \"\")\n\n self.redis_cmd.setnx(self.album_last_item_redis_unique_key, 0)\n\n for album_node in album_nodes:\n album_url = album_node.css('.p a::attr(href)').extract_first().strip()\n # 判断当前主题url是否已持久化\n is_persisted = self.redis_cmd.get(album_url)\n if is_persisted is not None and int(is_persisted):\n self.logger.info(\"Redis中该url album_url:%s已持久化\" % album_url)\n continue\n\n album_url_object_id = self.get_md5(album_url)\n # 只有name不存在时,当前set操作才执行\n self.redis_cmd.setnx(album_url, 0)\n count = 0\n try:\n count = self.db_session.query(func.count()).filter(\n Album.album_url_object_id == album_url_object_id).first()\n if count:\n count = count[0]\n except Exception as e:\n self.logger.error(\"查询数据库异常,原因:{}\".format(e))\n finally:\n self.db_session.rollback()\n\n if count:\n self.logger.info(\"数据库已有该数据album_url_object_id:%s\" % album_url_object_id)\n repeated_count += 1\n # 只有name存在时,当前set操作才执行\n self.redis_cmd.set(album_url, 1, xx=True)\n continue\n else:\n album_item = self.parse_album_item(album_node, album_url, album_url_object_id, category)\n yield response.follow(url=album_url,\n meta={\"AlbumItem\": album_item},\n callback=self.parse_detail)\n\n # 提取下一页并交给scrapy下载\n selector_list = response.css('.page li a::attr(href)')\n # 如果最后一页的最后一个主题url未被持久化则继续爬取\n if not is_last_item_finished:\n if selector_list:\n last_page_url = None\n current_url_page = response.xpath('//li[@class=\"thisclass\"]//text()').extract_first()\n # 如果当前页是第一页则获取最后一页url\n if current_url_page and int(current_url_page) == 1:\n last_page_url = selector_list[-1].extract()\n\n next_url = selector_list[-2].extract()\n if next_url == last_page_url:\n album_last_page_url = response.urljoin(last_page_url)\n self.logger.info(\"Last page:%s\" % album_last_page_url)\n else:\n self.logger.info(\"Next page:%s\" % response.urljoin(next_url))\n yield response.follow(url=next_url,\n meta={\"album_last_page_url\": album_last_page_url},\n callback=self.parse)\n else:\n self.logger.info(\"selector_list is None\")\n self.logger.info(\"重复次数:%s\" % repeated_count)\n else:\n self.logger.info(\"Stop crawler. None Next page!\")\n\n def parse_album_item(self, album_node, album_url, album_url_object_id, category):\n album_title = album_node.css('.p a img::attr(alt)').extract_first().strip()\n cover_url = album_node.css('.p a img::attr(src)').extract_first().strip()\n regex = \"\\d+\\.\\d+.\\d+\\s+No\\.\\d+|\\d+\\-\\d+-\\d+\\s+No\\.\\d+\"\n number_group = re.findall(regex, album_title)\n if len(number_group) > 0:\n number = number_group[0]\n else:\n number = \"No.unknown\"\n create_date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')\n album_item = AlbumItem()\n album_item['category'] = category\n album_item['album_url'] = album_url\n album_item['album_url_object_id'] = album_url_object_id\n album_item['album_title'] = album_title\n album_item['cover_url'] = cover_url\n album_item['number'] = number\n album_item['create_date'] = create_date\n return album_item\n\n def parse_detail(self, response):\n self.album_item = response.meta.get(\"AlbumItem\")\n self.album_image_relation_item['album_item'] = self.album_item\n self.parse_album_image_item(response)\n # 详情页分页链接,循环生成所有子页面的请求\n relative_next_page_list = response.css('.page li a::attr(href)').extract()\n # 使用gevent协程池提升网络IO处理效率\n next_page_threads = [\n self.gevent_pool.spawn(self.get_album_image_item_list, response.urljoin(relative_next_page))\n for relative_next_page in relative_next_page_list[2:-1]\n ]\n gevent.joinall(next_page_threads)\n self.album_image_relation_item['album_image_item_list'] = self.album_image_item_list\n # 重新初始化\n self.album_image_item_list = []\n yield self.album_image_relation_item\n\n def get_album_image_item_list(self, abs_next_page):\n \"\"\"\n 使用下页绝对路径同步请求\n :param abs_next_page:\n :return:\n \"\"\"\n resp = requests.get(abs_next_page)\n if resp.status_code == 200:\n encoding = requests.utils.get_encodings_from_content(resp.text)\n resp.encoding = encoding[0]\n self.parse_album_image_item(etree.HTML(resp.text))\n else:\n self.logger.warn(\"下载此页{}失败,返回的状态码为{}\".format(abs_next_page, resp.status_code))\n\n def parse_album_image_item(self, response):\n \"\"\"\n 解析item并返回给pipelines\n :param response: 如果response类型是继承自scrapy的TextResponse类则使用scrapy的Selector来解析,否则使用lxml来解析\n :return:\n \"\"\"\n if isinstance(response, HtmlResponse):\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()').extract_first().strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()').extract_first().split(\":\")[1]\n image_link_list = response.css('.contents a img::attr(src)').extract()\n else:\n item_title = response.xpath('//div[@class=\"content\"]/h1/text()')[0].strip()\n publish_date = response.xpath('//div[@class=\"tit\"]/span/text()')[0].split(\":\")[1]\n image_link_list = response.xpath('//div[@class=\"contents\"]/a/img')\n image_link_list = [image_link.attrib['src'] for image_link in image_link_list]\n\n regex = \"\\s?\\w+[^\\w]?\"\n regex_group = re.findall(regex, item_title)\n stage_name = \"unknown\"\n if len(regex_group) > 0:\n str = regex_group[-1]\n if \"[\" in str:\n stage_name = str.split(\"[\")[0].strip()\n elif \"(\" in str:\n stage_name = str.split(\"(\")[0].strip()\n elif re.match('[^\\d*]', str):\n stage_name = re.match('[^\\d*]', str).group()\n\n # 详情页多个图片链接\n for image_url in image_link_list:\n album_image_item = AlbumImageItem()\n album_image_item['item_url'] = image_url\n album_image_item['item_url_object_id'] = self.get_md5(image_url)\n item_url_list_json = \"{}\"\n album_image_item['item_url_list_json'] = item_url_list_json\n album_image_item['item_title'] = item_title\n album_image_item['stage_name'] = stage_name\n album_image_item['publish_date'] = publish_date\n self.album_image_item_list.append(album_image_item)\n return self.album_image_item_list\n\n @staticmethod\n def get_md5(param):\n if isinstance(param, str):\n param = param.encode()\n m = hashlib.md5()\n m.update(param)\n return m.hexdigest()\n\n @staticmethod\n def sub_url_scheme(website, replace_str):\n scheme_regex = \"^(http://|https://)\"\n return re.sub(scheme_regex, replace_str, website)\n", "step-ids": [ 3, 6, 8, 9, 13 ] }
[ 3, 6, 8, 9, 13 ]
Album,artist,year,songs="More Mayhem","Imelda May",2001,((1,"pulling the rug"),(2,"psycho"),(3,"mayhem"),(4,"kentisch town waltz")) for song in songs: track,title=song print(" track number {}\t, title {}".format(track,title))
normal
{ "blob_id": "30f02b956af68960804f0cb57695bdbf8510bc43", "index": 7290, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor song in songs:\n track, title = song\n print(' track number {}\\t, title {}'.format(track, title))\n", "step-3": "Album, artist, year, songs = 'More Mayhem', 'Imelda May', 2001, ((1,\n 'pulling the rug'), (2, 'psycho'), (3, 'mayhem'), (4,\n 'kentisch town waltz'))\nfor song in songs:\n track, title = song\n print(' track number {}\\t, title {}'.format(track, title))\n", "step-4": "Album,artist,year,songs=\"More Mayhem\",\"Imelda May\",2001,((1,\"pulling the rug\"),(2,\"psycho\"),(3,\"mayhem\"),(4,\"kentisch town waltz\"))\nfor song in songs:\n track,title=song\n print(\" track number {}\\t, title {}\".format(track,title))", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from base_plugin import * from plugin_utils import * from datetime import datetime import time class LogPlugin(Plugin): def initialize(self): self.add_trigger(on_message) self.add_command("!chatsearch", self.search) self.add_command("!chatreplay", self.replay) def run(self, message): append_to_file(str(datetime.now()) + " : " + message.From + " : " + message.Body + '\n', "chatlog.log") def search(self, message, query, *additional_queries): chat_history = read_lines_from_file("chatlog.log") chat_history.reverse() found_line = None for line in chat_history: if query in line: found_line = line for additional_query in additional_queries: if additional_query not in line: found_line = None break if found_line: break if found_line: self.send_message(message.From, line) return def replay(self, message, startTime, endTime = None): start_time = None end_time = None try: start_time = datetime.strptime(startTime, "%Y-%m-%d,%H:%M") if endTime: end_time = datetime.strptime(endTime, "%Y-%m-%d,%H:%M") except Exception as e: self.send_message(message.From, "Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; " + str(e)) return chat_history = read_lines_from_file("chatlog.log") for line in chat_history: line_tokens = line.split(" : ") line_time = None try: line_time = datetime.strptime(line_tokens[0], "%Y-%m-%d %H:%M:%S.%f") except: continue #2.6 compatibility. delta = (line_time - start_time) delta_seconds = (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6 if ((line_time > start_time ) \ and ( end_time and line_time < end_time )) \ or (not end_time and abs(delta_seconds) < 10): self.send_message(message.From, line) time.sleep(1) self.send_message(message.From, "Done replay.")
normal
{ "blob_id": "d932ab84848c9a8ca8bb23a57424b8f6190b6260", "index": 2563, "step-1": "<mask token>\n\n\nclass LogPlugin(Plugin):\n <mask token>\n <mask token>\n\n def search(self, message, query, *additional_queries):\n chat_history = read_lines_from_file('chatlog.log')\n chat_history.reverse()\n found_line = None\n for line in chat_history:\n if query in line:\n found_line = line\n for additional_query in additional_queries:\n if additional_query not in line:\n found_line = None\n break\n if found_line:\n break\n if found_line:\n self.send_message(message.From, line)\n return\n\n def replay(self, message, startTime, endTime=None):\n start_time = None\n end_time = None\n try:\n start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')\n if endTime:\n end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')\n except Exception as e:\n self.send_message(message.From, \n 'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '\n + str(e))\n return\n chat_history = read_lines_from_file('chatlog.log')\n for line in chat_history:\n line_tokens = line.split(' : ')\n line_time = None\n try:\n line_time = datetime.strptime(line_tokens[0],\n '%Y-%m-%d %H:%M:%S.%f')\n except:\n continue\n delta = line_time - start_time\n delta_seconds = (delta.microseconds + (delta.seconds + delta.\n days * 24 * 3600) * 10 ** 6) / 10 ** 6\n if line_time > start_time and (end_time and line_time < end_time\n ) or not end_time and abs(delta_seconds) < 10:\n self.send_message(message.From, line)\n time.sleep(1)\n self.send_message(message.From, 'Done replay.')\n", "step-2": "<mask token>\n\n\nclass LogPlugin(Plugin):\n\n def initialize(self):\n self.add_trigger(on_message)\n self.add_command('!chatsearch', self.search)\n self.add_command('!chatreplay', self.replay)\n <mask token>\n\n def search(self, message, query, *additional_queries):\n chat_history = read_lines_from_file('chatlog.log')\n chat_history.reverse()\n found_line = None\n for line in chat_history:\n if query in line:\n found_line = line\n for additional_query in additional_queries:\n if additional_query not in line:\n found_line = None\n break\n if found_line:\n break\n if found_line:\n self.send_message(message.From, line)\n return\n\n def replay(self, message, startTime, endTime=None):\n start_time = None\n end_time = None\n try:\n start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')\n if endTime:\n end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')\n except Exception as e:\n self.send_message(message.From, \n 'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '\n + str(e))\n return\n chat_history = read_lines_from_file('chatlog.log')\n for line in chat_history:\n line_tokens = line.split(' : ')\n line_time = None\n try:\n line_time = datetime.strptime(line_tokens[0],\n '%Y-%m-%d %H:%M:%S.%f')\n except:\n continue\n delta = line_time - start_time\n delta_seconds = (delta.microseconds + (delta.seconds + delta.\n days * 24 * 3600) * 10 ** 6) / 10 ** 6\n if line_time > start_time and (end_time and line_time < end_time\n ) or not end_time and abs(delta_seconds) < 10:\n self.send_message(message.From, line)\n time.sleep(1)\n self.send_message(message.From, 'Done replay.')\n", "step-3": "<mask token>\n\n\nclass LogPlugin(Plugin):\n\n def initialize(self):\n self.add_trigger(on_message)\n self.add_command('!chatsearch', self.search)\n self.add_command('!chatreplay', self.replay)\n\n def run(self, message):\n append_to_file(str(datetime.now()) + ' : ' + message.From + ' : ' +\n message.Body + '\\n', 'chatlog.log')\n\n def search(self, message, query, *additional_queries):\n chat_history = read_lines_from_file('chatlog.log')\n chat_history.reverse()\n found_line = None\n for line in chat_history:\n if query in line:\n found_line = line\n for additional_query in additional_queries:\n if additional_query not in line:\n found_line = None\n break\n if found_line:\n break\n if found_line:\n self.send_message(message.From, line)\n return\n\n def replay(self, message, startTime, endTime=None):\n start_time = None\n end_time = None\n try:\n start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')\n if endTime:\n end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')\n except Exception as e:\n self.send_message(message.From, \n 'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '\n + str(e))\n return\n chat_history = read_lines_from_file('chatlog.log')\n for line in chat_history:\n line_tokens = line.split(' : ')\n line_time = None\n try:\n line_time = datetime.strptime(line_tokens[0],\n '%Y-%m-%d %H:%M:%S.%f')\n except:\n continue\n delta = line_time - start_time\n delta_seconds = (delta.microseconds + (delta.seconds + delta.\n days * 24 * 3600) * 10 ** 6) / 10 ** 6\n if line_time > start_time and (end_time and line_time < end_time\n ) or not end_time and abs(delta_seconds) < 10:\n self.send_message(message.From, line)\n time.sleep(1)\n self.send_message(message.From, 'Done replay.')\n", "step-4": "from base_plugin import *\nfrom plugin_utils import *\nfrom datetime import datetime\nimport time\n\n\nclass LogPlugin(Plugin):\n\n def initialize(self):\n self.add_trigger(on_message)\n self.add_command('!chatsearch', self.search)\n self.add_command('!chatreplay', self.replay)\n\n def run(self, message):\n append_to_file(str(datetime.now()) + ' : ' + message.From + ' : ' +\n message.Body + '\\n', 'chatlog.log')\n\n def search(self, message, query, *additional_queries):\n chat_history = read_lines_from_file('chatlog.log')\n chat_history.reverse()\n found_line = None\n for line in chat_history:\n if query in line:\n found_line = line\n for additional_query in additional_queries:\n if additional_query not in line:\n found_line = None\n break\n if found_line:\n break\n if found_line:\n self.send_message(message.From, line)\n return\n\n def replay(self, message, startTime, endTime=None):\n start_time = None\n end_time = None\n try:\n start_time = datetime.strptime(startTime, '%Y-%m-%d,%H:%M')\n if endTime:\n end_time = datetime.strptime(endTime, '%Y-%m-%d,%H:%M')\n except Exception as e:\n self.send_message(message.From, \n 'Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; '\n + str(e))\n return\n chat_history = read_lines_from_file('chatlog.log')\n for line in chat_history:\n line_tokens = line.split(' : ')\n line_time = None\n try:\n line_time = datetime.strptime(line_tokens[0],\n '%Y-%m-%d %H:%M:%S.%f')\n except:\n continue\n delta = line_time - start_time\n delta_seconds = (delta.microseconds + (delta.seconds + delta.\n days * 24 * 3600) * 10 ** 6) / 10 ** 6\n if line_time > start_time and (end_time and line_time < end_time\n ) or not end_time and abs(delta_seconds) < 10:\n self.send_message(message.From, line)\n time.sleep(1)\n self.send_message(message.From, 'Done replay.')\n", "step-5": "from base_plugin import *\nfrom plugin_utils import *\n\nfrom datetime import datetime\nimport time\n\n\nclass LogPlugin(Plugin):\n\tdef initialize(self):\n\t\tself.add_trigger(on_message)\n\n\t\tself.add_command(\"!chatsearch\", self.search)\n\t\tself.add_command(\"!chatreplay\", self.replay)\n\n\n\tdef run(self, message):\n\t\tappend_to_file(str(datetime.now()) + \" : \" + message.From + \" : \" + message.Body + '\\n', \"chatlog.log\")\n\n\n\tdef search(self, message, query, *additional_queries):\n\t\tchat_history = read_lines_from_file(\"chatlog.log\")\n\t\tchat_history.reverse()\n\n\t\tfound_line = None\n\t\tfor line in chat_history:\n\t\t\tif query in line:\n\t\t\t\tfound_line = line\n\t\t\t\tfor additional_query in additional_queries:\n\t\t\t\t\tif additional_query not in line:\n\t\t\t\t\t\tfound_line = None\n\t\t\t\t\t\tbreak\n\n\t\t\t\tif found_line:\n\t\t\t\t\tbreak\n\n\t\tif found_line:\n\t\t\tself.send_message(message.From, line)\n\n\t\treturn\n\n\tdef replay(self, message, startTime, endTime = None):\n\t\tstart_time = None\n\t\tend_time = None\n\t\ttry:\n\t\t\tstart_time = datetime.strptime(startTime, \"%Y-%m-%d,%H:%M\")\n\t\t\tif endTime:\n\t\t\t\tend_time = datetime.strptime(endTime, \"%Y-%m-%d,%H:%M\")\n\t\texcept Exception as e:\n\t\t\tself.send_message(message.From, \"Expects inputs in the format: !chatreplay <yyyy-mm-dd,hh:mm> [<yyyyy-mm-dd,hh:mm>] ; \" + str(e))\n\t\t\treturn\n\n\n\t\tchat_history = read_lines_from_file(\"chatlog.log\")\n\n\t\tfor line in chat_history:\n\t\t\tline_tokens = line.split(\" : \")\n\n\t\t\tline_time = None\n\t\t\ttry:\n\t\t\t\tline_time = datetime.strptime(line_tokens[0], \"%Y-%m-%d %H:%M:%S.%f\")\n\t\t\texcept:\n\t\t\t\tcontinue\n\t\t\t\n\t\t\t#2.6 compatibility.\n\t\t\tdelta = (line_time - start_time)\n\t\t\tdelta_seconds = (delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10**6) / 10**6\n\n\t\t\tif ((line_time > start_time ) \\\n\t\t\t\t\tand ( end_time and line_time < end_time )) \\\n\t\t\t\tor (not end_time and abs(delta_seconds) < 10):\n\t\t\t\t\tself.send_message(message.From, line)\n\t\t\t\t\ttime.sleep(1)\n\n\t\tself.send_message(message.From, \"Done replay.\")\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
""" 100 4 200 1 3 2 100 4 200 1 3 2 6:35 """ class Solution: def longestConsecutive(self, nums: List[int]) -> int: numset = set(nums) ans = 0 # visited = set(nums) maxnum = float('-inf') if not nums: return 0 for n in numset: # saven = n if n+1 not in numset: ans = 1 saven = n while saven-1 in numset: ans +=1 saven = saven-1 # visited.add(n) maxnum = max(ans, maxnum) return maxnum # cnt = Counter(nums) # print(cnt) # maxnum = float('-inf') # minnum = float('inf') # ans = [minnum, maxnum] # visited = set() # def checknumber(checknum, cnt, ans): # minnum = ans[0] # maxnum = ans[1] # print('checknum', checknum, minnum, maxnum, visited) # if checknum in cnt and n not in visited: # minnum = min(checknum, minnum) # maxnum = max(checknum, maxnum) # visited.add(n) # if checknum-1 in cnt: # checknumber(checknum-1, cnt,[minnum, maxnum]) # if checknum+1 in cnt: # checknumber(checknum+1, cnt, [minnum, maxnum]) # for n in nums: # checknumber(n, cnt, [minnum, maxnum]) # return (ans[1]-ans[0])+1
normal
{ "blob_id": "50c7ce95f17cbd40a753d16d9f9fab349ad4f4ce", "index": 3801, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Solution:\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Solution:\n\n def longestConsecutive(self, nums: List[int]) ->int:\n numset = set(nums)\n ans = 0\n maxnum = float('-inf')\n if not nums:\n return 0\n for n in numset:\n if n + 1 not in numset:\n ans = 1\n saven = n\n while saven - 1 in numset:\n ans += 1\n saven = saven - 1\n maxnum = max(ans, maxnum)\n return maxnum\n", "step-4": "\"\"\"\n 100 4 200 1 3 2\n100 \n4\n200\n1\n3\n2\n\n6:35\n\"\"\"\n\nclass Solution:\n def longestConsecutive(self, nums: List[int]) -> int:\n numset = set(nums)\n ans = 0\n # visited = set(nums)\n maxnum = float('-inf')\n \n if not nums: \n return 0\n \n for n in numset:\n # saven = n\n \n if n+1 not in numset:\n ans = 1\n saven = n\n\n while saven-1 in numset:\n ans +=1\n saven = saven-1\n # visited.add(n)\n\n maxnum = max(ans, maxnum)\n \n return maxnum\n \n \n \n \n \n # cnt = Counter(nums)\n# print(cnt)\n# maxnum = float('-inf')\n# minnum = float('inf')\n# ans = [minnum, maxnum]\n# visited = set()\n \n# def checknumber(checknum, cnt, ans):\n# minnum = ans[0]\n# maxnum = ans[1]\n# print('checknum', checknum, minnum, maxnum, visited)\n# if checknum in cnt and n not in visited:\n# minnum = min(checknum, minnum) \n# maxnum = max(checknum, maxnum)\n# visited.add(n)\n\n# if checknum-1 in cnt:\n# checknumber(checknum-1, cnt,[minnum, maxnum])\n# if checknum+1 in cnt:\n# checknumber(checknum+1, cnt, [minnum, maxnum])\n \n# for n in nums:\n# checknumber(n, cnt, [minnum, maxnum])\n \n# return (ans[1]-ans[0])+1", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def create_app(config_name): app = Flask(__name__) app.config.from_object(Config) app.config.from_object(config_options[config_name]) app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47' bootstrap.init_app(app) csrf.init_app(app) db.init_app(app) from .main import main as main_blueprint app.register_blueprint(main_blueprint) from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix='/authenticate') return app <|reserved_special_token_1|> <|reserved_special_token_0|> login_manager = LoginManager() login_manager.session_protection = 'strong' login_manager.loginview = 'auth.login' bootstrap = Bootstrap() csrf = CSRFProtect() db = SQLAlchemy() def create_app(config_name): app = Flask(__name__) app.config.from_object(Config) app.config.from_object(config_options[config_name]) app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47' bootstrap.init_app(app) csrf.init_app(app) db.init_app(app) from .main import main as main_blueprint app.register_blueprint(main_blueprint) from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix='/authenticate') return app <|reserved_special_token_1|> from flask import Flask, render_template from config import Config from flask_bootstrap import Bootstrap from config import config_options from flask_login import LoginManager from flask_wtf.csrf import CSRFProtect from flask_sqlalchemy import SQLAlchemy login_manager = LoginManager() login_manager.session_protection = 'strong' login_manager.loginview = 'auth.login' bootstrap = Bootstrap() csrf = CSRFProtect() db = SQLAlchemy() def create_app(config_name): app = Flask(__name__) app.config.from_object(Config) app.config.from_object(config_options[config_name]) app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47' bootstrap.init_app(app) csrf.init_app(app) db.init_app(app) from .main import main as main_blueprint app.register_blueprint(main_blueprint) from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix='/authenticate') return app <|reserved_special_token_1|> from flask import Flask, render_template from config import Config from flask_bootstrap import Bootstrap from config import config_options from flask_login import LoginManager from flask_wtf.csrf import CSRFProtect from flask_sqlalchemy import SQLAlchemy login_manager = LoginManager() login_manager.session_protection = 'strong' login_manager.loginview = 'auth.login' bootstrap = Bootstrap() csrf=CSRFProtect() db = SQLAlchemy() def create_app(config_name): app= Flask(__name__) #create app configs app.config.from_object(Config) app.config.from_object(config_options[config_name]) app.config['SECRET_KEY']='d686414d5eeb7d38df7e8c385b2c2c47' #initializing bootstrap.init_app(app) csrf.init_app(app) db.init_app(app) #registering from .main import main as main_blueprint app.register_blueprint(main_blueprint) from .auth import auth as auth_blueprint app.register_blueprint(auth_blueprint, url_prefix = '/authenticate') return app
flexible
{ "blob_id": "2eecc852a6438db19e0ed55ba6cc6610d76c6ed0", "index": 2207, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef create_app(config_name):\n app = Flask(__name__)\n app.config.from_object(Config)\n app.config.from_object(config_options[config_name])\n app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47'\n bootstrap.init_app(app)\n csrf.init_app(app)\n db.init_app(app)\n from .main import main as main_blueprint\n app.register_blueprint(main_blueprint)\n from .auth import auth as auth_blueprint\n app.register_blueprint(auth_blueprint, url_prefix='/authenticate')\n return app\n", "step-3": "<mask token>\nlogin_manager = LoginManager()\nlogin_manager.session_protection = 'strong'\nlogin_manager.loginview = 'auth.login'\nbootstrap = Bootstrap()\ncsrf = CSRFProtect()\ndb = SQLAlchemy()\n\n\ndef create_app(config_name):\n app = Flask(__name__)\n app.config.from_object(Config)\n app.config.from_object(config_options[config_name])\n app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47'\n bootstrap.init_app(app)\n csrf.init_app(app)\n db.init_app(app)\n from .main import main as main_blueprint\n app.register_blueprint(main_blueprint)\n from .auth import auth as auth_blueprint\n app.register_blueprint(auth_blueprint, url_prefix='/authenticate')\n return app\n", "step-4": "from flask import Flask, render_template\nfrom config import Config\nfrom flask_bootstrap import Bootstrap\nfrom config import config_options\nfrom flask_login import LoginManager\nfrom flask_wtf.csrf import CSRFProtect\nfrom flask_sqlalchemy import SQLAlchemy\nlogin_manager = LoginManager()\nlogin_manager.session_protection = 'strong'\nlogin_manager.loginview = 'auth.login'\nbootstrap = Bootstrap()\ncsrf = CSRFProtect()\ndb = SQLAlchemy()\n\n\ndef create_app(config_name):\n app = Flask(__name__)\n app.config.from_object(Config)\n app.config.from_object(config_options[config_name])\n app.config['SECRET_KEY'] = 'd686414d5eeb7d38df7e8c385b2c2c47'\n bootstrap.init_app(app)\n csrf.init_app(app)\n db.init_app(app)\n from .main import main as main_blueprint\n app.register_blueprint(main_blueprint)\n from .auth import auth as auth_blueprint\n app.register_blueprint(auth_blueprint, url_prefix='/authenticate')\n return app\n", "step-5": "from flask import Flask, render_template\nfrom config import Config\nfrom flask_bootstrap import Bootstrap\nfrom config import config_options\nfrom flask_login import LoginManager\nfrom flask_wtf.csrf import CSRFProtect\nfrom flask_sqlalchemy import SQLAlchemy\n\nlogin_manager = LoginManager()\nlogin_manager.session_protection = 'strong'\nlogin_manager.loginview = 'auth.login'\n\nbootstrap = Bootstrap()\ncsrf=CSRFProtect()\ndb = SQLAlchemy()\n\ndef create_app(config_name):\n \n app= Flask(__name__)\n\n #create app configs\n app.config.from_object(Config)\n app.config.from_object(config_options[config_name])\n app.config['SECRET_KEY']='d686414d5eeb7d38df7e8c385b2c2c47'\n \n #initializing\n bootstrap.init_app(app)\n csrf.init_app(app)\n db.init_app(app)\n \n #registering\n from .main import main as main_blueprint\n app.register_blueprint(main_blueprint)\n \n from .auth import auth as auth_blueprint\n app.register_blueprint(auth_blueprint, url_prefix = '/authenticate')\n\n \n return app", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def parse_page(page): if 'redirect' in page.keys(): return page_text = page['revision']['text']['#text'] page_text = remove_xml_comments(page_text) title = page['title'] categories = extract_categories(page_text) try: sections = extract_sections(page_text) except: return title, 'Can not parse', None, None return title, sections, categories <|reserved_special_token_1|> from .parse_categories import extract_categories from .parse_sections import extract_sections from .utils import remove_xml_comments def parse_page(page): if 'redirect' in page.keys(): return page_text = page['revision']['text']['#text'] page_text = remove_xml_comments(page_text) title = page['title'] categories = extract_categories(page_text) try: sections = extract_sections(page_text) except: return title, 'Can not parse', None, None return title, sections, categories
flexible
{ "blob_id": "0ad2e6d7e3fd61943fc1dfe6662110a6f48c1bd5", "index": 5347, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef parse_page(page):\n if 'redirect' in page.keys():\n return\n page_text = page['revision']['text']['#text']\n page_text = remove_xml_comments(page_text)\n title = page['title']\n categories = extract_categories(page_text)\n try:\n sections = extract_sections(page_text)\n except:\n return title, 'Can not parse', None, None\n return title, sections, categories\n", "step-3": "from .parse_categories import extract_categories\nfrom .parse_sections import extract_sections\nfrom .utils import remove_xml_comments\n\n\ndef parse_page(page):\n if 'redirect' in page.keys():\n return\n page_text = page['revision']['text']['#text']\n page_text = remove_xml_comments(page_text)\n title = page['title']\n categories = extract_categories(page_text)\n try:\n sections = extract_sections(page_text)\n except:\n return title, 'Can not parse', None, None\n return title, sections, categories\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> def select_sort(input): print('\nSelect Sort') input_len = len(input) for i in range(0, input_len): min_index = i for j in range(i + 1, input_len): if input[j] < input[min_index]: min_index = j tmp = input[i] input[i] = input[min_index] input[min_index] = tmp return input <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def bubble_sort(input): print('\nBubble Sort') input_len = len(input) print('length of input: %d' % input_len) for i in range(0, input_len): for j in range(0, input_len - 1 - i): if input[j] > input[j + 1]: tmp = input[j + 1] input[j + 1] = input[j] input[j] = tmp return input <|reserved_special_token_0|> print(test_arr_bubble_sorted) def select_sort(input): print('\nSelect Sort') input_len = len(input) for i in range(0, input_len): min_index = i for j in range(i + 1, input_len): if input[j] < input[min_index]: min_index = j tmp = input[i] input[i] = input[min_index] input[min_index] = tmp return input <|reserved_special_token_0|> print(test_arr_select_sorted) def merge_sort(input): input_len = len(input) if input_len <= 1: return input mid = math.floor(input_len / 2) left = merge_sort(input[:mid]) right = merge_sort(input[mid:]) return merge(left, right) def merge(sorted_arr1, sorted_arr2): result = [] i = j = 0 while i < len(sorted_arr1) and j < len(sorted_arr2): if sorted_arr1[i] < sorted_arr2[j]: result.append(sorted_arr1[i]) i = i + 1 else: result.append(sorted_arr2[j]) j = j + 1 if i == len(sorted_arr1): for item in sorted_arr2[j:]: result.append(item) else: for item in sorted_arr1[i:]: result.append(item) return result <|reserved_special_token_0|> print('\nMerge Sort') <|reserved_special_token_0|> print(test_arr_merge_sorted) def quick_sort(li, start, end): if start >= end: return left = start right = end mid = li[left] while left < right: while left < right and li[right] >= mid: right -= 1 li[left] = li[right] while left < right and li[left] < mid: left += 1 li[right] = li[left] li[left] = mid quick_sort(li, start, left - 1) quick_sort(li, left + 1, end) <|reserved_special_token_0|> print('\nQuick Sort') quick_sort(test_arr, 0, len(test_arr) - 1) print(test_arr) <|reserved_special_token_1|> <|reserved_special_token_0|> def bubble_sort(input): print('\nBubble Sort') input_len = len(input) print('length of input: %d' % input_len) for i in range(0, input_len): for j in range(0, input_len - 1 - i): if input[j] > input[j + 1]: tmp = input[j + 1] input[j + 1] = input[j] input[j] = tmp return input test_arr = [3, 4, 1, 6, 30, 5] test_arr_bubble_sorted = bubble_sort(test_arr) print(test_arr_bubble_sorted) def select_sort(input): print('\nSelect Sort') input_len = len(input) for i in range(0, input_len): min_index = i for j in range(i + 1, input_len): if input[j] < input[min_index]: min_index = j tmp = input[i] input[i] = input[min_index] input[min_index] = tmp return input test_arr = [3, 4, 1, 6, 30, 5] test_arr_select_sorted = select_sort(test_arr) print(test_arr_select_sorted) def merge_sort(input): input_len = len(input) if input_len <= 1: return input mid = math.floor(input_len / 2) left = merge_sort(input[:mid]) right = merge_sort(input[mid:]) return merge(left, right) def merge(sorted_arr1, sorted_arr2): result = [] i = j = 0 while i < len(sorted_arr1) and j < len(sorted_arr2): if sorted_arr1[i] < sorted_arr2[j]: result.append(sorted_arr1[i]) i = i + 1 else: result.append(sorted_arr2[j]) j = j + 1 if i == len(sorted_arr1): for item in sorted_arr2[j:]: result.append(item) else: for item in sorted_arr1[i:]: result.append(item) return result test_arr = [3, 4, 1, 6, 30, 5] print('\nMerge Sort') test_arr_merge_sorted = merge_sort(test_arr) print(test_arr_merge_sorted) def quick_sort(li, start, end): if start >= end: return left = start right = end mid = li[left] while left < right: while left < right and li[right] >= mid: right -= 1 li[left] = li[right] while left < right and li[left] < mid: left += 1 li[right] = li[left] li[left] = mid quick_sort(li, start, left - 1) quick_sort(li, left + 1, end) test_arr = [3, 4, 1, 6, 30, 5] print('\nQuick Sort') quick_sort(test_arr, 0, len(test_arr) - 1) print(test_arr) <|reserved_special_token_1|> import math def bubble_sort(input): print('\nBubble Sort') input_len = len(input) print('length of input: %d' % input_len) for i in range(0, input_len): for j in range(0, input_len - 1 - i): if input[j] > input[j + 1]: tmp = input[j + 1] input[j + 1] = input[j] input[j] = tmp return input test_arr = [3, 4, 1, 6, 30, 5] test_arr_bubble_sorted = bubble_sort(test_arr) print(test_arr_bubble_sorted) def select_sort(input): print('\nSelect Sort') input_len = len(input) for i in range(0, input_len): min_index = i for j in range(i + 1, input_len): if input[j] < input[min_index]: min_index = j tmp = input[i] input[i] = input[min_index] input[min_index] = tmp return input test_arr = [3, 4, 1, 6, 30, 5] test_arr_select_sorted = select_sort(test_arr) print(test_arr_select_sorted) def merge_sort(input): input_len = len(input) if input_len <= 1: return input mid = math.floor(input_len / 2) left = merge_sort(input[:mid]) right = merge_sort(input[mid:]) return merge(left, right) def merge(sorted_arr1, sorted_arr2): result = [] i = j = 0 while i < len(sorted_arr1) and j < len(sorted_arr2): if sorted_arr1[i] < sorted_arr2[j]: result.append(sorted_arr1[i]) i = i + 1 else: result.append(sorted_arr2[j]) j = j + 1 if i == len(sorted_arr1): for item in sorted_arr2[j:]: result.append(item) else: for item in sorted_arr1[i:]: result.append(item) return result test_arr = [3, 4, 1, 6, 30, 5] print('\nMerge Sort') test_arr_merge_sorted = merge_sort(test_arr) print(test_arr_merge_sorted) def quick_sort(li, start, end): if start >= end: return left = start right = end mid = li[left] while left < right: while left < right and li[right] >= mid: right -= 1 li[left] = li[right] while left < right and li[left] < mid: left += 1 li[right] = li[left] li[left] = mid quick_sort(li, start, left - 1) quick_sort(li, left + 1, end) test_arr = [3, 4, 1, 6, 30, 5] print('\nQuick Sort') quick_sort(test_arr, 0, len(test_arr) - 1) print(test_arr) <|reserved_special_token_1|> # -*- coding: utf-8 -*- import math # 冒泡排序(Bubble Sort) # 比较相邻的元素。如果第一个比第二个大,就交换它们两个; # 对每一对相邻元素作同样的工作,从开始第一对到结尾的最后一对,这样在最后的元素应该会是最大的数; # 针对所有的元素重复以上的步骤,除了最后一个; # 重复步骤1~3,直到排序完成。 # 冒泡排序总的平均时间复杂度为:O(n^2) def bubble_sort(input): print("\nBubble Sort") input_len = len(input) print("length of input: %d" % input_len) for i in range(0, input_len): for j in range(0, input_len - 1 - i): if input[j] > input[j + 1]: tmp = input[j + 1] input[j + 1] = input[j] input[j] = tmp return input test_arr = [3, 4, 1, 6, 30, 5] test_arr_bubble_sorted = bubble_sort(test_arr) print(test_arr_bubble_sorted) # 选择排序(Selection-sort) # 选择排序(Selection-sort)是一种简单直观的排序算法。它的工作原理:首先在未排序序列中找到最小(大)元素,存放到排序序列的起始位置, # 然后,再从剩余未排序元素中继续寻找最小(大)元素,然后放到已排序序列的末尾。以此类推,直到所有元素均排序完毕。 # 选择排序总的平均时间复杂度为:O(n^2) def select_sort(input): print("\nSelect Sort") input_len = len(input) for i in range(0, input_len): min_index = i for j in range(i + 1, input_len): if input[j] < input[min_index]: min_index = j tmp = input[i] input[i] = input[min_index] input[min_index] = tmp return input test_arr = [3, 4, 1, 6, 30, 5] test_arr_select_sorted = select_sort(test_arr) print(test_arr_select_sorted) # 插入排序(Insertion Sort) # 插入排序(Insertion-Sort)的算法描述是一种简单直观的排序算法。它的工作原理是通过构建有序序列,对于未排序数据, # 在已排序序列中从后向前扫描,找到相应位置并插入。 # 归并排序(Merge Sort) # 首先归并排序使用了二分法,归根到底的思想还是分而治之。拿到一个长数组,将其不停的分为左边和右边两份,然后以此递归分下去。 # 然后再将她们按照两个有序数组的样子合并起来。 # 归并排序时间复杂度是o(nlogn) def merge_sort(input): input_len = len(input) if input_len <= 1: return input mid = math.floor(input_len / 2) left = merge_sort(input[:mid]) right = merge_sort(input[mid:]) return merge(left, right) def merge(sorted_arr1, sorted_arr2): result = [] i = j = 0 while i < len(sorted_arr1) and j < len(sorted_arr2): if sorted_arr1[i] < sorted_arr2[j]: result.append(sorted_arr1[i]) i = i + 1 else: result.append(sorted_arr2[j]) j = j + 1 if i == len(sorted_arr1): for item in sorted_arr2[j:]: result.append(item) else: for item in sorted_arr1[i:]: result.append(item) return result test_arr = [3, 4, 1, 6, 30, 5] print("\nMerge Sort") test_arr_merge_sorted = merge_sort(test_arr) print(test_arr_merge_sorted) # 快速排序(Quick Sort) # 快速排序使用分治法来把一个串(list)分为两个子串(sub-lists)。具体算法描述如下: # # 从数列中挑出一个元素,称为 “基准”(pivot); # 重新排序数列,所有元素比基准值小的摆放在基准前面,所有元素比基准值大的摆在基准的后面(相同的数可以到任一边)。 # 在这个分区退出之后,该基准就处于数列的中间位置。这个称为分区(partition)操作; # 递归地(recursive)把小于基准值元素的子数列和大于基准值元素的子数列排序。 # 快速排序时间复杂度是o(nlogn) def quick_sort(li, start, end): # 分治 一分为二 # start=end ,证明要处理的数据只有一个 # start>end ,证明右边没有数据 if start >= end: return # 定义两个游标,分别指向0和末尾位置 left = start right = end # 把0位置的数据,认为是中间值 mid = li[left] while left < right: # 让右边游标往左移动,目的是找到小于mid的值,放到left游标位置 while left < right and li[right] >= mid: right -= 1 li[left] = li[right] # 让左边游标往右移动,目的是找到大于mid的值,放到right游标位置 while left < right and li[left] < mid: left += 1 li[right] = li[left] # while结束后,把mid放到中间位置,left=right li[left] = mid # 递归处理左边的数据 quick_sort(li, start, left-1) # 递归处理右边的数据 quick_sort(li, left+1, end) test_arr = [3, 4, 1, 6, 30, 5] print("\nQuick Sort") quick_sort(test_arr, 0, len(test_arr)-1) print(test_arr)
flexible
{ "blob_id": "c967aa647a97b17c9a7493559b9a1577dd95263a", "index": 7806, "step-1": "<mask token>\n\n\ndef select_sort(input):\n print('\\nSelect Sort')\n input_len = len(input)\n for i in range(0, input_len):\n min_index = i\n for j in range(i + 1, input_len):\n if input[j] < input[min_index]:\n min_index = j\n tmp = input[i]\n input[i] = input[min_index]\n input[min_index] = tmp\n return input\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef bubble_sort(input):\n print('\\nBubble Sort')\n input_len = len(input)\n print('length of input: %d' % input_len)\n for i in range(0, input_len):\n for j in range(0, input_len - 1 - i):\n if input[j] > input[j + 1]:\n tmp = input[j + 1]\n input[j + 1] = input[j]\n input[j] = tmp\n return input\n\n\n<mask token>\nprint(test_arr_bubble_sorted)\n\n\ndef select_sort(input):\n print('\\nSelect Sort')\n input_len = len(input)\n for i in range(0, input_len):\n min_index = i\n for j in range(i + 1, input_len):\n if input[j] < input[min_index]:\n min_index = j\n tmp = input[i]\n input[i] = input[min_index]\n input[min_index] = tmp\n return input\n\n\n<mask token>\nprint(test_arr_select_sorted)\n\n\ndef merge_sort(input):\n input_len = len(input)\n if input_len <= 1:\n return input\n mid = math.floor(input_len / 2)\n left = merge_sort(input[:mid])\n right = merge_sort(input[mid:])\n return merge(left, right)\n\n\ndef merge(sorted_arr1, sorted_arr2):\n result = []\n i = j = 0\n while i < len(sorted_arr1) and j < len(sorted_arr2):\n if sorted_arr1[i] < sorted_arr2[j]:\n result.append(sorted_arr1[i])\n i = i + 1\n else:\n result.append(sorted_arr2[j])\n j = j + 1\n if i == len(sorted_arr1):\n for item in sorted_arr2[j:]:\n result.append(item)\n else:\n for item in sorted_arr1[i:]:\n result.append(item)\n return result\n\n\n<mask token>\nprint('\\nMerge Sort')\n<mask token>\nprint(test_arr_merge_sorted)\n\n\ndef quick_sort(li, start, end):\n if start >= end:\n return\n left = start\n right = end\n mid = li[left]\n while left < right:\n while left < right and li[right] >= mid:\n right -= 1\n li[left] = li[right]\n while left < right and li[left] < mid:\n left += 1\n li[right] = li[left]\n li[left] = mid\n quick_sort(li, start, left - 1)\n quick_sort(li, left + 1, end)\n\n\n<mask token>\nprint('\\nQuick Sort')\nquick_sort(test_arr, 0, len(test_arr) - 1)\nprint(test_arr)\n", "step-3": "<mask token>\n\n\ndef bubble_sort(input):\n print('\\nBubble Sort')\n input_len = len(input)\n print('length of input: %d' % input_len)\n for i in range(0, input_len):\n for j in range(0, input_len - 1 - i):\n if input[j] > input[j + 1]:\n tmp = input[j + 1]\n input[j + 1] = input[j]\n input[j] = tmp\n return input\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\ntest_arr_bubble_sorted = bubble_sort(test_arr)\nprint(test_arr_bubble_sorted)\n\n\ndef select_sort(input):\n print('\\nSelect Sort')\n input_len = len(input)\n for i in range(0, input_len):\n min_index = i\n for j in range(i + 1, input_len):\n if input[j] < input[min_index]:\n min_index = j\n tmp = input[i]\n input[i] = input[min_index]\n input[min_index] = tmp\n return input\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\ntest_arr_select_sorted = select_sort(test_arr)\nprint(test_arr_select_sorted)\n\n\ndef merge_sort(input):\n input_len = len(input)\n if input_len <= 1:\n return input\n mid = math.floor(input_len / 2)\n left = merge_sort(input[:mid])\n right = merge_sort(input[mid:])\n return merge(left, right)\n\n\ndef merge(sorted_arr1, sorted_arr2):\n result = []\n i = j = 0\n while i < len(sorted_arr1) and j < len(sorted_arr2):\n if sorted_arr1[i] < sorted_arr2[j]:\n result.append(sorted_arr1[i])\n i = i + 1\n else:\n result.append(sorted_arr2[j])\n j = j + 1\n if i == len(sorted_arr1):\n for item in sorted_arr2[j:]:\n result.append(item)\n else:\n for item in sorted_arr1[i:]:\n result.append(item)\n return result\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\nprint('\\nMerge Sort')\ntest_arr_merge_sorted = merge_sort(test_arr)\nprint(test_arr_merge_sorted)\n\n\ndef quick_sort(li, start, end):\n if start >= end:\n return\n left = start\n right = end\n mid = li[left]\n while left < right:\n while left < right and li[right] >= mid:\n right -= 1\n li[left] = li[right]\n while left < right and li[left] < mid:\n left += 1\n li[right] = li[left]\n li[left] = mid\n quick_sort(li, start, left - 1)\n quick_sort(li, left + 1, end)\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\nprint('\\nQuick Sort')\nquick_sort(test_arr, 0, len(test_arr) - 1)\nprint(test_arr)\n", "step-4": "import math\n\n\ndef bubble_sort(input):\n print('\\nBubble Sort')\n input_len = len(input)\n print('length of input: %d' % input_len)\n for i in range(0, input_len):\n for j in range(0, input_len - 1 - i):\n if input[j] > input[j + 1]:\n tmp = input[j + 1]\n input[j + 1] = input[j]\n input[j] = tmp\n return input\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\ntest_arr_bubble_sorted = bubble_sort(test_arr)\nprint(test_arr_bubble_sorted)\n\n\ndef select_sort(input):\n print('\\nSelect Sort')\n input_len = len(input)\n for i in range(0, input_len):\n min_index = i\n for j in range(i + 1, input_len):\n if input[j] < input[min_index]:\n min_index = j\n tmp = input[i]\n input[i] = input[min_index]\n input[min_index] = tmp\n return input\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\ntest_arr_select_sorted = select_sort(test_arr)\nprint(test_arr_select_sorted)\n\n\ndef merge_sort(input):\n input_len = len(input)\n if input_len <= 1:\n return input\n mid = math.floor(input_len / 2)\n left = merge_sort(input[:mid])\n right = merge_sort(input[mid:])\n return merge(left, right)\n\n\ndef merge(sorted_arr1, sorted_arr2):\n result = []\n i = j = 0\n while i < len(sorted_arr1) and j < len(sorted_arr2):\n if sorted_arr1[i] < sorted_arr2[j]:\n result.append(sorted_arr1[i])\n i = i + 1\n else:\n result.append(sorted_arr2[j])\n j = j + 1\n if i == len(sorted_arr1):\n for item in sorted_arr2[j:]:\n result.append(item)\n else:\n for item in sorted_arr1[i:]:\n result.append(item)\n return result\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\nprint('\\nMerge Sort')\ntest_arr_merge_sorted = merge_sort(test_arr)\nprint(test_arr_merge_sorted)\n\n\ndef quick_sort(li, start, end):\n if start >= end:\n return\n left = start\n right = end\n mid = li[left]\n while left < right:\n while left < right and li[right] >= mid:\n right -= 1\n li[left] = li[right]\n while left < right and li[left] < mid:\n left += 1\n li[right] = li[left]\n li[left] = mid\n quick_sort(li, start, left - 1)\n quick_sort(li, left + 1, end)\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\nprint('\\nQuick Sort')\nquick_sort(test_arr, 0, len(test_arr) - 1)\nprint(test_arr)\n", "step-5": "# -*- coding: utf-8 -*-\nimport math\n\n\n# 冒泡排序(Bubble Sort)\n# 比较相邻的元素。如果第一个比第二个大,就交换它们两个;\n# 对每一对相邻元素作同样的工作,从开始第一对到结尾的最后一对,这样在最后的元素应该会是最大的数;\n# 针对所有的元素重复以上的步骤,除了最后一个;\n# 重复步骤1~3,直到排序完成。\n# 冒泡排序总的平均时间复杂度为:O(n^2)\n\n\ndef bubble_sort(input):\n print(\"\\nBubble Sort\")\n input_len = len(input)\n print(\"length of input: %d\" % input_len)\n for i in range(0, input_len):\n for j in range(0, input_len - 1 - i):\n if input[j] > input[j + 1]:\n tmp = input[j + 1]\n input[j + 1] = input[j]\n input[j] = tmp\n return input\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\ntest_arr_bubble_sorted = bubble_sort(test_arr)\nprint(test_arr_bubble_sorted)\n\n\n# 选择排序(Selection-sort)\n# 选择排序(Selection-sort)是一种简单直观的排序算法。它的工作原理:首先在未排序序列中找到最小(大)元素,存放到排序序列的起始位置,\n# 然后,再从剩余未排序元素中继续寻找最小(大)元素,然后放到已排序序列的末尾。以此类推,直到所有元素均排序完毕。\n# 选择排序总的平均时间复杂度为:O(n^2)\n\n\ndef select_sort(input):\n print(\"\\nSelect Sort\")\n input_len = len(input)\n for i in range(0, input_len):\n min_index = i\n for j in range(i + 1, input_len):\n if input[j] < input[min_index]:\n min_index = j\n\n tmp = input[i]\n input[i] = input[min_index]\n input[min_index] = tmp\n return input\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\ntest_arr_select_sorted = select_sort(test_arr)\nprint(test_arr_select_sorted)\n\n\n# 插入排序(Insertion Sort)\n# 插入排序(Insertion-Sort)的算法描述是一种简单直观的排序算法。它的工作原理是通过构建有序序列,对于未排序数据,\n# 在已排序序列中从后向前扫描,找到相应位置并插入。\n\n# 归并排序(Merge Sort)\n# 首先归并排序使用了二分法,归根到底的思想还是分而治之。拿到一个长数组,将其不停的分为左边和右边两份,然后以此递归分下去。\n# 然后再将她们按照两个有序数组的样子合并起来。\n# 归并排序时间复杂度是o(nlogn)\n\n\ndef merge_sort(input):\n input_len = len(input)\n if input_len <= 1:\n return input\n mid = math.floor(input_len / 2)\n left = merge_sort(input[:mid])\n right = merge_sort(input[mid:])\n return merge(left, right)\n\n\ndef merge(sorted_arr1, sorted_arr2):\n result = []\n i = j = 0\n while i < len(sorted_arr1) and j < len(sorted_arr2):\n if sorted_arr1[i] < sorted_arr2[j]:\n result.append(sorted_arr1[i])\n i = i + 1\n else:\n result.append(sorted_arr2[j])\n j = j + 1\n\n if i == len(sorted_arr1):\n for item in sorted_arr2[j:]:\n result.append(item)\n else:\n for item in sorted_arr1[i:]:\n result.append(item)\n return result\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\nprint(\"\\nMerge Sort\")\ntest_arr_merge_sorted = merge_sort(test_arr)\nprint(test_arr_merge_sorted)\n\n\n# 快速排序(Quick Sort)\n# 快速排序使用分治法来把一个串(list)分为两个子串(sub-lists)。具体算法描述如下:\n#\n# 从数列中挑出一个元素,称为 “基准”(pivot);\n# 重新排序数列,所有元素比基准值小的摆放在基准前面,所有元素比基准值大的摆在基准的后面(相同的数可以到任一边)。\n# 在这个分区退出之后,该基准就处于数列的中间位置。这个称为分区(partition)操作;\n# 递归地(recursive)把小于基准值元素的子数列和大于基准值元素的子数列排序。\n# 快速排序时间复杂度是o(nlogn)\n\n\ndef quick_sort(li, start, end):\n # 分治 一分为二\n # start=end ,证明要处理的数据只有一个\n # start>end ,证明右边没有数据\n if start >= end:\n return\n # 定义两个游标,分别指向0和末尾位置\n left = start\n right = end\n # 把0位置的数据,认为是中间值\n mid = li[left]\n while left < right:\n # 让右边游标往左移动,目的是找到小于mid的值,放到left游标位置\n while left < right and li[right] >= mid:\n right -= 1\n li[left] = li[right]\n # 让左边游标往右移动,目的是找到大于mid的值,放到right游标位置\n while left < right and li[left] < mid:\n left += 1\n li[right] = li[left]\n # while结束后,把mid放到中间位置,left=right\n li[left] = mid\n # 递归处理左边的数据\n quick_sort(li, start, left-1)\n # 递归处理右边的数据\n quick_sort(li, left+1, end)\n\n\ntest_arr = [3, 4, 1, 6, 30, 5]\nprint(\"\\nQuick Sort\")\nquick_sort(test_arr, 0, len(test_arr)-1)\nprint(test_arr)\n", "step-ids": [ 1, 6, 7, 8, 9 ] }
[ 1, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def eval_loop(): line = input('Please enter a sting') while True: if line == 'done': break else: output = eval(line) print(output) line = input('Please enter a sting') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def eval_loop(): line = input('Please enter a sting') while True: if line == 'done': break else: output = eval(line) print(output) line = input('Please enter a sting') eval_loop() <|reserved_special_token_1|> from math import * def eval_loop(): line = input('Please enter a sting') while True: if line == 'done': break else: output = eval(line) print(output) line = input('Please enter a sting') eval_loop()
flexible
{ "blob_id": "b0062dde448c450131f578a2afe130ca663f0902", "index": 2041, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef eval_loop():\n line = input('Please enter a sting')\n while True:\n if line == 'done':\n break\n else:\n output = eval(line)\n print(output)\n line = input('Please enter a sting')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef eval_loop():\n line = input('Please enter a sting')\n while True:\n if line == 'done':\n break\n else:\n output = eval(line)\n print(output)\n line = input('Please enter a sting')\n\n\neval_loop()\n", "step-4": "from math import *\n\n\ndef eval_loop():\n line = input('Please enter a sting')\n while True:\n if line == 'done':\n break\n else:\n output = eval(line)\n print(output)\n line = input('Please enter a sting')\n\n\neval_loop()\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2015-2016 Applatix, Inc. All rights reserved. # ''' cAdvisor CLI. Used by axstats temporarily before moving to Heapster ''' import requests import logging import time logger = logging.getLogger(__name__) CHECK_LIVELINESS_INTERVAL = 5 CONNECTION_TIMEOUT = 5 class AXCadvisorClient(object): def __init__(self, ip): self._wait_interval = 60 # Using Kubernetes default cadvisor port self._url_prefix = "http://{ip}:{port}/api/v2.0/".format(ip=ip, port=4194) self.wait_for_cadvisor_up() def wait_for_cadvisor_up(self): """ Poll cadvisor endpoint till there is a response. Note it was calling /api/v2.0/version before, but this api in Kubernetes returns empty string :param url: :return: """ ping = None while ping is None: ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT) if ping is None: logger.debug("Unable to connect to cadvisor %s. Will sleep for %s sec", self._url_prefix, CHECK_LIVELINESS_INTERVAL) time.sleep(CHECK_LIVELINESS_INTERVAL) logger.info("cAdvisor client is up for endpoint %s", self._url_prefix) def get_machine_info(self): url = self._url_prefix + "machine" return self._get_response(url) def get_spec_info(self): url = self._url_prefix + "spec" data = { "recursive": "true" } return self._get_response(url, data) def get_events(self, event_start): url = self._url_prefix + "events" data = { "all_events": "true", "subcontainers": "true", "start_time": event_start } return self._get_response(url, data) def get_docker_stats(self): url = self._url_prefix + "stats" data = { "recursive": "true", "count": str(self._wait_interval) } return self._get_response(url, data) @staticmethod def _get_response(url, params=None): out = None try: response = requests.get(url=url, params=params, timeout=CONNECTION_TIMEOUT) if response.status_code == requests.codes.ok: out = response.json() except requests.exceptions.RequestException as e: logger.error('Unexpected exception occurred during request: %s', e) return out
normal
{ "blob_id": "87f672919f6019e549508b239c798301d5f549bd", "index": 7667, "step-1": "<mask token>\n\n\nclass AXCadvisorClient(object):\n\n def __init__(self, ip):\n self._wait_interval = 60\n self._url_prefix = 'http://{ip}:{port}/api/v2.0/'.format(ip=ip,\n port=4194)\n self.wait_for_cadvisor_up()\n\n def wait_for_cadvisor_up(self):\n \"\"\"\n Poll cadvisor endpoint till there is a response.\n Note it was calling /api/v2.0/version before, but this api in Kubernetes returns empty string\n :param url:\n :return:\n \"\"\"\n ping = None\n while ping is None:\n ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT)\n if ping is None:\n logger.debug(\n 'Unable to connect to cadvisor %s. Will sleep for %s sec',\n self._url_prefix, CHECK_LIVELINESS_INTERVAL)\n time.sleep(CHECK_LIVELINESS_INTERVAL)\n logger.info('cAdvisor client is up for endpoint %s', self._url_prefix)\n <mask token>\n\n def get_spec_info(self):\n url = self._url_prefix + 'spec'\n data = {'recursive': 'true'}\n return self._get_response(url, data)\n\n def get_events(self, event_start):\n url = self._url_prefix + 'events'\n data = {'all_events': 'true', 'subcontainers': 'true', 'start_time':\n event_start}\n return self._get_response(url, data)\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass AXCadvisorClient(object):\n\n def __init__(self, ip):\n self._wait_interval = 60\n self._url_prefix = 'http://{ip}:{port}/api/v2.0/'.format(ip=ip,\n port=4194)\n self.wait_for_cadvisor_up()\n\n def wait_for_cadvisor_up(self):\n \"\"\"\n Poll cadvisor endpoint till there is a response.\n Note it was calling /api/v2.0/version before, but this api in Kubernetes returns empty string\n :param url:\n :return:\n \"\"\"\n ping = None\n while ping is None:\n ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT)\n if ping is None:\n logger.debug(\n 'Unable to connect to cadvisor %s. Will sleep for %s sec',\n self._url_prefix, CHECK_LIVELINESS_INTERVAL)\n time.sleep(CHECK_LIVELINESS_INTERVAL)\n logger.info('cAdvisor client is up for endpoint %s', self._url_prefix)\n\n def get_machine_info(self):\n url = self._url_prefix + 'machine'\n return self._get_response(url)\n\n def get_spec_info(self):\n url = self._url_prefix + 'spec'\n data = {'recursive': 'true'}\n return self._get_response(url, data)\n\n def get_events(self, event_start):\n url = self._url_prefix + 'events'\n data = {'all_events': 'true', 'subcontainers': 'true', 'start_time':\n event_start}\n return self._get_response(url, data)\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass AXCadvisorClient(object):\n\n def __init__(self, ip):\n self._wait_interval = 60\n self._url_prefix = 'http://{ip}:{port}/api/v2.0/'.format(ip=ip,\n port=4194)\n self.wait_for_cadvisor_up()\n\n def wait_for_cadvisor_up(self):\n \"\"\"\n Poll cadvisor endpoint till there is a response.\n Note it was calling /api/v2.0/version before, but this api in Kubernetes returns empty string\n :param url:\n :return:\n \"\"\"\n ping = None\n while ping is None:\n ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT)\n if ping is None:\n logger.debug(\n 'Unable to connect to cadvisor %s. Will sleep for %s sec',\n self._url_prefix, CHECK_LIVELINESS_INTERVAL)\n time.sleep(CHECK_LIVELINESS_INTERVAL)\n logger.info('cAdvisor client is up for endpoint %s', self._url_prefix)\n\n def get_machine_info(self):\n url = self._url_prefix + 'machine'\n return self._get_response(url)\n\n def get_spec_info(self):\n url = self._url_prefix + 'spec'\n data = {'recursive': 'true'}\n return self._get_response(url, data)\n\n def get_events(self, event_start):\n url = self._url_prefix + 'events'\n data = {'all_events': 'true', 'subcontainers': 'true', 'start_time':\n event_start}\n return self._get_response(url, data)\n\n def get_docker_stats(self):\n url = self._url_prefix + 'stats'\n data = {'recursive': 'true', 'count': str(self._wait_interval)}\n return self._get_response(url, data)\n <mask token>\n", "step-4": "<mask token>\n\n\nclass AXCadvisorClient(object):\n\n def __init__(self, ip):\n self._wait_interval = 60\n self._url_prefix = 'http://{ip}:{port}/api/v2.0/'.format(ip=ip,\n port=4194)\n self.wait_for_cadvisor_up()\n\n def wait_for_cadvisor_up(self):\n \"\"\"\n Poll cadvisor endpoint till there is a response.\n Note it was calling /api/v2.0/version before, but this api in Kubernetes returns empty string\n :param url:\n :return:\n \"\"\"\n ping = None\n while ping is None:\n ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT)\n if ping is None:\n logger.debug(\n 'Unable to connect to cadvisor %s. Will sleep for %s sec',\n self._url_prefix, CHECK_LIVELINESS_INTERVAL)\n time.sleep(CHECK_LIVELINESS_INTERVAL)\n logger.info('cAdvisor client is up for endpoint %s', self._url_prefix)\n\n def get_machine_info(self):\n url = self._url_prefix + 'machine'\n return self._get_response(url)\n\n def get_spec_info(self):\n url = self._url_prefix + 'spec'\n data = {'recursive': 'true'}\n return self._get_response(url, data)\n\n def get_events(self, event_start):\n url = self._url_prefix + 'events'\n data = {'all_events': 'true', 'subcontainers': 'true', 'start_time':\n event_start}\n return self._get_response(url, data)\n\n def get_docker_stats(self):\n url = self._url_prefix + 'stats'\n data = {'recursive': 'true', 'count': str(self._wait_interval)}\n return self._get_response(url, data)\n\n @staticmethod\n def _get_response(url, params=None):\n out = None\n try:\n response = requests.get(url=url, params=params, timeout=\n CONNECTION_TIMEOUT)\n if response.status_code == requests.codes.ok:\n out = response.json()\n except requests.exceptions.RequestException as e:\n logger.error('Unexpected exception occurred during request: %s', e)\n return out\n", "step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n#\n# Copyright 2015-2016 Applatix, Inc. All rights reserved.\n#\n\n'''\ncAdvisor CLI. Used by axstats temporarily before moving to Heapster\n'''\n\nimport requests\nimport logging\nimport time\n\nlogger = logging.getLogger(__name__)\nCHECK_LIVELINESS_INTERVAL = 5\nCONNECTION_TIMEOUT = 5\n\n\nclass AXCadvisorClient(object):\n def __init__(self, ip):\n self._wait_interval = 60\n\n # Using Kubernetes default cadvisor port\n self._url_prefix = \"http://{ip}:{port}/api/v2.0/\".format(ip=ip, port=4194)\n self.wait_for_cadvisor_up()\n\n def wait_for_cadvisor_up(self):\n \"\"\"\n Poll cadvisor endpoint till there is a response.\n Note it was calling /api/v2.0/version before, but this api in Kubernetes returns empty string\n :param url:\n :return:\n \"\"\"\n ping = None\n while ping is None:\n ping = requests.get(self._url_prefix, timeout=CONNECTION_TIMEOUT)\n if ping is None:\n logger.debug(\"Unable to connect to cadvisor %s. Will sleep for %s sec\",\n self._url_prefix, CHECK_LIVELINESS_INTERVAL)\n time.sleep(CHECK_LIVELINESS_INTERVAL)\n logger.info(\"cAdvisor client is up for endpoint %s\", self._url_prefix)\n\n def get_machine_info(self):\n url = self._url_prefix + \"machine\"\n return self._get_response(url)\n\n def get_spec_info(self):\n url = self._url_prefix + \"spec\"\n data = {\n \"recursive\": \"true\"\n }\n return self._get_response(url, data)\n\n def get_events(self, event_start):\n url = self._url_prefix + \"events\"\n data = {\n \"all_events\": \"true\",\n \"subcontainers\": \"true\",\n \"start_time\": event_start\n }\n return self._get_response(url, data)\n\n def get_docker_stats(self):\n url = self._url_prefix + \"stats\"\n data = {\n \"recursive\": \"true\",\n \"count\": str(self._wait_interval)\n }\n return self._get_response(url, data)\n\n @staticmethod\n def _get_response(url, params=None):\n out = None\n try:\n response = requests.get(url=url, params=params, timeout=CONNECTION_TIMEOUT)\n if response.status_code == requests.codes.ok:\n out = response.json()\n except requests.exceptions.RequestException as e:\n logger.error('Unexpected exception occurred during request: %s', e)\n return out\n", "step-ids": [ 5, 6, 7, 8, 11 ] }
[ 5, 6, 7, 8, 11 ]
<|reserved_special_token_0|> class SeriesListSerializer(serializers.ModelSerializer): class Meta: model = Serie fields = 'name', class CatalogCoinListSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = ('id', 'face_value', 'currency', 'country', 'year', 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist') serie = serializers.SlugRelatedField(slug_field='name', read_only=True) collection = serializers.IntegerField(read_only=True) exchange = serializers.IntegerField(read_only=True) wishlist = serializers.IntegerField(read_only=True) class CatalogCoinSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = '__all__' class CoinListSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = 'id', 'catalog_coin', 'owner', 'status' catalog_coin = CatalogCoinListSerializer() class CoinSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = '__all__' <|reserved_special_token_1|> <|reserved_special_token_0|> class CountriesListSerializer(serializers.ModelSerializer): class Meta: model = Country fields = 'name', 'flag' class SeriesListSerializer(serializers.ModelSerializer): class Meta: model = Serie fields = 'name', class CatalogCoinListSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = ('id', 'face_value', 'currency', 'country', 'year', 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist') serie = serializers.SlugRelatedField(slug_field='name', read_only=True) collection = serializers.IntegerField(read_only=True) exchange = serializers.IntegerField(read_only=True) wishlist = serializers.IntegerField(read_only=True) class CatalogCoinSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = '__all__' class CoinListSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = 'id', 'catalog_coin', 'owner', 'status' catalog_coin = CatalogCoinListSerializer() class CoinSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = '__all__' <|reserved_special_token_1|> <|reserved_special_token_0|> __all__ = ('CatalogCoinListSerializer', 'CatalogCoinSerializer', 'SeriesListSerializer', 'CoinListSerializer', 'CoinSerializer', 'CountriesListSerializer') class CountriesListSerializer(serializers.ModelSerializer): class Meta: model = Country fields = 'name', 'flag' class SeriesListSerializer(serializers.ModelSerializer): class Meta: model = Serie fields = 'name', class CatalogCoinListSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = ('id', 'face_value', 'currency', 'country', 'year', 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist') serie = serializers.SlugRelatedField(slug_field='name', read_only=True) collection = serializers.IntegerField(read_only=True) exchange = serializers.IntegerField(read_only=True) wishlist = serializers.IntegerField(read_only=True) class CatalogCoinSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = '__all__' class CoinListSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = 'id', 'catalog_coin', 'owner', 'status' catalog_coin = CatalogCoinListSerializer() class CoinSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = '__all__' <|reserved_special_token_1|> from rest_framework import serializers from .models import * __all__ = ('CatalogCoinListSerializer', 'CatalogCoinSerializer', 'SeriesListSerializer', 'CoinListSerializer', 'CoinSerializer', 'CountriesListSerializer') class CountriesListSerializer(serializers.ModelSerializer): class Meta: model = Country fields = 'name', 'flag' class SeriesListSerializer(serializers.ModelSerializer): class Meta: model = Serie fields = 'name', class CatalogCoinListSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = ('id', 'face_value', 'currency', 'country', 'year', 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist') serie = serializers.SlugRelatedField(slug_field='name', read_only=True) collection = serializers.IntegerField(read_only=True) exchange = serializers.IntegerField(read_only=True) wishlist = serializers.IntegerField(read_only=True) class CatalogCoinSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = '__all__' class CoinListSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = 'id', 'catalog_coin', 'owner', 'status' catalog_coin = CatalogCoinListSerializer() class CoinSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = '__all__' <|reserved_special_token_1|> from rest_framework import serializers from .models import * __all__ = ( 'CatalogCoinListSerializer', 'CatalogCoinSerializer', 'SeriesListSerializer', 'CoinListSerializer', 'CoinSerializer', 'CountriesListSerializer', ) class CountriesListSerializer(serializers.ModelSerializer): class Meta: model = Country fields = ('name', 'flag',) class SeriesListSerializer(serializers.ModelSerializer): class Meta: model = Serie fields = ('name',) class CatalogCoinListSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = ( 'id', 'face_value', 'currency', 'country', 'year', 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist', ) serie = serializers.SlugRelatedField(slug_field='name', read_only=True) collection = serializers.IntegerField(read_only=True) exchange = serializers.IntegerField(read_only=True) wishlist = serializers.IntegerField(read_only=True) class CatalogCoinSerializer(serializers.ModelSerializer): class Meta: model = CatalogCoin fields = '__all__' class CoinListSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = ('id', 'catalog_coin', 'owner', 'status',) catalog_coin = CatalogCoinListSerializer() class CoinSerializer(serializers.ModelSerializer): class Meta: model = Coin fields = '__all__'
flexible
{ "blob_id": "b77da75b01e96ff89f873f4c5764a62cf68cd576", "index": 217, "step-1": "<mask token>\n\n\nclass SeriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Serie\n fields = 'name',\n\n\nclass CatalogCoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = ('id', 'face_value', 'currency', 'country', 'year',\n 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist')\n serie = serializers.SlugRelatedField(slug_field='name', read_only=True)\n collection = serializers.IntegerField(read_only=True)\n exchange = serializers.IntegerField(read_only=True)\n wishlist = serializers.IntegerField(read_only=True)\n\n\nclass CatalogCoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = '__all__'\n\n\nclass CoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = 'id', 'catalog_coin', 'owner', 'status'\n catalog_coin = CatalogCoinListSerializer()\n\n\nclass CoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = '__all__'\n", "step-2": "<mask token>\n\n\nclass CountriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Country\n fields = 'name', 'flag'\n\n\nclass SeriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Serie\n fields = 'name',\n\n\nclass CatalogCoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = ('id', 'face_value', 'currency', 'country', 'year',\n 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist')\n serie = serializers.SlugRelatedField(slug_field='name', read_only=True)\n collection = serializers.IntegerField(read_only=True)\n exchange = serializers.IntegerField(read_only=True)\n wishlist = serializers.IntegerField(read_only=True)\n\n\nclass CatalogCoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = '__all__'\n\n\nclass CoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = 'id', 'catalog_coin', 'owner', 'status'\n catalog_coin = CatalogCoinListSerializer()\n\n\nclass CoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = '__all__'\n", "step-3": "<mask token>\n__all__ = ('CatalogCoinListSerializer', 'CatalogCoinSerializer',\n 'SeriesListSerializer', 'CoinListSerializer', 'CoinSerializer',\n 'CountriesListSerializer')\n\n\nclass CountriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Country\n fields = 'name', 'flag'\n\n\nclass SeriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Serie\n fields = 'name',\n\n\nclass CatalogCoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = ('id', 'face_value', 'currency', 'country', 'year',\n 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist')\n serie = serializers.SlugRelatedField(slug_field='name', read_only=True)\n collection = serializers.IntegerField(read_only=True)\n exchange = serializers.IntegerField(read_only=True)\n wishlist = serializers.IntegerField(read_only=True)\n\n\nclass CatalogCoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = '__all__'\n\n\nclass CoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = 'id', 'catalog_coin', 'owner', 'status'\n catalog_coin = CatalogCoinListSerializer()\n\n\nclass CoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = '__all__'\n", "step-4": "from rest_framework import serializers\nfrom .models import *\n__all__ = ('CatalogCoinListSerializer', 'CatalogCoinSerializer',\n 'SeriesListSerializer', 'CoinListSerializer', 'CoinSerializer',\n 'CountriesListSerializer')\n\n\nclass CountriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Country\n fields = 'name', 'flag'\n\n\nclass SeriesListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Serie\n fields = 'name',\n\n\nclass CatalogCoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = ('id', 'face_value', 'currency', 'country', 'year',\n 'theme', 'mint', 'serie', 'collection', 'exchange', 'wishlist')\n serie = serializers.SlugRelatedField(slug_field='name', read_only=True)\n collection = serializers.IntegerField(read_only=True)\n exchange = serializers.IntegerField(read_only=True)\n wishlist = serializers.IntegerField(read_only=True)\n\n\nclass CatalogCoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = CatalogCoin\n fields = '__all__'\n\n\nclass CoinListSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = 'id', 'catalog_coin', 'owner', 'status'\n catalog_coin = CatalogCoinListSerializer()\n\n\nclass CoinSerializer(serializers.ModelSerializer):\n\n\n class Meta:\n model = Coin\n fields = '__all__'\n", "step-5": "from rest_framework import serializers\n\nfrom .models import *\n\n__all__ = (\n 'CatalogCoinListSerializer', 'CatalogCoinSerializer', 'SeriesListSerializer', 'CoinListSerializer',\n 'CoinSerializer', 'CountriesListSerializer',\n)\n\n\nclass CountriesListSerializer(serializers.ModelSerializer):\n class Meta:\n model = Country\n fields = ('name', 'flag',)\n\n\nclass SeriesListSerializer(serializers.ModelSerializer):\n class Meta:\n model = Serie\n fields = ('name',)\n\n\nclass CatalogCoinListSerializer(serializers.ModelSerializer):\n class Meta:\n model = CatalogCoin\n fields = (\n 'id', 'face_value', 'currency', 'country', 'year', 'theme', 'mint', 'serie', 'collection', 'exchange',\n 'wishlist',\n )\n\n serie = serializers.SlugRelatedField(slug_field='name', read_only=True)\n collection = serializers.IntegerField(read_only=True)\n exchange = serializers.IntegerField(read_only=True)\n wishlist = serializers.IntegerField(read_only=True)\n\n\nclass CatalogCoinSerializer(serializers.ModelSerializer):\n class Meta:\n model = CatalogCoin\n fields = '__all__'\n\n\nclass CoinListSerializer(serializers.ModelSerializer):\n class Meta:\n model = Coin\n fields = ('id', 'catalog_coin', 'owner', 'status',)\n\n catalog_coin = CatalogCoinListSerializer()\n\n\nclass CoinSerializer(serializers.ModelSerializer):\n class Meta:\n model = Coin\n fields = '__all__'\n", "step-ids": [ 7, 8, 9, 10, 11 ] }
[ 7, 8, 9, 10, 11 ]
#Program to convert temp in degree Celsius to temp in degree Fahrenheit celsius=input("Enter temperature in Celsius") celsius=int(celsius) fah=(celsius*9/5)+32 print("Temp in ",celsius,"celsius=",fah," Fahrenheit")
normal
{ "blob_id": "e1172cadeb8b2ce036d8431cef78cfe19bda0cb8", "index": 2161, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('Temp in ', celsius, 'celsius=', fah, ' Fahrenheit')\n", "step-3": "celsius = input('Enter temperature in Celsius')\ncelsius = int(celsius)\nfah = celsius * 9 / 5 + 32\nprint('Temp in ', celsius, 'celsius=', fah, ' Fahrenheit')\n", "step-4": "#Program to convert temp in degree Celsius to temp in degree Fahrenheit\ncelsius=input(\"Enter temperature in Celsius\")\ncelsius=int(celsius)\nfah=(celsius*9/5)+32\nprint(\"Temp in \",celsius,\"celsius=\",fah,\" Fahrenheit\")\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
ulang = 'y' while True : a = int(input ("masukkan nilai = ")) if a > 60 : status = "LULUS" elif a <= 60 : status = "TIDAK LULUS" print(status) ulang = input("apakah anda ingin mengulang? y/n = ")
normal
{ "blob_id": "759b440bf436afbfb081cf55eeb4a0f075ed3e6d", "index": 9577, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile True:\n a = int(input('masukkan nilai = '))\n if a > 60:\n status = 'LULUS'\n elif a <= 60:\n status = 'TIDAK LULUS'\n print(status)\n ulang = input('apakah anda ingin mengulang? y/n = ')\n", "step-3": "ulang = 'y'\nwhile True:\n a = int(input('masukkan nilai = '))\n if a > 60:\n status = 'LULUS'\n elif a <= 60:\n status = 'TIDAK LULUS'\n print(status)\n ulang = input('apakah anda ingin mengulang? y/n = ')\n", "step-4": "ulang = 'y'\r\nwhile True :\r\n\ta = int(input (\"masukkan nilai = \"))\r\n\r\n\tif a > 60 :\r\n\t\tstatus = \"LULUS\"\r\n\telif a <= 60 :\r\n\t\tstatus = \"TIDAK LULUS\"\r\n\tprint(status)\r\n\r\n\tulang = input(\"apakah anda ingin mengulang? y/n = \")", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def create_events_calendar(): """ Create an events calendar if none already exists. This function mostly exists for creating calendars for dev environments, not used in prod. """ service = get_calendar_service() if not service: return calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone': 'Europe/Oslo'} cal_insert_response = service.calendars().insert(body=calendar).execute() public_acl = {'role': 'reader', 'scope': {'type': 'default'}} acl_insert_response = service.acl().insert(calendarId= cal_insert_response['id'], body=public_acl).execute() return acl_insert_response def get_calendar_service(): name = 'calendar' version = 'v3' scope = 'https://www.googleapis.com/auth/calendar' if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'): _logger.info( 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.' ) return credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL, settings.GOOGLE_API_PRIVATE_KEY, scope) http = credentials.authorize(http=httplib2.Http()) service = discovery.build(name, version, http=http) return service @shared_task @log_errors def update_google_calendar_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event.google_calendar_id: _logger.info('Adding missing event to google calendar: %s', event.name) add_google_calender_event(event.id) return service = get_calendar_service() if not service: return payload = get_google_calendar_payload_for_event(event) results = service.events().update(calendarId=settings. GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload ).execute() _logger.info('Google calendar event for %s updated: %s', event.name, results) <|reserved_special_token_0|> def get_google_calendar_payload_for_event(event): return {'summary': event.name, 'location': event.location, 'description': event.summary, 'start': {'dateTime': event.startdate .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}} <|reserved_special_token_1|> <|reserved_special_token_0|> def create_events_calendar(): """ Create an events calendar if none already exists. This function mostly exists for creating calendars for dev environments, not used in prod. """ service = get_calendar_service() if not service: return calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone': 'Europe/Oslo'} cal_insert_response = service.calendars().insert(body=calendar).execute() public_acl = {'role': 'reader', 'scope': {'type': 'default'}} acl_insert_response = service.acl().insert(calendarId= cal_insert_response['id'], body=public_acl).execute() return acl_insert_response def get_calendar_service(): name = 'calendar' version = 'v3' scope = 'https://www.googleapis.com/auth/calendar' if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'): _logger.info( 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.' ) return credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL, settings.GOOGLE_API_PRIVATE_KEY, scope) http = credentials.authorize(http=httplib2.Http()) service = discovery.build(name, version, http=http) return service @shared_task @log_errors def update_google_calendar_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event.google_calendar_id: _logger.info('Adding missing event to google calendar: %s', event.name) add_google_calender_event(event.id) return service = get_calendar_service() if not service: return payload = get_google_calendar_payload_for_event(event) results = service.events().update(calendarId=settings. GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload ).execute() _logger.info('Google calendar event for %s updated: %s', event.name, results) @shared_task @log_errors def add_google_calender_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event: _logger.warning('Could not find event to add to Google Calendar: %d', event_id) return google_payload = get_google_calendar_payload_for_event(event) service = get_calendar_service() if not service: return results = service.events().insert(calendarId=settings. GOOGLE_CALENDAR_ID, body=google_payload).execute() if results.get('id'): event.google_calendar_id = results['id'] event.save() _logger.info( "Google Calendar event for event '%s' created successfully", event.name) else: _logger.error( 'New Google Calendar event did not have id in response, was: %s', results) @shared_task @log_errors def delete_google_calendar_event(google_calendar_event_id): service = get_calendar_service() if not service: return result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID, eventId=google_calendar_event_id).execute() _logger.info('Google calendar event %s deleted: %s', google_calendar_event_id, result) def get_google_calendar_payload_for_event(event): return {'summary': event.name, 'location': event.location, 'description': event.summary, 'start': {'dateTime': event.startdate .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}} <|reserved_special_token_1|> <|reserved_special_token_0|> _logger = getLogger(__name__) def create_events_calendar(): """ Create an events calendar if none already exists. This function mostly exists for creating calendars for dev environments, not used in prod. """ service = get_calendar_service() if not service: return calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone': 'Europe/Oslo'} cal_insert_response = service.calendars().insert(body=calendar).execute() public_acl = {'role': 'reader', 'scope': {'type': 'default'}} acl_insert_response = service.acl().insert(calendarId= cal_insert_response['id'], body=public_acl).execute() return acl_insert_response def get_calendar_service(): name = 'calendar' version = 'v3' scope = 'https://www.googleapis.com/auth/calendar' if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'): _logger.info( 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.' ) return credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL, settings.GOOGLE_API_PRIVATE_KEY, scope) http = credentials.authorize(http=httplib2.Http()) service = discovery.build(name, version, http=http) return service @shared_task @log_errors def update_google_calendar_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event.google_calendar_id: _logger.info('Adding missing event to google calendar: %s', event.name) add_google_calender_event(event.id) return service = get_calendar_service() if not service: return payload = get_google_calendar_payload_for_event(event) results = service.events().update(calendarId=settings. GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload ).execute() _logger.info('Google calendar event for %s updated: %s', event.name, results) @shared_task @log_errors def add_google_calender_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event: _logger.warning('Could not find event to add to Google Calendar: %d', event_id) return google_payload = get_google_calendar_payload_for_event(event) service = get_calendar_service() if not service: return results = service.events().insert(calendarId=settings. GOOGLE_CALENDAR_ID, body=google_payload).execute() if results.get('id'): event.google_calendar_id = results['id'] event.save() _logger.info( "Google Calendar event for event '%s' created successfully", event.name) else: _logger.error( 'New Google Calendar event did not have id in response, was: %s', results) @shared_task @log_errors def delete_google_calendar_event(google_calendar_event_id): service = get_calendar_service() if not service: return result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID, eventId=google_calendar_event_id).execute() _logger.info('Google calendar event %s deleted: %s', google_calendar_event_id, result) def get_google_calendar_payload_for_event(event): return {'summary': event.name, 'location': event.location, 'description': event.summary, 'start': {'dateTime': event.startdate .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}} <|reserved_special_token_1|> from ..general.utils import log_errors from googleapiclient import discovery from oauth2client.client import SignedJwtAssertionCredentials from django.conf import settings from celery import shared_task from logging import getLogger import httplib2 _logger = getLogger(__name__) def create_events_calendar(): """ Create an events calendar if none already exists. This function mostly exists for creating calendars for dev environments, not used in prod. """ service = get_calendar_service() if not service: return calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone': 'Europe/Oslo'} cal_insert_response = service.calendars().insert(body=calendar).execute() public_acl = {'role': 'reader', 'scope': {'type': 'default'}} acl_insert_response = service.acl().insert(calendarId= cal_insert_response['id'], body=public_acl).execute() return acl_insert_response def get_calendar_service(): name = 'calendar' version = 'v3' scope = 'https://www.googleapis.com/auth/calendar' if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'): _logger.info( 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.' ) return credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL, settings.GOOGLE_API_PRIVATE_KEY, scope) http = credentials.authorize(http=httplib2.Http()) service = discovery.build(name, version, http=http) return service @shared_task @log_errors def update_google_calendar_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event.google_calendar_id: _logger.info('Adding missing event to google calendar: %s', event.name) add_google_calender_event(event.id) return service = get_calendar_service() if not service: return payload = get_google_calendar_payload_for_event(event) results = service.events().update(calendarId=settings. GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload ).execute() _logger.info('Google calendar event for %s updated: %s', event.name, results) @shared_task @log_errors def add_google_calender_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event: _logger.warning('Could not find event to add to Google Calendar: %d', event_id) return google_payload = get_google_calendar_payload_for_event(event) service = get_calendar_service() if not service: return results = service.events().insert(calendarId=settings. GOOGLE_CALENDAR_ID, body=google_payload).execute() if results.get('id'): event.google_calendar_id = results['id'] event.save() _logger.info( "Google Calendar event for event '%s' created successfully", event.name) else: _logger.error( 'New Google Calendar event did not have id in response, was: %s', results) @shared_task @log_errors def delete_google_calendar_event(google_calendar_event_id): service = get_calendar_service() if not service: return result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID, eventId=google_calendar_event_id).execute() _logger.info('Google calendar event %s deleted: %s', google_calendar_event_id, result) def get_google_calendar_payload_for_event(event): return {'summary': event.name, 'location': event.location, 'description': event.summary, 'start': {'dateTime': event.startdate .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}} <|reserved_special_token_1|> # -*- coding: utf-8 -*- from ..general.utils import log_errors from googleapiclient import discovery from oauth2client.client import SignedJwtAssertionCredentials from django.conf import settings from celery import shared_task from logging import getLogger import httplib2 _logger = getLogger(__name__) def create_events_calendar(): """ Create an events calendar if none already exists. This function mostly exists for creating calendars for dev environments, not used in prod. """ service = get_calendar_service() if not service: return calendar = { 'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone': 'Europe/Oslo', } cal_insert_response = service.calendars().insert(body=calendar).execute() public_acl = { 'role': 'reader', 'scope': { 'type': 'default' } } acl_insert_response = service.acl().insert(calendarId=cal_insert_response['id'], body=public_acl).execute() return acl_insert_response def get_calendar_service(): name = 'calendar' version = 'v3' scope = 'https://www.googleapis.com/auth/calendar' # Provide a mock fallback for test environments where real interaction with # Google calendar is not needed if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'): _logger.info('Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY ' 'in settings.') return # Prepare credentials, and authorize HTTP object with them. credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL, settings.GOOGLE_API_PRIVATE_KEY, scope) http = credentials.authorize(http=httplib2.Http()) # Construct a service object via the discovery service. service = discovery.build(name, version, http=http) return service @shared_task @log_errors def update_google_calendar_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) # If the event doesn't already exist on google calendar, create it if not event.google_calendar_id: _logger.info('Adding missing event to google calendar: %s', event.name) add_google_calender_event(event.id) return # Authenticate and construct service. service = get_calendar_service() if not service: return payload = get_google_calendar_payload_for_event(event) results = service.events().update(calendarId=settings.GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload).execute() _logger.info('Google calendar event for %s updated: %s', event.name, results) @shared_task @log_errors def add_google_calender_event(event_id): from .models import Event event = Event.objects.get(pk=event_id) if not event: _logger.warning('Could not find event to add to Google Calendar: %d', event_id) return google_payload = get_google_calendar_payload_for_event(event) service = get_calendar_service() if not service: return results = service.events().insert(calendarId=settings.GOOGLE_CALENDAR_ID, body=google_payload).execute() if results.get('id'): event.google_calendar_id = results['id'] event.save() _logger.info("Google Calendar event for event '%s' created successfully", event.name) else: _logger.error("New Google Calendar event did not have id in response, was: %s", results) @shared_task @log_errors def delete_google_calendar_event(google_calendar_event_id): service = get_calendar_service() if not service: return result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID, eventId=google_calendar_event_id).execute() _logger.info('Google calendar event %s deleted: %s', google_calendar_event_id, result) def get_google_calendar_payload_for_event(event): return { 'summary': event.name, 'location': event.location, 'description': event.summary, 'start': { 'dateTime': event.startdate.isoformat(), 'timeZone': 'Europe/Oslo', }, 'end': { 'dateTime': event.enddate.isoformat(), 'timeZone': 'Europe/Oslo', } }
flexible
{ "blob_id": "36fb0d936be5c5d305c4076fd1c497664c9b770a", "index": 8374, "step-1": "<mask token>\n\n\ndef create_events_calendar():\n \"\"\" Create an events calendar if none already exists. This function mostly exists for\n creating calendars for dev environments, not used in prod.\n \"\"\"\n service = get_calendar_service()\n if not service:\n return\n calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone':\n 'Europe/Oslo'}\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {'role': 'reader', 'scope': {'type': 'default'}}\n acl_insert_response = service.acl().insert(calendarId=\n cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response\n\n\ndef get_calendar_service():\n name = 'calendar'\n version = 'v3'\n scope = 'https://www.googleapis.com/auth/calendar'\n if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'):\n _logger.info(\n 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.'\n )\n return\n credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL,\n settings.GOOGLE_API_PRIVATE_KEY, scope)\n http = credentials.authorize(http=httplib2.Http())\n service = discovery.build(name, version, http=http)\n return service\n\n\n@shared_task\n@log_errors\ndef update_google_calendar_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event.google_calendar_id:\n _logger.info('Adding missing event to google calendar: %s', event.name)\n add_google_calender_event(event.id)\n return\n service = get_calendar_service()\n if not service:\n return\n payload = get_google_calendar_payload_for_event(event)\n results = service.events().update(calendarId=settings.\n GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload\n ).execute()\n _logger.info('Google calendar event for %s updated: %s', event.name,\n results)\n\n\n<mask token>\n\n\ndef get_google_calendar_payload_for_event(event):\n return {'summary': event.name, 'location': event.location,\n 'description': event.summary, 'start': {'dateTime': event.startdate\n .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event\n .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}}\n", "step-2": "<mask token>\n\n\ndef create_events_calendar():\n \"\"\" Create an events calendar if none already exists. This function mostly exists for\n creating calendars for dev environments, not used in prod.\n \"\"\"\n service = get_calendar_service()\n if not service:\n return\n calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone':\n 'Europe/Oslo'}\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {'role': 'reader', 'scope': {'type': 'default'}}\n acl_insert_response = service.acl().insert(calendarId=\n cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response\n\n\ndef get_calendar_service():\n name = 'calendar'\n version = 'v3'\n scope = 'https://www.googleapis.com/auth/calendar'\n if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'):\n _logger.info(\n 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.'\n )\n return\n credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL,\n settings.GOOGLE_API_PRIVATE_KEY, scope)\n http = credentials.authorize(http=httplib2.Http())\n service = discovery.build(name, version, http=http)\n return service\n\n\n@shared_task\n@log_errors\ndef update_google_calendar_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event.google_calendar_id:\n _logger.info('Adding missing event to google calendar: %s', event.name)\n add_google_calender_event(event.id)\n return\n service = get_calendar_service()\n if not service:\n return\n payload = get_google_calendar_payload_for_event(event)\n results = service.events().update(calendarId=settings.\n GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload\n ).execute()\n _logger.info('Google calendar event for %s updated: %s', event.name,\n results)\n\n\n@shared_task\n@log_errors\ndef add_google_calender_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event:\n _logger.warning('Could not find event to add to Google Calendar: %d',\n event_id)\n return\n google_payload = get_google_calendar_payload_for_event(event)\n service = get_calendar_service()\n if not service:\n return\n results = service.events().insert(calendarId=settings.\n GOOGLE_CALENDAR_ID, body=google_payload).execute()\n if results.get('id'):\n event.google_calendar_id = results['id']\n event.save()\n _logger.info(\n \"Google Calendar event for event '%s' created successfully\",\n event.name)\n else:\n _logger.error(\n 'New Google Calendar event did not have id in response, was: %s',\n results)\n\n\n@shared_task\n@log_errors\ndef delete_google_calendar_event(google_calendar_event_id):\n service = get_calendar_service()\n if not service:\n return\n result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID,\n eventId=google_calendar_event_id).execute()\n _logger.info('Google calendar event %s deleted: %s',\n google_calendar_event_id, result)\n\n\ndef get_google_calendar_payload_for_event(event):\n return {'summary': event.name, 'location': event.location,\n 'description': event.summary, 'start': {'dateTime': event.startdate\n .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event\n .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}}\n", "step-3": "<mask token>\n_logger = getLogger(__name__)\n\n\ndef create_events_calendar():\n \"\"\" Create an events calendar if none already exists. This function mostly exists for\n creating calendars for dev environments, not used in prod.\n \"\"\"\n service = get_calendar_service()\n if not service:\n return\n calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone':\n 'Europe/Oslo'}\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {'role': 'reader', 'scope': {'type': 'default'}}\n acl_insert_response = service.acl().insert(calendarId=\n cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response\n\n\ndef get_calendar_service():\n name = 'calendar'\n version = 'v3'\n scope = 'https://www.googleapis.com/auth/calendar'\n if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'):\n _logger.info(\n 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.'\n )\n return\n credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL,\n settings.GOOGLE_API_PRIVATE_KEY, scope)\n http = credentials.authorize(http=httplib2.Http())\n service = discovery.build(name, version, http=http)\n return service\n\n\n@shared_task\n@log_errors\ndef update_google_calendar_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event.google_calendar_id:\n _logger.info('Adding missing event to google calendar: %s', event.name)\n add_google_calender_event(event.id)\n return\n service = get_calendar_service()\n if not service:\n return\n payload = get_google_calendar_payload_for_event(event)\n results = service.events().update(calendarId=settings.\n GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload\n ).execute()\n _logger.info('Google calendar event for %s updated: %s', event.name,\n results)\n\n\n@shared_task\n@log_errors\ndef add_google_calender_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event:\n _logger.warning('Could not find event to add to Google Calendar: %d',\n event_id)\n return\n google_payload = get_google_calendar_payload_for_event(event)\n service = get_calendar_service()\n if not service:\n return\n results = service.events().insert(calendarId=settings.\n GOOGLE_CALENDAR_ID, body=google_payload).execute()\n if results.get('id'):\n event.google_calendar_id = results['id']\n event.save()\n _logger.info(\n \"Google Calendar event for event '%s' created successfully\",\n event.name)\n else:\n _logger.error(\n 'New Google Calendar event did not have id in response, was: %s',\n results)\n\n\n@shared_task\n@log_errors\ndef delete_google_calendar_event(google_calendar_event_id):\n service = get_calendar_service()\n if not service:\n return\n result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID,\n eventId=google_calendar_event_id).execute()\n _logger.info('Google calendar event %s deleted: %s',\n google_calendar_event_id, result)\n\n\ndef get_google_calendar_payload_for_event(event):\n return {'summary': event.name, 'location': event.location,\n 'description': event.summary, 'start': {'dateTime': event.startdate\n .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event\n .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}}\n", "step-4": "from ..general.utils import log_errors\nfrom googleapiclient import discovery\nfrom oauth2client.client import SignedJwtAssertionCredentials\nfrom django.conf import settings\nfrom celery import shared_task\nfrom logging import getLogger\nimport httplib2\n_logger = getLogger(__name__)\n\n\ndef create_events_calendar():\n \"\"\" Create an events calendar if none already exists. This function mostly exists for\n creating calendars for dev environments, not used in prod.\n \"\"\"\n service = get_calendar_service()\n if not service:\n return\n calendar = {'summary': 'Ting som skjer i Telemarkgruppa', 'timeZone':\n 'Europe/Oslo'}\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {'role': 'reader', 'scope': {'type': 'default'}}\n acl_insert_response = service.acl().insert(calendarId=\n cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response\n\n\ndef get_calendar_service():\n name = 'calendar'\n version = 'v3'\n scope = 'https://www.googleapis.com/auth/calendar'\n if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'):\n _logger.info(\n 'Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY in settings.'\n )\n return\n credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL,\n settings.GOOGLE_API_PRIVATE_KEY, scope)\n http = credentials.authorize(http=httplib2.Http())\n service = discovery.build(name, version, http=http)\n return service\n\n\n@shared_task\n@log_errors\ndef update_google_calendar_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event.google_calendar_id:\n _logger.info('Adding missing event to google calendar: %s', event.name)\n add_google_calender_event(event.id)\n return\n service = get_calendar_service()\n if not service:\n return\n payload = get_google_calendar_payload_for_event(event)\n results = service.events().update(calendarId=settings.\n GOOGLE_CALENDAR_ID, eventId=event.google_calendar_id, body=payload\n ).execute()\n _logger.info('Google calendar event for %s updated: %s', event.name,\n results)\n\n\n@shared_task\n@log_errors\ndef add_google_calender_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n if not event:\n _logger.warning('Could not find event to add to Google Calendar: %d',\n event_id)\n return\n google_payload = get_google_calendar_payload_for_event(event)\n service = get_calendar_service()\n if not service:\n return\n results = service.events().insert(calendarId=settings.\n GOOGLE_CALENDAR_ID, body=google_payload).execute()\n if results.get('id'):\n event.google_calendar_id = results['id']\n event.save()\n _logger.info(\n \"Google Calendar event for event '%s' created successfully\",\n event.name)\n else:\n _logger.error(\n 'New Google Calendar event did not have id in response, was: %s',\n results)\n\n\n@shared_task\n@log_errors\ndef delete_google_calendar_event(google_calendar_event_id):\n service = get_calendar_service()\n if not service:\n return\n result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID,\n eventId=google_calendar_event_id).execute()\n _logger.info('Google calendar event %s deleted: %s',\n google_calendar_event_id, result)\n\n\ndef get_google_calendar_payload_for_event(event):\n return {'summary': event.name, 'location': event.location,\n 'description': event.summary, 'start': {'dateTime': event.startdate\n .isoformat(), 'timeZone': 'Europe/Oslo'}, 'end': {'dateTime': event\n .enddate.isoformat(), 'timeZone': 'Europe/Oslo'}}\n", "step-5": "# -*- coding: utf-8 -*-\n\nfrom ..general.utils import log_errors\n\nfrom googleapiclient import discovery\nfrom oauth2client.client import SignedJwtAssertionCredentials\nfrom django.conf import settings\nfrom celery import shared_task\nfrom logging import getLogger\nimport httplib2\n\n_logger = getLogger(__name__)\n\ndef create_events_calendar():\n \"\"\" Create an events calendar if none already exists. This function mostly exists for\n creating calendars for dev environments, not used in prod.\n \"\"\"\n service = get_calendar_service()\n if not service:\n return\n calendar = {\n 'summary': 'Ting som skjer i Telemarkgruppa',\n 'timeZone': 'Europe/Oslo',\n }\n cal_insert_response = service.calendars().insert(body=calendar).execute()\n public_acl = {\n 'role': 'reader',\n 'scope': {\n 'type': 'default'\n }\n }\n acl_insert_response = service.acl().insert(calendarId=cal_insert_response['id'], body=public_acl).execute()\n return acl_insert_response\n\n\ndef get_calendar_service():\n name = 'calendar'\n version = 'v3'\n scope = 'https://www.googleapis.com/auth/calendar'\n\n # Provide a mock fallback for test environments where real interaction with\n # Google calendar is not needed\n if not hasattr(settings, 'GOOGLE_API_PRIVATE_KEY'):\n _logger.info('Skipping Google calendar integration due to missing GOOGLE_API_PRIVATE_KEY '\n 'in settings.')\n return\n\n # Prepare credentials, and authorize HTTP object with them.\n credentials = SignedJwtAssertionCredentials(settings.GOOGLE_API_EMAIL,\n settings.GOOGLE_API_PRIVATE_KEY, scope)\n http = credentials.authorize(http=httplib2.Http())\n\n # Construct a service object via the discovery service.\n service = discovery.build(name, version, http=http)\n return service\n\n\n@shared_task\n@log_errors\ndef update_google_calendar_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n\n # If the event doesn't already exist on google calendar, create it\n if not event.google_calendar_id:\n _logger.info('Adding missing event to google calendar: %s', event.name)\n add_google_calender_event(event.id)\n return\n\n # Authenticate and construct service.\n service = get_calendar_service()\n\n if not service:\n return\n\n payload = get_google_calendar_payload_for_event(event)\n results = service.events().update(calendarId=settings.GOOGLE_CALENDAR_ID,\n eventId=event.google_calendar_id, body=payload).execute()\n _logger.info('Google calendar event for %s updated: %s', event.name, results)\n\n\n@shared_task\n@log_errors\ndef add_google_calender_event(event_id):\n from .models import Event\n event = Event.objects.get(pk=event_id)\n\n if not event:\n _logger.warning('Could not find event to add to Google Calendar: %d', event_id)\n return\n\n google_payload = get_google_calendar_payload_for_event(event)\n service = get_calendar_service()\n if not service:\n return\n\n results = service.events().insert(calendarId=settings.GOOGLE_CALENDAR_ID,\n body=google_payload).execute()\n if results.get('id'):\n event.google_calendar_id = results['id']\n event.save()\n _logger.info(\"Google Calendar event for event '%s' created successfully\", event.name)\n else:\n _logger.error(\"New Google Calendar event did not have id in response, was: %s\", results)\n\n\n@shared_task\n@log_errors\ndef delete_google_calendar_event(google_calendar_event_id):\n service = get_calendar_service()\n if not service:\n return\n\n result = service.events().delete(calendarId=settings.GOOGLE_CALENDAR_ID,\n eventId=google_calendar_event_id).execute()\n _logger.info('Google calendar event %s deleted: %s', google_calendar_event_id, result)\n\n\ndef get_google_calendar_payload_for_event(event):\n return {\n 'summary': event.name,\n 'location': event.location,\n 'description': event.summary,\n 'start': {\n 'dateTime': event.startdate.isoformat(),\n 'timeZone': 'Europe/Oslo',\n },\n 'end': {\n 'dateTime': event.enddate.isoformat(),\n 'timeZone': 'Europe/Oslo',\n }\n }\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
# -*- coding: utf-8 -*- import os import re import datetime import sys import codecs import logging import logging.handlers import fnmatch import time import argparse from antlr4 import * from antlr4.tree.Trees import Trees from lxml import etree from AknJudgementClass import AknJudgementXML from AknLegalReferencesClass import AknLegalReferences from functions import validateXML, findDatesOfInterest from functions import setupLogger, fixStringXML, CheckXMLvalidity from variables import * from grammars.gen.SupremeCourtLexer import SupremeCourtLexer from grammars.gen.SupremeCourtParser import SupremeCourtParser from grammars.gen.SupremeCourtListener import SupremeCourtListener from grammars.gen.Legal_refLexer import Legal_refLexer from grammars.gen.Legal_refParser import Legal_refParser from grammars.gen.Legal_refListener import Legal_refListener from grammars.gen.Legal_refVisitor import Legal_refVisitor program_description = 'A Command Line Interface to transform judgments ' program_description += 'published by the Supreme Civil and Criminal court ' program_description += '(Areios Pagos) into XML using Akoma Ntoso ' program_description += 'prototype. ' parser = argparse.ArgumentParser( description = program_description ) year_help = 'choose a specific year for judgment(s) to be processed ' parser.add_argument( '-year', help = year_help ) fn_help = 'choose a specific file to be transformed to Akoma Ntoso ' fn_help += '(if argument is present -year parameter must be declared)' parser.add_argument( '-fn', metavar = 'FILENAME', help = fn_help ) # create a namespace object args = parser.parse_args() if __name__ == '__main__': #print args # This is used for statistics purposes (time calculation, validation etc.) #general_LOG_file = 'statistics_AreiosPagos.txt' # Create regex object for publicHearingDate publicHearingDateObj = re.compile(publicHearingDatePattern) # Create regex objext for decisionPublicationDate decisionPublicationDateObj = re.compile(decisionPublicationDatePattern) # Create regex objext for courtConferenceDate courtConferenceDateObj = re.compile(courtConferenceDatePattern) # Create regex objext for fix XML string paragraphPatternObj = re.compile(paragraphPattern) if args.fn is not None: if args.year is None: parser.error( 'You must provide -year parameter ' + 'in order to process a specific file' ) else: file_pattern = '*' + args.fn else: file_pattern = '*' + TXT_EXT source_path = os.path.join( os.getcwd(), os.path.join( LEGAL_TEXTS, AREIOS_PAGOS ) ) if args.year is not None: source_path = os.path.join( source_path, args.year ) #print source_path for root, dirs, files in os.walk(source_path): #print root logs_path = root.replace( os.path.join( os.getcwd(), LEGAL_TEXTS ), os.path.join( os.getcwd(), LOGS ) ) #print "logs: " + logs_path xml_path = root.replace( os.path.join( os.getcwd(), LEGAL_TEXTS ), os.path.join( os.getcwd(), XML ) ) #print "xml: " + xml_path #xml_no_ner_path = root.replace( # os.path.join( # os.getcwd(), # LEGAL_TEXTS # ), # os.path.join( # os.getcwd(), # XML_NO_NER # ) # ) #print "xmlnoner: " +xml_no_ner_path ner_path = root.replace( os.path.join( os.getcwd(), LEGAL_TEXTS ), os.path.join( os.getcwd(), NER ) ) #print "ner: " + ner_path #sys.exit() # Create LOG folder if it does not exist if not os.path.exists(logs_path): #print "Creating Logs folder..." os.makedirs(logs_path) # Create XML folder if it does not exist if not os.path.exists(xml_path): #print "Creating XML folder..." os.makedirs(xml_path) # Create XML without NER folder if it does not exist #if not os.path.exists(xml_no_ner_path): #print "Creating XML without NER folder..." #os.makedirs(xml_no_ner_path) for name in files: if fnmatch.fnmatch(name, file_pattern): print "judgment decision: " + name global is_valid is_valid = False try: # just for statistics purposes start_time = time.clock() # Foreach judgment file create a corresponding log, # XML and text filename year = name.split('.')[0].split('_')[-1] log_file = os.path.join( logs_path, name ) xml_file = os.path.join( xml_path, name.split('.')[0] + XML_EXT ) #xml_file_NO_NER = os.path.join( # xml_no_ner_path, # name.split('.')[0] + XML_EXT # ) text_file = os.path.join( xml_path, name.split('.')[0] + TXT_EXT ) #text_file_NO_NER = os.path.join( # xml_no_ner_path, # name.split('.')[0] + TXT_EXT # ) # Declare Gate XML file where named entities are stored gate_xml_file = os.path.join( ner_path, name + XML_EXT ) #print "log_file: " + log_file #print "xml_file: " + xml_file #print "text_fle: " + text_file #print "gate_xml: " + gate_xml_file #sys.exit() # Setup a logger Akn_LOGGER = setupLogger('Akn_LOGGER', log_file) Akn_LOGGER.info('Converting %s', name) ######################## METADATA ######################### # Dictionary of metadata # Usually metadata comes from external files or # could be extracted from legal text later meta = {} meta['textType'] = "judgment" meta['author'] = "#SCCC" meta['foreas'] = "SCCC" # In Areios Pagos we can extract decision number and # year from file name datePattern = re.search( r'Ar?\s+(?P<decisionNumber>\d+)[_](?P<issueYear>\d+)', name, re.DOTALL ) if datePattern: #print datePattern.group('decisionNumber') #print datePattern.group('issueYear') meta['issueYear'] = datePattern.group('issueYear') meta['decisionNumber'] = datePattern.group('decisionNumber') # Create AknJudgementXML object judgmentObj = AknJudgementXML( textType = meta['textType'], author = meta['author'], foreas = meta['foreas'], issueYear = meta['issueYear'], decisionNumber = meta['decisionNumber'] ) # Create "meta" node metaElem = judgmentObj.createMeta() #print(etree.tostring( # metaElem, # pretty_print=True, # encoding="UTF-8", # xml_declaration =True # )) # Populate reference node with Named Entities if os.path.isfile(gate_xml_file): #print "gate_xml_file exists" referencesNode = metaElem.find('references') if referencesNode is not None: referencesNodeIndex = metaElem.getchildren().index(referencesNode) #print referencesNodeIndex newReferencesNode = judgmentObj.modifyReferencesFromGateXml( gate_xml_file, referencesNode ) metaElem.remove(referencesNode) metaElem.insert( referencesNodeIndex, newReferencesNode ) #sys.exit() ######################## END METADATA ##################### ########################### LEGAL REFERENCES ################# #print 'Parsing legal references...' finput = FileStream(os.path.join(root, name), encoding='utf-8') lexer = Legal_refLexer(finput) stream = CommonTokenStream(lexer) parser = Legal_refParser(stream) tree = parser.legal_text() answer = AknLegalReferences().visit(tree) #print(answer) ########################### END LEGAL REFERENCES ############## ############################# STRUCTURE ####################### #print 'Creating judgment structure...' Akn_LOGGER.info('Creating judgment structure...') finput = InputStream(answer) lexer = SupremeCourtLexer(finput) stream = CommonTokenStream(lexer) parser = SupremeCourtParser(stream) tree = parser.judgment() walker = ParseTreeWalker() walker.walk(judgmentObj, tree) #print judgmentObj.text ############################## END STRUCTURE #################### ############################ Named Entities in text ############# if os.path.isfile(gate_xml_file): judgmentObj.text = judgmentObj.createNamedEntitiesInText( gate_xml_file, judgmentObj.text ) ################################################################## # Create AkomaNtoso Root element akomaNtosoElem = judgmentObj.createAkomaNtosoRoot() # This is due to cases where a ref tag does not close # before the end tag of a paragraph (<p><ref></p></ref>) judgmentObj.text = fixStringXML( judgmentObj.text, paragraphPatternObj ) try: # Create judgment element based on parser and append to root Akn_LOGGER.info('Transforming to XML element...') # etree.fromstring is being used it will change range # ids character '>' to &gt; judgmentElem = judgmentObj.XML() #print etree.tostring( # judgmentElem, # pretty_print=True, # encoding="UTF-8", # xml_declaration =True # ) akomaNtosoElem.insert(0, judgmentElem) # Find judgment node and insert metaElement judgmentNode = akomaNtosoElem.find("judgment") judgmentNode.insert(0, metaElem) #print( # etree.tostring( # akomaNtosoElem, # pretty_print=True, # encoding="UTF-8", # xml_declaration =True # ) # ) # Specific nodes that will be used after headerNode = akomaNtosoElem.xpath("/akomaNtoso/judgment/header") conclusionsNode = akomaNtosoElem.xpath("/akomaNtoso/judgment/conclusions") workflow = akomaNtosoElem.xpath("/akomaNtoso/judgment/meta/workflow") references = metaElem.xpath("/akomaNtoso/judgment/meta/references") # Get FRBRdate date attribute of FRBRWork and FRBRExpression elements FRBRdateWorkNode = akomaNtosoElem.xpath( "/akomaNtoso/judgment/meta/identification/FRBRWork/FRBRdate" ) FRBRdateExpressionNode = akomaNtosoElem.xpath( "/akomaNtoso/judgment/meta/identification/FRBRExpression/FRBRdate" ) # Dates of interest can be found in specific elements # in a judgment decision - find nodes Akn_LOGGER.info('Searching for dates of interest...') ###################### publicHearingDate ######################### # PublicHearingDate can be found on header element # of AkomaNtoso structure if headerNode: newHeaderNode = findDatesOfInterest( headerNode[0], publicHearingDateObj, 'publicHearingDate', meta['author'] ) if newHeaderNode is not None: publicHearDate = newHeaderNode[1].get('date') if workflow is not None: workflow[0].insert(0, newHeaderNode[1]) if references is not None: references[0].append(newHeaderNode[2]) # Set "date" attribute to FRBRdate node of # FRBRWork and FRBRExpression if FRBRdateWorkNode: FRBRdateWorkNode[0].set('date', publicHearDate) FRBRdateWorkNode[0].set('name', 'publicHearingDate') if FRBRdateExpressionNode: FRBRdateExpressionNode[0].set('date', publicHearDate) FRBRdateExpressionNode[0].set('name', 'publicHearingDate') #################################################################### ########################## courtConferenceDate #################### # CourtConferenceDate can also be found in conclusions node if conclusionsNode: newConclusionsNode = findDatesOfInterest( conclusionsNode[0], courtConferenceDateObj, 'courtConferenceDate', meta['author'] ) if newConclusionsNode is not None: courtConfDate = newConclusionsNode[1].get('date') # Set step element to workflow node if workflow is not None: workflow[0].insert(0, newConclusionsNode[1]) # Set TLCEvent element to workflow node if references is not None: references[0].append(newConclusionsNode[2]) # If for some reason DecisionPublicationDate does not exist # try fill FRBR date with # court conference date #if hasDecisionPublicationDate == False: if FRBRdateWorkNode: FRBRdateWorkNode[0].set('date', courtConfDate) FRBRdateWorkNode[0].set('name', 'courtConferenceDate') if FRBRdateExpressionNode: FRBRdateExpressionNode[0].set('date', courtConfDate) FRBRdateExpressionNode[0].set('name', 'courtConferenceDate') ###################################################################### ########################## decisionPublicationDate ################# # DecisionPublicationDate can be found on conclusions element # of AkomaNtoso structure #hasDecisionPublicationDate = True if conclusionsNode: newConclusionsNode = findDatesOfInterest( conclusionsNode[0], decisionPublicationDateObj, 'decisionPublicationDate', meta['author'] ) #print newConclusionsNode if newConclusionsNode is not None: publicationDate = newConclusionsNode[1].get('date') # Set step element to workflow node if workflow is not None: workflow[0].insert(0, newConclusionsNode[1]) # Set TLCEvent element to workflow node if references is not None: references[0].append(newConclusionsNode[2]) # Set "date" attribute to FRBRdate node of # FRBRWork and FRBRExpression if FRBRdateWorkNode: FRBRdateWorkNode[0].set('date', publicationDate) FRBRdateWorkNode[0].set('name', 'decisionPublicationDate') if FRBRdateExpressionNode: FRBRdateExpressionNode[0].set('date', publicationDate) FRBRdateExpressionNode[0].set('name', 'decisionPublicationDate') #else: # hasDecisionPublicationDate = False #################################################################### Akn_LOGGER.info('Stop searching for dates of interest...') # Create the corresponding ElementTree object XmlTree = etree.ElementTree(akomaNtosoElem) #print etree.tostring( # XmlTree, # pretty_print = True, # encoding="UTF-8", # xml_declaration = True # ) # Open the XML file and append elementTree to it Akn_LOGGER.info('Creating XML file...') # Problem with href range_id cannot retain '>' character, # so write string tree representation to file with codecs.open(xml_file, "w") as fin: fin.write( etree.tostring( XmlTree, pretty_print=True, encoding="UTF-8", xml_declaration =True ).replace('&gt;', '>') ) ########## copy XML tree and save it without including NER ############## """ rootNode = XmlTree.getroot() for child in rootNode.xpath("./judgment/meta/references"): for child_lv2 in child: if child_lv2.tag == 'TLCOrganization' or child_lv2.tag == 'TLCPerson' or child_lv2.tag == 'TLCLocation': #print child_lv2 child_lv2.getparent().remove(child_lv2) XmlTreeStr_NO_NER = etree.tostring( XmlTree, pretty_print=True, encoding="UTF-8", xml_declaration =True ) XmlTreeStr_NO_NER = re.sub( r'[<]/?organization.*?[>]', '', XmlTreeStr_NO_NER, flags = re.DOTALL ) XmlTreeStr_NO_NER = re.sub( r'[<]/?person.*?[>]', '', XmlTreeStr_NO_NER, flags = re.DOTALL ) XmlTreeStr_NO_NER = re.sub( r'[<]/?location.*?[>]', '', XmlTreeStr_NO_NER, flags = re.DOTALL ) #print XmlTreeStr_NO_NER # etree.fromstring is being used it will change # range ids character '>' to &gt; XmlElement_NO_NER = etree.fromstring(XmlTreeStr_NO_NER) #print XmlElement_NO_NER XmlTree_NO_NER = etree.ElementTree(XmlElement_NO_NER) #print XmlElement_NO_NER with codecs.open(xml_file_NO_NER, "w") as fin: fin.write( etree.tostring( XmlTree_NO_NER, pretty_print=True, encoding="UTF-8", xml_declaration =True ).replace('&gt;', '>') ) """ ######################################################################## # Validation validateXML('akomantoso30.xsd', xml_file, log_file) #is_valid = CheckXMLvalidity('akomantoso30.xsd', xml_file) #print is_valid except etree.XMLSyntaxError: # Something went wrong write the corresponding # XML string to a .txt file Akn_LOGGER.info('Could not create XML element from string! Check validity!') with open(text_file, "w") as fin: fin.write(judgmentObj.text) #with open(text_file_NO_NER, "w") as fin: # fin.write(judgmentObj.text) except KeyboardInterrupt: raise except Exception as e: print(e) Akn_LOGGER.info('Something went wrong! Error raised and passed...') with open(text_file, "w") as fin: fin.write('') #with open(text_file_NO_NER, "w") as fin: # fin.write('') #pass end_time = time.clock() file_process_time = round(end_time - start_time, 2) #print is_valid Akn_LOGGER.info('file process time: %s', file_process_time) #with open (general_LOG_file, "a") as file_log: # file_log.write( # os.path.join(root, name) + # ';' + # str(file_process_time) + # ';' + # str(is_valid) + # '\n' # ) logging.shutdown()
normal
{ "blob_id": "190f0bcbac946c410d964860fd5be8718011caa8", "index": 2862, "step-1": "# -*- coding: utf-8 -*-\r\nimport os\r\nimport re\r\nimport datetime\r\nimport sys\r\nimport codecs\r\nimport logging\r\nimport logging.handlers\r\nimport fnmatch\r\nimport time\r\nimport argparse\r\nfrom antlr4 import *\r\nfrom antlr4.tree.Trees import Trees\r\nfrom lxml import etree\r\nfrom AknJudgementClass import AknJudgementXML\r\nfrom AknLegalReferencesClass import AknLegalReferences\r\nfrom functions import validateXML, findDatesOfInterest\r\nfrom functions import setupLogger, fixStringXML, CheckXMLvalidity\r\nfrom variables import *\r\nfrom grammars.gen.SupremeCourtLexer import SupremeCourtLexer\r\nfrom grammars.gen.SupremeCourtParser import SupremeCourtParser\r\nfrom grammars.gen.SupremeCourtListener import SupremeCourtListener\r\nfrom grammars.gen.Legal_refLexer import Legal_refLexer\r\nfrom grammars.gen.Legal_refParser import Legal_refParser\r\nfrom grammars.gen.Legal_refListener import Legal_refListener\r\nfrom grammars.gen.Legal_refVisitor import Legal_refVisitor\r\n\r\nprogram_description = 'A Command Line Interface to transform judgments '\r\nprogram_description += 'published by the Supreme Civil and Criminal court '\r\nprogram_description += '(Areios Pagos) into XML using Akoma Ntoso '\r\nprogram_description += 'prototype. '\r\n\r\nparser = argparse.ArgumentParser(\r\n description = program_description\r\n )\r\n\r\nyear_help = 'choose a specific year for judgment(s) to be processed '\r\nparser.add_argument(\r\n '-year',\r\n help = year_help\r\n )\r\n\r\nfn_help = 'choose a specific file to be transformed to Akoma Ntoso '\r\nfn_help += '(if argument is present -year parameter must be declared)'\r\nparser.add_argument(\r\n '-fn',\r\n metavar = 'FILENAME',\r\n help = fn_help\r\n )\r\n\r\n# create a namespace object\r\nargs = parser.parse_args()\r\n\r\nif __name__ == '__main__':\r\n #print args\r\n\r\n # This is used for statistics purposes (time calculation, validation etc.)\r\n #general_LOG_file = 'statistics_AreiosPagos.txt'\r\n\r\n # Create regex object for publicHearingDate\r\n publicHearingDateObj = re.compile(publicHearingDatePattern)\r\n # Create regex objext for decisionPublicationDate\r\n decisionPublicationDateObj = re.compile(decisionPublicationDatePattern) \r\n # Create regex objext for courtConferenceDate\r\n courtConferenceDateObj = re.compile(courtConferenceDatePattern)\r\n # Create regex objext for fix XML string\r\n paragraphPatternObj = re.compile(paragraphPattern)\r\n\r\n if args.fn is not None:\r\n if args.year is None:\r\n parser.error(\r\n 'You must provide -year parameter ' +\r\n 'in order to process a specific file'\r\n )\r\n else:\r\n file_pattern = '*' + args.fn\r\n else:\r\n file_pattern = '*' + TXT_EXT\r\n\r\n source_path = os.path.join(\r\n os.getcwd(),\r\n os.path.join(\r\n LEGAL_TEXTS,\r\n AREIOS_PAGOS\r\n )\r\n )\r\n \r\n if args.year is not None:\r\n source_path = os.path.join(\r\n source_path,\r\n args.year\r\n )\r\n #print source_path\r\n\r\n for root, dirs, files in os.walk(source_path):\r\n #print root\r\n logs_path = root.replace(\r\n os.path.join(\r\n os.getcwd(),\r\n LEGAL_TEXTS\r\n ),\r\n os.path.join(\r\n os.getcwd(),\r\n LOGS\r\n )\r\n )\r\n #print \"logs: \" + logs_path\r\n\r\n xml_path = root.replace(\r\n os.path.join(\r\n os.getcwd(),\r\n LEGAL_TEXTS\r\n ),\r\n os.path.join(\r\n os.getcwd(),\r\n XML\r\n )\r\n )\r\n #print \"xml: \" + xml_path\r\n\r\n #xml_no_ner_path = root.replace(\r\n # os.path.join(\r\n # os.getcwd(),\r\n # LEGAL_TEXTS\r\n # ),\r\n # os.path.join(\r\n # os.getcwd(),\r\n # XML_NO_NER\r\n # )\r\n # )\r\n #print \"xmlnoner: \" +xml_no_ner_path\r\n\r\n ner_path = root.replace(\r\n os.path.join(\r\n os.getcwd(),\r\n LEGAL_TEXTS\r\n ),\r\n os.path.join(\r\n os.getcwd(),\r\n NER\r\n )\r\n )\r\n #print \"ner: \" + ner_path\r\n #sys.exit()\r\n \r\n # Create LOG folder if it does not exist\r\n if not os.path.exists(logs_path):\r\n #print \"Creating Logs folder...\"\r\n os.makedirs(logs_path)\r\n\r\n # Create XML folder if it does not exist\r\n if not os.path.exists(xml_path):\r\n #print \"Creating XML folder...\"\r\n os.makedirs(xml_path)\r\n\r\n # Create XML without NER folder if it does not exist\r\n #if not os.path.exists(xml_no_ner_path):\r\n #print \"Creating XML without NER folder...\"\r\n #os.makedirs(xml_no_ner_path)\r\n \r\n for name in files:\r\n if fnmatch.fnmatch(name, file_pattern):\r\n print \"judgment decision: \" + name\r\n global is_valid\r\n is_valid = False\r\n try:\r\n # just for statistics purposes\r\n start_time = time.clock()\r\n \r\n # Foreach judgment file create a corresponding log,\r\n # XML and text filename\r\n year = name.split('.')[0].split('_')[-1]\r\n log_file = os.path.join(\r\n logs_path,\r\n name\r\n )\r\n xml_file = os.path.join(\r\n xml_path,\r\n name.split('.')[0] + XML_EXT\r\n )\r\n #xml_file_NO_NER = os.path.join(\r\n # xml_no_ner_path,\r\n # name.split('.')[0] + XML_EXT\r\n # )\r\n text_file = os.path.join(\r\n xml_path,\r\n name.split('.')[0] + TXT_EXT\r\n )\r\n #text_file_NO_NER = os.path.join(\r\n # xml_no_ner_path,\r\n # name.split('.')[0] + TXT_EXT\r\n # )\r\n \r\n # Declare Gate XML file where named entities are stored\r\n gate_xml_file = os.path.join(\r\n ner_path,\r\n name + XML_EXT\r\n )\r\n #print \"log_file: \" + log_file\r\n #print \"xml_file: \" + xml_file\r\n #print \"text_fle: \" + text_file\r\n #print \"gate_xml: \" + gate_xml_file\r\n #sys.exit()\r\n\r\n # Setup a logger\r\n Akn_LOGGER = setupLogger('Akn_LOGGER', log_file)\r\n Akn_LOGGER.info('Converting %s', name)\r\n\r\n ######################## METADATA #########################\r\n # Dictionary of metadata\r\n # Usually metadata comes from external files or\r\n # could be extracted from legal text later\r\n meta = {}\r\n meta['textType'] = \"judgment\"\r\n meta['author'] = \"#SCCC\"\r\n meta['foreas'] = \"SCCC\"\r\n\r\n # In Areios Pagos we can extract decision number and\r\n # year from file name\r\n datePattern = re.search(\r\n r'Ar?\\s+(?P<decisionNumber>\\d+)[_](?P<issueYear>\\d+)',\r\n name,\r\n re.DOTALL\r\n )\r\n if datePattern:\r\n #print datePattern.group('decisionNumber')\r\n #print datePattern.group('issueYear')\r\n meta['issueYear'] = datePattern.group('issueYear')\r\n meta['decisionNumber'] = datePattern.group('decisionNumber')\r\n\r\n # Create AknJudgementXML object\r\n judgmentObj = AknJudgementXML(\r\n textType = meta['textType'],\r\n author = meta['author'],\r\n foreas = meta['foreas'],\r\n issueYear = meta['issueYear'],\r\n decisionNumber = meta['decisionNumber']\r\n )\r\n\r\n # Create \"meta\" node\r\n metaElem = judgmentObj.createMeta()\r\n #print(etree.tostring(\r\n # metaElem,\r\n # pretty_print=True,\r\n # encoding=\"UTF-8\",\r\n # xml_declaration =True\r\n # ))\r\n \r\n # Populate reference node with Named Entities\r\n if os.path.isfile(gate_xml_file):\r\n #print \"gate_xml_file exists\"\r\n referencesNode = metaElem.find('references')\r\n if referencesNode is not None:\r\n referencesNodeIndex = metaElem.getchildren().index(referencesNode)\r\n #print referencesNodeIndex\r\n newReferencesNode = judgmentObj.modifyReferencesFromGateXml(\r\n gate_xml_file,\r\n referencesNode\r\n )\r\n metaElem.remove(referencesNode)\r\n metaElem.insert(\r\n referencesNodeIndex,\r\n newReferencesNode\r\n )\r\n #sys.exit()\r\n ######################## END METADATA #####################\r\n \r\n ########################### LEGAL REFERENCES #################\r\n #print 'Parsing legal references...'\r\n finput = FileStream(os.path.join(root, name), encoding='utf-8')\r\n lexer = Legal_refLexer(finput)\r\n stream = CommonTokenStream(lexer)\r\n parser = Legal_refParser(stream)\r\n tree = parser.legal_text()\r\n answer = AknLegalReferences().visit(tree)\r\n #print(answer)\r\n ########################### END LEGAL REFERENCES ##############\r\n \r\n ############################# STRUCTURE #######################\r\n #print 'Creating judgment structure...'\r\n Akn_LOGGER.info('Creating judgment structure...')\r\n finput = InputStream(answer)\r\n lexer = SupremeCourtLexer(finput)\r\n stream = CommonTokenStream(lexer)\r\n parser = SupremeCourtParser(stream)\r\n tree = parser.judgment()\r\n walker = ParseTreeWalker()\r\n walker.walk(judgmentObj, tree)\r\n #print judgmentObj.text\r\n ############################## END STRUCTURE #################### \r\n\r\n ############################ Named Entities in text #############\r\n if os.path.isfile(gate_xml_file):\r\n judgmentObj.text = judgmentObj.createNamedEntitiesInText(\r\n gate_xml_file,\r\n judgmentObj.text\r\n )\r\n ##################################################################\r\n\r\n # Create AkomaNtoso Root element\r\n akomaNtosoElem = judgmentObj.createAkomaNtosoRoot()\r\n \r\n # This is due to cases where a ref tag does not close\r\n # before the end tag of a paragraph (<p><ref></p></ref>)\r\n judgmentObj.text = fixStringXML(\r\n judgmentObj.text,\r\n paragraphPatternObj\r\n )\r\n\r\n try:\r\n # Create judgment element based on parser and append to root\r\n Akn_LOGGER.info('Transforming to XML element...')\r\n\r\n # etree.fromstring is being used it will change range\r\n # ids character '>' to &gt; \r\n judgmentElem = judgmentObj.XML()\r\n\r\n #print etree.tostring(\r\n # judgmentElem,\r\n # pretty_print=True,\r\n # encoding=\"UTF-8\",\r\n # xml_declaration =True\r\n # )\r\n akomaNtosoElem.insert(0, judgmentElem)\r\n \r\n # Find judgment node and insert metaElement\r\n judgmentNode = akomaNtosoElem.find(\"judgment\")\r\n judgmentNode.insert(0, metaElem)\r\n #print(\r\n # etree.tostring(\r\n # akomaNtosoElem,\r\n # pretty_print=True,\r\n # encoding=\"UTF-8\",\r\n # xml_declaration =True\r\n # )\r\n # )\r\n\r\n # Specific nodes that will be used after\r\n headerNode = akomaNtosoElem.xpath(\"/akomaNtoso/judgment/header\")\r\n conclusionsNode = akomaNtosoElem.xpath(\"/akomaNtoso/judgment/conclusions\")\r\n workflow = akomaNtosoElem.xpath(\"/akomaNtoso/judgment/meta/workflow\")\r\n references = metaElem.xpath(\"/akomaNtoso/judgment/meta/references\")\r\n\r\n # Get FRBRdate date attribute of FRBRWork and FRBRExpression elements\r\n FRBRdateWorkNode = akomaNtosoElem.xpath(\r\n \"/akomaNtoso/judgment/meta/identification/FRBRWork/FRBRdate\"\r\n )\r\n FRBRdateExpressionNode = akomaNtosoElem.xpath(\r\n \"/akomaNtoso/judgment/meta/identification/FRBRExpression/FRBRdate\"\r\n )\r\n \r\n # Dates of interest can be found in specific elements\r\n # in a judgment decision - find nodes\r\n Akn_LOGGER.info('Searching for dates of interest...')\r\n \r\n ###################### publicHearingDate #########################\r\n # PublicHearingDate can be found on header element\r\n # of AkomaNtoso structure\r\n if headerNode:\r\n newHeaderNode = findDatesOfInterest(\r\n headerNode[0],\r\n publicHearingDateObj,\r\n 'publicHearingDate',\r\n meta['author']\r\n )\r\n \r\n if newHeaderNode is not None:\r\n publicHearDate = newHeaderNode[1].get('date')\r\n \r\n if workflow is not None:\r\n workflow[0].insert(0, newHeaderNode[1])\r\n\r\n if references is not None:\r\n references[0].append(newHeaderNode[2])\r\n\r\n # Set \"date\" attribute to FRBRdate node of\r\n # FRBRWork and FRBRExpression\r\n if FRBRdateWorkNode:\r\n FRBRdateWorkNode[0].set('date', publicHearDate)\r\n FRBRdateWorkNode[0].set('name', 'publicHearingDate')\r\n\r\n if FRBRdateExpressionNode:\r\n FRBRdateExpressionNode[0].set('date', publicHearDate)\r\n FRBRdateExpressionNode[0].set('name', 'publicHearingDate')\r\n ####################################################################\r\n\r\n ########################## courtConferenceDate ####################\r\n # CourtConferenceDate can also be found in conclusions node\r\n if conclusionsNode:\r\n newConclusionsNode = findDatesOfInterest(\r\n conclusionsNode[0],\r\n courtConferenceDateObj,\r\n 'courtConferenceDate',\r\n meta['author']\r\n )\r\n\r\n if newConclusionsNode is not None:\r\n courtConfDate = newConclusionsNode[1].get('date')\r\n\r\n # Set step element to workflow node\r\n if workflow is not None:\r\n workflow[0].insert(0, newConclusionsNode[1])\r\n\r\n # Set TLCEvent element to workflow node\r\n if references is not None:\r\n references[0].append(newConclusionsNode[2])\r\n\r\n # If for some reason DecisionPublicationDate does not exist\r\n # try fill FRBR date with\r\n # court conference date\r\n #if hasDecisionPublicationDate == False:\r\n if FRBRdateWorkNode:\r\n FRBRdateWorkNode[0].set('date', courtConfDate)\r\n FRBRdateWorkNode[0].set('name', 'courtConferenceDate')\r\n \r\n if FRBRdateExpressionNode:\r\n FRBRdateExpressionNode[0].set('date', courtConfDate)\r\n FRBRdateExpressionNode[0].set('name', 'courtConferenceDate')\r\n ######################################################################\r\n \r\n ########################## decisionPublicationDate #################\r\n # DecisionPublicationDate can be found on conclusions element\r\n # of AkomaNtoso structure\r\n #hasDecisionPublicationDate = True\r\n if conclusionsNode:\r\n newConclusionsNode = findDatesOfInterest(\r\n conclusionsNode[0],\r\n decisionPublicationDateObj,\r\n 'decisionPublicationDate',\r\n meta['author']\r\n )\r\n #print newConclusionsNode\r\n\r\n if newConclusionsNode is not None:\r\n publicationDate = newConclusionsNode[1].get('date')\r\n\r\n # Set step element to workflow node\r\n if workflow is not None:\r\n workflow[0].insert(0, newConclusionsNode[1])\r\n\r\n # Set TLCEvent element to workflow node\r\n if references is not None:\r\n references[0].append(newConclusionsNode[2])\r\n\r\n # Set \"date\" attribute to FRBRdate node of\r\n # FRBRWork and FRBRExpression\r\n if FRBRdateWorkNode:\r\n FRBRdateWorkNode[0].set('date', publicationDate)\r\n FRBRdateWorkNode[0].set('name', 'decisionPublicationDate')\r\n\r\n if FRBRdateExpressionNode:\r\n FRBRdateExpressionNode[0].set('date', publicationDate)\r\n FRBRdateExpressionNode[0].set('name', 'decisionPublicationDate')\r\n #else:\r\n # hasDecisionPublicationDate = False\r\n ####################################################################\r\n\r\n Akn_LOGGER.info('Stop searching for dates of interest...')\r\n \r\n # Create the corresponding ElementTree object\r\n XmlTree = etree.ElementTree(akomaNtosoElem)\r\n #print etree.tostring(\r\n # XmlTree,\r\n # pretty_print = True,\r\n # encoding=\"UTF-8\",\r\n # xml_declaration = True\r\n # )\r\n \r\n # Open the XML file and append elementTree to it\r\n Akn_LOGGER.info('Creating XML file...')\r\n # Problem with href range_id cannot retain '>' character,\r\n # so write string tree representation to file\r\n with codecs.open(xml_file, \"w\") as fin:\r\n fin.write(\r\n etree.tostring(\r\n XmlTree,\r\n pretty_print=True,\r\n encoding=\"UTF-8\",\r\n xml_declaration =True\r\n ).replace('&gt;', '>')\r\n )\r\n\r\n ########## copy XML tree and save it without including NER ##############\r\n \"\"\"\r\n rootNode = XmlTree.getroot()\r\n for child in rootNode.xpath(\"./judgment/meta/references\"):\r\n for child_lv2 in child:\r\n if child_lv2.tag == 'TLCOrganization' or child_lv2.tag == 'TLCPerson' or child_lv2.tag == 'TLCLocation':\r\n #print child_lv2\r\n child_lv2.getparent().remove(child_lv2)\r\n\r\n XmlTreeStr_NO_NER = etree.tostring(\r\n XmlTree,\r\n pretty_print=True,\r\n encoding=\"UTF-8\",\r\n xml_declaration =True\r\n )\r\n XmlTreeStr_NO_NER = re.sub(\r\n r'[<]/?organization.*?[>]',\r\n '',\r\n XmlTreeStr_NO_NER,\r\n flags = re.DOTALL\r\n )\r\n XmlTreeStr_NO_NER = re.sub(\r\n r'[<]/?person.*?[>]',\r\n '',\r\n XmlTreeStr_NO_NER,\r\n flags = re.DOTALL\r\n )\r\n XmlTreeStr_NO_NER = re.sub(\r\n r'[<]/?location.*?[>]',\r\n '',\r\n XmlTreeStr_NO_NER,\r\n flags = re.DOTALL\r\n )\r\n #print XmlTreeStr_NO_NER\r\n # etree.fromstring is being used it will change\r\n # range ids character '>' to &gt; \r\n XmlElement_NO_NER = etree.fromstring(XmlTreeStr_NO_NER)\r\n #print XmlElement_NO_NER\r\n XmlTree_NO_NER = etree.ElementTree(XmlElement_NO_NER)\r\n #print XmlElement_NO_NER\r\n with codecs.open(xml_file_NO_NER, \"w\") as fin:\r\n fin.write(\r\n etree.tostring(\r\n XmlTree_NO_NER,\r\n pretty_print=True,\r\n encoding=\"UTF-8\",\r\n xml_declaration =True\r\n ).replace('&gt;', '>')\r\n )\r\n \"\"\"\r\n ########################################################################\r\n\r\n # Validation\r\n validateXML('akomantoso30.xsd', xml_file, log_file)\r\n #is_valid = CheckXMLvalidity('akomantoso30.xsd', xml_file)\r\n #print is_valid\r\n\r\n except etree.XMLSyntaxError:\r\n # Something went wrong write the corresponding\r\n # XML string to a .txt file\r\n Akn_LOGGER.info('Could not create XML element from string! Check validity!')\r\n with open(text_file, \"w\") as fin:\r\n fin.write(judgmentObj.text)\r\n\r\n #with open(text_file_NO_NER, \"w\") as fin:\r\n # fin.write(judgmentObj.text)\r\n\r\n except KeyboardInterrupt:\r\n raise\r\n\r\n except Exception as e:\r\n print(e)\r\n Akn_LOGGER.info('Something went wrong! Error raised and passed...')\r\n with open(text_file, \"w\") as fin:\r\n fin.write('')\r\n\r\n #with open(text_file_NO_NER, \"w\") as fin:\r\n # fin.write('')\r\n #pass\r\n\r\n end_time = time.clock()\r\n file_process_time = round(end_time - start_time, 2)\r\n #print is_valid\r\n Akn_LOGGER.info('file process time: %s', file_process_time)\r\n #with open (general_LOG_file, \"a\") as file_log:\r\n # file_log.write(\r\n # os.path.join(root, name) +\r\n # ';' +\r\n # str(file_process_time) +\r\n # ';' +\r\n # str(is_valid) +\r\n # '\\n'\r\n # )\r\n \r\n logging.shutdown()\r\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
import mosquitto import json import time device_id = "868850013067326" # The callback for when the client receives a CONNACK response from the server. def on_connect(mosq, userdata, rc): print("Connected with result code "+str(rc)) # Subscribing in on_connect() means that if we lose the connection and # reconnect then subscriptions will be renewed. client.subscribe('vneigbor/%s' % device_id) # The callback for when a PUBLISH message is received from the server. def on_message(mosq, userdata, message): print message.topic, message.payload client = mosquitto.Mosquitto("mosq-rec") client.on_connect = on_connect client.on_message = on_message client.connect("localhost", "18833") # Blocking call that processes network traffic, dispatches callbacks and # handles reconnecting. # Other loop*() functions are available that give a threaded interface and a # manual interface. client.loop_forever()
normal
{ "blob_id": "673d6bb02ec666dbdbecb5fd7fd5041da1941cf8", "index": 2251, "step-1": "import mosquitto\nimport json\nimport time\ndevice_id = \"868850013067326\"\n\n# The callback for when the client receives a CONNACK response from the server.\ndef on_connect(mosq, userdata, rc):\n print(\"Connected with result code \"+str(rc))\n\n # Subscribing in on_connect() means that if we lose the connection and\n # reconnect then subscriptions will be renewed.\n client.subscribe('vneigbor/%s' % device_id)\n\n# The callback for when a PUBLISH message is received from the server.\ndef on_message(mosq, userdata, message):\n print message.topic, message.payload\n\nclient = mosquitto.Mosquitto(\"mosq-rec\")\nclient.on_connect = on_connect\nclient.on_message = on_message\n\nclient.connect(\"localhost\", \"18833\")\n\n# Blocking call that processes network traffic, dispatches callbacks and\n# handles reconnecting.\n# Other loop*() functions are available that give a threaded interface and a\n# manual interface.\nclient.loop_forever()\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> include('RecExRecoTest/RecExRecoTest_RTT_common.py') <|reserved_special_token_0|> include('RecExCommon/rdotoesdnotrigger.py') include('RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py') <|reserved_special_token_1|> include('RecExRecoTest/RecExRecoTest_RTT_common.py') <|reserved_special_token_0|> BTaggingFlags.Active = False include('RecExCommon/rdotoesdnotrigger.py') include('RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py') <|reserved_special_token_1|> include('RecExRecoTest/RecExRecoTest_RTT_common.py') from BTagging.BTaggingFlags import BTaggingFlags BTaggingFlags.Active = False include('RecExCommon/rdotoesdnotrigger.py') include('RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py') <|reserved_special_token_1|> include ("RecExRecoTest/RecExRecoTest_RTT_common.py") from BTagging.BTaggingFlags import BTaggingFlags BTaggingFlags.Active=False # main jobOption include ("RecExCommon/rdotoesdnotrigger.py") include ("RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py")
flexible
{ "blob_id": "34c91d273648ae72731fba7f5519a4920d77c0c3", "index": 7192, "step-1": "<mask token>\n", "step-2": "include('RecExRecoTest/RecExRecoTest_RTT_common.py')\n<mask token>\ninclude('RecExCommon/rdotoesdnotrigger.py')\ninclude('RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py')\n", "step-3": "include('RecExRecoTest/RecExRecoTest_RTT_common.py')\n<mask token>\nBTaggingFlags.Active = False\ninclude('RecExCommon/rdotoesdnotrigger.py')\ninclude('RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py')\n", "step-4": "include('RecExRecoTest/RecExRecoTest_RTT_common.py')\nfrom BTagging.BTaggingFlags import BTaggingFlags\nBTaggingFlags.Active = False\ninclude('RecExCommon/rdotoesdnotrigger.py')\ninclude('RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py')\n", "step-5": "include (\"RecExRecoTest/RecExRecoTest_RTT_common.py\")\n\n\nfrom BTagging.BTaggingFlags import BTaggingFlags\nBTaggingFlags.Active=False\n\n# main jobOption\ninclude (\"RecExCommon/rdotoesdnotrigger.py\")\n\n\ninclude (\"RecExRecoTest/RecExRecoTest_RTT_common_postOptions.py\")\n\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> urlpatterns = patterns('', url(regex='new_subscriber/$', view= add_new_subscriber, name='support.new_subscriber'), url(regex= 'update_subscriber/(?P<pk>\\d+)/$', view=update_existing_subscriber, name='support.update_subscriber'), url(regex='edit_subscriber/$', view= EditSubscriberView.as_view(), name='support.edit_subscriber'), url( regex='deactivate_subscriber/$', view=DeActivateSubscriberView.as_view( ), name='support.deactivate_subscriber'), url(regex= 'reactivate_subscriber/$', view=ReActivateSubscriberView.as_view(), name='support.reactivate_subscriber'), url(regex='reports/$', view= SupportSubscriberReportView.as_view(), name='support.subscriber_report' ), url(regex='dashboard/$', view=DashboardView.as_view(), name= 'support.dashboard')) <|reserved_special_token_1|> from django.conf.urls import patterns, url from customer_support.views import update_existing_subscriber, add_new_subscriber from .views import EditSubscriberView, DeActivateSubscriberView, ReActivateSubscriberView, SupportSubscriberReportView, DashboardView urlpatterns = patterns('', url(regex='new_subscriber/$', view= add_new_subscriber, name='support.new_subscriber'), url(regex= 'update_subscriber/(?P<pk>\\d+)/$', view=update_existing_subscriber, name='support.update_subscriber'), url(regex='edit_subscriber/$', view= EditSubscriberView.as_view(), name='support.edit_subscriber'), url( regex='deactivate_subscriber/$', view=DeActivateSubscriberView.as_view( ), name='support.deactivate_subscriber'), url(regex= 'reactivate_subscriber/$', view=ReActivateSubscriberView.as_view(), name='support.reactivate_subscriber'), url(regex='reports/$', view= SupportSubscriberReportView.as_view(), name='support.subscriber_report' ), url(regex='dashboard/$', view=DashboardView.as_view(), name= 'support.dashboard')) <|reserved_special_token_1|> # -*- coding: utf-8 -*- from django.conf.urls import patterns, url from customer_support.views import update_existing_subscriber, \ add_new_subscriber from .views import (EditSubscriberView, DeActivateSubscriberView, ReActivateSubscriberView, SupportSubscriberReportView, DashboardView) urlpatterns = patterns('', url( regex=r'new_subscriber/$', view=add_new_subscriber, name="support.new_subscriber" ), url( regex=r'update_subscriber/(?P<pk>\d+)/$', view=update_existing_subscriber, name="support.update_subscriber" ), url( regex=r'edit_subscriber/$', view=EditSubscriberView.as_view(), name="support.edit_subscriber" ), url( regex=r'deactivate_subscriber/$', view=DeActivateSubscriberView.as_view(), name="support.deactivate_subscriber" ), url( regex=r'reactivate_subscriber/$', view=ReActivateSubscriberView.as_view(), name="support.reactivate_subscriber" ), url( regex=r'reports/$', view=SupportSubscriberReportView.as_view(), name="support.subscriber_report" ), url( regex=r'dashboard/$', view=DashboardView.as_view(), name="support.dashboard" ), )
flexible
{ "blob_id": "fb4818e742ed3c7d131c426811f839dbe70f03de", "index": 2650, "step-1": "<mask token>\n", "step-2": "<mask token>\nurlpatterns = patterns('', url(regex='new_subscriber/$', view=\n add_new_subscriber, name='support.new_subscriber'), url(regex=\n 'update_subscriber/(?P<pk>\\\\d+)/$', view=update_existing_subscriber,\n name='support.update_subscriber'), url(regex='edit_subscriber/$', view=\n EditSubscriberView.as_view(), name='support.edit_subscriber'), url(\n regex='deactivate_subscriber/$', view=DeActivateSubscriberView.as_view(\n ), name='support.deactivate_subscriber'), url(regex=\n 'reactivate_subscriber/$', view=ReActivateSubscriberView.as_view(),\n name='support.reactivate_subscriber'), url(regex='reports/$', view=\n SupportSubscriberReportView.as_view(), name='support.subscriber_report'\n ), url(regex='dashboard/$', view=DashboardView.as_view(), name=\n 'support.dashboard'))\n", "step-3": "from django.conf.urls import patterns, url\nfrom customer_support.views import update_existing_subscriber, add_new_subscriber\nfrom .views import EditSubscriberView, DeActivateSubscriberView, ReActivateSubscriberView, SupportSubscriberReportView, DashboardView\nurlpatterns = patterns('', url(regex='new_subscriber/$', view=\n add_new_subscriber, name='support.new_subscriber'), url(regex=\n 'update_subscriber/(?P<pk>\\\\d+)/$', view=update_existing_subscriber,\n name='support.update_subscriber'), url(regex='edit_subscriber/$', view=\n EditSubscriberView.as_view(), name='support.edit_subscriber'), url(\n regex='deactivate_subscriber/$', view=DeActivateSubscriberView.as_view(\n ), name='support.deactivate_subscriber'), url(regex=\n 'reactivate_subscriber/$', view=ReActivateSubscriberView.as_view(),\n name='support.reactivate_subscriber'), url(regex='reports/$', view=\n SupportSubscriberReportView.as_view(), name='support.subscriber_report'\n ), url(regex='dashboard/$', view=DashboardView.as_view(), name=\n 'support.dashboard'))\n", "step-4": "# -*- coding: utf-8 -*-\n\nfrom django.conf.urls import patterns, url\n\nfrom customer_support.views import update_existing_subscriber, \\\n add_new_subscriber\n\nfrom .views import (EditSubscriberView,\n DeActivateSubscriberView,\n ReActivateSubscriberView,\n SupportSubscriberReportView,\n DashboardView)\n\n\nurlpatterns = patterns('',\n url(\n regex=r'new_subscriber/$',\n view=add_new_subscriber,\n name=\"support.new_subscriber\"\n ),\n url(\n regex=r'update_subscriber/(?P<pk>\\d+)/$',\n view=update_existing_subscriber,\n name=\"support.update_subscriber\"\n ),\n url(\n regex=r'edit_subscriber/$',\n view=EditSubscriberView.as_view(),\n name=\"support.edit_subscriber\"\n ),\n url(\n regex=r'deactivate_subscriber/$',\n view=DeActivateSubscriberView.as_view(),\n name=\"support.deactivate_subscriber\"\n ),\n url(\n regex=r'reactivate_subscriber/$',\n view=ReActivateSubscriberView.as_view(),\n name=\"support.reactivate_subscriber\"\n ),\n url(\n regex=r'reports/$',\n view=SupportSubscriberReportView.as_view(),\n name=\"support.subscriber_report\"\n ),\n url(\n regex=r'dashboard/$',\n view=DashboardView.as_view(),\n name=\"support.dashboard\"\n ),\n)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from end import Client c = Client()
normal
{ "blob_id": "1be510e6715d21e814c48fe05496704e9a65d554", "index": 308, "step-1": "<mask token>\n", "step-2": "<mask token>\nc = Client()\n", "step-3": "from end import Client\nc = Client()\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import pandas as pd df = pd.DataFrame({'col1':[1,2,3,4],'col2':[444,555,666,444],'col3':['abc','def','ghi','xyz']}) print(df.head()) #print(df['col2'].unique()) #print(df['col1'] > 2) newdf = df[(df['col1']>0) & (df['col2'] == 444)] print("========================") print(newdf) def times2(x): return x*2 print("========================") print(df['col1'].apply(times2)) print("========================") print(df.sort_values(by='col2')) print("========================") print(df)
normal
{ "blob_id": "422a4945ebf453d3e09e9e7e76dd32b30488680e", "index": 3011, "step-1": "<mask token>\n\n\ndef times2(x):\n return x * 2\n\n\n<mask token>\n", "step-2": "<mask token>\nprint(df.head())\n<mask token>\nprint('========================')\nprint(newdf)\n\n\ndef times2(x):\n return x * 2\n\n\nprint('========================')\nprint(df['col1'].apply(times2))\nprint('========================')\nprint(df.sort_values(by='col2'))\nprint('========================')\nprint(df)\n", "step-3": "<mask token>\ndf = pd.DataFrame({'col1': [1, 2, 3, 4], 'col2': [444, 555, 666, 444],\n 'col3': ['abc', 'def', 'ghi', 'xyz']})\nprint(df.head())\nnewdf = df[(df['col1'] > 0) & (df['col2'] == 444)]\nprint('========================')\nprint(newdf)\n\n\ndef times2(x):\n return x * 2\n\n\nprint('========================')\nprint(df['col1'].apply(times2))\nprint('========================')\nprint(df.sort_values(by='col2'))\nprint('========================')\nprint(df)\n", "step-4": "import pandas as pd\ndf = pd.DataFrame({'col1': [1, 2, 3, 4], 'col2': [444, 555, 666, 444],\n 'col3': ['abc', 'def', 'ghi', 'xyz']})\nprint(df.head())\nnewdf = df[(df['col1'] > 0) & (df['col2'] == 444)]\nprint('========================')\nprint(newdf)\n\n\ndef times2(x):\n return x * 2\n\n\nprint('========================')\nprint(df['col1'].apply(times2))\nprint('========================')\nprint(df.sort_values(by='col2'))\nprint('========================')\nprint(df)\n", "step-5": "import pandas as pd\ndf = pd.DataFrame({'col1':[1,2,3,4],'col2':[444,555,666,444],'col3':['abc','def','ghi','xyz']})\nprint(df.head())\n#print(df['col2'].unique())\n#print(df['col1'] > 2)\nnewdf = df[(df['col1']>0) & (df['col2'] == 444)]\nprint(\"========================\")\nprint(newdf)\n\ndef times2(x):\n return x*2\n\nprint(\"========================\")\nprint(df['col1'].apply(times2))\n\nprint(\"========================\")\nprint(df.sort_values(by='col2'))\nprint(\"========================\")\nprint(df)", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def solve(): valid_passes = 0 with open('.\\day4.txt') as fp: for line in fp.read().strip().splitlines(): list_of_words = set() add = 1 for word in line.split(): modified_word = ''.join(sorted(word)) if modified_word in list_of_words: add = 0 break else: list_of_words.add(modified_word) valid_passes += add return valid_passes <|reserved_special_token_0|> <|reserved_special_token_1|> def solve(): valid_passes = 0 with open('.\\day4.txt') as fp: for line in fp.read().strip().splitlines(): list_of_words = set() add = 1 for word in line.split(): modified_word = ''.join(sorted(word)) if modified_word in list_of_words: add = 0 break else: list_of_words.add(modified_word) valid_passes += add return valid_passes print(solve()) <|reserved_special_token_1|> def solve(): valid_passes = 0 with open('.\day4.txt') as fp: for line in fp.read().strip().splitlines(): list_of_words = set() add = 1 for word in line.split(): modified_word = ''.join(sorted(word)) if modified_word in list_of_words: add = 0 break else: list_of_words.add(modified_word) valid_passes += add return valid_passes print(solve())
flexible
{ "blob_id": "870d260b58c10e0379d66c3b44bc45594ff7d666", "index": 4396, "step-1": "<mask token>\n", "step-2": "def solve():\n valid_passes = 0\n with open('.\\\\day4.txt') as fp:\n for line in fp.read().strip().splitlines():\n list_of_words = set()\n add = 1\n for word in line.split():\n modified_word = ''.join(sorted(word))\n if modified_word in list_of_words:\n add = 0\n break\n else:\n list_of_words.add(modified_word)\n valid_passes += add\n return valid_passes\n\n\n<mask token>\n", "step-3": "def solve():\n valid_passes = 0\n with open('.\\\\day4.txt') as fp:\n for line in fp.read().strip().splitlines():\n list_of_words = set()\n add = 1\n for word in line.split():\n modified_word = ''.join(sorted(word))\n if modified_word in list_of_words:\n add = 0\n break\n else:\n list_of_words.add(modified_word)\n valid_passes += add\n return valid_passes\n\n\nprint(solve())\n", "step-4": "\ndef solve():\n\n valid_passes = 0\n with open('.\\day4.txt') as fp:\n for line in fp.read().strip().splitlines():\n list_of_words = set()\n add = 1\n for word in line.split():\n modified_word = ''.join(sorted(word))\n if modified_word in list_of_words:\n add = 0\n break\n else:\n list_of_words.add(modified_word)\n \n valid_passes += add\n\n return valid_passes\n\nprint(solve())\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class TypeFrame(wx.Frame): <|reserved_special_token_0|> def createCogButtons(self, row): cogButtons = self.domButtons if row == 0 else self.auxButtons labels = ['N', 'S', 'T', 'F'] for i in range(4): cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i', size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e', size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) for i in range(8): self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i]) if row == 1: for button in self.auxButtons: button.Disable() def onclick_cogFunction(self, event): btnLabel = event.GetEventObject().GetLabel() if self.rowCount == 0: self.rowCount = 1 self.entityList.append(self.labelToFunction(btnLabel)) for button in self.domButtons: button.Disable() for button in self.auxButtons: if button.Label[1] == self.entityList[0].opposite( ).sublabel and button.Label[0] != self.entityList[0 ].opposite_orientation().label and button.Label[0 ] != self.entityList[0].label: button.Enable() else: self.entityList.append(self.labelToFunction(btnLabel)) for button in self.auxButtons: button.Disable() if len(self.entityList) == 2: e = Entity(self.entityList) print(Translator.translate_orientation(e) + Translator. translate_observing(e) + Translator. translate_decision_making(e) + Translator. translate_perception(e)) def labelToFunction(self, btnLabel): if btnLabel == 'Ni': return Ni elif btnLabel == 'Ne': return Ne elif btnLabel == 'Si': return Si elif btnLabel == 'Se': return Se elif btnLabel == 'Ti': return Ti elif btnLabel == 'Te': return Te elif btnLabel == 'Fi': return Fi elif btnLabel == 'Fe': return Fe <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class TypeFrame(wx.Frame): def __init__(self, parent, title): wx.Frame.__init__(self, parent, title=title, size=(530, 480), style =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER) self.panel = wx.Panel(self) self.entityList = [] self.domButtons = [] self.auxButtons = [] self.rowCount = 0 self.row_1_y = 30 self.row_2_y = 90 self.row_3_y = 150 wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self .row_1_y - 20)) self.createCogButtons(0) wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, self.row_2_y - 20)) self.createCogButtons(1) def createCogButtons(self, row): cogButtons = self.domButtons if row == 0 else self.auxButtons labels = ['N', 'S', 'T', 'F'] for i in range(4): cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i', size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e', size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) for i in range(8): self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i]) if row == 1: for button in self.auxButtons: button.Disable() def onclick_cogFunction(self, event): btnLabel = event.GetEventObject().GetLabel() if self.rowCount == 0: self.rowCount = 1 self.entityList.append(self.labelToFunction(btnLabel)) for button in self.domButtons: button.Disable() for button in self.auxButtons: if button.Label[1] == self.entityList[0].opposite( ).sublabel and button.Label[0] != self.entityList[0 ].opposite_orientation().label and button.Label[0 ] != self.entityList[0].label: button.Enable() else: self.entityList.append(self.labelToFunction(btnLabel)) for button in self.auxButtons: button.Disable() if len(self.entityList) == 2: e = Entity(self.entityList) print(Translator.translate_orientation(e) + Translator. translate_observing(e) + Translator. translate_decision_making(e) + Translator. translate_perception(e)) def labelToFunction(self, btnLabel): if btnLabel == 'Ni': return Ni elif btnLabel == 'Ne': return Ne elif btnLabel == 'Si': return Si elif btnLabel == 'Se': return Se elif btnLabel == 'Ti': return Ti elif btnLabel == 'Te': return Te elif btnLabel == 'Fi': return Fi elif btnLabel == 'Fe': return Fe <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class TypeFrame(wx.Frame): def __init__(self, parent, title): wx.Frame.__init__(self, parent, title=title, size=(530, 480), style =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER) self.panel = wx.Panel(self) self.entityList = [] self.domButtons = [] self.auxButtons = [] self.rowCount = 0 self.row_1_y = 30 self.row_2_y = 90 self.row_3_y = 150 wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self .row_1_y - 20)) self.createCogButtons(0) wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, self.row_2_y - 20)) self.createCogButtons(1) def createCogButtons(self, row): cogButtons = self.domButtons if row == 0 else self.auxButtons labels = ['N', 'S', 'T', 'F'] for i in range(4): cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i', size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e', size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) for i in range(8): self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i]) if row == 1: for button in self.auxButtons: button.Disable() def onclick_cogFunction(self, event): btnLabel = event.GetEventObject().GetLabel() if self.rowCount == 0: self.rowCount = 1 self.entityList.append(self.labelToFunction(btnLabel)) for button in self.domButtons: button.Disable() for button in self.auxButtons: if button.Label[1] == self.entityList[0].opposite( ).sublabel and button.Label[0] != self.entityList[0 ].opposite_orientation().label and button.Label[0 ] != self.entityList[0].label: button.Enable() else: self.entityList.append(self.labelToFunction(btnLabel)) for button in self.auxButtons: button.Disable() if len(self.entityList) == 2: e = Entity(self.entityList) print(Translator.translate_orientation(e) + Translator. translate_observing(e) + Translator. translate_decision_making(e) + Translator. translate_perception(e)) def labelToFunction(self, btnLabel): if btnLabel == 'Ni': return Ni elif btnLabel == 'Ne': return Ne elif btnLabel == 'Si': return Si elif btnLabel == 'Se': return Se elif btnLabel == 'Ti': return Ti elif btnLabel == 'Te': return Te elif btnLabel == 'Fi': return Fi elif btnLabel == 'Fe': return Fe if __name__ == '__main__': app = wx.App() frame = TypeFrame(None, title='Socionics Engine') frame.Show() app.MainLoop() <|reserved_special_token_1|> import wx from cognitive_function import * from entity import Entity from function_to_type import Translator from function_analysis import * class TypeFrame(wx.Frame): def __init__(self, parent, title): wx.Frame.__init__(self, parent, title=title, size=(530, 480), style =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER) self.panel = wx.Panel(self) self.entityList = [] self.domButtons = [] self.auxButtons = [] self.rowCount = 0 self.row_1_y = 30 self.row_2_y = 90 self.row_3_y = 150 wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self .row_1_y - 20)) self.createCogButtons(0) wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, self.row_2_y - 20)) self.createCogButtons(1) def createCogButtons(self, row): cogButtons = self.domButtons if row == 0 else self.auxButtons labels = ['N', 'S', 'T', 'F'] for i in range(4): cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i', size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e', size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) for i in range(8): self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i]) if row == 1: for button in self.auxButtons: button.Disable() def onclick_cogFunction(self, event): btnLabel = event.GetEventObject().GetLabel() if self.rowCount == 0: self.rowCount = 1 self.entityList.append(self.labelToFunction(btnLabel)) for button in self.domButtons: button.Disable() for button in self.auxButtons: if button.Label[1] == self.entityList[0].opposite( ).sublabel and button.Label[0] != self.entityList[0 ].opposite_orientation().label and button.Label[0 ] != self.entityList[0].label: button.Enable() else: self.entityList.append(self.labelToFunction(btnLabel)) for button in self.auxButtons: button.Disable() if len(self.entityList) == 2: e = Entity(self.entityList) print(Translator.translate_orientation(e) + Translator. translate_observing(e) + Translator. translate_decision_making(e) + Translator. translate_perception(e)) def labelToFunction(self, btnLabel): if btnLabel == 'Ni': return Ni elif btnLabel == 'Ne': return Ne elif btnLabel == 'Si': return Si elif btnLabel == 'Se': return Se elif btnLabel == 'Ti': return Ti elif btnLabel == 'Te': return Te elif btnLabel == 'Fi': return Fi elif btnLabel == 'Fe': return Fe if __name__ == '__main__': app = wx.App() frame = TypeFrame(None, title='Socionics Engine') frame.Show() app.MainLoop() <|reserved_special_token_1|> #!/usr/bin/env python3 # # main.py - By Steven Chen Hao Nyeo # Graphical interface for Socionics Engine # Created: August 8, 2019 import wx from cognitive_function import * from entity import Entity from function_to_type import Translator from function_analysis import * class TypeFrame(wx.Frame): def __init__(self, parent, title): # Create Frame wx.Frame.__init__(self, parent, title = title, size = (530, 480), style = wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER) self.panel = wx.Panel(self) # The current list of cognitive functions entered into the system self.entityList = [] # Arrays containing the rows of buttons for dominant and auxiliary functions self.domButtons = [] self.auxButtons = [] # Keep track of the current row of buttons to enable self.rowCount = 0 # Setup for program interface self.row_1_y = 30 self.row_2_y = 90 self.row_3_y = 150 wx.StaticText(self.panel, label = "Dominant Function:", pos = (30, self.row_1_y - 20)) self.createCogButtons(0) wx.StaticText(self.panel, label = "Auxiliary Function:", pos = (30, self.row_2_y - 20)) self.createCogButtons(1) # The function that creates the buttons for the eight cognitive functions def createCogButtons(self, row): # Keeps track of creation of dominant or auxiliary buttons cogButtons = self.domButtons if row == 0 else self.auxButtons # Create and bind the buttons to the event labels = ["N", "S", "T", "F"] for i in range(4): cogButtons.append(wx.Button(self.panel, label = labels[i] + "i", size = (50, 30) , pos = (30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) cogButtons.append(wx.Button(self.panel, label = labels[i] + "e", size = (50, 30) , pos = (90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y))) for i in range(8): self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i]) # The auxiliary buttons are disabled before the dominant function is entered if (row == 1): for button in self.auxButtons: button.Disable() # The event handler for clicking on the buttons def onclick_cogFunction(self, event): btnLabel = event.GetEventObject().GetLabel() # First row - dominant function if (self.rowCount == 0): # Disable the dominant function buttons self.rowCount = 1 self.entityList.append(self.labelToFunction(btnLabel)) for button in self.domButtons: button.Disable() # Re-enable the appropriate auxiliary function buttons for button in self.auxButtons: if (button.Label[1] == self.entityList[0].opposite().sublabel and button.Label[0] != self.entityList[0].opposite_orientation().label and button.Label[0] != self.entityList[0].label): button.Enable() # Second row - auxiliary function else: self.entityList.append(self.labelToFunction(btnLabel)) for button in self.auxButtons: button.Disable() if (len(self.entityList) == 2): e = Entity(self.entityList) print(Translator.translate_orientation(e) + Translator.translate_observing(e) + Translator.translate_decision_making(e) + Translator.translate_perception(e)) # The helper functin that returns the corresponding function object according to the entered string def labelToFunction(self, btnLabel): if (btnLabel == "Ni"): return Ni elif (btnLabel == "Ne"): return Ne elif (btnLabel == "Si"): return Si elif (btnLabel == "Se"): return Se elif (btnLabel == "Ti"): return Ti elif (btnLabel == "Te"): return Te elif (btnLabel == "Fi"): return Fi elif (btnLabel == "Fe"): return Fe if __name__ == "__main__": app = wx.App() frame = TypeFrame(None, title = "Socionics Engine") frame.Show() app.MainLoop()
flexible
{ "blob_id": "519dbe97ce9de30e616d660ef168e686c52b01b5", "index": 5452, "step-1": "<mask token>\n\n\nclass TypeFrame(wx.Frame):\n <mask token>\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass TypeFrame(wx.Frame):\n\n def __init__(self, parent, title):\n wx.Frame.__init__(self, parent, title=title, size=(530, 480), style\n =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n self.entityList = []\n self.domButtons = []\n self.auxButtons = []\n self.rowCount = 0\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self\n .row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, \n self.row_2_y - 20))\n self.createCogButtons(1)\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass TypeFrame(wx.Frame):\n\n def __init__(self, parent, title):\n wx.Frame.__init__(self, parent, title=title, size=(530, 480), style\n =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n self.entityList = []\n self.domButtons = []\n self.auxButtons = []\n self.rowCount = 0\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self\n .row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, \n self.row_2_y - 20))\n self.createCogButtons(1)\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\nif __name__ == '__main__':\n app = wx.App()\n frame = TypeFrame(None, title='Socionics Engine')\n frame.Show()\n app.MainLoop()\n", "step-4": "import wx\nfrom cognitive_function import *\nfrom entity import Entity\nfrom function_to_type import Translator\nfrom function_analysis import *\n\n\nclass TypeFrame(wx.Frame):\n\n def __init__(self, parent, title):\n wx.Frame.__init__(self, parent, title=title, size=(530, 480), style\n =wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n self.entityList = []\n self.domButtons = []\n self.auxButtons = []\n self.rowCount = 0\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label='Dominant Function:', pos=(30, self\n .row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label='Auxiliary Function:', pos=(30, \n self.row_2_y - 20))\n self.createCogButtons(1)\n\n def createCogButtons(self, row):\n cogButtons = self.domButtons if row == 0 else self.auxButtons\n labels = ['N', 'S', 'T', 'F']\n for i in range(4):\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'i',\n size=(50, 30), pos=(30 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label=labels[i] + 'e',\n size=(50, 30), pos=(90 + 120 * i, self.row_1_y if row == 0 else\n self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n if row == 1:\n for button in self.auxButtons:\n button.Disable()\n\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n if self.rowCount == 0:\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n for button in self.auxButtons:\n if button.Label[1] == self.entityList[0].opposite(\n ).sublabel and button.Label[0] != self.entityList[0\n ].opposite_orientation().label and button.Label[0\n ] != self.entityList[0].label:\n button.Enable()\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n if len(self.entityList) == 2:\n e = Entity(self.entityList)\n print(Translator.translate_orientation(e) + Translator.\n translate_observing(e) + Translator.\n translate_decision_making(e) + Translator.\n translate_perception(e))\n\n def labelToFunction(self, btnLabel):\n if btnLabel == 'Ni':\n return Ni\n elif btnLabel == 'Ne':\n return Ne\n elif btnLabel == 'Si':\n return Si\n elif btnLabel == 'Se':\n return Se\n elif btnLabel == 'Ti':\n return Ti\n elif btnLabel == 'Te':\n return Te\n elif btnLabel == 'Fi':\n return Fi\n elif btnLabel == 'Fe':\n return Fe\n\n\nif __name__ == '__main__':\n app = wx.App()\n frame = TypeFrame(None, title='Socionics Engine')\n frame.Show()\n app.MainLoop()\n", "step-5": "#!/usr/bin/env python3\n#\n# main.py - By Steven Chen Hao Nyeo \n# Graphical interface for Socionics Engine \n# Created: August 8, 2019\n\nimport wx\nfrom cognitive_function import *\nfrom entity import Entity\nfrom function_to_type import Translator\nfrom function_analysis import *\n\nclass TypeFrame(wx.Frame):\n def __init__(self, parent, title):\n \n # Create Frame\n wx.Frame.__init__(self, parent, title = title, size = (530, 480), style = wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)\n self.panel = wx.Panel(self)\n \n # The current list of cognitive functions entered into the system \n self.entityList = []\n\n # Arrays containing the rows of buttons for dominant and auxiliary functions\n self.domButtons = []\n self.auxButtons = []\n\n # Keep track of the current row of buttons to enable\n self.rowCount = 0\n\n # Setup for program interface\n self.row_1_y = 30\n self.row_2_y = 90\n self.row_3_y = 150\n wx.StaticText(self.panel, label = \"Dominant Function:\", pos = (30, self.row_1_y - 20))\n self.createCogButtons(0)\n wx.StaticText(self.panel, label = \"Auxiliary Function:\", pos = (30, self.row_2_y - 20))\n self.createCogButtons(1)\n\n # The function that creates the buttons for the eight cognitive functions\n def createCogButtons(self, row):\n\n # Keeps track of creation of dominant or auxiliary buttons\n cogButtons = self.domButtons if row == 0 else self.auxButtons \n \n # Create and bind the buttons to the event\n labels = [\"N\", \"S\", \"T\", \"F\"]\n for i in range(4): \n cogButtons.append(wx.Button(self.panel, label = labels[i] + \"i\", size = (50, 30) , pos = (30 + 120 * i, self.row_1_y if row == 0 else self.row_2_y)))\n cogButtons.append(wx.Button(self.panel, label = labels[i] + \"e\", size = (50, 30) , pos = (90 + 120 * i, self.row_1_y if row == 0 else self.row_2_y)))\n for i in range(8):\n self.Bind(wx.EVT_BUTTON, self.onclick_cogFunction, cogButtons[i])\n\n # The auxiliary buttons are disabled before the dominant function is entered\n if (row == 1): \n for button in self.auxButtons:\n button.Disable()\n\n # The event handler for clicking on the buttons\n def onclick_cogFunction(self, event):\n btnLabel = event.GetEventObject().GetLabel()\n\n # First row - dominant function\n if (self.rowCount == 0):\n\n # Disable the dominant function buttons\n self.rowCount = 1\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.domButtons:\n button.Disable()\n\n # Re-enable the appropriate auxiliary function buttons\n for button in self.auxButtons:\n if (button.Label[1] == self.entityList[0].opposite().sublabel \n and button.Label[0] != self.entityList[0].opposite_orientation().label\n and button.Label[0] != self.entityList[0].label):\n button.Enable()\n\n # Second row - auxiliary function\n else:\n self.entityList.append(self.labelToFunction(btnLabel))\n for button in self.auxButtons:\n button.Disable()\n\n if (len(self.entityList) == 2):\n e = Entity(self.entityList)\n\n print(Translator.translate_orientation(e) +\n Translator.translate_observing(e) +\n Translator.translate_decision_making(e) +\n Translator.translate_perception(e))\n\n # The helper functin that returns the corresponding function object according to the entered string\n def labelToFunction(self, btnLabel):\n if (btnLabel == \"Ni\"): \n return Ni\n elif (btnLabel == \"Ne\"): \n return Ne\n elif (btnLabel == \"Si\"): \n return Si\n elif (btnLabel == \"Se\"): \n return Se\n elif (btnLabel == \"Ti\"): \n return Ti\n elif (btnLabel == \"Te\"): \n return Te\n elif (btnLabel == \"Fi\"): \n return Fi\n elif (btnLabel == \"Fe\"): \n return Fe\n\nif __name__ == \"__main__\":\n app = wx.App()\n frame = TypeFrame(None, title = \"Socionics Engine\")\n frame.Show()\n app.MainLoop()\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
#This is a module which implements Naive Set Theory in Python. #It will be useful for Unions, Intersections, Mutual Exclusion, and more. #ideas: print(sum([[[1],[2]], [[3],[4]], [[5],[6]]], [])) Monoid - abstraction on + trial = [1, 2, 3] trial2 = [3, 4, 5] def recursiveUnioniser(set): if isinstance(set[0], int): return set res = [] for i in range(len(set)): for j in range(len(set[i])): res.append(set[i][j]) if isinstance(res[0], list): return recursiveUnioniser(res) else: return res print(recursiveUnioniser(trial)) def mutualexclusion(set_a, set_b): res = [i for i in set_a if i not in set_b] res2 = [i for i in set_b if i not in set_a] res += res2 return res print(mutualexclusion(trial, trial2)) def intersection(set_a, set_b): res = [i for i in set_a if i in set_b] return res print(intersection(trial, trial2)) def repetitionAudit(set): pass #this will audit a list to see if an element occurs more than once #If it does, it will remove this element and return the list
normal
{ "blob_id": "c632c50028fee2f19fb65458f0b55ec228b8006f", "index": 2137, "step-1": "<mask token>\n\n\ndef intersection(set_a, set_b):\n res = [i for i in set_a if i in set_b]\n return res\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef recursiveUnioniser(set):\n if isinstance(set[0], int):\n return set\n res = []\n for i in range(len(set)):\n for j in range(len(set[i])):\n res.append(set[i][j])\n if isinstance(res[0], list):\n return recursiveUnioniser(res)\n else:\n return res\n\n\n<mask token>\n\n\ndef mutualexclusion(set_a, set_b):\n res = [i for i in set_a if i not in set_b]\n res2 = [i for i in set_b if i not in set_a]\n res += res2\n return res\n\n\n<mask token>\n\n\ndef intersection(set_a, set_b):\n res = [i for i in set_a if i in set_b]\n return res\n\n\n<mask token>\n\n\ndef repetitionAudit(set):\n pass\n", "step-3": "<mask token>\n\n\ndef recursiveUnioniser(set):\n if isinstance(set[0], int):\n return set\n res = []\n for i in range(len(set)):\n for j in range(len(set[i])):\n res.append(set[i][j])\n if isinstance(res[0], list):\n return recursiveUnioniser(res)\n else:\n return res\n\n\nprint(recursiveUnioniser(trial))\n\n\ndef mutualexclusion(set_a, set_b):\n res = [i for i in set_a if i not in set_b]\n res2 = [i for i in set_b if i not in set_a]\n res += res2\n return res\n\n\nprint(mutualexclusion(trial, trial2))\n\n\ndef intersection(set_a, set_b):\n res = [i for i in set_a if i in set_b]\n return res\n\n\nprint(intersection(trial, trial2))\n\n\ndef repetitionAudit(set):\n pass\n", "step-4": "trial = [1, 2, 3]\ntrial2 = [3, 4, 5]\n\n\ndef recursiveUnioniser(set):\n if isinstance(set[0], int):\n return set\n res = []\n for i in range(len(set)):\n for j in range(len(set[i])):\n res.append(set[i][j])\n if isinstance(res[0], list):\n return recursiveUnioniser(res)\n else:\n return res\n\n\nprint(recursiveUnioniser(trial))\n\n\ndef mutualexclusion(set_a, set_b):\n res = [i for i in set_a if i not in set_b]\n res2 = [i for i in set_b if i not in set_a]\n res += res2\n return res\n\n\nprint(mutualexclusion(trial, trial2))\n\n\ndef intersection(set_a, set_b):\n res = [i for i in set_a if i in set_b]\n return res\n\n\nprint(intersection(trial, trial2))\n\n\ndef repetitionAudit(set):\n pass\n", "step-5": "#This is a module which implements Naive Set Theory in Python.\n#It will be useful for Unions, Intersections, Mutual Exclusion, and more.\n#ideas: print(sum([[[1],[2]], [[3],[4]], [[5],[6]]], [])) Monoid - abstraction on +\n\n\ntrial = [1, 2, 3]\ntrial2 = [3, 4, 5]\n\ndef recursiveUnioniser(set):\n if isinstance(set[0], int): return set\n\n res = []\n\n for i in range(len(set)):\n for j in range(len(set[i])):\n res.append(set[i][j])\n\n if isinstance(res[0], list):\n return recursiveUnioniser(res)\n else: return res\n\nprint(recursiveUnioniser(trial))\n\ndef mutualexclusion(set_a, set_b):\n res = [i for i in set_a if i not in set_b]\n res2 = [i for i in set_b if i not in set_a]\n res += res2\n\n return res\n\nprint(mutualexclusion(trial, trial2))\n\ndef intersection(set_a, set_b):\n res = [i for i in set_a if i in set_b]\n\n return res\n\nprint(intersection(trial, trial2))\n\ndef repetitionAudit(set):\n pass #this will audit a list to see if an element occurs more than once\n #If it does, it will remove this element and return the list\n", "step-ids": [ 1, 4, 5, 6, 7 ] }
[ 1, 4, 5, 6, 7 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> from ccapi.interfaces.bitfinex import Bitfinex from ccapi.interfaces.bittrex import Bittrex from ccapi.interfaces.poloniex import Poloniex from ccapi.interfaces.bithumb import Bithumb from ccapi.interfaces.coinone import Coinone from ccapi.interfaces.korbit import Korbit <|reserved_special_token_1|> from ccapi.interfaces.bitfinex import Bitfinex from ccapi.interfaces.bittrex import Bittrex from ccapi.interfaces.poloniex import Poloniex from ccapi.interfaces.bithumb import Bithumb from ccapi.interfaces.coinone import Coinone from ccapi.interfaces.korbit import Korbit # from ccapis.interfaces.coinbase import Coinbase
flexible
{ "blob_id": "098c91f4aa367cb389e542c0199b633e7ecd4003", "index": 4369, "step-1": "<mask token>\n", "step-2": "from ccapi.interfaces.bitfinex import Bitfinex\nfrom ccapi.interfaces.bittrex import Bittrex\nfrom ccapi.interfaces.poloniex import Poloniex\nfrom ccapi.interfaces.bithumb import Bithumb\nfrom ccapi.interfaces.coinone import Coinone\nfrom ccapi.interfaces.korbit import Korbit\n", "step-3": "from ccapi.interfaces.bitfinex import Bitfinex\nfrom ccapi.interfaces.bittrex import Bittrex\nfrom ccapi.interfaces.poloniex import Poloniex\nfrom ccapi.interfaces.bithumb import Bithumb\nfrom ccapi.interfaces.coinone import Coinone\nfrom ccapi.interfaces.korbit import Korbit\n# from ccapis.interfaces.coinbase import Coinbase\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
import logging from logging import INFO from typing import Dict, List from .constants import Relations, POS from .evaluator import * from .general import DPHelper from .general import * from .utils import * # ========================================= DRIVER ================================================= def generate(root: Dict): # {"relation": <>, "subjs": [<>], "objs": [<>]} relations: List[Dict] = [] # Is this applicable only to root? subj = DPHelper.get_subject(root) obj = DPHelper.get_object(root) if subj is not None and DPHelper.is_proper_noun(subj) and \ obj is not None and DPHelper.is_proper_noun(obj): if DPHelper.is_proper_noun(subj) and DPHelper.is_proper_noun(obj): logging.log(INFO, "============ Rooted NNP SUBJECT and NNP OBJECT =============") subjs = get_all_nouns(subj, proper_noun=True) objs = [get_noun_phrase(obj, proper_noun=True)] aux_relations = sub_obj_vbroot(root) # Relations between subject and object relations = relations + create_relations(subjs, aux_relations, objs) # Relations within clausal complements open_comp: List[Dict] = DPHelper.get_child_type(root, Relations.OPEN_CLAUSAL_COMPLEMENT) comp: List[Dict] = DPHelper.get_child_type(root, Relations.CLAUSAL_COMPLEMENT) if open_comp: # Assume for now open_comps all relate to object subjs = [get_noun_phrase(obj, proper_noun=True)] objs, xcomp_relations = x_comp(open_comp[0]) # TODO Can there be multiple xcomps? relations = relations + create_relations(subjs, xcomp_relations, objs) elif subj is not None and DPHelper.is_proper_noun(subj): subjs = get_all_nouns(subj, proper_noun=True) appos_rels, appos_objs = [], [] # Find direct appositional relations within NSUBJ block appos_rel_objs = [] for appos in DPHelper.get_child_type(subj, Relations.APPOSITION): a_objs, a_relations = direct_appositional_relations(appos) relations += create_nested_relations(subjs, a_relations, a_objs) # TODO Check for clausal complement for Subj (INDEPENDENT) if DPHelper.get_child_type(root, Relations.CLAUSAL_COMPLEMENT): pass # Passive subject, look into preposition for predicate object with possessive if DPHelper.is_proper_noun(subj) and subj["link"] == Relations.PASSIVE_NOM_SUBJECT: logging.log(INFO, "============= NNP PASSIVE SUBJECT ===============") objs, aux_relations, appos = subjpass(root) for appos_instance in appos: relations = relations + create_relations(subjs, appos_instance["relation"], appos_instance["obj"]) relations = relations + create_relations(subjs, aux_relations, objs) # Possible case where root is noun and hence subject is not labeled passive but relation still exists elif DPHelper.is_noun(root): logging.log(INFO, "============= NNP SUBJECT with NOUN ROOT ===============") objs, aux_relations = nnroot_subj(root) relations = relations + create_relations(subjs, aux_relations, objs) # Usually the case that the direct obj being non-NNP represents relation elif DPHelper.is_verb(root) and obj is not None: logging.log(INFO, "============= NNP SUBJECT with VERB ROOT (NON-NNP DOBJ present) ===============") objs, aux_relations = vbroot_subj_xobj(root) relations = relations + create_relations(subjs, aux_relations, objs) # Root verb without concrete noun form but valid relation (E.g. lives, resides) TODO Do we require `in/from etc.` for preposition? elif DPHelper.is_verb(root): logging.log(INFO, "============= NNP SUBJECT with VERB ROOT ===============") objs, aux_relations = vbroot_subj(root) relations = relations + create_nested_relations(subjs, aux_relations, objs) elif DPHelper.is_adjective(root): logging.log(INFO, "============= NNP SUBJECT with ADJ ROOT ===============") objs, aux_relations = vbroot_subj(root) # FIXME We assume this is similar to verb root for now relations = relations + create_nested_relations(subjs, aux_relations, objs) else: logging.log(INFO, "============= NNP SUBJECT with UNKNOWN STRUCTURE ===============") else: logging.log(INFO, "============== NOUN ROOT - No Direct SUBJ and OBJ ================") if subj is not None: # Mostly likely noun with possessive or nested if (subj["link"] == Relations.PASSIVE_NOM_SUBJECT): # Necessarily assume this since noun subj is possessive, else should Corefer logging.log(INFO, "============= NESTED POSSESSIVE OF PASSIVE SUBJECT ===============") subjs = subjpass_poss(subj) if DPHelper.has_rc_modifier(root): # NNP still might be present in rc modifier logging.log(INFO, "============= RELATIVE CLAUSE MODIFIER PRESENT ===============") if DPHelper.is_proper_noun(root): subj, relations, objs = nnproot(root) all_rel_tuples = [] for relation in relations: rel_tuples = [(sub, relation['relation'], obj) for sub in relation['subjs'] for obj in relation['objs']] all_rel_tuples += rel_tuples return all_rel_tuples
normal
{ "blob_id": "5923a12378225fb6389e7e0275af6d4aa476fe87", "index": 1635, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef generate(root: Dict):\n relations: List[Dict] = []\n subj = DPHelper.get_subject(root)\n obj = DPHelper.get_object(root)\n if subj is not None and DPHelper.is_proper_noun(subj\n ) and obj is not None and DPHelper.is_proper_noun(obj):\n if DPHelper.is_proper_noun(subj) and DPHelper.is_proper_noun(obj):\n logging.log(INFO,\n '============ Rooted NNP SUBJECT and NNP OBJECT =============')\n subjs = get_all_nouns(subj, proper_noun=True)\n objs = [get_noun_phrase(obj, proper_noun=True)]\n aux_relations = sub_obj_vbroot(root)\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n open_comp: List[Dict] = DPHelper.get_child_type(root, Relations\n .OPEN_CLAUSAL_COMPLEMENT)\n comp: List[Dict] = DPHelper.get_child_type(root, Relations.\n CLAUSAL_COMPLEMENT)\n if open_comp:\n subjs = [get_noun_phrase(obj, proper_noun=True)]\n objs, xcomp_relations = x_comp(open_comp[0])\n relations = relations + create_relations(subjs,\n xcomp_relations, objs)\n elif subj is not None and DPHelper.is_proper_noun(subj):\n subjs = get_all_nouns(subj, proper_noun=True)\n appos_rels, appos_objs = [], []\n appos_rel_objs = []\n for appos in DPHelper.get_child_type(subj, Relations.APPOSITION):\n a_objs, a_relations = direct_appositional_relations(appos)\n relations += create_nested_relations(subjs, a_relations, a_objs)\n if DPHelper.get_child_type(root, Relations.CLAUSAL_COMPLEMENT):\n pass\n if DPHelper.is_proper_noun(subj) and subj['link'\n ] == Relations.PASSIVE_NOM_SUBJECT:\n logging.log(INFO,\n '============= NNP PASSIVE SUBJECT ===============')\n objs, aux_relations, appos = subjpass(root)\n for appos_instance in appos:\n relations = relations + create_relations(subjs,\n appos_instance['relation'], appos_instance['obj'])\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n elif DPHelper.is_noun(root):\n logging.log(INFO,\n '============= NNP SUBJECT with NOUN ROOT ===============')\n objs, aux_relations = nnroot_subj(root)\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n elif DPHelper.is_verb(root) and obj is not None:\n logging.log(INFO,\n '============= NNP SUBJECT with VERB ROOT (NON-NNP DOBJ present) ==============='\n )\n objs, aux_relations = vbroot_subj_xobj(root)\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n elif DPHelper.is_verb(root):\n logging.log(INFO,\n '============= NNP SUBJECT with VERB ROOT ===============')\n objs, aux_relations = vbroot_subj(root)\n relations = relations + create_nested_relations(subjs,\n aux_relations, objs)\n elif DPHelper.is_adjective(root):\n logging.log(INFO,\n '============= NNP SUBJECT with ADJ ROOT ===============')\n objs, aux_relations = vbroot_subj(root)\n relations = relations + create_nested_relations(subjs,\n aux_relations, objs)\n else:\n logging.log(INFO,\n '============= NNP SUBJECT with UNKNOWN STRUCTURE ==============='\n )\n else:\n logging.log(INFO,\n '============== NOUN ROOT - No Direct SUBJ and OBJ ================'\n )\n if subj is not None:\n if subj['link'] == Relations.PASSIVE_NOM_SUBJECT:\n logging.log(INFO,\n '============= NESTED POSSESSIVE OF PASSIVE SUBJECT ==============='\n )\n subjs = subjpass_poss(subj)\n if DPHelper.has_rc_modifier(root):\n logging.log(INFO,\n '============= RELATIVE CLAUSE MODIFIER PRESENT ==============='\n )\n if DPHelper.is_proper_noun(root):\n subj, relations, objs = nnproot(root)\n all_rel_tuples = []\n for relation in relations:\n rel_tuples = [(sub, relation['relation'], obj) for sub in relation[\n 'subjs'] for obj in relation['objs']]\n all_rel_tuples += rel_tuples\n return all_rel_tuples\n", "step-3": "import logging\nfrom logging import INFO\nfrom typing import Dict, List\nfrom .constants import Relations, POS\nfrom .evaluator import *\nfrom .general import DPHelper\nfrom .general import *\nfrom .utils import *\n\n\ndef generate(root: Dict):\n relations: List[Dict] = []\n subj = DPHelper.get_subject(root)\n obj = DPHelper.get_object(root)\n if subj is not None and DPHelper.is_proper_noun(subj\n ) and obj is not None and DPHelper.is_proper_noun(obj):\n if DPHelper.is_proper_noun(subj) and DPHelper.is_proper_noun(obj):\n logging.log(INFO,\n '============ Rooted NNP SUBJECT and NNP OBJECT =============')\n subjs = get_all_nouns(subj, proper_noun=True)\n objs = [get_noun_phrase(obj, proper_noun=True)]\n aux_relations = sub_obj_vbroot(root)\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n open_comp: List[Dict] = DPHelper.get_child_type(root, Relations\n .OPEN_CLAUSAL_COMPLEMENT)\n comp: List[Dict] = DPHelper.get_child_type(root, Relations.\n CLAUSAL_COMPLEMENT)\n if open_comp:\n subjs = [get_noun_phrase(obj, proper_noun=True)]\n objs, xcomp_relations = x_comp(open_comp[0])\n relations = relations + create_relations(subjs,\n xcomp_relations, objs)\n elif subj is not None and DPHelper.is_proper_noun(subj):\n subjs = get_all_nouns(subj, proper_noun=True)\n appos_rels, appos_objs = [], []\n appos_rel_objs = []\n for appos in DPHelper.get_child_type(subj, Relations.APPOSITION):\n a_objs, a_relations = direct_appositional_relations(appos)\n relations += create_nested_relations(subjs, a_relations, a_objs)\n if DPHelper.get_child_type(root, Relations.CLAUSAL_COMPLEMENT):\n pass\n if DPHelper.is_proper_noun(subj) and subj['link'\n ] == Relations.PASSIVE_NOM_SUBJECT:\n logging.log(INFO,\n '============= NNP PASSIVE SUBJECT ===============')\n objs, aux_relations, appos = subjpass(root)\n for appos_instance in appos:\n relations = relations + create_relations(subjs,\n appos_instance['relation'], appos_instance['obj'])\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n elif DPHelper.is_noun(root):\n logging.log(INFO,\n '============= NNP SUBJECT with NOUN ROOT ===============')\n objs, aux_relations = nnroot_subj(root)\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n elif DPHelper.is_verb(root) and obj is not None:\n logging.log(INFO,\n '============= NNP SUBJECT with VERB ROOT (NON-NNP DOBJ present) ==============='\n )\n objs, aux_relations = vbroot_subj_xobj(root)\n relations = relations + create_relations(subjs, aux_relations, objs\n )\n elif DPHelper.is_verb(root):\n logging.log(INFO,\n '============= NNP SUBJECT with VERB ROOT ===============')\n objs, aux_relations = vbroot_subj(root)\n relations = relations + create_nested_relations(subjs,\n aux_relations, objs)\n elif DPHelper.is_adjective(root):\n logging.log(INFO,\n '============= NNP SUBJECT with ADJ ROOT ===============')\n objs, aux_relations = vbroot_subj(root)\n relations = relations + create_nested_relations(subjs,\n aux_relations, objs)\n else:\n logging.log(INFO,\n '============= NNP SUBJECT with UNKNOWN STRUCTURE ==============='\n )\n else:\n logging.log(INFO,\n '============== NOUN ROOT - No Direct SUBJ and OBJ ================'\n )\n if subj is not None:\n if subj['link'] == Relations.PASSIVE_NOM_SUBJECT:\n logging.log(INFO,\n '============= NESTED POSSESSIVE OF PASSIVE SUBJECT ==============='\n )\n subjs = subjpass_poss(subj)\n if DPHelper.has_rc_modifier(root):\n logging.log(INFO,\n '============= RELATIVE CLAUSE MODIFIER PRESENT ==============='\n )\n if DPHelper.is_proper_noun(root):\n subj, relations, objs = nnproot(root)\n all_rel_tuples = []\n for relation in relations:\n rel_tuples = [(sub, relation['relation'], obj) for sub in relation[\n 'subjs'] for obj in relation['objs']]\n all_rel_tuples += rel_tuples\n return all_rel_tuples\n", "step-4": "import logging\nfrom logging import INFO\nfrom typing import Dict, List\nfrom .constants import Relations, POS\nfrom .evaluator import *\nfrom .general import DPHelper\nfrom .general import *\nfrom .utils import *\n\n# ========================================= DRIVER =================================================\n\ndef generate(root: Dict):\n\n # {\"relation\": <>, \"subjs\": [<>], \"objs\": [<>]}\n relations: List[Dict] = []\n\n # Is this applicable only to root?\n subj = DPHelper.get_subject(root)\n obj = DPHelper.get_object(root)\n\n\n if subj is not None and DPHelper.is_proper_noun(subj) and \\\n obj is not None and DPHelper.is_proper_noun(obj):\n\n if DPHelper.is_proper_noun(subj) and DPHelper.is_proper_noun(obj):\n logging.log(INFO, \"============ Rooted NNP SUBJECT and NNP OBJECT =============\")\n subjs = get_all_nouns(subj, proper_noun=True)\n objs = [get_noun_phrase(obj, proper_noun=True)]\n aux_relations = sub_obj_vbroot(root) # Relations between subject and object\n relations = relations + create_relations(subjs, aux_relations, objs)\n\n # Relations within clausal complements\n open_comp: List[Dict] = DPHelper.get_child_type(root, Relations.OPEN_CLAUSAL_COMPLEMENT)\n comp: List[Dict] = DPHelper.get_child_type(root, Relations.CLAUSAL_COMPLEMENT)\n if open_comp: # Assume for now open_comps all relate to object\n subjs = [get_noun_phrase(obj, proper_noun=True)]\n objs, xcomp_relations = x_comp(open_comp[0]) # TODO Can there be multiple xcomps?\n relations = relations + create_relations(subjs, xcomp_relations, objs)\n\n elif subj is not None and DPHelper.is_proper_noun(subj):\n subjs = get_all_nouns(subj, proper_noun=True)\n\n appos_rels, appos_objs = [], []\n # Find direct appositional relations within NSUBJ block\n appos_rel_objs = []\n for appos in DPHelper.get_child_type(subj, Relations.APPOSITION):\n a_objs, a_relations = direct_appositional_relations(appos)\n relations += create_nested_relations(subjs, a_relations, a_objs)\n\n # TODO Check for clausal complement for Subj (INDEPENDENT)\n if DPHelper.get_child_type(root, Relations.CLAUSAL_COMPLEMENT):\n pass\n\n # Passive subject, look into preposition for predicate object with possessive\n if DPHelper.is_proper_noun(subj) and subj[\"link\"] == Relations.PASSIVE_NOM_SUBJECT:\n logging.log(INFO, \"============= NNP PASSIVE SUBJECT ===============\")\n objs, aux_relations, appos = subjpass(root)\n for appos_instance in appos:\n relations = relations + create_relations(subjs, appos_instance[\"relation\"], appos_instance[\"obj\"])\n relations = relations + create_relations(subjs, aux_relations, objs)\n\n # Possible case where root is noun and hence subject is not labeled passive but relation still exists\n elif DPHelper.is_noun(root):\n logging.log(INFO, \"============= NNP SUBJECT with NOUN ROOT ===============\")\n objs, aux_relations = nnroot_subj(root)\n relations = relations + create_relations(subjs, aux_relations, objs)\n\n # Usually the case that the direct obj being non-NNP represents relation\n elif DPHelper.is_verb(root) and obj is not None:\n logging.log(INFO, \"============= NNP SUBJECT with VERB ROOT (NON-NNP DOBJ present) ===============\")\n objs, aux_relations = vbroot_subj_xobj(root)\n relations = relations + create_relations(subjs, aux_relations, objs)\n\n # Root verb without concrete noun form but valid relation (E.g. lives, resides) TODO Do we require `in/from etc.` for preposition?\n elif DPHelper.is_verb(root):\n logging.log(INFO, \"============= NNP SUBJECT with VERB ROOT ===============\")\n objs, aux_relations = vbroot_subj(root)\n relations = relations + create_nested_relations(subjs, aux_relations, objs)\n\n elif DPHelper.is_adjective(root):\n logging.log(INFO, \"============= NNP SUBJECT with ADJ ROOT ===============\")\n objs, aux_relations = vbroot_subj(root) # FIXME We assume this is similar to verb root for now\n relations = relations + create_nested_relations(subjs, aux_relations, objs)\n else:\n logging.log(INFO, \"============= NNP SUBJECT with UNKNOWN STRUCTURE ===============\")\n\n\n else:\n logging.log(INFO, \"============== NOUN ROOT - No Direct SUBJ and OBJ ================\")\n\n if subj is not None: # Mostly likely noun with possessive or nested\n if (subj[\"link\"] == Relations.PASSIVE_NOM_SUBJECT): # Necessarily assume this since noun subj is possessive, else should Corefer\n logging.log(INFO, \"============= NESTED POSSESSIVE OF PASSIVE SUBJECT ===============\")\n subjs = subjpass_poss(subj)\n if DPHelper.has_rc_modifier(root): # NNP still might be present in rc modifier\n logging.log(INFO, \"============= RELATIVE CLAUSE MODIFIER PRESENT ===============\")\n\n if DPHelper.is_proper_noun(root):\n subj, relations, objs = nnproot(root)\n\n all_rel_tuples = []\n for relation in relations:\n rel_tuples = [(sub, relation['relation'], obj) for sub in relation['subjs'] for obj in relation['objs']]\n all_rel_tuples += rel_tuples\n return all_rel_tuples\n\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class LoginViewWebApp(FlaskView): <|reserved_special_token_0|> def __init__(self): self.user_controller = UserController() @route('/register', methods=['GET', 'POST']) def register_user(self): if request.method == 'GET': return render_template('register.html') elif request.method == 'POST': app.logger.info('Got post') app.logger.info(request.form) username, password, email = request.form['username'], request.form[ 'password'], request.form['email'] ok, error = self.user_controller.create_user(username, password, email) if ok: return '', 200 else: return 'User already registered', 432 <|reserved_special_token_0|> @route('/logout', methods=['GET']) def logout(self): logout_user() return '', 200 <|reserved_special_token_1|> <|reserved_special_token_0|> class LoginViewWebApp(FlaskView): <|reserved_special_token_0|> def __init__(self): self.user_controller = UserController() @route('/register', methods=['GET', 'POST']) def register_user(self): if request.method == 'GET': return render_template('register.html') elif request.method == 'POST': app.logger.info('Got post') app.logger.info(request.form) username, password, email = request.form['username'], request.form[ 'password'], request.form['email'] ok, error = self.user_controller.create_user(username, password, email) if ok: return '', 200 else: return 'User already registered', 432 @route('/login', methods=['GET', 'POST']) def login(self): if request.method == 'GET': return render_template('login.html') elif request.method == 'POST': username = request.form['username'] password = request.form['password'] user = self.user_controller.get_user_w_password(username, password) if user is None: return 'Invalid credentials', 432 else: login_user(user) return '', 200 @route('/logout', methods=['GET']) def logout(self): logout_user() return '', 200 <|reserved_special_token_1|> <|reserved_special_token_0|> class LoginViewWebApp(FlaskView): route_base = '/' def __init__(self): self.user_controller = UserController() @route('/register', methods=['GET', 'POST']) def register_user(self): if request.method == 'GET': return render_template('register.html') elif request.method == 'POST': app.logger.info('Got post') app.logger.info(request.form) username, password, email = request.form['username'], request.form[ 'password'], request.form['email'] ok, error = self.user_controller.create_user(username, password, email) if ok: return '', 200 else: return 'User already registered', 432 @route('/login', methods=['GET', 'POST']) def login(self): if request.method == 'GET': return render_template('login.html') elif request.method == 'POST': username = request.form['username'] password = request.form['password'] user = self.user_controller.get_user_w_password(username, password) if user is None: return 'Invalid credentials', 432 else: login_user(user) return '', 200 @route('/logout', methods=['GET']) def logout(self): logout_user() return '', 200 <|reserved_special_token_1|> import flask from flask.ext.classy import FlaskView, route, request from annotator_supreme.controllers.user_controller import UserController from annotator_supreme.views import view_tools from annotator_supreme.views import error_views from flask import render_template, flash, redirect, url_for from annotator_supreme import app from flask.ext.login import login_user, logout_user import json class LoginViewWebApp(FlaskView): route_base = '/' def __init__(self): self.user_controller = UserController() @route('/register', methods=['GET', 'POST']) def register_user(self): if request.method == 'GET': return render_template('register.html') elif request.method == 'POST': app.logger.info('Got post') app.logger.info(request.form) username, password, email = request.form['username'], request.form[ 'password'], request.form['email'] ok, error = self.user_controller.create_user(username, password, email) if ok: return '', 200 else: return 'User already registered', 432 @route('/login', methods=['GET', 'POST']) def login(self): if request.method == 'GET': return render_template('login.html') elif request.method == 'POST': username = request.form['username'] password = request.form['password'] user = self.user_controller.get_user_w_password(username, password) if user is None: return 'Invalid credentials', 432 else: login_user(user) return '', 200 @route('/logout', methods=['GET']) def logout(self): logout_user() return '', 200 <|reserved_special_token_1|> import flask from flask.ext.classy import FlaskView, route, request from annotator_supreme.controllers.user_controller import UserController from annotator_supreme.views import view_tools from annotator_supreme.views import error_views from flask import render_template, flash, redirect, url_for from annotator_supreme import app from flask.ext.login import login_user, logout_user import json class LoginViewWebApp(FlaskView): route_base = '/' def __init__(self): self.user_controller = UserController() @route('/register' , methods=['GET','POST']) def register_user(self): if request.method == 'GET': return render_template('register.html') elif request.method == 'POST': app.logger.info("Got post") app.logger.info(request.form) username, password, email = request.form['username'] , request.form['password'], request.form['email'] ok, error = self.user_controller.create_user(username, password, email) if ok: return "", 200 else: return "User already registered", 432 @route('/login',methods=['GET','POST']) def login(self): if request.method == 'GET': return render_template('login.html') elif request.method == 'POST': username = request.form['username'] password = request.form['password'] user = self.user_controller.get_user_w_password(username, password) if user is None: return "Invalid credentials", 432 else: login_user(user) return "", 200 @route('/logout', methods=['GET']) def logout(self): logout_user() return "", 200
flexible
{ "blob_id": "a2e77298059104b403555af95430d7995f8a697b", "index": 1379, "step-1": "<mask token>\n\n\nclass LoginViewWebApp(FlaskView):\n <mask token>\n\n def __init__(self):\n self.user_controller = UserController()\n\n @route('/register', methods=['GET', 'POST'])\n def register_user(self):\n if request.method == 'GET':\n return render_template('register.html')\n elif request.method == 'POST':\n app.logger.info('Got post')\n app.logger.info(request.form)\n username, password, email = request.form['username'], request.form[\n 'password'], request.form['email']\n ok, error = self.user_controller.create_user(username, password,\n email)\n if ok:\n return '', 200\n else:\n return 'User already registered', 432\n <mask token>\n\n @route('/logout', methods=['GET'])\n def logout(self):\n logout_user()\n return '', 200\n", "step-2": "<mask token>\n\n\nclass LoginViewWebApp(FlaskView):\n <mask token>\n\n def __init__(self):\n self.user_controller = UserController()\n\n @route('/register', methods=['GET', 'POST'])\n def register_user(self):\n if request.method == 'GET':\n return render_template('register.html')\n elif request.method == 'POST':\n app.logger.info('Got post')\n app.logger.info(request.form)\n username, password, email = request.form['username'], request.form[\n 'password'], request.form['email']\n ok, error = self.user_controller.create_user(username, password,\n email)\n if ok:\n return '', 200\n else:\n return 'User already registered', 432\n\n @route('/login', methods=['GET', 'POST'])\n def login(self):\n if request.method == 'GET':\n return render_template('login.html')\n elif request.method == 'POST':\n username = request.form['username']\n password = request.form['password']\n user = self.user_controller.get_user_w_password(username, password)\n if user is None:\n return 'Invalid credentials', 432\n else:\n login_user(user)\n return '', 200\n\n @route('/logout', methods=['GET'])\n def logout(self):\n logout_user()\n return '', 200\n", "step-3": "<mask token>\n\n\nclass LoginViewWebApp(FlaskView):\n route_base = '/'\n\n def __init__(self):\n self.user_controller = UserController()\n\n @route('/register', methods=['GET', 'POST'])\n def register_user(self):\n if request.method == 'GET':\n return render_template('register.html')\n elif request.method == 'POST':\n app.logger.info('Got post')\n app.logger.info(request.form)\n username, password, email = request.form['username'], request.form[\n 'password'], request.form['email']\n ok, error = self.user_controller.create_user(username, password,\n email)\n if ok:\n return '', 200\n else:\n return 'User already registered', 432\n\n @route('/login', methods=['GET', 'POST'])\n def login(self):\n if request.method == 'GET':\n return render_template('login.html')\n elif request.method == 'POST':\n username = request.form['username']\n password = request.form['password']\n user = self.user_controller.get_user_w_password(username, password)\n if user is None:\n return 'Invalid credentials', 432\n else:\n login_user(user)\n return '', 200\n\n @route('/logout', methods=['GET'])\n def logout(self):\n logout_user()\n return '', 200\n", "step-4": "import flask\nfrom flask.ext.classy import FlaskView, route, request\nfrom annotator_supreme.controllers.user_controller import UserController\nfrom annotator_supreme.views import view_tools\nfrom annotator_supreme.views import error_views\nfrom flask import render_template, flash, redirect, url_for\nfrom annotator_supreme import app\nfrom flask.ext.login import login_user, logout_user\nimport json\n\n\nclass LoginViewWebApp(FlaskView):\n route_base = '/'\n\n def __init__(self):\n self.user_controller = UserController()\n\n @route('/register', methods=['GET', 'POST'])\n def register_user(self):\n if request.method == 'GET':\n return render_template('register.html')\n elif request.method == 'POST':\n app.logger.info('Got post')\n app.logger.info(request.form)\n username, password, email = request.form['username'], request.form[\n 'password'], request.form['email']\n ok, error = self.user_controller.create_user(username, password,\n email)\n if ok:\n return '', 200\n else:\n return 'User already registered', 432\n\n @route('/login', methods=['GET', 'POST'])\n def login(self):\n if request.method == 'GET':\n return render_template('login.html')\n elif request.method == 'POST':\n username = request.form['username']\n password = request.form['password']\n user = self.user_controller.get_user_w_password(username, password)\n if user is None:\n return 'Invalid credentials', 432\n else:\n login_user(user)\n return '', 200\n\n @route('/logout', methods=['GET'])\n def logout(self):\n logout_user()\n return '', 200\n", "step-5": "import flask\nfrom flask.ext.classy import FlaskView, route, request\nfrom annotator_supreme.controllers.user_controller import UserController\nfrom annotator_supreme.views import view_tools\nfrom annotator_supreme.views import error_views\nfrom flask import render_template, flash, redirect, url_for\nfrom annotator_supreme import app\nfrom flask.ext.login import login_user, logout_user\nimport json\n\nclass LoginViewWebApp(FlaskView):\n route_base = '/'\n\n def __init__(self):\n self.user_controller = UserController()\n\n @route('/register' , methods=['GET','POST'])\n def register_user(self):\n if request.method == 'GET':\n return render_template('register.html')\n elif request.method == 'POST':\n app.logger.info(\"Got post\")\n app.logger.info(request.form)\n\n username, password, email = request.form['username'] , request.form['password'], request.form['email']\n ok, error = self.user_controller.create_user(username, password, email)\n if ok:\n return \"\", 200\n else:\n return \"User already registered\", 432\n \n @route('/login',methods=['GET','POST'])\n def login(self):\n if request.method == 'GET':\n return render_template('login.html')\n elif request.method == 'POST': \n username = request.form['username']\n password = request.form['password']\n user = self.user_controller.get_user_w_password(username, password)\n if user is None:\n return \"Invalid credentials\", 432\n else:\n login_user(user)\n return \"\", 200\n\n @route('/logout', methods=['GET'])\n def logout(self):\n logout_user()\n return \"\", 200\n \n\n", "step-ids": [ 4, 5, 6, 7, 8 ] }
[ 4, 5, 6, 7, 8 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Migration(migrations.Migration): dependencies = [('usuarios', '0001_initial'), ('plataforma', '0005_auto_20210219_2343')] operations = [migrations.AlterField(model_name='plataforma', name= 'usuario', field=models.ForeignKey(on_delete=django.db.models. deletion.CASCADE, to='usuarios.usuario'))] <|reserved_special_token_1|> from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [('usuarios', '0001_initial'), ('plataforma', '0005_auto_20210219_2343')] operations = [migrations.AlterField(model_name='plataforma', name= 'usuario', field=models.ForeignKey(on_delete=django.db.models. deletion.CASCADE, to='usuarios.usuario'))] <|reserved_special_token_1|> # Generated by Django 3.1.7 on 2021-02-20 02:52 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('usuarios', '0001_initial'), ('plataforma', '0005_auto_20210219_2343'), ] operations = [ migrations.AlterField( model_name='plataforma', name='usuario', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='usuarios.usuario'), ), ]
flexible
{ "blob_id": "3f9be81c86852a758440c6a144b8caba736b3868", "index": 972, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('usuarios', '0001_initial'), ('plataforma',\n '0005_auto_20210219_2343')]\n operations = [migrations.AlterField(model_name='plataforma', name=\n 'usuario', field=models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='usuarios.usuario'))]\n", "step-4": "from django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n dependencies = [('usuarios', '0001_initial'), ('plataforma',\n '0005_auto_20210219_2343')]\n operations = [migrations.AlterField(model_name='plataforma', name=\n 'usuario', field=models.ForeignKey(on_delete=django.db.models.\n deletion.CASCADE, to='usuarios.usuario'))]\n", "step-5": "# Generated by Django 3.1.7 on 2021-02-20 02:52\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('usuarios', '0001_initial'),\n ('plataforma', '0005_auto_20210219_2343'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='plataforma',\n name='usuario',\n field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='usuarios.usuario'),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# -*- coding: utf-8 -*- """ Created on Mon Jan 7 15:26:08 2019 @author: Qlala """ import numpy as np; import random as rand; import os; #os.system("del test_frame2.txt") #frame=open("test_frame2.txt","w"); #ba=bytearray(rand.getrandbits(8) for _ in range(400000)) #frame.write("0"*1000000) #frame.close() #ba.decode('ASCII'); #os.mkdir("test") os.chdir("test"); for i in range(1000): t_frame=open("test_f"+str(i),"w") t_frame.write("0"*1000000) t_frame.close() os.chdir("..")
normal
{ "blob_id": "281f2f47f9d7f0d87a354d37f9ff2c14a5598068", "index": 2893, "step-1": "<mask token>\n", "step-2": "<mask token>\nos.chdir('test')\nfor i in range(1000):\n t_frame = open('test_f' + str(i), 'w')\n t_frame.write('0' * 1000000)\n t_frame.close()\nos.chdir('..')\n", "step-3": "<mask token>\nimport numpy as np\nimport random as rand\nimport os\nos.chdir('test')\nfor i in range(1000):\n t_frame = open('test_f' + str(i), 'w')\n t_frame.write('0' * 1000000)\n t_frame.close()\nos.chdir('..')\n", "step-4": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Jan 7 15:26:08 2019\n\n@author: Qlala\n\"\"\"\nimport numpy as np;\nimport random as rand;\nimport os;\n#os.system(\"del test_frame2.txt\")\n#frame=open(\"test_frame2.txt\",\"w\");\n\n#ba=bytearray(rand.getrandbits(8) for _ in range(400000))\n#frame.write(\"0\"*1000000)\n#frame.close()\n#ba.decode('ASCII');\n#os.mkdir(\"test\")\nos.chdir(\"test\");\nfor i in range(1000):\n t_frame=open(\"test_f\"+str(i),\"w\")\n t_frame.write(\"0\"*1000000)\n t_frame.close()\nos.chdir(\"..\")", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
# Generated by Django 3.1.1 on 2020-12-02 19:50 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('element', '0011_suggestion_suggestion_type'), ('bot', '0001_initial'), ] operations = [ migrations.AddField( model_name='discorduser', name='has_elements', field=models.ManyToManyField(to='element.Element'), ), ]
normal
{ "blob_id": "43ae01ffe35c6c4491f3f7e480dd6f5c1be86eb2", "index": 2475, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('element', '0011_suggestion_suggestion_type'), ('bot',\n '0001_initial')]\n operations = [migrations.AddField(model_name='discorduser', name=\n 'has_elements', field=models.ManyToManyField(to='element.Element'))]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('element', '0011_suggestion_suggestion_type'), ('bot',\n '0001_initial')]\n operations = [migrations.AddField(model_name='discorduser', name=\n 'has_elements', field=models.ManyToManyField(to='element.Element'))]\n", "step-5": "# Generated by Django 3.1.1 on 2020-12-02 19:50\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('element', '0011_suggestion_suggestion_type'),\n ('bot', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='discorduser',\n name='has_elements',\n field=models.ManyToManyField(to='element.Element'),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import tensorflow as tf import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' import tensorflow as tf from tensorflow import keras from tensorflow.keras import layers, Sequential, optimizers import numpy as np from tensorflow.compat.v1.keras.backend import set_session config = tf.compat.v1.ConfigProto() config.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU config.log_device_placement = True # to log device placement (on which device the operation ran) sess = tf.compat.v1.Session(config=config) set_session(sess) print('\nTensorflow GPU installed: ' + str(tf.test.is_built_with_cuda())) print('Is Tensorflow using GPU: \n' + str(tf.test.is_gpu_available())) class BasicBlock(layers.Layer): # 残差模块 def __init__(self, filter_num, kernel_size, strides=1): super(BasicBlock, self).__init__() # 第一个卷积单元 self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides, padding='same') self.bn1 = layers.BatchNormalization() self.relu1 = layers.Activation('relu') # 第二个卷积单元 self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1, padding='same') self.bn2 = layers.BatchNormalization() self.relu2 = layers.Activation('relu') if strides != 1: self.downsample = Sequential() self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides)) else: self.downsample = lambda x: x def call(self, inputs, training=None): out = self.conv1(inputs) out = self.bn1(out) out = self.relu1(out) # 通过第二个卷积单元 out = self.conv2(out) out = self.bn2(out) out = self.relu2(out) # 通过identity模块 identity = self.downsample(inputs) # 2条路径输出直接相加 output = layers.add([out, identity]) output = tf.nn.relu(output) # 激活函数 return output class ResNet(keras.Model): def __init__(self, layer_dims, num_classes=4): # layer_dims:list[2,2,2,2,2,2] super(ResNet, self).__init__() self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1), layers.BatchNormalization(), layers.Activation('relu') ]) self.layer1 = self.build_resblock(16, layer_dims[0]) # 512 self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5, strides=4) # 128 self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5, strides=4) # 32 self.layer4 = self.build_resblock(128, layer_dims[3], strides=2) # 16 self.layer5 = self.build_resblock(256, layer_dims[4], strides=2) # 8 self.layer6 = self.build_resblock(512, layer_dims[5], strides=2) # 4 self.avgpool = layers.GlobalAveragePooling1D() # 512大小的向量: 512*1 self.fc = layers.Dense(num_classes) def call(self, inputs, training=None): x = self.stem(inputs) x = self.layer1(x) x = self.layer2(x) x = self.layer3(x) x = self.layer4(x) x = self.layer5(x) x = self.layer6(x) x = self.avgpool(x) x = self.fc(x) return x def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1): # 辅助函数,堆叠filter_num个BasicBlock res_blocks = Sequential() # 只有第一个BasicBlock的步长可能不为1,实现下采样 res_blocks.add(BasicBlock(filter_num, kernel_size, strides)) for _ in range(1, blocks): # 其他BasicBlock步长都为1 res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1)) return res_blocks x_train = np.loadtxt(r'/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1).astype(np.float32) y_train = np.loadtxt(r'/content/drive/My Drive/Data/y_train').astype(np.int32) x_test = np.loadtxt(r'/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1).astype(np.float32) y_test = np.loadtxt(r'/content/drive/My Drive/Data/y_test').astype(np.int32) train_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512) test_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512) # sample = next(iter(train_db)) # print(sample) model = ResNet([2,2,2,2,2,2]) model.build(input_shape=(512,512,1)) # conv_net.summary() # fc_net.summary() optimizer = optimizers.Adam(lr=1e-3) train_loss = [] test_acc = [] acc_max = 0 for epoch in range(500): for step, (x, y) in enumerate(train_db): with tf.GradientTape() as tape: # [b,512,1]=>[b,4] logits = model(x, training=True) y_onehot = tf.one_hot(y, depth=4) loss = tf.losses.categorical_crossentropy(y_onehot, logits, from_logits=True) loss = tf.reduce_mean(loss) grads = tape.gradient(loss, model.trainable_variables) optimizer.apply_gradients(zip(grads, model.trainable_variables)) if step % 100 == 0: print(epoch, step, 'loss:', float(loss)) train_loss.append(loss) total_num = 0 total_correct = 0 for x, y in test_db: logits = model(x) prob = tf.nn.softmax(logits, axis=1) pred = tf.argmax(prob, axis=1) pred = tf.cast(pred, dtype=tf.int32) correct = tf.cast(tf.equal(pred, y), dtype=tf.int32) correct = tf.reduce_sum(correct) total_num += x.shape[0] total_correct += int(correct) acc = total_correct / total_num test_acc.append(acc) print(epoch, 'acc:', acc) if acc > acc_max: acc_max = acc model.save_weights(r'ResNet/weights.ckpt')
normal
{ "blob_id": "e626a7f3f9241db8684c3b8c1bd79ea49e03490d", "index": 8141, "step-1": "<mask token>\n\n\nclass BasicBlock(layers.Layer):\n <mask token>\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass BasicBlock(layers.Layer):\n\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,\n padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,\n padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\n<mask token>\n", "step-3": "<mask token>\nset_session(sess)\nprint(\"\"\"\nTensorflow GPU installed: \"\"\" + str(tf.test.is_built_with_cuda()))\nprint('Is Tensorflow using GPU: \\n' + str(tf.test.is_gpu_available()))\n\n\nclass BasicBlock(layers.Layer):\n\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,\n padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,\n padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\n<mask token>\nmodel.build(input_shape=(512, 512, 1))\n<mask token>\nfor epoch in range(500):\n for step, (x, y) in enumerate(train_db):\n with tf.GradientTape() as tape:\n logits = model(x, training=True)\n y_onehot = tf.one_hot(y, depth=4)\n loss = tf.losses.categorical_crossentropy(y_onehot, logits,\n from_logits=True)\n loss = tf.reduce_mean(loss)\n grads = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(grads, model.trainable_variables))\n if step % 100 == 0:\n print(epoch, step, 'loss:', float(loss))\n train_loss.append(loss)\n total_num = 0\n total_correct = 0\n for x, y in test_db:\n logits = model(x)\n prob = tf.nn.softmax(logits, axis=1)\n pred = tf.argmax(prob, axis=1)\n pred = tf.cast(pred, dtype=tf.int32)\n correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)\n correct = tf.reduce_sum(correct)\n total_num += x.shape[0]\n total_correct += int(correct)\n acc = total_correct / total_num\n test_acc.append(acc)\n print(epoch, 'acc:', acc)\n if acc > acc_max:\n acc_max = acc\n model.save_weights('ResNet/weights.ckpt')\n", "step-4": "<mask token>\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\n<mask token>\nconfig = tf.compat.v1.ConfigProto()\nconfig.gpu_options.allow_growth = True\nconfig.log_device_placement = True\nsess = tf.compat.v1.Session(config=config)\nset_session(sess)\nprint(\"\"\"\nTensorflow GPU installed: \"\"\" + str(tf.test.is_built_with_cuda()))\nprint('Is Tensorflow using GPU: \\n' + str(tf.test.is_gpu_available()))\n\n\nclass BasicBlock(layers.Layer):\n\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides,\n padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1,\n padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n identity = self.downsample(inputs)\n output = layers.add([out, identity])\n output = tf.nn.relu(output)\n return output\n\n\nclass ResNet(keras.Model):\n\n def __init__(self, layer_dims, num_classes=4):\n super(ResNet, self).__init__()\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(), layers.Activation('relu')])\n self.layer1 = self.build_resblock(16, layer_dims[0])\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5,\n strides=4)\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5,\n strides=4)\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2)\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2)\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2)\n self.avgpool = layers.GlobalAveragePooling1D()\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n res_blocks = Sequential()\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n for _ in range(1, blocks):\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n return res_blocks\n\n\nx_train = np.loadtxt('/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1\n ).astype(np.float32)\ny_train = np.loadtxt('/content/drive/My Drive/Data/y_train').astype(np.int32)\nx_test = np.loadtxt('/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1\n ).astype(np.float32)\ny_test = np.loadtxt('/content/drive/My Drive/Data/y_test').astype(np.int32)\ntrain_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512)\ntest_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512)\nmodel = ResNet([2, 2, 2, 2, 2, 2])\nmodel.build(input_shape=(512, 512, 1))\noptimizer = optimizers.Adam(lr=0.001)\ntrain_loss = []\ntest_acc = []\nacc_max = 0\nfor epoch in range(500):\n for step, (x, y) in enumerate(train_db):\n with tf.GradientTape() as tape:\n logits = model(x, training=True)\n y_onehot = tf.one_hot(y, depth=4)\n loss = tf.losses.categorical_crossentropy(y_onehot, logits,\n from_logits=True)\n loss = tf.reduce_mean(loss)\n grads = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(grads, model.trainable_variables))\n if step % 100 == 0:\n print(epoch, step, 'loss:', float(loss))\n train_loss.append(loss)\n total_num = 0\n total_correct = 0\n for x, y in test_db:\n logits = model(x)\n prob = tf.nn.softmax(logits, axis=1)\n pred = tf.argmax(prob, axis=1)\n pred = tf.cast(pred, dtype=tf.int32)\n correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)\n correct = tf.reduce_sum(correct)\n total_num += x.shape[0]\n total_correct += int(correct)\n acc = total_correct / total_num\n test_acc.append(acc)\n print(epoch, 'acc:', acc)\n if acc > acc_max:\n acc_max = acc\n model.save_weights('ResNet/weights.ckpt')\n", "step-5": "import tensorflow as tf\nimport os\n\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras import layers, Sequential, optimizers\nimport numpy as np\nfrom tensorflow.compat.v1.keras.backend import set_session\n\nconfig = tf.compat.v1.ConfigProto()\nconfig.gpu_options.allow_growth = True # dynamically grow the memory used on the GPU\nconfig.log_device_placement = True # to log device placement (on which device the operation ran)\nsess = tf.compat.v1.Session(config=config)\nset_session(sess)\nprint('\\nTensorflow GPU installed: ' + str(tf.test.is_built_with_cuda()))\nprint('Is Tensorflow using GPU: \\n' + str(tf.test.is_gpu_available()))\n\n\nclass BasicBlock(layers.Layer):\n # 残差模块\n def __init__(self, filter_num, kernel_size, strides=1):\n super(BasicBlock, self).__init__()\n # 第一个卷积单元\n self.conv1 = layers.Conv1D(filter_num, kernel_size, strides=strides, padding='same')\n self.bn1 = layers.BatchNormalization()\n self.relu1 = layers.Activation('relu')\n # 第二个卷积单元\n self.conv2 = layers.Conv1D(filter_num, kernel_size, strides=1, padding='same')\n self.bn2 = layers.BatchNormalization()\n self.relu2 = layers.Activation('relu')\n\n if strides != 1:\n self.downsample = Sequential()\n self.downsample.add(layers.Conv1D(filter_num, 1, strides=strides))\n else:\n self.downsample = lambda x: x\n\n def call(self, inputs, training=None):\n out = self.conv1(inputs)\n out = self.bn1(out)\n out = self.relu1(out)\n # 通过第二个卷积单元\n out = self.conv2(out)\n out = self.bn2(out)\n out = self.relu2(out)\n # 通过identity模块\n identity = self.downsample(inputs)\n # 2条路径输出直接相加\n output = layers.add([out, identity])\n output = tf.nn.relu(output) # 激活函数\n return output\n\n\nclass ResNet(keras.Model):\n def __init__(self, layer_dims, num_classes=4):\n # layer_dims:list[2,2,2,2,2,2]\n super(ResNet, self).__init__()\n\n self.stem = Sequential([layers.Conv1D(16, kernel_size=3, strides=1),\n layers.BatchNormalization(),\n layers.Activation('relu')\n ])\n self.layer1 = self.build_resblock(16, layer_dims[0]) # 512\n self.layer2 = self.build_resblock(32, layer_dims[1], kernel_size=5, strides=4) # 128\n self.layer3 = self.build_resblock(64, layer_dims[2], kernel_size=5, strides=4) # 32\n self.layer4 = self.build_resblock(128, layer_dims[3], strides=2) # 16\n self.layer5 = self.build_resblock(256, layer_dims[4], strides=2) # 8\n self.layer6 = self.build_resblock(512, layer_dims[5], strides=2) # 4\n\n self.avgpool = layers.GlobalAveragePooling1D() # 512大小的向量: 512*1\n self.fc = layers.Dense(num_classes)\n\n def call(self, inputs, training=None):\n x = self.stem(inputs)\n x = self.layer1(x)\n x = self.layer2(x)\n x = self.layer3(x)\n x = self.layer4(x)\n x = self.layer5(x)\n x = self.layer6(x)\n x = self.avgpool(x)\n x = self.fc(x)\n return x\n\n def build_resblock(self, filter_num, blocks, kernel_size=3, strides=1):\n # 辅助函数,堆叠filter_num个BasicBlock\n res_blocks = Sequential()\n # 只有第一个BasicBlock的步长可能不为1,实现下采样\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides))\n\n for _ in range(1, blocks): # 其他BasicBlock步长都为1\n res_blocks.add(BasicBlock(filter_num, kernel_size, strides=1))\n\n return res_blocks\n \nx_train = np.loadtxt(r'/content/drive/My Drive/Data/x_train').reshape(-1, 512, 1).astype(np.float32)\ny_train = np.loadtxt(r'/content/drive/My Drive/Data/y_train').astype(np.int32)\nx_test = np.loadtxt(r'/content/drive/My Drive/Data/x_test').reshape(-1, 512, 1).astype(np.float32)\ny_test = np.loadtxt(r'/content/drive/My Drive/Data/y_test').astype(np.int32)\ntrain_db = tf.data.Dataset.from_tensor_slices((x_train, y_train)).batch(512)\ntest_db = tf.data.Dataset.from_tensor_slices((x_test, y_test)).batch(512)\n# sample = next(iter(train_db))\n# print(sample)\n\nmodel = ResNet([2,2,2,2,2,2])\nmodel.build(input_shape=(512,512,1))\n# conv_net.summary()\n# fc_net.summary()\noptimizer = optimizers.Adam(lr=1e-3)\n\ntrain_loss = []\ntest_acc = []\nacc_max = 0\nfor epoch in range(500):\n for step, (x, y) in enumerate(train_db):\n with tf.GradientTape() as tape:\n # [b,512,1]=>[b,4]\n logits = model(x, training=True)\n\n y_onehot = tf.one_hot(y, depth=4)\n loss = tf.losses.categorical_crossentropy(y_onehot, logits, from_logits=True)\n loss = tf.reduce_mean(loss)\n grads = tape.gradient(loss, model.trainable_variables)\n optimizer.apply_gradients(zip(grads, model.trainable_variables))\n if step % 100 == 0:\n print(epoch, step, 'loss:', float(loss))\n train_loss.append(loss)\n total_num = 0\n total_correct = 0\n for x, y in test_db:\n logits = model(x)\n prob = tf.nn.softmax(logits, axis=1)\n pred = tf.argmax(prob, axis=1)\n pred = tf.cast(pred, dtype=tf.int32)\n\n correct = tf.cast(tf.equal(pred, y), dtype=tf.int32)\n correct = tf.reduce_sum(correct)\n\n total_num += x.shape[0]\n total_correct += int(correct)\n\n acc = total_correct / total_num\n test_acc.append(acc)\n print(epoch, 'acc:', acc)\n if acc > acc_max:\n acc_max = acc\n model.save_weights(r'ResNet/weights.ckpt')\n", "step-ids": [ 6, 7, 8, 9, 11 ] }
[ 6, 7, 8, 9, 11 ]
<|reserved_special_token_0|> class Eye11(Page): form_model = models.Player form_fields = ['option_11'] timeout_seconds = 10 class Eye12(Page): form_model = models.Player form_fields = ['option_12'] timeout_seconds = 10 class Eye13(Page): form_model = models.Player form_fields = ['option_13'] timeout_seconds = 10 class Eye14(Page): form_model = models.Player form_fields = ['option_14'] timeout_seconds = 10 class Eye15(Page): form_model = models.Player form_fields = ['option_15'] timeout_seconds = 10 class Eye16(Page): form_model = models.Player form_fields = ['option_16'] timeout_seconds = 10 class Eye17(Page): form_model = models.Player form_fields = ['option_17'] timeout_seconds = 10 class Eye18(Page): form_model = models.Player form_fields = ['option_18'] timeout_seconds = 10 class Eye19(Page): form_model = models.Player form_fields = ['option_19'] timeout_seconds = 10 class Eye20(Page): form_model = models.Player form_fields = ['option_20'] timeout_seconds = 10 class Eye21(Page): form_model = models.Player form_fields = ['option_21'] timeout_seconds = 10 class Eye22(Page): form_model = models.Player form_fields = ['option_22'] timeout_seconds = 10 class Eye23(Page): form_model = models.Player form_fields = ['option_23'] timeout_seconds = 10 class Eye24(Page): form_model = models.Player form_fields = ['option_24'] timeout_seconds = 10 class Eye25(Page): form_model = models.Player form_fields = ['option_25'] timeout_seconds = 10 class Eye26(Page): form_model = models.Player form_fields = ['option_26'] timeout_seconds = 10 class Eye27(Page): form_model = models.Player form_fields = ['option_27'] timeout_seconds = 10 class Eye28(Page): form_model = models.Player form_fields = ['option_28'] timeout_seconds = 10 class Eye29(Page): form_model = models.Player form_fields = ['option_29'] timeout_seconds = 10 class Eye30(Page): form_model = models.Player form_fields = ['option_30'] timeout_seconds = 10 class Eye31(Page): form_model = models.Player form_fields = ['option_31'] timeout_seconds = 10 class Eye32(Page): form_model = models.Player form_fields = ['option_32'] timeout_seconds = 10 class Eye33(Page): form_model = models.Player form_fields = ['option_33'] timeout_seconds = 10 class Eye34(Page): form_model = models.Player form_fields = ['option_34'] timeout_seconds = 10 class Eye35(Page): form_model = models.Player form_fields = ['option_35'] timeout_seconds = 10 class Eye36(Page): form_model = models.Player form_fields = ['option_36'] timeout_seconds = 10 class ResultsWaitPage(WaitPage): def after_all_players_arrive(self): self.group.set_payoffs() def is_displayed(self): return self.player.treatment != 4 class MyWaitPage(WaitPage): group_by_arrival_time = True players_per_group = 2 def after_all_players_arrive(self): self.group.get_treatment() class Player1(Page): form_model = models.Player form_fields = ['Message_12'] def is_displayed(self): return self.player.id_in_group == 1 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'Message_12': 'Message 1'} class Player2(Page): form_model = models.Player form_fields = ['option_AB'] def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'option_AB': 'Option A'} class treatment_4(Page): form_model = models.Player form_fields = ['option4_1', 'option4_2'] def before_next_page(self): self.player.payoff = 0.1 self.player.total = 0.3 def is_displayed(self): return self.player.treatment == 4 class Result_123(Page): def vars_for_template(self): return {'task2': self.player.payoff - 0.2} class Demographic(Page): form_model = models.Player form_fields = ['gender', 'age', 'religion', 'service'] class WaitforP1(WaitPage): def is_displayed(self): return self.player.treatment != 4 class Task3(Page): def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Eye8(Page): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Eye9(Page): form_model = models.Player form_fields = ['option_9'] timeout_seconds = 10 class Eye10(Page): form_model = models.Player form_fields = ['option_10'] timeout_seconds = 10 class Eye11(Page): form_model = models.Player form_fields = ['option_11'] timeout_seconds = 10 class Eye12(Page): form_model = models.Player form_fields = ['option_12'] timeout_seconds = 10 class Eye13(Page): form_model = models.Player form_fields = ['option_13'] timeout_seconds = 10 class Eye14(Page): form_model = models.Player form_fields = ['option_14'] timeout_seconds = 10 class Eye15(Page): form_model = models.Player form_fields = ['option_15'] timeout_seconds = 10 class Eye16(Page): form_model = models.Player form_fields = ['option_16'] timeout_seconds = 10 class Eye17(Page): form_model = models.Player form_fields = ['option_17'] timeout_seconds = 10 class Eye18(Page): form_model = models.Player form_fields = ['option_18'] timeout_seconds = 10 class Eye19(Page): form_model = models.Player form_fields = ['option_19'] timeout_seconds = 10 class Eye20(Page): form_model = models.Player form_fields = ['option_20'] timeout_seconds = 10 class Eye21(Page): form_model = models.Player form_fields = ['option_21'] timeout_seconds = 10 class Eye22(Page): form_model = models.Player form_fields = ['option_22'] timeout_seconds = 10 class Eye23(Page): form_model = models.Player form_fields = ['option_23'] timeout_seconds = 10 class Eye24(Page): form_model = models.Player form_fields = ['option_24'] timeout_seconds = 10 class Eye25(Page): form_model = models.Player form_fields = ['option_25'] timeout_seconds = 10 class Eye26(Page): form_model = models.Player form_fields = ['option_26'] timeout_seconds = 10 class Eye27(Page): form_model = models.Player form_fields = ['option_27'] timeout_seconds = 10 class Eye28(Page): form_model = models.Player form_fields = ['option_28'] timeout_seconds = 10 class Eye29(Page): form_model = models.Player form_fields = ['option_29'] timeout_seconds = 10 class Eye30(Page): form_model = models.Player form_fields = ['option_30'] timeout_seconds = 10 class Eye31(Page): form_model = models.Player form_fields = ['option_31'] timeout_seconds = 10 class Eye32(Page): form_model = models.Player form_fields = ['option_32'] timeout_seconds = 10 class Eye33(Page): form_model = models.Player form_fields = ['option_33'] timeout_seconds = 10 class Eye34(Page): form_model = models.Player form_fields = ['option_34'] timeout_seconds = 10 class Eye35(Page): form_model = models.Player form_fields = ['option_35'] timeout_seconds = 10 class Eye36(Page): form_model = models.Player form_fields = ['option_36'] timeout_seconds = 10 class ResultsWaitPage(WaitPage): def after_all_players_arrive(self): self.group.set_payoffs() def is_displayed(self): return self.player.treatment != 4 class MyWaitPage(WaitPage): group_by_arrival_time = True players_per_group = 2 def after_all_players_arrive(self): self.group.get_treatment() class Player1(Page): form_model = models.Player form_fields = ['Message_12'] def is_displayed(self): return self.player.id_in_group == 1 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'Message_12': 'Message 1'} class Player2(Page): form_model = models.Player form_fields = ['option_AB'] def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'option_AB': 'Option A'} class treatment_4(Page): form_model = models.Player form_fields = ['option4_1', 'option4_2'] def before_next_page(self): self.player.payoff = 0.1 self.player.total = 0.3 def is_displayed(self): return self.player.treatment == 4 class Result_123(Page): def vars_for_template(self): return {'task2': self.player.payoff - 0.2} class Demographic(Page): form_model = models.Player form_fields = ['gender', 'age', 'religion', 'service'] class WaitforP1(WaitPage): def is_displayed(self): return self.player.treatment != 4 class Task3(Page): def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Eye6(Page): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Eye7(Page): form_model = models.Player form_fields = ['option_7'] timeout_seconds = 10 class Eye8(Page): form_model = models.Player form_fields = ['option_8'] timeout_seconds = 10 class Eye9(Page): form_model = models.Player form_fields = ['option_9'] timeout_seconds = 10 class Eye10(Page): form_model = models.Player form_fields = ['option_10'] timeout_seconds = 10 class Eye11(Page): form_model = models.Player form_fields = ['option_11'] timeout_seconds = 10 class Eye12(Page): form_model = models.Player form_fields = ['option_12'] timeout_seconds = 10 class Eye13(Page): form_model = models.Player form_fields = ['option_13'] timeout_seconds = 10 class Eye14(Page): form_model = models.Player form_fields = ['option_14'] timeout_seconds = 10 class Eye15(Page): form_model = models.Player form_fields = ['option_15'] timeout_seconds = 10 class Eye16(Page): form_model = models.Player form_fields = ['option_16'] timeout_seconds = 10 class Eye17(Page): form_model = models.Player form_fields = ['option_17'] timeout_seconds = 10 class Eye18(Page): form_model = models.Player form_fields = ['option_18'] timeout_seconds = 10 class Eye19(Page): form_model = models.Player form_fields = ['option_19'] timeout_seconds = 10 class Eye20(Page): form_model = models.Player form_fields = ['option_20'] timeout_seconds = 10 class Eye21(Page): form_model = models.Player form_fields = ['option_21'] timeout_seconds = 10 class Eye22(Page): form_model = models.Player form_fields = ['option_22'] timeout_seconds = 10 class Eye23(Page): form_model = models.Player form_fields = ['option_23'] timeout_seconds = 10 class Eye24(Page): form_model = models.Player form_fields = ['option_24'] timeout_seconds = 10 class Eye25(Page): form_model = models.Player form_fields = ['option_25'] timeout_seconds = 10 class Eye26(Page): form_model = models.Player form_fields = ['option_26'] timeout_seconds = 10 class Eye27(Page): form_model = models.Player form_fields = ['option_27'] timeout_seconds = 10 class Eye28(Page): form_model = models.Player form_fields = ['option_28'] timeout_seconds = 10 class Eye29(Page): form_model = models.Player form_fields = ['option_29'] timeout_seconds = 10 class Eye30(Page): form_model = models.Player form_fields = ['option_30'] timeout_seconds = 10 class Eye31(Page): form_model = models.Player form_fields = ['option_31'] timeout_seconds = 10 class Eye32(Page): form_model = models.Player form_fields = ['option_32'] timeout_seconds = 10 class Eye33(Page): form_model = models.Player form_fields = ['option_33'] timeout_seconds = 10 class Eye34(Page): form_model = models.Player form_fields = ['option_34'] timeout_seconds = 10 class Eye35(Page): form_model = models.Player form_fields = ['option_35'] timeout_seconds = 10 class Eye36(Page): form_model = models.Player form_fields = ['option_36'] timeout_seconds = 10 class ResultsWaitPage(WaitPage): def after_all_players_arrive(self): self.group.set_payoffs() def is_displayed(self): return self.player.treatment != 4 class MyWaitPage(WaitPage): group_by_arrival_time = True players_per_group = 2 def after_all_players_arrive(self): self.group.get_treatment() class Player1(Page): form_model = models.Player form_fields = ['Message_12'] def is_displayed(self): return self.player.id_in_group == 1 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'Message_12': 'Message 1'} class Player2(Page): form_model = models.Player form_fields = ['option_AB'] def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'option_AB': 'Option A'} class treatment_4(Page): form_model = models.Player form_fields = ['option4_1', 'option4_2'] def before_next_page(self): self.player.payoff = 0.1 self.player.total = 0.3 def is_displayed(self): return self.player.treatment == 4 class Result_123(Page): def vars_for_template(self): return {'task2': self.player.payoff - 0.2} class Demographic(Page): form_model = models.Player form_fields = ['gender', 'age', 'religion', 'service'] class WaitforP1(WaitPage): def is_displayed(self): return self.player.treatment != 4 class Task3(Page): def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Welcome(Page): timeout_seconds = 60 class Priming(Page): form_model = models.Player form_fields = ['text'] class Eye1(Page): form_model = models.Player form_fields = ['option_1'] timeout_seconds = 10 class Eye2(Page): form_model = models.Player form_fields = ['option_2'] timeout_seconds = 10 class Eye3(Page): form_model = models.Player form_fields = ['option_3'] timeout_seconds = 10 class Eye4(Page): form_model = models.Player form_fields = ['option_4'] timeout_seconds = 10 class Eye5(Page): form_model = models.Player form_fields = ['option_5'] timeout_seconds = 10 class Eye6(Page): form_model = models.Player form_fields = ['option_6'] timeout_seconds = 10 class Eye7(Page): form_model = models.Player form_fields = ['option_7'] timeout_seconds = 10 class Eye8(Page): form_model = models.Player form_fields = ['option_8'] timeout_seconds = 10 class Eye9(Page): form_model = models.Player form_fields = ['option_9'] timeout_seconds = 10 class Eye10(Page): form_model = models.Player form_fields = ['option_10'] timeout_seconds = 10 class Eye11(Page): form_model = models.Player form_fields = ['option_11'] timeout_seconds = 10 class Eye12(Page): form_model = models.Player form_fields = ['option_12'] timeout_seconds = 10 class Eye13(Page): form_model = models.Player form_fields = ['option_13'] timeout_seconds = 10 class Eye14(Page): form_model = models.Player form_fields = ['option_14'] timeout_seconds = 10 class Eye15(Page): form_model = models.Player form_fields = ['option_15'] timeout_seconds = 10 class Eye16(Page): form_model = models.Player form_fields = ['option_16'] timeout_seconds = 10 class Eye17(Page): form_model = models.Player form_fields = ['option_17'] timeout_seconds = 10 class Eye18(Page): form_model = models.Player form_fields = ['option_18'] timeout_seconds = 10 class Eye19(Page): form_model = models.Player form_fields = ['option_19'] timeout_seconds = 10 class Eye20(Page): form_model = models.Player form_fields = ['option_20'] timeout_seconds = 10 class Eye21(Page): form_model = models.Player form_fields = ['option_21'] timeout_seconds = 10 class Eye22(Page): form_model = models.Player form_fields = ['option_22'] timeout_seconds = 10 class Eye23(Page): form_model = models.Player form_fields = ['option_23'] timeout_seconds = 10 class Eye24(Page): form_model = models.Player form_fields = ['option_24'] timeout_seconds = 10 class Eye25(Page): form_model = models.Player form_fields = ['option_25'] timeout_seconds = 10 class Eye26(Page): form_model = models.Player form_fields = ['option_26'] timeout_seconds = 10 class Eye27(Page): form_model = models.Player form_fields = ['option_27'] timeout_seconds = 10 class Eye28(Page): form_model = models.Player form_fields = ['option_28'] timeout_seconds = 10 class Eye29(Page): form_model = models.Player form_fields = ['option_29'] timeout_seconds = 10 class Eye30(Page): form_model = models.Player form_fields = ['option_30'] timeout_seconds = 10 class Eye31(Page): form_model = models.Player form_fields = ['option_31'] timeout_seconds = 10 class Eye32(Page): form_model = models.Player form_fields = ['option_32'] timeout_seconds = 10 class Eye33(Page): form_model = models.Player form_fields = ['option_33'] timeout_seconds = 10 class Eye34(Page): form_model = models.Player form_fields = ['option_34'] timeout_seconds = 10 class Eye35(Page): form_model = models.Player form_fields = ['option_35'] timeout_seconds = 10 class Eye36(Page): form_model = models.Player form_fields = ['option_36'] timeout_seconds = 10 class ResultsWaitPage(WaitPage): def after_all_players_arrive(self): self.group.set_payoffs() def is_displayed(self): return self.player.treatment != 4 class MyWaitPage(WaitPage): group_by_arrival_time = True players_per_group = 2 def after_all_players_arrive(self): self.group.get_treatment() class Player1(Page): form_model = models.Player form_fields = ['Message_12'] def is_displayed(self): return self.player.id_in_group == 1 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'Message_12': 'Message 1'} class Player2(Page): form_model = models.Player form_fields = ['option_AB'] def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'option_AB': 'Option A'} class treatment_4(Page): form_model = models.Player form_fields = ['option4_1', 'option4_2'] def before_next_page(self): self.player.payoff = 0.1 self.player.total = 0.3 def is_displayed(self): return self.player.treatment == 4 class Result_123(Page): def vars_for_template(self): return {'task2': self.player.payoff - 0.2} class Demographic(Page): form_model = models.Player form_fields = ['gender', 'age', 'religion', 'service'] class WaitforP1(WaitPage): def is_displayed(self): return self.player.treatment != 4 class Task3(Page): def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 <|reserved_special_token_0|> <|reserved_special_token_1|> from otree.api import Currency as c, currency_range from . import models from ._builtin import Page, WaitPage from .models import Constants class Introduction(Page): timeout_seconds = 60 class Welcome(Page): timeout_seconds = 60 class Priming(Page): form_model = models.Player form_fields = ['text'] class Eye1(Page): form_model = models.Player form_fields = ['option_1'] timeout_seconds = 10 class Eye2(Page): form_model = models.Player form_fields = ['option_2'] timeout_seconds = 10 class Eye3(Page): form_model = models.Player form_fields = ['option_3'] timeout_seconds = 10 class Eye4(Page): form_model = models.Player form_fields = ['option_4'] timeout_seconds = 10 class Eye5(Page): form_model = models.Player form_fields = ['option_5'] timeout_seconds = 10 class Eye6(Page): form_model = models.Player form_fields = ['option_6'] timeout_seconds = 10 class Eye7(Page): form_model = models.Player form_fields = ['option_7'] timeout_seconds = 10 class Eye8(Page): form_model = models.Player form_fields = ['option_8'] timeout_seconds = 10 class Eye9(Page): form_model = models.Player form_fields = ['option_9'] timeout_seconds = 10 class Eye10(Page): form_model = models.Player form_fields = ['option_10'] timeout_seconds = 10 class Eye11(Page): form_model = models.Player form_fields = ['option_11'] timeout_seconds = 10 class Eye12(Page): form_model = models.Player form_fields = ['option_12'] timeout_seconds = 10 class Eye13(Page): form_model = models.Player form_fields = ['option_13'] timeout_seconds = 10 class Eye14(Page): form_model = models.Player form_fields = ['option_14'] timeout_seconds = 10 class Eye15(Page): form_model = models.Player form_fields = ['option_15'] timeout_seconds = 10 class Eye16(Page): form_model = models.Player form_fields = ['option_16'] timeout_seconds = 10 class Eye17(Page): form_model = models.Player form_fields = ['option_17'] timeout_seconds = 10 class Eye18(Page): form_model = models.Player form_fields = ['option_18'] timeout_seconds = 10 class Eye19(Page): form_model = models.Player form_fields = ['option_19'] timeout_seconds = 10 class Eye20(Page): form_model = models.Player form_fields = ['option_20'] timeout_seconds = 10 class Eye21(Page): form_model = models.Player form_fields = ['option_21'] timeout_seconds = 10 class Eye22(Page): form_model = models.Player form_fields = ['option_22'] timeout_seconds = 10 class Eye23(Page): form_model = models.Player form_fields = ['option_23'] timeout_seconds = 10 class Eye24(Page): form_model = models.Player form_fields = ['option_24'] timeout_seconds = 10 class Eye25(Page): form_model = models.Player form_fields = ['option_25'] timeout_seconds = 10 class Eye26(Page): form_model = models.Player form_fields = ['option_26'] timeout_seconds = 10 class Eye27(Page): form_model = models.Player form_fields = ['option_27'] timeout_seconds = 10 class Eye28(Page): form_model = models.Player form_fields = ['option_28'] timeout_seconds = 10 class Eye29(Page): form_model = models.Player form_fields = ['option_29'] timeout_seconds = 10 class Eye30(Page): form_model = models.Player form_fields = ['option_30'] timeout_seconds = 10 class Eye31(Page): form_model = models.Player form_fields = ['option_31'] timeout_seconds = 10 class Eye32(Page): form_model = models.Player form_fields = ['option_32'] timeout_seconds = 10 class Eye33(Page): form_model = models.Player form_fields = ['option_33'] timeout_seconds = 10 class Eye34(Page): form_model = models.Player form_fields = ['option_34'] timeout_seconds = 10 class Eye35(Page): form_model = models.Player form_fields = ['option_35'] timeout_seconds = 10 class Eye36(Page): form_model = models.Player form_fields = ['option_36'] timeout_seconds = 10 class ResultsWaitPage(WaitPage): def after_all_players_arrive(self): self.group.set_payoffs() def is_displayed(self): return self.player.treatment != 4 class MyWaitPage(WaitPage): group_by_arrival_time = True players_per_group = 2 def after_all_players_arrive(self): self.group.get_treatment() class Player1(Page): form_model = models.Player form_fields = ['Message_12'] def is_displayed(self): return self.player.id_in_group == 1 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'Message_12': 'Message 1'} class Player2(Page): form_model = models.Player form_fields = ['option_AB'] def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 timeout_seconds = 120 timeout_submission = {'option_AB': 'Option A'} class treatment_4(Page): form_model = models.Player form_fields = ['option4_1', 'option4_2'] def before_next_page(self): self.player.payoff = 0.10 self.player.total = 0.30 def is_displayed(self): return self.player.treatment == 4 class Result_123(Page): def vars_for_template(self): return {'task2': self.player.payoff - 0.20} class Demographic(Page): form_model = models.Player form_fields = ['gender', 'age', 'religion', 'service'] #'getcode_1', 'getcode_2'] class WaitforP1(WaitPage): def is_displayed(self): return self.player.treatment != 4 class Task3(Page): def is_displayed(self): return self.player.id_in_group == 2 and self.player.treatment != 4 page_sequence = [ MyWaitPage, Welcome, Priming, Introduction, Eye1, Eye2, Eye3, Eye4, Eye5, Eye6, Eye7, Eye8, Eye9, Eye10, Eye11, Eye12, Eye13, Eye14, Eye15, Eye16, Eye17, Eye18, Eye19, Eye20, Eye21, Eye22, Eye23, Eye24, Eye25, Eye26, Eye27, Eye28, Eye29, Eye30, Eye31, Eye32, Eye33, Eye34, Eye35, Eye36, Player1, Task3, WaitforP1, Player2, treatment_4, Demographic, ResultsWaitPage, Result_123 ]
flexible
{ "blob_id": "8fecfdf4b3772e5304f0b146317f94cdbd7fbd53", "index": 5791, "step-1": "<mask token>\n\n\nclass Eye11(Page):\n form_model = models.Player\n form_fields = ['option_11']\n timeout_seconds = 10\n\n\nclass Eye12(Page):\n form_model = models.Player\n form_fields = ['option_12']\n timeout_seconds = 10\n\n\nclass Eye13(Page):\n form_model = models.Player\n form_fields = ['option_13']\n timeout_seconds = 10\n\n\nclass Eye14(Page):\n form_model = models.Player\n form_fields = ['option_14']\n timeout_seconds = 10\n\n\nclass Eye15(Page):\n form_model = models.Player\n form_fields = ['option_15']\n timeout_seconds = 10\n\n\nclass Eye16(Page):\n form_model = models.Player\n form_fields = ['option_16']\n timeout_seconds = 10\n\n\nclass Eye17(Page):\n form_model = models.Player\n form_fields = ['option_17']\n timeout_seconds = 10\n\n\nclass Eye18(Page):\n form_model = models.Player\n form_fields = ['option_18']\n timeout_seconds = 10\n\n\nclass Eye19(Page):\n form_model = models.Player\n form_fields = ['option_19']\n timeout_seconds = 10\n\n\nclass Eye20(Page):\n form_model = models.Player\n form_fields = ['option_20']\n timeout_seconds = 10\n\n\nclass Eye21(Page):\n form_model = models.Player\n form_fields = ['option_21']\n timeout_seconds = 10\n\n\nclass Eye22(Page):\n form_model = models.Player\n form_fields = ['option_22']\n timeout_seconds = 10\n\n\nclass Eye23(Page):\n form_model = models.Player\n form_fields = ['option_23']\n timeout_seconds = 10\n\n\nclass Eye24(Page):\n form_model = models.Player\n form_fields = ['option_24']\n timeout_seconds = 10\n\n\nclass Eye25(Page):\n form_model = models.Player\n form_fields = ['option_25']\n timeout_seconds = 10\n\n\nclass Eye26(Page):\n form_model = models.Player\n form_fields = ['option_26']\n timeout_seconds = 10\n\n\nclass Eye27(Page):\n form_model = models.Player\n form_fields = ['option_27']\n timeout_seconds = 10\n\n\nclass Eye28(Page):\n form_model = models.Player\n form_fields = ['option_28']\n timeout_seconds = 10\n\n\nclass Eye29(Page):\n form_model = models.Player\n form_fields = ['option_29']\n timeout_seconds = 10\n\n\nclass Eye30(Page):\n form_model = models.Player\n form_fields = ['option_30']\n timeout_seconds = 10\n\n\nclass Eye31(Page):\n form_model = models.Player\n form_fields = ['option_31']\n timeout_seconds = 10\n\n\nclass Eye32(Page):\n form_model = models.Player\n form_fields = ['option_32']\n timeout_seconds = 10\n\n\nclass Eye33(Page):\n form_model = models.Player\n form_fields = ['option_33']\n timeout_seconds = 10\n\n\nclass Eye34(Page):\n form_model = models.Player\n form_fields = ['option_34']\n timeout_seconds = 10\n\n\nclass Eye35(Page):\n form_model = models.Player\n form_fields = ['option_35']\n timeout_seconds = 10\n\n\nclass Eye36(Page):\n form_model = models.Player\n form_fields = ['option_36']\n timeout_seconds = 10\n\n\nclass ResultsWaitPage(WaitPage):\n\n def after_all_players_arrive(self):\n self.group.set_payoffs()\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass MyWaitPage(WaitPage):\n group_by_arrival_time = True\n players_per_group = 2\n\n def after_all_players_arrive(self):\n self.group.get_treatment()\n\n\nclass Player1(Page):\n form_model = models.Player\n form_fields = ['Message_12']\n\n def is_displayed(self):\n return self.player.id_in_group == 1 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'Message_12': 'Message 1'}\n\n\nclass Player2(Page):\n form_model = models.Player\n form_fields = ['option_AB']\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'option_AB': 'Option A'}\n\n\nclass treatment_4(Page):\n form_model = models.Player\n form_fields = ['option4_1', 'option4_2']\n\n def before_next_page(self):\n self.player.payoff = 0.1\n self.player.total = 0.3\n\n def is_displayed(self):\n return self.player.treatment == 4\n\n\nclass Result_123(Page):\n\n def vars_for_template(self):\n return {'task2': self.player.payoff - 0.2}\n\n\nclass Demographic(Page):\n form_model = models.Player\n form_fields = ['gender', 'age', 'religion', 'service']\n\n\nclass WaitforP1(WaitPage):\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass Task3(Page):\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Eye8(Page):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Eye9(Page):\n form_model = models.Player\n form_fields = ['option_9']\n timeout_seconds = 10\n\n\nclass Eye10(Page):\n form_model = models.Player\n form_fields = ['option_10']\n timeout_seconds = 10\n\n\nclass Eye11(Page):\n form_model = models.Player\n form_fields = ['option_11']\n timeout_seconds = 10\n\n\nclass Eye12(Page):\n form_model = models.Player\n form_fields = ['option_12']\n timeout_seconds = 10\n\n\nclass Eye13(Page):\n form_model = models.Player\n form_fields = ['option_13']\n timeout_seconds = 10\n\n\nclass Eye14(Page):\n form_model = models.Player\n form_fields = ['option_14']\n timeout_seconds = 10\n\n\nclass Eye15(Page):\n form_model = models.Player\n form_fields = ['option_15']\n timeout_seconds = 10\n\n\nclass Eye16(Page):\n form_model = models.Player\n form_fields = ['option_16']\n timeout_seconds = 10\n\n\nclass Eye17(Page):\n form_model = models.Player\n form_fields = ['option_17']\n timeout_seconds = 10\n\n\nclass Eye18(Page):\n form_model = models.Player\n form_fields = ['option_18']\n timeout_seconds = 10\n\n\nclass Eye19(Page):\n form_model = models.Player\n form_fields = ['option_19']\n timeout_seconds = 10\n\n\nclass Eye20(Page):\n form_model = models.Player\n form_fields = ['option_20']\n timeout_seconds = 10\n\n\nclass Eye21(Page):\n form_model = models.Player\n form_fields = ['option_21']\n timeout_seconds = 10\n\n\nclass Eye22(Page):\n form_model = models.Player\n form_fields = ['option_22']\n timeout_seconds = 10\n\n\nclass Eye23(Page):\n form_model = models.Player\n form_fields = ['option_23']\n timeout_seconds = 10\n\n\nclass Eye24(Page):\n form_model = models.Player\n form_fields = ['option_24']\n timeout_seconds = 10\n\n\nclass Eye25(Page):\n form_model = models.Player\n form_fields = ['option_25']\n timeout_seconds = 10\n\n\nclass Eye26(Page):\n form_model = models.Player\n form_fields = ['option_26']\n timeout_seconds = 10\n\n\nclass Eye27(Page):\n form_model = models.Player\n form_fields = ['option_27']\n timeout_seconds = 10\n\n\nclass Eye28(Page):\n form_model = models.Player\n form_fields = ['option_28']\n timeout_seconds = 10\n\n\nclass Eye29(Page):\n form_model = models.Player\n form_fields = ['option_29']\n timeout_seconds = 10\n\n\nclass Eye30(Page):\n form_model = models.Player\n form_fields = ['option_30']\n timeout_seconds = 10\n\n\nclass Eye31(Page):\n form_model = models.Player\n form_fields = ['option_31']\n timeout_seconds = 10\n\n\nclass Eye32(Page):\n form_model = models.Player\n form_fields = ['option_32']\n timeout_seconds = 10\n\n\nclass Eye33(Page):\n form_model = models.Player\n form_fields = ['option_33']\n timeout_seconds = 10\n\n\nclass Eye34(Page):\n form_model = models.Player\n form_fields = ['option_34']\n timeout_seconds = 10\n\n\nclass Eye35(Page):\n form_model = models.Player\n form_fields = ['option_35']\n timeout_seconds = 10\n\n\nclass Eye36(Page):\n form_model = models.Player\n form_fields = ['option_36']\n timeout_seconds = 10\n\n\nclass ResultsWaitPage(WaitPage):\n\n def after_all_players_arrive(self):\n self.group.set_payoffs()\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass MyWaitPage(WaitPage):\n group_by_arrival_time = True\n players_per_group = 2\n\n def after_all_players_arrive(self):\n self.group.get_treatment()\n\n\nclass Player1(Page):\n form_model = models.Player\n form_fields = ['Message_12']\n\n def is_displayed(self):\n return self.player.id_in_group == 1 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'Message_12': 'Message 1'}\n\n\nclass Player2(Page):\n form_model = models.Player\n form_fields = ['option_AB']\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'option_AB': 'Option A'}\n\n\nclass treatment_4(Page):\n form_model = models.Player\n form_fields = ['option4_1', 'option4_2']\n\n def before_next_page(self):\n self.player.payoff = 0.1\n self.player.total = 0.3\n\n def is_displayed(self):\n return self.player.treatment == 4\n\n\nclass Result_123(Page):\n\n def vars_for_template(self):\n return {'task2': self.player.payoff - 0.2}\n\n\nclass Demographic(Page):\n form_model = models.Player\n form_fields = ['gender', 'age', 'religion', 'service']\n\n\nclass WaitforP1(WaitPage):\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass Task3(Page):\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Eye6(Page):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass Eye7(Page):\n form_model = models.Player\n form_fields = ['option_7']\n timeout_seconds = 10\n\n\nclass Eye8(Page):\n form_model = models.Player\n form_fields = ['option_8']\n timeout_seconds = 10\n\n\nclass Eye9(Page):\n form_model = models.Player\n form_fields = ['option_9']\n timeout_seconds = 10\n\n\nclass Eye10(Page):\n form_model = models.Player\n form_fields = ['option_10']\n timeout_seconds = 10\n\n\nclass Eye11(Page):\n form_model = models.Player\n form_fields = ['option_11']\n timeout_seconds = 10\n\n\nclass Eye12(Page):\n form_model = models.Player\n form_fields = ['option_12']\n timeout_seconds = 10\n\n\nclass Eye13(Page):\n form_model = models.Player\n form_fields = ['option_13']\n timeout_seconds = 10\n\n\nclass Eye14(Page):\n form_model = models.Player\n form_fields = ['option_14']\n timeout_seconds = 10\n\n\nclass Eye15(Page):\n form_model = models.Player\n form_fields = ['option_15']\n timeout_seconds = 10\n\n\nclass Eye16(Page):\n form_model = models.Player\n form_fields = ['option_16']\n timeout_seconds = 10\n\n\nclass Eye17(Page):\n form_model = models.Player\n form_fields = ['option_17']\n timeout_seconds = 10\n\n\nclass Eye18(Page):\n form_model = models.Player\n form_fields = ['option_18']\n timeout_seconds = 10\n\n\nclass Eye19(Page):\n form_model = models.Player\n form_fields = ['option_19']\n timeout_seconds = 10\n\n\nclass Eye20(Page):\n form_model = models.Player\n form_fields = ['option_20']\n timeout_seconds = 10\n\n\nclass Eye21(Page):\n form_model = models.Player\n form_fields = ['option_21']\n timeout_seconds = 10\n\n\nclass Eye22(Page):\n form_model = models.Player\n form_fields = ['option_22']\n timeout_seconds = 10\n\n\nclass Eye23(Page):\n form_model = models.Player\n form_fields = ['option_23']\n timeout_seconds = 10\n\n\nclass Eye24(Page):\n form_model = models.Player\n form_fields = ['option_24']\n timeout_seconds = 10\n\n\nclass Eye25(Page):\n form_model = models.Player\n form_fields = ['option_25']\n timeout_seconds = 10\n\n\nclass Eye26(Page):\n form_model = models.Player\n form_fields = ['option_26']\n timeout_seconds = 10\n\n\nclass Eye27(Page):\n form_model = models.Player\n form_fields = ['option_27']\n timeout_seconds = 10\n\n\nclass Eye28(Page):\n form_model = models.Player\n form_fields = ['option_28']\n timeout_seconds = 10\n\n\nclass Eye29(Page):\n form_model = models.Player\n form_fields = ['option_29']\n timeout_seconds = 10\n\n\nclass Eye30(Page):\n form_model = models.Player\n form_fields = ['option_30']\n timeout_seconds = 10\n\n\nclass Eye31(Page):\n form_model = models.Player\n form_fields = ['option_31']\n timeout_seconds = 10\n\n\nclass Eye32(Page):\n form_model = models.Player\n form_fields = ['option_32']\n timeout_seconds = 10\n\n\nclass Eye33(Page):\n form_model = models.Player\n form_fields = ['option_33']\n timeout_seconds = 10\n\n\nclass Eye34(Page):\n form_model = models.Player\n form_fields = ['option_34']\n timeout_seconds = 10\n\n\nclass Eye35(Page):\n form_model = models.Player\n form_fields = ['option_35']\n timeout_seconds = 10\n\n\nclass Eye36(Page):\n form_model = models.Player\n form_fields = ['option_36']\n timeout_seconds = 10\n\n\nclass ResultsWaitPage(WaitPage):\n\n def after_all_players_arrive(self):\n self.group.set_payoffs()\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass MyWaitPage(WaitPage):\n group_by_arrival_time = True\n players_per_group = 2\n\n def after_all_players_arrive(self):\n self.group.get_treatment()\n\n\nclass Player1(Page):\n form_model = models.Player\n form_fields = ['Message_12']\n\n def is_displayed(self):\n return self.player.id_in_group == 1 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'Message_12': 'Message 1'}\n\n\nclass Player2(Page):\n form_model = models.Player\n form_fields = ['option_AB']\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'option_AB': 'Option A'}\n\n\nclass treatment_4(Page):\n form_model = models.Player\n form_fields = ['option4_1', 'option4_2']\n\n def before_next_page(self):\n self.player.payoff = 0.1\n self.player.total = 0.3\n\n def is_displayed(self):\n return self.player.treatment == 4\n\n\nclass Result_123(Page):\n\n def vars_for_template(self):\n return {'task2': self.player.payoff - 0.2}\n\n\nclass Demographic(Page):\n form_model = models.Player\n form_fields = ['gender', 'age', 'religion', 'service']\n\n\nclass WaitforP1(WaitPage):\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass Task3(Page):\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n\n\n<mask token>\n", "step-4": "<mask token>\n\n\nclass Welcome(Page):\n timeout_seconds = 60\n\n\nclass Priming(Page):\n form_model = models.Player\n form_fields = ['text']\n\n\nclass Eye1(Page):\n form_model = models.Player\n form_fields = ['option_1']\n timeout_seconds = 10\n\n\nclass Eye2(Page):\n form_model = models.Player\n form_fields = ['option_2']\n timeout_seconds = 10\n\n\nclass Eye3(Page):\n form_model = models.Player\n form_fields = ['option_3']\n timeout_seconds = 10\n\n\nclass Eye4(Page):\n form_model = models.Player\n form_fields = ['option_4']\n timeout_seconds = 10\n\n\nclass Eye5(Page):\n form_model = models.Player\n form_fields = ['option_5']\n timeout_seconds = 10\n\n\nclass Eye6(Page):\n form_model = models.Player\n form_fields = ['option_6']\n timeout_seconds = 10\n\n\nclass Eye7(Page):\n form_model = models.Player\n form_fields = ['option_7']\n timeout_seconds = 10\n\n\nclass Eye8(Page):\n form_model = models.Player\n form_fields = ['option_8']\n timeout_seconds = 10\n\n\nclass Eye9(Page):\n form_model = models.Player\n form_fields = ['option_9']\n timeout_seconds = 10\n\n\nclass Eye10(Page):\n form_model = models.Player\n form_fields = ['option_10']\n timeout_seconds = 10\n\n\nclass Eye11(Page):\n form_model = models.Player\n form_fields = ['option_11']\n timeout_seconds = 10\n\n\nclass Eye12(Page):\n form_model = models.Player\n form_fields = ['option_12']\n timeout_seconds = 10\n\n\nclass Eye13(Page):\n form_model = models.Player\n form_fields = ['option_13']\n timeout_seconds = 10\n\n\nclass Eye14(Page):\n form_model = models.Player\n form_fields = ['option_14']\n timeout_seconds = 10\n\n\nclass Eye15(Page):\n form_model = models.Player\n form_fields = ['option_15']\n timeout_seconds = 10\n\n\nclass Eye16(Page):\n form_model = models.Player\n form_fields = ['option_16']\n timeout_seconds = 10\n\n\nclass Eye17(Page):\n form_model = models.Player\n form_fields = ['option_17']\n timeout_seconds = 10\n\n\nclass Eye18(Page):\n form_model = models.Player\n form_fields = ['option_18']\n timeout_seconds = 10\n\n\nclass Eye19(Page):\n form_model = models.Player\n form_fields = ['option_19']\n timeout_seconds = 10\n\n\nclass Eye20(Page):\n form_model = models.Player\n form_fields = ['option_20']\n timeout_seconds = 10\n\n\nclass Eye21(Page):\n form_model = models.Player\n form_fields = ['option_21']\n timeout_seconds = 10\n\n\nclass Eye22(Page):\n form_model = models.Player\n form_fields = ['option_22']\n timeout_seconds = 10\n\n\nclass Eye23(Page):\n form_model = models.Player\n form_fields = ['option_23']\n timeout_seconds = 10\n\n\nclass Eye24(Page):\n form_model = models.Player\n form_fields = ['option_24']\n timeout_seconds = 10\n\n\nclass Eye25(Page):\n form_model = models.Player\n form_fields = ['option_25']\n timeout_seconds = 10\n\n\nclass Eye26(Page):\n form_model = models.Player\n form_fields = ['option_26']\n timeout_seconds = 10\n\n\nclass Eye27(Page):\n form_model = models.Player\n form_fields = ['option_27']\n timeout_seconds = 10\n\n\nclass Eye28(Page):\n form_model = models.Player\n form_fields = ['option_28']\n timeout_seconds = 10\n\n\nclass Eye29(Page):\n form_model = models.Player\n form_fields = ['option_29']\n timeout_seconds = 10\n\n\nclass Eye30(Page):\n form_model = models.Player\n form_fields = ['option_30']\n timeout_seconds = 10\n\n\nclass Eye31(Page):\n form_model = models.Player\n form_fields = ['option_31']\n timeout_seconds = 10\n\n\nclass Eye32(Page):\n form_model = models.Player\n form_fields = ['option_32']\n timeout_seconds = 10\n\n\nclass Eye33(Page):\n form_model = models.Player\n form_fields = ['option_33']\n timeout_seconds = 10\n\n\nclass Eye34(Page):\n form_model = models.Player\n form_fields = ['option_34']\n timeout_seconds = 10\n\n\nclass Eye35(Page):\n form_model = models.Player\n form_fields = ['option_35']\n timeout_seconds = 10\n\n\nclass Eye36(Page):\n form_model = models.Player\n form_fields = ['option_36']\n timeout_seconds = 10\n\n\nclass ResultsWaitPage(WaitPage):\n\n def after_all_players_arrive(self):\n self.group.set_payoffs()\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass MyWaitPage(WaitPage):\n group_by_arrival_time = True\n players_per_group = 2\n\n def after_all_players_arrive(self):\n self.group.get_treatment()\n\n\nclass Player1(Page):\n form_model = models.Player\n form_fields = ['Message_12']\n\n def is_displayed(self):\n return self.player.id_in_group == 1 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'Message_12': 'Message 1'}\n\n\nclass Player2(Page):\n form_model = models.Player\n form_fields = ['option_AB']\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n timeout_seconds = 120\n timeout_submission = {'option_AB': 'Option A'}\n\n\nclass treatment_4(Page):\n form_model = models.Player\n form_fields = ['option4_1', 'option4_2']\n\n def before_next_page(self):\n self.player.payoff = 0.1\n self.player.total = 0.3\n\n def is_displayed(self):\n return self.player.treatment == 4\n\n\nclass Result_123(Page):\n\n def vars_for_template(self):\n return {'task2': self.player.payoff - 0.2}\n\n\nclass Demographic(Page):\n form_model = models.Player\n form_fields = ['gender', 'age', 'religion', 'service']\n\n\nclass WaitforP1(WaitPage):\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass Task3(Page):\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n\n\n<mask token>\n", "step-5": "from otree.api import Currency as c, currency_range\nfrom . import models\nfrom ._builtin import Page, WaitPage\nfrom .models import Constants\n\n\nclass Introduction(Page):\n timeout_seconds = 60\n\n\nclass Welcome(Page):\n timeout_seconds = 60\n\n\nclass Priming(Page):\n form_model = models.Player\n form_fields = ['text']\n\n\nclass Eye1(Page):\n form_model = models.Player\n form_fields = ['option_1']\n\n timeout_seconds = 10\n\n\nclass Eye2(Page):\n form_model = models.Player\n form_fields = ['option_2']\n\n timeout_seconds = 10\n\n\nclass Eye3(Page):\n form_model = models.Player\n form_fields = ['option_3']\n\n timeout_seconds = 10\n\n\nclass Eye4(Page):\n form_model = models.Player\n form_fields = ['option_4']\n\n timeout_seconds = 10\n\n\nclass Eye5(Page):\n form_model = models.Player\n form_fields = ['option_5']\n\n timeout_seconds = 10\n\n\nclass Eye6(Page):\n form_model = models.Player\n form_fields = ['option_6']\n\n timeout_seconds = 10\n\n\nclass Eye7(Page):\n form_model = models.Player\n form_fields = ['option_7']\n\n timeout_seconds = 10\n\n\nclass Eye8(Page):\n form_model = models.Player\n form_fields = ['option_8']\n\n timeout_seconds = 10\n\n\nclass Eye9(Page):\n form_model = models.Player\n form_fields = ['option_9']\n\n timeout_seconds = 10\n\n\nclass Eye10(Page):\n form_model = models.Player\n form_fields = ['option_10']\n\n timeout_seconds = 10\n\n\nclass Eye11(Page):\n form_model = models.Player\n form_fields = ['option_11']\n\n timeout_seconds = 10\n\n\nclass Eye12(Page):\n form_model = models.Player\n form_fields = ['option_12']\n\n timeout_seconds = 10\n\n\nclass Eye13(Page):\n form_model = models.Player\n form_fields = ['option_13']\n\n timeout_seconds = 10\n\n\nclass Eye14(Page):\n form_model = models.Player\n form_fields = ['option_14']\n\n timeout_seconds = 10\n\n\nclass Eye15(Page):\n form_model = models.Player\n form_fields = ['option_15']\n\n timeout_seconds = 10\n\n\nclass Eye16(Page):\n form_model = models.Player\n form_fields = ['option_16']\n\n timeout_seconds = 10\n\n\nclass Eye17(Page):\n form_model = models.Player\n form_fields = ['option_17']\n\n timeout_seconds = 10\n\n\nclass Eye18(Page):\n form_model = models.Player\n form_fields = ['option_18']\n\n timeout_seconds = 10\n\n\nclass Eye19(Page):\n form_model = models.Player\n form_fields = ['option_19']\n\n timeout_seconds = 10\n\n\nclass Eye20(Page):\n form_model = models.Player\n form_fields = ['option_20']\n\n timeout_seconds = 10\n\n\nclass Eye21(Page):\n form_model = models.Player\n form_fields = ['option_21']\n\n timeout_seconds = 10\n\n\nclass Eye22(Page):\n form_model = models.Player\n form_fields = ['option_22']\n\n timeout_seconds = 10\n\n\nclass Eye23(Page):\n form_model = models.Player\n form_fields = ['option_23']\n\n timeout_seconds = 10\n\n\nclass Eye24(Page):\n form_model = models.Player\n form_fields = ['option_24']\n\n timeout_seconds = 10\n\n\nclass Eye25(Page):\n form_model = models.Player\n form_fields = ['option_25']\n\n timeout_seconds = 10\n\n\nclass Eye26(Page):\n form_model = models.Player\n form_fields = ['option_26']\n\n timeout_seconds = 10\n\n\nclass Eye27(Page):\n form_model = models.Player\n form_fields = ['option_27']\n\n timeout_seconds = 10\n\n\nclass Eye28(Page):\n form_model = models.Player\n form_fields = ['option_28']\n\n timeout_seconds = 10\n\n\nclass Eye29(Page):\n form_model = models.Player\n form_fields = ['option_29']\n\n timeout_seconds = 10\n\n\nclass Eye30(Page):\n form_model = models.Player\n form_fields = ['option_30']\n\n timeout_seconds = 10\n\n\nclass Eye31(Page):\n form_model = models.Player\n form_fields = ['option_31']\n\n timeout_seconds = 10\n\n\nclass Eye32(Page):\n form_model = models.Player\n form_fields = ['option_32']\n\n timeout_seconds = 10\n\n\nclass Eye33(Page):\n form_model = models.Player\n form_fields = ['option_33']\n\n timeout_seconds = 10\n\n\nclass Eye34(Page):\n form_model = models.Player\n form_fields = ['option_34']\n\n timeout_seconds = 10\n\n\nclass Eye35(Page):\n form_model = models.Player\n form_fields = ['option_35']\n\n timeout_seconds = 10\n\n\nclass Eye36(Page):\n form_model = models.Player\n form_fields = ['option_36']\n\n timeout_seconds = 10\n\n\nclass ResultsWaitPage(WaitPage):\n\n def after_all_players_arrive(self):\n self.group.set_payoffs()\n\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass MyWaitPage(WaitPage):\n group_by_arrival_time = True\n players_per_group = 2\n\n def after_all_players_arrive(self):\n self.group.get_treatment()\n\n\nclass Player1(Page):\n form_model = models.Player\n form_fields = ['Message_12']\n\n def is_displayed(self):\n return self.player.id_in_group == 1 and self.player.treatment != 4\n\n timeout_seconds = 120\n timeout_submission = {'Message_12': 'Message 1'}\n\n\nclass Player2(Page):\n form_model = models.Player\n form_fields = ['option_AB']\n\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n\n timeout_seconds = 120\n timeout_submission = {'option_AB': 'Option A'}\n\n\nclass treatment_4(Page):\n form_model = models.Player\n form_fields = ['option4_1', 'option4_2']\n\n def before_next_page(self):\n self.player.payoff = 0.10\n self.player.total = 0.30\n\n def is_displayed(self):\n return self.player.treatment == 4\n\n\nclass Result_123(Page):\n\n def vars_for_template(self):\n return {'task2': self.player.payoff - 0.20}\n\n\nclass Demographic(Page):\n form_model = models.Player\n form_fields = ['gender', 'age', 'religion', 'service'] #'getcode_1', 'getcode_2']\n\n\nclass WaitforP1(WaitPage):\n def is_displayed(self):\n return self.player.treatment != 4\n\n\nclass Task3(Page):\n def is_displayed(self):\n return self.player.id_in_group == 2 and self.player.treatment != 4\n\n\npage_sequence = [\n MyWaitPage,\n Welcome,\n Priming,\n Introduction,\n Eye1,\n Eye2,\n Eye3,\n Eye4,\n Eye5,\n Eye6,\n Eye7,\n Eye8,\n Eye9,\n Eye10,\n Eye11,\n Eye12,\n Eye13,\n Eye14,\n Eye15,\n Eye16,\n Eye17,\n Eye18,\n Eye19,\n Eye20,\n Eye21,\n Eye22,\n Eye23,\n Eye24,\n Eye25,\n Eye26,\n Eye27,\n Eye28,\n Eye29,\n Eye30,\n Eye31,\n Eye32,\n Eye33,\n Eye34,\n Eye35,\n Eye36,\n Player1,\n Task3,\n WaitforP1,\n Player2,\n treatment_4,\n Demographic,\n ResultsWaitPage,\n Result_123\n]\n", "step-ids": [ 76, 81, 85, 100, 105 ] }
[ 76, 81, 85, 100, 105 ]
import os import factorStatFileCreator dirName = 'NoPerms/' dirName2 = 'AllPerms/' freqAgentDic = dict() lenAgentDic = dict() contAgentDic = dict() def freqModAvgFunc(dirName): fullList = factorStatFileCreator.directoryFreq(dirName) UA = dirName.split("/")[1] avgList = [] sum = 0 i = 0 while i <= len(fullList) - 2: diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i+1]) if diff == None: i+=1 else: avgList.append(int(diff)) i+=1 for item in avgList: sum += item if len(avgList) != 0: if UA not in freqAgentDic.keys(): freqAgentDic[UA] = [sum/len(avgList)] else: agentList = freqAgentDic[UA] agentList.append(sum/len(avgList)) freqAgentDic[UA] = agentList def finalFreqFunc(dirName): for filename in os.listdir(dirName): file = dirName + filename freqModAvgFunc(file) def printFreqDiff(): finalFreqFunc(dirName) finalFreqFunc(dirName2) #print (freqAgentDic) for keys, vals in freqAgentDic.items(): if len(vals) > 1 and vals[1] > 0: score = vals[0] / vals[1] print ("{:<15}: {:.2f}".format(keys,score)) else: score = "N/A" print ("{:<15}: {}".format(keys,score)) freqAgentDic[keys] = score return (freqAgentDic) def avgModFunc(directory): sum = 0 UA = directory.split("/")[1] byteList = factorStatFileCreator.directoryLen(directory) for item in byteList: sum += item if len(byteList) != 0: if UA not in lenAgentDic.keys(): lenAgentDic[UA] = [sum/len(byteList)] else: agentList = lenAgentDic[UA] agentList.append(sum/len(byteList)) lenAgentDic[UA] = agentList def finalLenFunc(dirName): for filename in os.listdir(dirName): file = dirName + filename avgModFunc(file) def printLenDiff(): finalLenFunc(dirName) finalLenFunc(dirName2) for keys, vals in lenAgentDic.items(): if len(vals) > 1 and vals[1] > 0: score = vals[1] / vals[0] print ("{:<15}: {:.2f}".format(keys,score)) else: score = "N/A" print ("{:<15}: {}".format(keys,score)) lenAgentDic[keys] = score return lenAgentDic def directoryModCont(directory): contentSet = set() newSet = set() listHolder = [] numofReq = 0 UA = directory.split("/")[1] for filename in os.listdir(directory): file = directory + '/' + filename listHolder = factorStatFileCreator.contentCommand(file) #print(newSet) newSet = listHolder[0] numofReq += len(listHolder[1]) contentSet = contentSet|newSet newSet = set() if UA not in contAgentDic.keys(): contAgentDic[UA] = [numofReq] else: agentList = contAgentDic[UA] agentList.append(numofReq) contAgentDic[UA] = agentList return contentSet, numofReq def finalContFunc(dirName): for filename in os.listdir(dirName): file = dirName + filename directoryModCont(file) def printContDiff(): finalContFunc(dirName) finalContFunc(dirName2) for keys, vals in contAgentDic.items(): if len(vals) > 1 and vals[1] > 0: score = vals[0] / vals[1] print ("{:<15}: {:.2f}".format(keys,score)) else: score = "N/A" print ("{:<15}: {}".format(keys,score)) contAgentDic[keys] = score return contAgentDic
normal
{ "blob_id": "8ac84aa29e9e4f3b85f1b3c27819feb5f41e8d8e", "index": 598, "step-1": "<mask token>\n\n\ndef freqModAvgFunc(dirName):\n fullList = factorStatFileCreator.directoryFreq(dirName)\n UA = dirName.split('/')[1]\n avgList = []\n sum = 0\n i = 0\n while i <= len(fullList) - 2:\n diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])\n if diff == None:\n i += 1\n else:\n avgList.append(int(diff))\n i += 1\n for item in avgList:\n sum += item\n if len(avgList) != 0:\n if UA not in freqAgentDic.keys():\n freqAgentDic[UA] = [sum / len(avgList)]\n else:\n agentList = freqAgentDic[UA]\n agentList.append(sum / len(avgList))\n freqAgentDic[UA] = agentList\n\n\n<mask token>\n\n\ndef printFreqDiff():\n finalFreqFunc(dirName)\n finalFreqFunc(dirName2)\n for keys, vals in freqAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n freqAgentDic[keys] = score\n return freqAgentDic\n\n\n<mask token>\n\n\ndef finalLenFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n avgModFunc(file)\n\n\ndef printLenDiff():\n finalLenFunc(dirName)\n finalLenFunc(dirName2)\n for keys, vals in lenAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[1] / vals[0]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n lenAgentDic[keys] = score\n return lenAgentDic\n\n\ndef directoryModCont(directory):\n contentSet = set()\n newSet = set()\n listHolder = []\n numofReq = 0\n UA = directory.split('/')[1]\n for filename in os.listdir(directory):\n file = directory + '/' + filename\n listHolder = factorStatFileCreator.contentCommand(file)\n newSet = listHolder[0]\n numofReq += len(listHolder[1])\n contentSet = contentSet | newSet\n newSet = set()\n if UA not in contAgentDic.keys():\n contAgentDic[UA] = [numofReq]\n else:\n agentList = contAgentDic[UA]\n agentList.append(numofReq)\n contAgentDic[UA] = agentList\n return contentSet, numofReq\n\n\ndef finalContFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n directoryModCont(file)\n\n\ndef printContDiff():\n finalContFunc(dirName)\n finalContFunc(dirName2)\n for keys, vals in contAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n contAgentDic[keys] = score\n return contAgentDic\n", "step-2": "<mask token>\n\n\ndef freqModAvgFunc(dirName):\n fullList = factorStatFileCreator.directoryFreq(dirName)\n UA = dirName.split('/')[1]\n avgList = []\n sum = 0\n i = 0\n while i <= len(fullList) - 2:\n diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])\n if diff == None:\n i += 1\n else:\n avgList.append(int(diff))\n i += 1\n for item in avgList:\n sum += item\n if len(avgList) != 0:\n if UA not in freqAgentDic.keys():\n freqAgentDic[UA] = [sum / len(avgList)]\n else:\n agentList = freqAgentDic[UA]\n agentList.append(sum / len(avgList))\n freqAgentDic[UA] = agentList\n\n\ndef finalFreqFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n freqModAvgFunc(file)\n\n\ndef printFreqDiff():\n finalFreqFunc(dirName)\n finalFreqFunc(dirName2)\n for keys, vals in freqAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n freqAgentDic[keys] = score\n return freqAgentDic\n\n\n<mask token>\n\n\ndef finalLenFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n avgModFunc(file)\n\n\ndef printLenDiff():\n finalLenFunc(dirName)\n finalLenFunc(dirName2)\n for keys, vals in lenAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[1] / vals[0]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n lenAgentDic[keys] = score\n return lenAgentDic\n\n\ndef directoryModCont(directory):\n contentSet = set()\n newSet = set()\n listHolder = []\n numofReq = 0\n UA = directory.split('/')[1]\n for filename in os.listdir(directory):\n file = directory + '/' + filename\n listHolder = factorStatFileCreator.contentCommand(file)\n newSet = listHolder[0]\n numofReq += len(listHolder[1])\n contentSet = contentSet | newSet\n newSet = set()\n if UA not in contAgentDic.keys():\n contAgentDic[UA] = [numofReq]\n else:\n agentList = contAgentDic[UA]\n agentList.append(numofReq)\n contAgentDic[UA] = agentList\n return contentSet, numofReq\n\n\ndef finalContFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n directoryModCont(file)\n\n\ndef printContDiff():\n finalContFunc(dirName)\n finalContFunc(dirName2)\n for keys, vals in contAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n contAgentDic[keys] = score\n return contAgentDic\n", "step-3": "<mask token>\n\n\ndef freqModAvgFunc(dirName):\n fullList = factorStatFileCreator.directoryFreq(dirName)\n UA = dirName.split('/')[1]\n avgList = []\n sum = 0\n i = 0\n while i <= len(fullList) - 2:\n diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])\n if diff == None:\n i += 1\n else:\n avgList.append(int(diff))\n i += 1\n for item in avgList:\n sum += item\n if len(avgList) != 0:\n if UA not in freqAgentDic.keys():\n freqAgentDic[UA] = [sum / len(avgList)]\n else:\n agentList = freqAgentDic[UA]\n agentList.append(sum / len(avgList))\n freqAgentDic[UA] = agentList\n\n\ndef finalFreqFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n freqModAvgFunc(file)\n\n\ndef printFreqDiff():\n finalFreqFunc(dirName)\n finalFreqFunc(dirName2)\n for keys, vals in freqAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n freqAgentDic[keys] = score\n return freqAgentDic\n\n\ndef avgModFunc(directory):\n sum = 0\n UA = directory.split('/')[1]\n byteList = factorStatFileCreator.directoryLen(directory)\n for item in byteList:\n sum += item\n if len(byteList) != 0:\n if UA not in lenAgentDic.keys():\n lenAgentDic[UA] = [sum / len(byteList)]\n else:\n agentList = lenAgentDic[UA]\n agentList.append(sum / len(byteList))\n lenAgentDic[UA] = agentList\n\n\ndef finalLenFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n avgModFunc(file)\n\n\ndef printLenDiff():\n finalLenFunc(dirName)\n finalLenFunc(dirName2)\n for keys, vals in lenAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[1] / vals[0]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n lenAgentDic[keys] = score\n return lenAgentDic\n\n\ndef directoryModCont(directory):\n contentSet = set()\n newSet = set()\n listHolder = []\n numofReq = 0\n UA = directory.split('/')[1]\n for filename in os.listdir(directory):\n file = directory + '/' + filename\n listHolder = factorStatFileCreator.contentCommand(file)\n newSet = listHolder[0]\n numofReq += len(listHolder[1])\n contentSet = contentSet | newSet\n newSet = set()\n if UA not in contAgentDic.keys():\n contAgentDic[UA] = [numofReq]\n else:\n agentList = contAgentDic[UA]\n agentList.append(numofReq)\n contAgentDic[UA] = agentList\n return contentSet, numofReq\n\n\ndef finalContFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n directoryModCont(file)\n\n\ndef printContDiff():\n finalContFunc(dirName)\n finalContFunc(dirName2)\n for keys, vals in contAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n contAgentDic[keys] = score\n return contAgentDic\n", "step-4": "<mask token>\ndirName = 'NoPerms/'\ndirName2 = 'AllPerms/'\nfreqAgentDic = dict()\nlenAgentDic = dict()\ncontAgentDic = dict()\n\n\ndef freqModAvgFunc(dirName):\n fullList = factorStatFileCreator.directoryFreq(dirName)\n UA = dirName.split('/')[1]\n avgList = []\n sum = 0\n i = 0\n while i <= len(fullList) - 2:\n diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i + 1])\n if diff == None:\n i += 1\n else:\n avgList.append(int(diff))\n i += 1\n for item in avgList:\n sum += item\n if len(avgList) != 0:\n if UA not in freqAgentDic.keys():\n freqAgentDic[UA] = [sum / len(avgList)]\n else:\n agentList = freqAgentDic[UA]\n agentList.append(sum / len(avgList))\n freqAgentDic[UA] = agentList\n\n\ndef finalFreqFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n freqModAvgFunc(file)\n\n\ndef printFreqDiff():\n finalFreqFunc(dirName)\n finalFreqFunc(dirName2)\n for keys, vals in freqAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n freqAgentDic[keys] = score\n return freqAgentDic\n\n\ndef avgModFunc(directory):\n sum = 0\n UA = directory.split('/')[1]\n byteList = factorStatFileCreator.directoryLen(directory)\n for item in byteList:\n sum += item\n if len(byteList) != 0:\n if UA not in lenAgentDic.keys():\n lenAgentDic[UA] = [sum / len(byteList)]\n else:\n agentList = lenAgentDic[UA]\n agentList.append(sum / len(byteList))\n lenAgentDic[UA] = agentList\n\n\ndef finalLenFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n avgModFunc(file)\n\n\ndef printLenDiff():\n finalLenFunc(dirName)\n finalLenFunc(dirName2)\n for keys, vals in lenAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[1] / vals[0]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n lenAgentDic[keys] = score\n return lenAgentDic\n\n\ndef directoryModCont(directory):\n contentSet = set()\n newSet = set()\n listHolder = []\n numofReq = 0\n UA = directory.split('/')[1]\n for filename in os.listdir(directory):\n file = directory + '/' + filename\n listHolder = factorStatFileCreator.contentCommand(file)\n newSet = listHolder[0]\n numofReq += len(listHolder[1])\n contentSet = contentSet | newSet\n newSet = set()\n if UA not in contAgentDic.keys():\n contAgentDic[UA] = [numofReq]\n else:\n agentList = contAgentDic[UA]\n agentList.append(numofReq)\n contAgentDic[UA] = agentList\n return contentSet, numofReq\n\n\ndef finalContFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n directoryModCont(file)\n\n\ndef printContDiff():\n finalContFunc(dirName)\n finalContFunc(dirName2)\n for keys, vals in contAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print('{:<15}: {:.2f}'.format(keys, score))\n else:\n score = 'N/A'\n print('{:<15}: {}'.format(keys, score))\n contAgentDic[keys] = score\n return contAgentDic\n", "step-5": "import os\nimport factorStatFileCreator\n\ndirName = 'NoPerms/'\ndirName2 = 'AllPerms/'\n\nfreqAgentDic = dict()\nlenAgentDic = dict()\ncontAgentDic = dict()\n\ndef freqModAvgFunc(dirName):\n fullList = factorStatFileCreator.directoryFreq(dirName)\n UA = dirName.split(\"/\")[1]\n avgList = []\n sum = 0\n i = 0\n while i <= len(fullList) - 2:\n diff = factorStatFileCreator.diffFunc(fullList[i], fullList[i+1])\n if diff == None:\n i+=1\n else:\n avgList.append(int(diff))\n i+=1\n for item in avgList:\n sum += item\n if len(avgList) != 0:\n if UA not in freqAgentDic.keys():\n freqAgentDic[UA] = [sum/len(avgList)]\n else:\n agentList = freqAgentDic[UA]\n agentList.append(sum/len(avgList))\n freqAgentDic[UA] = agentList\n\ndef finalFreqFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n freqModAvgFunc(file)\n\ndef printFreqDiff():\n finalFreqFunc(dirName)\n finalFreqFunc(dirName2)\n #print (freqAgentDic)\n for keys, vals in freqAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print (\"{:<15}: {:.2f}\".format(keys,score))\n else:\n score = \"N/A\"\n print (\"{:<15}: {}\".format(keys,score))\n freqAgentDic[keys] = score\n return (freqAgentDic)\n\ndef avgModFunc(directory):\n sum = 0\n UA = directory.split(\"/\")[1]\n byteList = factorStatFileCreator.directoryLen(directory)\n for item in byteList:\n sum += item\n if len(byteList) != 0:\n if UA not in lenAgentDic.keys():\n lenAgentDic[UA] = [sum/len(byteList)]\n else:\n agentList = lenAgentDic[UA]\n agentList.append(sum/len(byteList))\n lenAgentDic[UA] = agentList\n\ndef finalLenFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n avgModFunc(file)\n\ndef printLenDiff():\n finalLenFunc(dirName)\n finalLenFunc(dirName2)\n for keys, vals in lenAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[1] / vals[0]\n print (\"{:<15}: {:.2f}\".format(keys,score))\n else:\n score = \"N/A\"\n print (\"{:<15}: {}\".format(keys,score))\n lenAgentDic[keys] = score\n return lenAgentDic\n\ndef directoryModCont(directory):\n contentSet = set()\n newSet = set()\n listHolder = []\n numofReq = 0\n UA = directory.split(\"/\")[1]\n for filename in os.listdir(directory):\n file = directory + '/' + filename\n listHolder = factorStatFileCreator.contentCommand(file)\n #print(newSet)\n newSet = listHolder[0]\n numofReq += len(listHolder[1])\n contentSet = contentSet|newSet\n newSet = set()\n if UA not in contAgentDic.keys():\n contAgentDic[UA] = [numofReq]\n else:\n agentList = contAgentDic[UA]\n agentList.append(numofReq)\n contAgentDic[UA] = agentList\n return contentSet, numofReq\n\ndef finalContFunc(dirName):\n for filename in os.listdir(dirName):\n file = dirName + filename\n directoryModCont(file)\n\ndef printContDiff():\n finalContFunc(dirName)\n finalContFunc(dirName2)\n for keys, vals in contAgentDic.items():\n if len(vals) > 1 and vals[1] > 0:\n score = vals[0] / vals[1]\n print (\"{:<15}: {:.2f}\".format(keys,score))\n else:\n score = \"N/A\"\n print (\"{:<15}: {}\".format(keys,score))\n contAgentDic[keys] = score\n return contAgentDic\n", "step-ids": [ 7, 8, 9, 10, 12 ] }
[ 7, 8, 9, 10, 12 ]
#! /usr/bin/python # -*- coding: utf-8 -*- __author__ = 'raek' web = '910d59f0-30bd-495b-a54c-bf5addc81a8a' app = '21ec74fb-e941-43be-8772-a2f8dc6ccc4f'
normal
{ "blob_id": "cce645073ba117b9e297dfccf5a39710b0c6cd14", "index": 8479, "step-1": "<mask token>\n", "step-2": "__author__ = 'raek'\nweb = '910d59f0-30bd-495b-a54c-bf5addc81a8a'\napp = '21ec74fb-e941-43be-8772-a2f8dc6ccc4f'\n", "step-3": "#! /usr/bin/python\n# -*- coding: utf-8 -*-\n__author__ = 'raek'\n\nweb = '910d59f0-30bd-495b-a54c-bf5addc81a8a'\napp = '21ec74fb-e941-43be-8772-a2f8dc6ccc4f'", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> warnings.filterwarnings(action='ignore', module='scipy', message= '^internal gelsd') <|reserved_special_token_0|> model.intercept_ print(model.intercept_) model.coef_ print(model.coef_) <|reserved_special_token_1|> <|reserved_special_token_0|> warnings.filterwarnings(action='ignore', module='scipy', message= '^internal gelsd') iris = load_iris() X = iris.data y = iris.target regr = LinearRegression() model = regr.fit(X, y) model.intercept_ print(model.intercept_) model.coef_ print(model.coef_) <|reserved_special_token_1|> from sklearn.linear_model import LinearRegression from sklearn.datasets import load_iris import warnings import numpy as np import pandas as pd warnings.filterwarnings(action='ignore', module='scipy', message= '^internal gelsd') iris = load_iris() X = iris.data y = iris.target regr = LinearRegression() model = regr.fit(X, y) model.intercept_ print(model.intercept_) model.coef_ print(model.coef_) <|reserved_special_token_1|> #Tom Healy #Adapted from Chris Albon https://chrisalbon.com/machine_learning/linear_regression/linear_regression_using_scikit-learn/ #Load the libraries we will need #This is just to play round with Linear regression more that anything else from sklearn.linear_model import LinearRegression from sklearn.datasets import load_iris import warnings import numpy as np import pandas as pd #HE (Chris Albon) recommends to suppress the warnings warnings.filterwarnings(action="ignore", module="scipy", message="^internal gelsd") #Load the data and assign the X and y to the data and target respectively iris = load_iris() X = iris.data y = iris.target #Create a linear regression regr = LinearRegression() #Fit the model model = regr.fit(X, y) #View the intercept model.intercept_ #Print the intercept, so this where the data hits the y axis (I wish I paid more attention in Algebra......) print(model.intercept_) #View the coefficients model.coef_ print(model.coef_)
flexible
{ "blob_id": "0f257d199ad0285d8619647434451841144af66d", "index": 9379, "step-1": "<mask token>\n", "step-2": "<mask token>\nwarnings.filterwarnings(action='ignore', module='scipy', message=\n '^internal gelsd')\n<mask token>\nmodel.intercept_\nprint(model.intercept_)\nmodel.coef_\nprint(model.coef_)\n", "step-3": "<mask token>\nwarnings.filterwarnings(action='ignore', module='scipy', message=\n '^internal gelsd')\niris = load_iris()\nX = iris.data\ny = iris.target\nregr = LinearRegression()\nmodel = regr.fit(X, y)\nmodel.intercept_\nprint(model.intercept_)\nmodel.coef_\nprint(model.coef_)\n", "step-4": "from sklearn.linear_model import LinearRegression\nfrom sklearn.datasets import load_iris\nimport warnings\nimport numpy as np\nimport pandas as pd\nwarnings.filterwarnings(action='ignore', module='scipy', message=\n '^internal gelsd')\niris = load_iris()\nX = iris.data\ny = iris.target\nregr = LinearRegression()\nmodel = regr.fit(X, y)\nmodel.intercept_\nprint(model.intercept_)\nmodel.coef_\nprint(model.coef_)\n", "step-5": "#Tom Healy\n#Adapted from Chris Albon https://chrisalbon.com/machine_learning/linear_regression/linear_regression_using_scikit-learn/\n#Load the libraries we will need\n#This is just to play round with Linear regression more that anything else\nfrom sklearn.linear_model import LinearRegression\nfrom sklearn.datasets import load_iris\nimport warnings\nimport numpy as np \nimport pandas as pd \n\n#HE (Chris Albon) recommends to suppress the warnings \nwarnings.filterwarnings(action=\"ignore\", module=\"scipy\", message=\"^internal gelsd\")\n\n#Load the data and assign the X and y to the data and target respectively \niris = load_iris()\nX = iris.data\ny = iris.target\n\n\n\n#Create a linear regression\nregr = LinearRegression()\n#Fit the model\nmodel = regr.fit(X, y)\n#View the intercept \nmodel.intercept_\n#Print the intercept, so this where the data hits the y axis (I wish I paid more attention in Algebra......)\nprint(model.intercept_)\n\n#View the coefficients\nmodel.coef_\n\nprint(model.coef_)\n\n\n\n\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# Generated by Django 2.2.4 on 2019-09-09 11:00 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('core', '0002_ordered'), ] operations = [ migrations.AlterField( model_name='generalinfo', name='amount_available', field=models.IntegerField(blank=True, default=False, null=True, verbose_name='У наявності'), ), migrations.AlterField( model_name='generalinfo', name='image', field=models.URLField(blank=True), ), ]
normal
{ "blob_id": "8af9cc32b445402fa790b29382a802bd8afc1100", "index": 5655, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('core', '0002_ordered')]\n operations = [migrations.AlterField(model_name='generalinfo', name=\n 'amount_available', field=models.IntegerField(blank=True, default=\n False, null=True, verbose_name='У наявності')), migrations.\n AlterField(model_name='generalinfo', name='image', field=models.\n URLField(blank=True))]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('core', '0002_ordered')]\n operations = [migrations.AlterField(model_name='generalinfo', name=\n 'amount_available', field=models.IntegerField(blank=True, default=\n False, null=True, verbose_name='У наявності')), migrations.\n AlterField(model_name='generalinfo', name='image', field=models.\n URLField(blank=True))]\n", "step-5": "# Generated by Django 2.2.4 on 2019-09-09 11:00\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('core', '0002_ordered'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='generalinfo',\n name='amount_available',\n field=models.IntegerField(blank=True, default=False, null=True, verbose_name='У наявності'),\n ),\n migrations.AlterField(\n model_name='generalinfo',\n name='image',\n field=models.URLField(blank=True),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import pandas as pd import matplotlib.pyplot as plt import math import seaborn as sns import numpy as np suv_data=pd.read_csv("F:/Development/Machine Learning/suv-data/suv_data.csv") print(suv_data.head(10)) print("the no of passengers in the list is"+str(len(suv_data.index))) sns.countplot(x="Purchased",data=suv_data) sns.countplot(x="Purchased",hue="Gender",data=suv_data) suv_data['Age'].plot.hist() suv_data.info() suv_data['EstimatedSalary'].plot.hist(bins=50,figsize=(10,5)) print(suv_data.isnull()) print(suv_data.isnull().sum()) sns.heatmap(suv_data.isnull(),yticklabels=False,cmap="viridis") plt.show() sns.boxplot(x="Gender",y="Age",data=suv_data) plt.show() suv_data.drop("User ID",axis=1,inplace=True) suv_data.columns suv_data.head(10) Gen=pd.get_dummies(suv_data['Gender'],drop_first=True) print(Gen.head(5)) suv_data=pd.concat([suv_data,Gen],axis=1) print(suv_data.head(5)) suv_data.drop("Gender",axis=1,inplace=True) print(suv_data.head(10)) X=suv_data.iloc[:,[0,1,3]].values y=suv_data.iloc[:,2].values from sklearn.model_selection import train_test_split X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0) from sklearn.preprocessing import StandardScaler sc=StandardScaler() X_train=sc.fit_transform(X_train) X_test=sc.transform(X_test) from sklearn.linear_model import LogisticRegression logmodel=LogisticRegression() logmodel.fit(X_train, y_train) predictions=logmodel.predict(X_test) print(predictions) from sklearn.metrics import classification_report print(classification_report(y_test,predictions)) from sklearn.metrics import confusion_matrix print(confusion_matrix(y_test,predictions)) from sklearn.metrics import accuracy_score print(accuracy_score(y_test,predictions)*100)
normal
{ "blob_id": "c955057d7f8d5289898ecb96a290f5a7d241b787", "index": 6440, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(suv_data.head(10))\nprint('the no of passengers in the list is' + str(len(suv_data.index)))\nsns.countplot(x='Purchased', data=suv_data)\nsns.countplot(x='Purchased', hue='Gender', data=suv_data)\nsuv_data['Age'].plot.hist()\nsuv_data.info()\nsuv_data['EstimatedSalary'].plot.hist(bins=50, figsize=(10, 5))\nprint(suv_data.isnull())\nprint(suv_data.isnull().sum())\nsns.heatmap(suv_data.isnull(), yticklabels=False, cmap='viridis')\nplt.show()\nsns.boxplot(x='Gender', y='Age', data=suv_data)\nplt.show()\nsuv_data.drop('User ID', axis=1, inplace=True)\nsuv_data.columns\nsuv_data.head(10)\n<mask token>\nprint(Gen.head(5))\n<mask token>\nprint(suv_data.head(5))\nsuv_data.drop('Gender', axis=1, inplace=True)\nprint(suv_data.head(10))\n<mask token>\nlogmodel.fit(X_train, y_train)\n<mask token>\nprint(predictions)\n<mask token>\nprint(classification_report(y_test, predictions))\n<mask token>\nprint(confusion_matrix(y_test, predictions))\n<mask token>\nprint(accuracy_score(y_test, predictions) * 100)\n", "step-3": "<mask token>\nsuv_data = pd.read_csv('F:/Development/Machine Learning/suv-data/suv_data.csv')\nprint(suv_data.head(10))\nprint('the no of passengers in the list is' + str(len(suv_data.index)))\nsns.countplot(x='Purchased', data=suv_data)\nsns.countplot(x='Purchased', hue='Gender', data=suv_data)\nsuv_data['Age'].plot.hist()\nsuv_data.info()\nsuv_data['EstimatedSalary'].plot.hist(bins=50, figsize=(10, 5))\nprint(suv_data.isnull())\nprint(suv_data.isnull().sum())\nsns.heatmap(suv_data.isnull(), yticklabels=False, cmap='viridis')\nplt.show()\nsns.boxplot(x='Gender', y='Age', data=suv_data)\nplt.show()\nsuv_data.drop('User ID', axis=1, inplace=True)\nsuv_data.columns\nsuv_data.head(10)\nGen = pd.get_dummies(suv_data['Gender'], drop_first=True)\nprint(Gen.head(5))\nsuv_data = pd.concat([suv_data, Gen], axis=1)\nprint(suv_data.head(5))\nsuv_data.drop('Gender', axis=1, inplace=True)\nprint(suv_data.head(10))\nX = suv_data.iloc[:, [0, 1, 3]].values\ny = suv_data.iloc[:, 2].values\n<mask token>\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25,\n random_state=0)\n<mask token>\nsc = StandardScaler()\nX_train = sc.fit_transform(X_train)\nX_test = sc.transform(X_test)\n<mask token>\nlogmodel = LogisticRegression()\nlogmodel.fit(X_train, y_train)\npredictions = logmodel.predict(X_test)\nprint(predictions)\n<mask token>\nprint(classification_report(y_test, predictions))\n<mask token>\nprint(confusion_matrix(y_test, predictions))\n<mask token>\nprint(accuracy_score(y_test, predictions) * 100)\n", "step-4": "import pandas as pd\nimport matplotlib.pyplot as plt\nimport math\nimport seaborn as sns\nimport numpy as np\nsuv_data = pd.read_csv('F:/Development/Machine Learning/suv-data/suv_data.csv')\nprint(suv_data.head(10))\nprint('the no of passengers in the list is' + str(len(suv_data.index)))\nsns.countplot(x='Purchased', data=suv_data)\nsns.countplot(x='Purchased', hue='Gender', data=suv_data)\nsuv_data['Age'].plot.hist()\nsuv_data.info()\nsuv_data['EstimatedSalary'].plot.hist(bins=50, figsize=(10, 5))\nprint(suv_data.isnull())\nprint(suv_data.isnull().sum())\nsns.heatmap(suv_data.isnull(), yticklabels=False, cmap='viridis')\nplt.show()\nsns.boxplot(x='Gender', y='Age', data=suv_data)\nplt.show()\nsuv_data.drop('User ID', axis=1, inplace=True)\nsuv_data.columns\nsuv_data.head(10)\nGen = pd.get_dummies(suv_data['Gender'], drop_first=True)\nprint(Gen.head(5))\nsuv_data = pd.concat([suv_data, Gen], axis=1)\nprint(suv_data.head(5))\nsuv_data.drop('Gender', axis=1, inplace=True)\nprint(suv_data.head(10))\nX = suv_data.iloc[:, [0, 1, 3]].values\ny = suv_data.iloc[:, 2].values\nfrom sklearn.model_selection import train_test_split\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25,\n random_state=0)\nfrom sklearn.preprocessing import StandardScaler\nsc = StandardScaler()\nX_train = sc.fit_transform(X_train)\nX_test = sc.transform(X_test)\nfrom sklearn.linear_model import LogisticRegression\nlogmodel = LogisticRegression()\nlogmodel.fit(X_train, y_train)\npredictions = logmodel.predict(X_test)\nprint(predictions)\nfrom sklearn.metrics import classification_report\nprint(classification_report(y_test, predictions))\nfrom sklearn.metrics import confusion_matrix\nprint(confusion_matrix(y_test, predictions))\nfrom sklearn.metrics import accuracy_score\nprint(accuracy_score(y_test, predictions) * 100)\n", "step-5": "import pandas as pd\nimport matplotlib.pyplot as plt \nimport math\nimport seaborn as sns\nimport numpy as np\nsuv_data=pd.read_csv(\"F:/Development/Machine Learning/suv-data/suv_data.csv\")\nprint(suv_data.head(10))\nprint(\"the no of passengers in the list is\"+str(len(suv_data.index)))\nsns.countplot(x=\"Purchased\",data=suv_data)\nsns.countplot(x=\"Purchased\",hue=\"Gender\",data=suv_data)\nsuv_data['Age'].plot.hist()\nsuv_data.info()\nsuv_data['EstimatedSalary'].plot.hist(bins=50,figsize=(10,5))\nprint(suv_data.isnull())\nprint(suv_data.isnull().sum())\nsns.heatmap(suv_data.isnull(),yticklabels=False,cmap=\"viridis\")\nplt.show()\nsns.boxplot(x=\"Gender\",y=\"Age\",data=suv_data)\nplt.show()\nsuv_data.drop(\"User ID\",axis=1,inplace=True)\nsuv_data.columns\nsuv_data.head(10)\nGen=pd.get_dummies(suv_data['Gender'],drop_first=True)\nprint(Gen.head(5))\nsuv_data=pd.concat([suv_data,Gen],axis=1)\nprint(suv_data.head(5))\nsuv_data.drop(\"Gender\",axis=1,inplace=True)\nprint(suv_data.head(10))\nX=suv_data.iloc[:,[0,1,3]].values\ny=suv_data.iloc[:,2].values\nfrom sklearn.model_selection import train_test_split\nX_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=0)\nfrom sklearn.preprocessing import StandardScaler\nsc=StandardScaler()\nX_train=sc.fit_transform(X_train)\nX_test=sc.transform(X_test)\nfrom sklearn.linear_model import LogisticRegression\nlogmodel=LogisticRegression()\nlogmodel.fit(X_train, y_train)\npredictions=logmodel.predict(X_test)\nprint(predictions)\nfrom sklearn.metrics import classification_report\nprint(classification_report(y_test,predictions))\nfrom sklearn.metrics import confusion_matrix\nprint(confusion_matrix(y_test,predictions))\nfrom sklearn.metrics import accuracy_score\nprint(accuracy_score(y_test,predictions)*100)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def main(): errors = [] for arg in sys.argv[1:]: try: validate_fragment('envoy.config.bootstrap.v3.Bootstrap', yaml. safe_load(pathlib.Path(arg).read_text())) except (ParseError, KeyError) as e: errors.append(arg) print(f'\nERROR (validation failed): {arg}\n{e}\n\n') if errors: raise SystemExit( f'ERROR: some configuration files ({len(errors)}) failed to validate' ) if __name__ == '__main__': main() <|reserved_special_token_1|> <|reserved_special_token_0|> sys.path = [p for p in sys.path if not p.endswith('bazel_tools')] <|reserved_special_token_0|> def main(): errors = [] for arg in sys.argv[1:]: try: validate_fragment('envoy.config.bootstrap.v3.Bootstrap', yaml. safe_load(pathlib.Path(arg).read_text())) except (ParseError, KeyError) as e: errors.append(arg) print(f'\nERROR (validation failed): {arg}\n{e}\n\n') if errors: raise SystemExit( f'ERROR: some configuration files ({len(errors)}) failed to validate' ) if __name__ == '__main__': main() <|reserved_special_token_1|> import pathlib import sys import yaml from google.protobuf.json_format import ParseError sys.path = [p for p in sys.path if not p.endswith('bazel_tools')] from tools.config_validation.validate_fragment import validate_fragment def main(): errors = [] for arg in sys.argv[1:]: try: validate_fragment('envoy.config.bootstrap.v3.Bootstrap', yaml. safe_load(pathlib.Path(arg).read_text())) except (ParseError, KeyError) as e: errors.append(arg) print(f'\nERROR (validation failed): {arg}\n{e}\n\n') if errors: raise SystemExit( f'ERROR: some configuration files ({len(errors)}) failed to validate' ) if __name__ == '__main__': main() <|reserved_special_token_1|> import pathlib import sys import yaml from google.protobuf.json_format import ParseError sys.path = [p for p in sys.path if not p.endswith('bazel_tools')] from tools.config_validation.validate_fragment import validate_fragment def main(): errors = [] for arg in sys.argv[1:]: try: validate_fragment( "envoy.config.bootstrap.v3.Bootstrap", yaml.safe_load(pathlib.Path(arg).read_text())) except (ParseError, KeyError) as e: errors.append(arg) print(f"\nERROR (validation failed): {arg}\n{e}\n\n") if errors: raise SystemExit(f"ERROR: some configuration files ({len(errors)}) failed to validate") if __name__ == "__main__": main()
flexible
{ "blob_id": "04097e63de5cd94ca8921be5cb6c2155c1e7bc20", "index": 7534, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef main():\n errors = []\n for arg in sys.argv[1:]:\n try:\n validate_fragment('envoy.config.bootstrap.v3.Bootstrap', yaml.\n safe_load(pathlib.Path(arg).read_text()))\n except (ParseError, KeyError) as e:\n errors.append(arg)\n print(f'\\nERROR (validation failed): {arg}\\n{e}\\n\\n')\n if errors:\n raise SystemExit(\n f'ERROR: some configuration files ({len(errors)}) failed to validate'\n )\n\n\nif __name__ == '__main__':\n main()\n", "step-3": "<mask token>\nsys.path = [p for p in sys.path if not p.endswith('bazel_tools')]\n<mask token>\n\n\ndef main():\n errors = []\n for arg in sys.argv[1:]:\n try:\n validate_fragment('envoy.config.bootstrap.v3.Bootstrap', yaml.\n safe_load(pathlib.Path(arg).read_text()))\n except (ParseError, KeyError) as e:\n errors.append(arg)\n print(f'\\nERROR (validation failed): {arg}\\n{e}\\n\\n')\n if errors:\n raise SystemExit(\n f'ERROR: some configuration files ({len(errors)}) failed to validate'\n )\n\n\nif __name__ == '__main__':\n main()\n", "step-4": "import pathlib\nimport sys\nimport yaml\nfrom google.protobuf.json_format import ParseError\nsys.path = [p for p in sys.path if not p.endswith('bazel_tools')]\nfrom tools.config_validation.validate_fragment import validate_fragment\n\n\ndef main():\n errors = []\n for arg in sys.argv[1:]:\n try:\n validate_fragment('envoy.config.bootstrap.v3.Bootstrap', yaml.\n safe_load(pathlib.Path(arg).read_text()))\n except (ParseError, KeyError) as e:\n errors.append(arg)\n print(f'\\nERROR (validation failed): {arg}\\n{e}\\n\\n')\n if errors:\n raise SystemExit(\n f'ERROR: some configuration files ({len(errors)}) failed to validate'\n )\n\n\nif __name__ == '__main__':\n main()\n", "step-5": "import pathlib\nimport sys\n\nimport yaml\n\nfrom google.protobuf.json_format import ParseError\n\nsys.path = [p for p in sys.path if not p.endswith('bazel_tools')]\n\nfrom tools.config_validation.validate_fragment import validate_fragment\n\n\ndef main():\n errors = []\n for arg in sys.argv[1:]:\n try:\n validate_fragment(\n \"envoy.config.bootstrap.v3.Bootstrap\",\n yaml.safe_load(pathlib.Path(arg).read_text()))\n except (ParseError, KeyError) as e:\n errors.append(arg)\n print(f\"\\nERROR (validation failed): {arg}\\n{e}\\n\\n\")\n\n if errors:\n raise SystemExit(f\"ERROR: some configuration files ({len(errors)}) failed to validate\")\n\n\nif __name__ == \"__main__\":\n main()\n", "step-ids": [ 0, 2, 3, 4, 5 ] }
[ 0, 2, 3, 4, 5 ]
<|reserved_special_token_0|> def loop(run_state): error = 1 simulations = 1 while run: error_margin = str(error / simulations * 100) + '%' prediction = get_prediction() print('Prediction: %s' % prediction) print('Error Margin: %s' % error_margin) print('Flip the coin and insert your result:\nh = head\nt = tail') answer = input() comparator = '' if answer is 'h' or answer is 't': if answer == 't': write_data(False) comparator = 'tail' elif answer == 'h': write_data(True) comparator = 'head' simulations += 1 if comparator != prediction: error += 1 else: print('Invalid answer\n') def get_prediction(): file = read_file() data = file['coin-result'] true = 0 for i in data: if i is True: true += 1 head = true / len(data) tail = 1 - head if head + tail == 1: rand = random.uniform(0.0, 1.0) if head == 1: return 'head' elif tail == 1: return 'tail' elif head > tail: if rand > head: return 'head' else: return 'tail' elif head < tail: if rand > tail: return 'tail' else: return 'head' elif head == tail: rand = random.randint(0, 1) if rand == 0: return 'tail' else: return 'head' def read_file(): file = open(url, 'r') data = json.loads(file.read()) file.close() return data def write_data(value): data = read_file() file = open(url, 'w') data['coin-result'].append(value) json.dump(data, file) file.close() def get_answer(answer): if answer == 'c': return 'head' elif answer == 't': return 'tail' else: print('Invalid answer') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def loop(run_state): error = 1 simulations = 1 while run: error_margin = str(error / simulations * 100) + '%' prediction = get_prediction() print('Prediction: %s' % prediction) print('Error Margin: %s' % error_margin) print('Flip the coin and insert your result:\nh = head\nt = tail') answer = input() comparator = '' if answer is 'h' or answer is 't': if answer == 't': write_data(False) comparator = 'tail' elif answer == 'h': write_data(True) comparator = 'head' simulations += 1 if comparator != prediction: error += 1 else: print('Invalid answer\n') def get_prediction(): file = read_file() data = file['coin-result'] true = 0 for i in data: if i is True: true += 1 head = true / len(data) tail = 1 - head if head + tail == 1: rand = random.uniform(0.0, 1.0) if head == 1: return 'head' elif tail == 1: return 'tail' elif head > tail: if rand > head: return 'head' else: return 'tail' elif head < tail: if rand > tail: return 'tail' else: return 'head' elif head == tail: rand = random.randint(0, 1) if rand == 0: return 'tail' else: return 'head' def read_file(): file = open(url, 'r') data = json.loads(file.read()) file.close() return data def write_data(value): data = read_file() file = open(url, 'w') data['coin-result'].append(value) json.dump(data, file) file.close() def get_answer(answer): if answer == 'c': return 'head' elif answer == 't': return 'tail' else: print('Invalid answer') <|reserved_special_token_0|> print('Welcome to CoinPredictor\n') loop(run) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> url = 'data/data.json' def loop(run_state): error = 1 simulations = 1 while run: error_margin = str(error / simulations * 100) + '%' prediction = get_prediction() print('Prediction: %s' % prediction) print('Error Margin: %s' % error_margin) print('Flip the coin and insert your result:\nh = head\nt = tail') answer = input() comparator = '' if answer is 'h' or answer is 't': if answer == 't': write_data(False) comparator = 'tail' elif answer == 'h': write_data(True) comparator = 'head' simulations += 1 if comparator != prediction: error += 1 else: print('Invalid answer\n') def get_prediction(): file = read_file() data = file['coin-result'] true = 0 for i in data: if i is True: true += 1 head = true / len(data) tail = 1 - head if head + tail == 1: rand = random.uniform(0.0, 1.0) if head == 1: return 'head' elif tail == 1: return 'tail' elif head > tail: if rand > head: return 'head' else: return 'tail' elif head < tail: if rand > tail: return 'tail' else: return 'head' elif head == tail: rand = random.randint(0, 1) if rand == 0: return 'tail' else: return 'head' def read_file(): file = open(url, 'r') data = json.loads(file.read()) file.close() return data def write_data(value): data = read_file() file = open(url, 'w') data['coin-result'].append(value) json.dump(data, file) file.close() def get_answer(answer): if answer == 'c': return 'head' elif answer == 't': return 'tail' else: print('Invalid answer') run = True print('Welcome to CoinPredictor\n') loop(run) <|reserved_special_token_0|> <|reserved_special_token_1|> import json import random from time import sleep url = 'data/data.json' def loop(run_state): error = 1 simulations = 1 while run: error_margin = str(error / simulations * 100) + '%' prediction = get_prediction() print('Prediction: %s' % prediction) print('Error Margin: %s' % error_margin) print('Flip the coin and insert your result:\nh = head\nt = tail') answer = input() comparator = '' if answer is 'h' or answer is 't': if answer == 't': write_data(False) comparator = 'tail' elif answer == 'h': write_data(True) comparator = 'head' simulations += 1 if comparator != prediction: error += 1 else: print('Invalid answer\n') def get_prediction(): file = read_file() data = file['coin-result'] true = 0 for i in data: if i is True: true += 1 head = true / len(data) tail = 1 - head if head + tail == 1: rand = random.uniform(0.0, 1.0) if head == 1: return 'head' elif tail == 1: return 'tail' elif head > tail: if rand > head: return 'head' else: return 'tail' elif head < tail: if rand > tail: return 'tail' else: return 'head' elif head == tail: rand = random.randint(0, 1) if rand == 0: return 'tail' else: return 'head' def read_file(): file = open(url, 'r') data = json.loads(file.read()) file.close() return data def write_data(value): data = read_file() file = open(url, 'w') data['coin-result'].append(value) json.dump(data, file) file.close() def get_answer(answer): if answer == 'c': return 'head' elif answer == 't': return 'tail' else: print('Invalid answer') run = True print('Welcome to CoinPredictor\n') loop(run) <|reserved_special_token_0|> <|reserved_special_token_1|> import json import random from time import sleep url = "data/data.json" def loop(run_state): error = 1 simulations = 1 while run: error_margin = str((error/simulations) * 100) + "%" prediction = get_prediction() print("Prediction: %s" % prediction) print("Error Margin: %s" % error_margin) print("Flip the coin and insert your result:\nh = head\nt = tail") answer = input() comparator = "" if answer is "h" or answer is "t": if answer == "t": write_data(False) comparator = "tail" elif answer == "h": write_data(True) comparator = "head" simulations += 1 if comparator != prediction: error += 1 else: print("Invalid answer\n") def get_prediction(): file = read_file() data = file["coin-result"] true = 0 for i in data: if i is True: true += 1 head = true/len(data) tail = 1-head if head + tail == 1: rand = random.uniform(0.0, 1.0) if head == 1: return "head" elif tail == 1: return "tail" elif head > tail: if rand > head: return "head" else: return "tail" elif head < tail: if rand > tail: return "tail" else: return "head" elif head == tail: rand = random.randint(0, 1) if rand == 0: return "tail" else: return "head" def read_file(): file = open(url, "r") data = json.loads(file.read()) file.close() return data def write_data(value): data = read_file() file = open(url, "w") data["coin-result"].append(value) json.dump(data, file) file.close() def get_answer(answer): if answer == "c": return "head" elif answer == "t": return "tail" else: print("Invalid answer") # OnRun run = True print("Welcome to CoinPredictor\n") loop(run) ''' file = open("data/data.json", "w") data['coin-result'].append(False) data = json.dump(data, file) print(data) file.close()'''
flexible
{ "blob_id": "25ff54a969651d365de33f2420c662518dd63738", "index": 864, "step-1": "<mask token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\n<mask token>\nprint('Welcome to CoinPredictor\\n')\nloop(run)\n<mask token>\n", "step-3": "<mask token>\nurl = 'data/data.json'\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\nrun = True\nprint('Welcome to CoinPredictor\\n')\nloop(run)\n<mask token>\n", "step-4": "import json\nimport random\nfrom time import sleep\nurl = 'data/data.json'\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n while run:\n error_margin = str(error / simulations * 100) + '%'\n prediction = get_prediction()\n print('Prediction: %s' % prediction)\n print('Error Margin: %s' % error_margin)\n print('Flip the coin and insert your result:\\nh = head\\nt = tail')\n answer = input()\n comparator = ''\n if answer is 'h' or answer is 't':\n if answer == 't':\n write_data(False)\n comparator = 'tail'\n elif answer == 'h':\n write_data(True)\n comparator = 'head'\n simulations += 1\n if comparator != prediction:\n error += 1\n else:\n print('Invalid answer\\n')\n\n\ndef get_prediction():\n file = read_file()\n data = file['coin-result']\n true = 0\n for i in data:\n if i is True:\n true += 1\n head = true / len(data)\n tail = 1 - head\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n if head == 1:\n return 'head'\n elif tail == 1:\n return 'tail'\n elif head > tail:\n if rand > head:\n return 'head'\n else:\n return 'tail'\n elif head < tail:\n if rand > tail:\n return 'tail'\n else:\n return 'head'\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return 'tail'\n else:\n return 'head'\n\n\ndef read_file():\n file = open(url, 'r')\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, 'w')\n data['coin-result'].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == 'c':\n return 'head'\n elif answer == 't':\n return 'tail'\n else:\n print('Invalid answer')\n\n\nrun = True\nprint('Welcome to CoinPredictor\\n')\nloop(run)\n<mask token>\n", "step-5": "import json\nimport random\nfrom time import sleep\n\nurl = \"data/data.json\"\n\n\ndef loop(run_state):\n error = 1\n simulations = 1\n\n while run:\n\n error_margin = str((error/simulations) * 100) + \"%\"\n prediction = get_prediction()\n print(\"Prediction: %s\" % prediction)\n print(\"Error Margin: %s\" % error_margin)\n print(\"Flip the coin and insert your result:\\nh = head\\nt = tail\")\n answer = input()\n comparator = \"\"\n\n if answer is \"h\" or answer is \"t\":\n if answer == \"t\":\n write_data(False)\n comparator = \"tail\"\n\n elif answer == \"h\":\n write_data(True)\n comparator = \"head\"\n\n simulations += 1\n\n if comparator != prediction:\n error += 1\n\n else:\n print(\"Invalid answer\\n\")\n\n\ndef get_prediction():\n file = read_file()\n data = file[\"coin-result\"]\n true = 0\n\n for i in data:\n if i is True:\n true += 1\n\n head = true/len(data)\n tail = 1-head\n\n if head + tail == 1:\n rand = random.uniform(0.0, 1.0)\n\n if head == 1:\n return \"head\"\n\n elif tail == 1:\n return \"tail\"\n\n elif head > tail:\n if rand > head:\n return \"head\"\n else:\n return \"tail\"\n\n elif head < tail:\n if rand > tail:\n return \"tail\"\n else:\n return \"head\"\n\n elif head == tail:\n rand = random.randint(0, 1)\n if rand == 0:\n return \"tail\"\n else:\n return \"head\"\n\n\ndef read_file():\n file = open(url, \"r\")\n data = json.loads(file.read())\n file.close()\n return data\n\n\ndef write_data(value):\n data = read_file()\n file = open(url, \"w\")\n data[\"coin-result\"].append(value)\n json.dump(data, file)\n file.close()\n\n\ndef get_answer(answer):\n if answer == \"c\":\n return \"head\"\n elif answer == \"t\":\n return \"tail\"\n else:\n print(\"Invalid answer\")\n\n\n# OnRun\nrun = True\nprint(\"Welcome to CoinPredictor\\n\")\nloop(run)\n\n\n'''\n\nfile = open(\"data/data.json\", \"w\")\ndata['coin-result'].append(False)\ndata = json.dump(data, file)\nprint(data)\nfile.close()'''\n", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
a=range(1,11) #1~10숫자를 에이에 저장 b=1 for i in a: #a에있는 원소를 b에 곱하고 비에 저장 b*=i print(b)
normal
{ "blob_id": "8cb7290792f9390dd350e0c79711e0dd72d6063b", "index": 9508, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor i in a:\n b *= i\nprint(b)\n", "step-3": "a = range(1, 11)\nb = 1\nfor i in a:\n b *= i\nprint(b)\n", "step-4": "a=range(1,11) #1~10숫자를 에이에 저장\nb=1\nfor i in a: #a에있는 원소를 b에 곱하고 비에 저장\n b*=i\nprint(b)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
import sys import numpy as np sys.setrecursionlimit(10 ** 7) read = sys.stdin.buffer.read readline = sys.stdin.buffer.readline readlines = sys.stdin.buffer.readlines N, M = map(int, input().split()) def cumprod(arr, MOD): L = len(arr) Lsq = int(L ** 0.5 + 1) arr = np.resize(arr, Lsq ** 2).reshape(Lsq, Lsq) for n in range(1, Lsq): arr[:, n] *= arr[:, n - 1] arr[:, n] %= MOD for n in range(1, Lsq): arr[n] *= arr[n - 1, -1] arr[n] %= MOD return arr.ravel()[:L] def make_fact(U, MOD): x = np.arange(U, dtype=np.int64) x[0] = 1 fact = cumprod(x, MOD) x = np.arange(U, 0, -1, dtype=np.int64) x[0] = pow(int(fact[-1]), MOD - 2, MOD) fact_inv = cumprod(x, MOD)[::-1] return fact, fact_inv def mod_comb_k(n, k, mod): return fact[n] * fact_inv[k] % mod * fact_inv[n - k] % mod MOD = 10 ** 9 + 7 U = (10 ** 5) * 5 + 10 fact, fact_inv = make_fact(U, MOD) prev_acc_cnt = 1 c = M - N + 1 factr = 1 rest = 1 for i in range(1, N): # i個違う t = mod_comb_k(N, i, MOD) # 入れ替える factr *= c factr %= MOD # 引く p = factr - prev_acc_cnt p %= MOD # 何通り t *= p t %= MOD # 更新 c += 1 prev_acc_cnt += t prev_acc_cnt %= MOD rest += t rest %= MOD # print(i, t, prev_acc_cnt, factr) total = fact[M] * fact_inv[M - N] total %= MOD ans = total * (total - rest) ans %= MOD print(ans)
normal
{ "blob_id": "43d5bf79f16e8530797cdd13cdfcc91f0d3aef5e", "index": 8208, "step-1": "<mask token>\n\n\ndef cumprod(arr, MOD):\n L = len(arr)\n Lsq = int(L ** 0.5 + 1)\n arr = np.resize(arr, Lsq ** 2).reshape(Lsq, Lsq)\n for n in range(1, Lsq):\n arr[:, n] *= arr[:, n - 1]\n arr[:, n] %= MOD\n for n in range(1, Lsq):\n arr[n] *= arr[n - 1, -1]\n arr[n] %= MOD\n return arr.ravel()[:L]\n\n\ndef make_fact(U, MOD):\n x = np.arange(U, dtype=np.int64)\n x[0] = 1\n fact = cumprod(x, MOD)\n x = np.arange(U, 0, -1, dtype=np.int64)\n x[0] = pow(int(fact[-1]), MOD - 2, MOD)\n fact_inv = cumprod(x, MOD)[::-1]\n return fact, fact_inv\n\n\ndef mod_comb_k(n, k, mod):\n return fact[n] * fact_inv[k] % mod * fact_inv[n - k] % mod\n\n\n<mask token>\n", "step-2": "<mask token>\nsys.setrecursionlimit(10 ** 7)\n<mask token>\n\n\ndef cumprod(arr, MOD):\n L = len(arr)\n Lsq = int(L ** 0.5 + 1)\n arr = np.resize(arr, Lsq ** 2).reshape(Lsq, Lsq)\n for n in range(1, Lsq):\n arr[:, n] *= arr[:, n - 1]\n arr[:, n] %= MOD\n for n in range(1, Lsq):\n arr[n] *= arr[n - 1, -1]\n arr[n] %= MOD\n return arr.ravel()[:L]\n\n\ndef make_fact(U, MOD):\n x = np.arange(U, dtype=np.int64)\n x[0] = 1\n fact = cumprod(x, MOD)\n x = np.arange(U, 0, -1, dtype=np.int64)\n x[0] = pow(int(fact[-1]), MOD - 2, MOD)\n fact_inv = cumprod(x, MOD)[::-1]\n return fact, fact_inv\n\n\ndef mod_comb_k(n, k, mod):\n return fact[n] * fact_inv[k] % mod * fact_inv[n - k] % mod\n\n\n<mask token>\nfor i in range(1, N):\n t = mod_comb_k(N, i, MOD)\n factr *= c\n factr %= MOD\n p = factr - prev_acc_cnt\n p %= MOD\n t *= p\n t %= MOD\n c += 1\n prev_acc_cnt += t\n prev_acc_cnt %= MOD\n rest += t\n rest %= MOD\n<mask token>\ntotal %= MOD\n<mask token>\nans %= MOD\nprint(ans)\n", "step-3": "<mask token>\nsys.setrecursionlimit(10 ** 7)\nread = sys.stdin.buffer.read\nreadline = sys.stdin.buffer.readline\nreadlines = sys.stdin.buffer.readlines\nN, M = map(int, input().split())\n\n\ndef cumprod(arr, MOD):\n L = len(arr)\n Lsq = int(L ** 0.5 + 1)\n arr = np.resize(arr, Lsq ** 2).reshape(Lsq, Lsq)\n for n in range(1, Lsq):\n arr[:, n] *= arr[:, n - 1]\n arr[:, n] %= MOD\n for n in range(1, Lsq):\n arr[n] *= arr[n - 1, -1]\n arr[n] %= MOD\n return arr.ravel()[:L]\n\n\ndef make_fact(U, MOD):\n x = np.arange(U, dtype=np.int64)\n x[0] = 1\n fact = cumprod(x, MOD)\n x = np.arange(U, 0, -1, dtype=np.int64)\n x[0] = pow(int(fact[-1]), MOD - 2, MOD)\n fact_inv = cumprod(x, MOD)[::-1]\n return fact, fact_inv\n\n\ndef mod_comb_k(n, k, mod):\n return fact[n] * fact_inv[k] % mod * fact_inv[n - k] % mod\n\n\nMOD = 10 ** 9 + 7\nU = 10 ** 5 * 5 + 10\nfact, fact_inv = make_fact(U, MOD)\nprev_acc_cnt = 1\nc = M - N + 1\nfactr = 1\nrest = 1\nfor i in range(1, N):\n t = mod_comb_k(N, i, MOD)\n factr *= c\n factr %= MOD\n p = factr - prev_acc_cnt\n p %= MOD\n t *= p\n t %= MOD\n c += 1\n prev_acc_cnt += t\n prev_acc_cnt %= MOD\n rest += t\n rest %= MOD\ntotal = fact[M] * fact_inv[M - N]\ntotal %= MOD\nans = total * (total - rest)\nans %= MOD\nprint(ans)\n", "step-4": "import sys\nimport numpy as np\nsys.setrecursionlimit(10 ** 7)\nread = sys.stdin.buffer.read\nreadline = sys.stdin.buffer.readline\nreadlines = sys.stdin.buffer.readlines\nN, M = map(int, input().split())\n\n\ndef cumprod(arr, MOD):\n L = len(arr)\n Lsq = int(L ** 0.5 + 1)\n arr = np.resize(arr, Lsq ** 2).reshape(Lsq, Lsq)\n for n in range(1, Lsq):\n arr[:, n] *= arr[:, n - 1]\n arr[:, n] %= MOD\n for n in range(1, Lsq):\n arr[n] *= arr[n - 1, -1]\n arr[n] %= MOD\n return arr.ravel()[:L]\n\n\ndef make_fact(U, MOD):\n x = np.arange(U, dtype=np.int64)\n x[0] = 1\n fact = cumprod(x, MOD)\n x = np.arange(U, 0, -1, dtype=np.int64)\n x[0] = pow(int(fact[-1]), MOD - 2, MOD)\n fact_inv = cumprod(x, MOD)[::-1]\n return fact, fact_inv\n\n\ndef mod_comb_k(n, k, mod):\n return fact[n] * fact_inv[k] % mod * fact_inv[n - k] % mod\n\n\nMOD = 10 ** 9 + 7\nU = 10 ** 5 * 5 + 10\nfact, fact_inv = make_fact(U, MOD)\nprev_acc_cnt = 1\nc = M - N + 1\nfactr = 1\nrest = 1\nfor i in range(1, N):\n t = mod_comb_k(N, i, MOD)\n factr *= c\n factr %= MOD\n p = factr - prev_acc_cnt\n p %= MOD\n t *= p\n t %= MOD\n c += 1\n prev_acc_cnt += t\n prev_acc_cnt %= MOD\n rest += t\n rest %= MOD\ntotal = fact[M] * fact_inv[M - N]\ntotal %= MOD\nans = total * (total - rest)\nans %= MOD\nprint(ans)\n", "step-5": "import sys\nimport numpy as np\n\nsys.setrecursionlimit(10 ** 7)\n\nread = sys.stdin.buffer.read\nreadline = sys.stdin.buffer.readline\nreadlines = sys.stdin.buffer.readlines\n\nN, M = map(int, input().split())\n\n\ndef cumprod(arr, MOD):\n L = len(arr)\n Lsq = int(L ** 0.5 + 1)\n arr = np.resize(arr, Lsq ** 2).reshape(Lsq, Lsq)\n for n in range(1, Lsq):\n arr[:, n] *= arr[:, n - 1]\n arr[:, n] %= MOD\n for n in range(1, Lsq):\n arr[n] *= arr[n - 1, -1]\n arr[n] %= MOD\n return arr.ravel()[:L]\n\n\ndef make_fact(U, MOD):\n x = np.arange(U, dtype=np.int64)\n x[0] = 1\n fact = cumprod(x, MOD)\n x = np.arange(U, 0, -1, dtype=np.int64)\n x[0] = pow(int(fact[-1]), MOD - 2, MOD)\n fact_inv = cumprod(x, MOD)[::-1]\n return fact, fact_inv\n\n\ndef mod_comb_k(n, k, mod):\n return fact[n] * fact_inv[k] % mod * fact_inv[n - k] % mod\n\n\nMOD = 10 ** 9 + 7\nU = (10 ** 5) * 5 + 10\nfact, fact_inv = make_fact(U, MOD)\n\nprev_acc_cnt = 1\nc = M - N + 1\nfactr = 1\nrest = 1\nfor i in range(1, N):\n # i個違う\n t = mod_comb_k(N, i, MOD)\n\n # 入れ替える\n factr *= c\n factr %= MOD\n\n # 引く\n p = factr - prev_acc_cnt\n p %= MOD\n\n # 何通り\n t *= p\n t %= MOD\n\n # 更新\n c += 1\n prev_acc_cnt += t\n prev_acc_cnt %= MOD\n\n rest += t\n rest %= MOD\n\n # print(i, t, prev_acc_cnt, factr)\n\ntotal = fact[M] * fact_inv[M - N]\ntotal %= MOD\n\nans = total * (total - rest)\nans %= MOD\n\nprint(ans)\n\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
from wrapper import SeleniumWrapper from selenium.webdriver.common.by import By class PageDetector: def __init__(self, driver): self.selenium = SeleniumWrapper(driver) def detect(self): if self.selenium.wait_for_presence(locator=(By.ID, "teams-app-bar"), timeout=30): if self.selenium.wait_for_presence(locator=(By.ID, "download-desktop-page"), timeout=3): return "promo-page" return "main-app-page" elif self.selenium.wait_for_title_contains(title_substring="Sign in", timeout=30): return "sign-in-page" else: return "unknown"
normal
{ "blob_id": "603d7df0639def2b620cca2299077674e35a74b2", "index": 5980, "step-1": "<mask token>\n\n\nclass PageDetector:\n <mask token>\n <mask token>\n", "step-2": "<mask token>\n\n\nclass PageDetector:\n\n def __init__(self, driver):\n self.selenium = SeleniumWrapper(driver)\n <mask token>\n", "step-3": "<mask token>\n\n\nclass PageDetector:\n\n def __init__(self, driver):\n self.selenium = SeleniumWrapper(driver)\n\n def detect(self):\n if self.selenium.wait_for_presence(locator=(By.ID, 'teams-app-bar'),\n timeout=30):\n if self.selenium.wait_for_presence(locator=(By.ID,\n 'download-desktop-page'), timeout=3):\n return 'promo-page'\n return 'main-app-page'\n elif self.selenium.wait_for_title_contains(title_substring=\n 'Sign in', timeout=30):\n return 'sign-in-page'\n else:\n return 'unknown'\n", "step-4": "from wrapper import SeleniumWrapper\nfrom selenium.webdriver.common.by import By\n\n\nclass PageDetector:\n\n def __init__(self, driver):\n self.selenium = SeleniumWrapper(driver)\n\n def detect(self):\n if self.selenium.wait_for_presence(locator=(By.ID, 'teams-app-bar'),\n timeout=30):\n if self.selenium.wait_for_presence(locator=(By.ID,\n 'download-desktop-page'), timeout=3):\n return 'promo-page'\n return 'main-app-page'\n elif self.selenium.wait_for_title_contains(title_substring=\n 'Sign in', timeout=30):\n return 'sign-in-page'\n else:\n return 'unknown'\n", "step-5": "from wrapper import SeleniumWrapper\nfrom selenium.webdriver.common.by import By\n\n\n\nclass PageDetector:\n\n def __init__(self, driver):\n self.selenium = SeleniumWrapper(driver)\n \n\n def detect(self):\n if self.selenium.wait_for_presence(locator=(By.ID, \"teams-app-bar\"), timeout=30):\n if self.selenium.wait_for_presence(locator=(By.ID, \"download-desktop-page\"), timeout=3):\n return \"promo-page\"\n return \"main-app-page\"\n elif self.selenium.wait_for_title_contains(title_substring=\"Sign in\", timeout=30):\n return \"sign-in-page\"\n else:\n return \"unknown\"", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> def postfix(expression): operators, stack = '+-*/', [] for item in expression.split(): if item not in operators: stack.append(item) else: operand_1, operand_2 = stack.pop(), stack.pop() stack.append(str(eval(operand_2 + item + operand_1))) return int(float(stack.pop()))
flexible
{ "blob_id": "3ae0149af78216d6cc85313ebaa6f7cd99185c05", "index": 531, "step-1": "<mask token>\n", "step-2": "def postfix(expression):\n operators, stack = '+-*/', []\n for item in expression.split():\n if item not in operators:\n stack.append(item)\n else:\n operand_1, operand_2 = stack.pop(), stack.pop()\n stack.append(str(eval(operand_2 + item + operand_1)))\n return int(float(stack.pop()))\n", "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0, 1 ] }
[ 0, 1 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> sns.set() get_ipython().magic('matplotlib inline') <|reserved_special_token_0|> if header_included: header = 0 <|reserved_special_token_0|> for item in combinations: index = ax[i] x_vis = X[:, [features[item[0]], features[item[1]]]] axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1], label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor= palette1[4], linewidth=0.15) axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1], label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor= palette2[0], linewidth=0.15) axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0 ].capitalize(), fontsize=fs) i += 1 plt.legend() plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> almost_black = '#262626' <|reserved_special_token_0|> sns.set() get_ipython().magic('matplotlib inline') filepath = 'data/full_data_genre.csv' header = None header_included = True if header_included: header = 0 df = pd.read_csv(filepath, header=header) df['billboard'] = df['weeks'].map(lambda x: x != 0) df = df.drop('artist', 1) df = df.drop('title', 1) genres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative'] accoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness', 'acousticness', 'instrumentalness', 'danceability', 'time_signature', 'loudness', 'duration', 'mode'] artist = ['artist_familiarity', 'artist_hottness'] var = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy', 'liveness', 'danceability', 'speechiness', 'instrumentalness'] X = np.array(df[var]) fnames = df[var].columns features = dict(zip(fnames, range(len(fnames)))) palette1 = sns.color_palette('Paired') flatui = ['#9b59b6', '#3498db', '#95a5a6', '#e74c3c', '#34495e', '#2ecc71'] palette2 = sns.color_palette(flatui) fs = 20 fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 15)) ax = list(itertools.product(range(2), range(2))) combinations = [('artist_familiarity', 'artist_hottness'), ('tempo', 'energy'), ('liveness', 'danceability'), ('instrumentalness', 'speechiness')] i = 0 for item in combinations: index = ax[i] x_vis = X[:, [features[item[0]], features[item[1]]]] axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1], label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor= palette1[4], linewidth=0.15) axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1], label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor= palette2[0], linewidth=0.15) axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0 ].capitalize(), fontsize=fs) i += 1 plt.legend() plt.show() <|reserved_special_token_1|> import pandas as pd import numpy as np import itertools almost_black = '#262626' import matplotlib import seaborn as sns import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap sns.set() get_ipython().magic('matplotlib inline') filepath = 'data/full_data_genre.csv' header = None header_included = True if header_included: header = 0 df = pd.read_csv(filepath, header=header) df['billboard'] = df['weeks'].map(lambda x: x != 0) df = df.drop('artist', 1) df = df.drop('title', 1) genres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative'] accoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness', 'acousticness', 'instrumentalness', 'danceability', 'time_signature', 'loudness', 'duration', 'mode'] artist = ['artist_familiarity', 'artist_hottness'] var = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy', 'liveness', 'danceability', 'speechiness', 'instrumentalness'] X = np.array(df[var]) fnames = df[var].columns features = dict(zip(fnames, range(len(fnames)))) palette1 = sns.color_palette('Paired') flatui = ['#9b59b6', '#3498db', '#95a5a6', '#e74c3c', '#34495e', '#2ecc71'] palette2 = sns.color_palette(flatui) fs = 20 fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 15)) ax = list(itertools.product(range(2), range(2))) combinations = [('artist_familiarity', 'artist_hottness'), ('tempo', 'energy'), ('liveness', 'danceability'), ('instrumentalness', 'speechiness')] i = 0 for item in combinations: index = ax[i] x_vis = X[:, [features[item[0]], features[item[1]]]] axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1], label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor= palette1[4], linewidth=0.15) axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1], label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor= palette2[0], linewidth=0.15) axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0 ].capitalize(), fontsize=fs) i += 1 plt.legend() plt.show() <|reserved_special_token_1|> # coding: utf-8 # In[1]: import pandas as pd import numpy as np import itertools # Save a nice dark grey as a variable almost_black = '#262626' import matplotlib import seaborn as sns import matplotlib.pyplot as plt from matplotlib.colors import ListedColormap sns.set() get_ipython().magic('matplotlib inline') # In[2]: filepath = 'data/full_data_genre.csv' header = None header_included = True if header_included: header = 0 df = pd.read_csv(filepath, header = header) df['billboard'] = df['weeks'].map(lambda x: x != 0) df = df.drop('artist', 1) df = df.drop('title', 1) genres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative'] accoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness', 'acousticness', 'instrumentalness', 'danceability', 'time_signature', 'loudness', 'duration', 'mode'] artist = ['artist_familiarity', 'artist_hottness'] var = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy', 'liveness', 'danceability','speechiness', 'instrumentalness'] X = np.array(df[var]) # In[3]: fnames = df[var].columns features = dict(zip(fnames, range(len(fnames)))) # In[4]: palette1 = sns.color_palette("Paired") flatui = ["#9b59b6", "#3498db", "#95a5a6", "#e74c3c", "#34495e", "#2ecc71"] palette2 = sns.color_palette(flatui) fs = 20 # fontsize fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15,15)) ax = list(itertools.product(range(2), range(2))) combinations = [('artist_familiarity', 'artist_hottness'), ('tempo', 'energy'), ('liveness', 'danceability'), ('instrumentalness', 'speechiness')] i = 0 for item in combinations: index = ax[i] x_vis = X[:, [features[item[0]], features[item[1]]]] axes[index[0], index[1]].scatter(x_vis[Y==0, 0], x_vis[Y==0, 1], label="Class #0", alpha=0.5, edgecolor=almost_black, facecolor=palette1[4], linewidth=0.15) axes[index[0], index[1]].scatter(x_vis[Y==1, 0], x_vis[Y==1, 1], label="Class #1", alpha=0.1, edgecolor=almost_black, facecolor=palette2[0], linewidth=0.15) axes[index[0], index[1]].set_title(item[1].capitalize(),'v.s.', item[0].capitalize(), fontsize=fs) i+=1 plt.legend() plt.show()
flexible
{ "blob_id": "f2786e445bdf66cf6bb66f4cde4c7b2bf819d8aa", "index": 3299, "step-1": "<mask token>\n", "step-2": "<mask token>\nsns.set()\nget_ipython().magic('matplotlib inline')\n<mask token>\nif header_included:\n header = 0\n<mask token>\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1],\n label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor=\n palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1],\n label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor=\n palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0\n ].capitalize(), fontsize=fs)\n i += 1\nplt.legend()\nplt.show()\n", "step-3": "<mask token>\nalmost_black = '#262626'\n<mask token>\nsns.set()\nget_ipython().magic('matplotlib inline')\nfilepath = 'data/full_data_genre.csv'\nheader = None\nheader_included = True\nif header_included:\n header = 0\ndf = pd.read_csv(filepath, header=header)\ndf['billboard'] = df['weeks'].map(lambda x: x != 0)\ndf = df.drop('artist', 1)\ndf = df.drop('title', 1)\ngenres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative']\naccoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness',\n 'acousticness', 'instrumentalness', 'danceability', 'time_signature',\n 'loudness', 'duration', 'mode']\nartist = ['artist_familiarity', 'artist_hottness']\nvar = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy',\n 'liveness', 'danceability', 'speechiness', 'instrumentalness']\nX = np.array(df[var])\nfnames = df[var].columns\nfeatures = dict(zip(fnames, range(len(fnames))))\npalette1 = sns.color_palette('Paired')\nflatui = ['#9b59b6', '#3498db', '#95a5a6', '#e74c3c', '#34495e', '#2ecc71']\npalette2 = sns.color_palette(flatui)\nfs = 20\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 15))\nax = list(itertools.product(range(2), range(2)))\ncombinations = [('artist_familiarity', 'artist_hottness'), ('tempo',\n 'energy'), ('liveness', 'danceability'), ('instrumentalness',\n 'speechiness')]\ni = 0\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1],\n label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor=\n palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1],\n label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor=\n palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0\n ].capitalize(), fontsize=fs)\n i += 1\nplt.legend()\nplt.show()\n", "step-4": "import pandas as pd\nimport numpy as np\nimport itertools\nalmost_black = '#262626'\nimport matplotlib\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nsns.set()\nget_ipython().magic('matplotlib inline')\nfilepath = 'data/full_data_genre.csv'\nheader = None\nheader_included = True\nif header_included:\n header = 0\ndf = pd.read_csv(filepath, header=header)\ndf['billboard'] = df['weeks'].map(lambda x: x != 0)\ndf = df.drop('artist', 1)\ndf = df.drop('title', 1)\ngenres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative']\naccoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness',\n 'acousticness', 'instrumentalness', 'danceability', 'time_signature',\n 'loudness', 'duration', 'mode']\nartist = ['artist_familiarity', 'artist_hottness']\nvar = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy',\n 'liveness', 'danceability', 'speechiness', 'instrumentalness']\nX = np.array(df[var])\nfnames = df[var].columns\nfeatures = dict(zip(fnames, range(len(fnames))))\npalette1 = sns.color_palette('Paired')\nflatui = ['#9b59b6', '#3498db', '#95a5a6', '#e74c3c', '#34495e', '#2ecc71']\npalette2 = sns.color_palette(flatui)\nfs = 20\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15, 15))\nax = list(itertools.product(range(2), range(2)))\ncombinations = [('artist_familiarity', 'artist_hottness'), ('tempo',\n 'energy'), ('liveness', 'danceability'), ('instrumentalness',\n 'speechiness')]\ni = 0\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y == 0, 0], x_vis[Y == 0, 1],\n label='Class #0', alpha=0.5, edgecolor=almost_black, facecolor=\n palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y == 1, 0], x_vis[Y == 1, 1],\n label='Class #1', alpha=0.1, edgecolor=almost_black, facecolor=\n palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(), 'v.s.', item[0\n ].capitalize(), fontsize=fs)\n i += 1\nplt.legend()\nplt.show()\n", "step-5": "\n# coding: utf-8\n\n# In[1]:\n\nimport pandas as pd\nimport numpy as np\nimport itertools\n# Save a nice dark grey as a variable\nalmost_black = '#262626'\nimport matplotlib\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nfrom matplotlib.colors import ListedColormap\nsns.set()\nget_ipython().magic('matplotlib inline')\n\n\n# In[2]:\n\nfilepath = 'data/full_data_genre.csv'\n\nheader = None\nheader_included = True\nif header_included:\n header = 0\n \ndf = pd.read_csv(filepath, header = header)\ndf['billboard'] = df['weeks'].map(lambda x: x != 0)\ndf = df.drop('artist', 1)\ndf = df.drop('title', 1)\n\ngenres = ['country', 'dance', 'hip_hop', 'pop', 'r&b', 'rock', 'alternative']\naccoustic = ['key', 'energy', 'liveness', 'tempo', 'speechiness',\n 'acousticness', 'instrumentalness', 'danceability', \n 'time_signature', 'loudness', 'duration', 'mode']\nartist = ['artist_familiarity', 'artist_hottness']\n\nvar = ['artist_familiarity', 'artist_hottness', 'tempo', 'energy', \n 'liveness', 'danceability','speechiness', 'instrumentalness']\n\nX = np.array(df[var])\n\n\n# In[3]:\n\nfnames = df[var].columns\nfeatures = dict(zip(fnames, range(len(fnames))))\n\n\n# In[4]:\n\npalette1 = sns.color_palette(\"Paired\")\nflatui = [\"#9b59b6\", \"#3498db\", \"#95a5a6\", \"#e74c3c\", \"#34495e\", \"#2ecc71\"]\npalette2 = sns.color_palette(flatui)\nfs = 20 # fontsize\nfig, axes = plt.subplots(nrows=2, ncols=2, figsize=(15,15))\n\nax = list(itertools.product(range(2), range(2)))\n\ncombinations = [('artist_familiarity', 'artist_hottness'),\n ('tempo', 'energy'),\n ('liveness', 'danceability'),\n ('instrumentalness', 'speechiness')]\n\ni = 0\nfor item in combinations:\n index = ax[i]\n x_vis = X[:, [features[item[0]], features[item[1]]]]\n axes[index[0], index[1]].scatter(x_vis[Y==0, 0], x_vis[Y==0, 1], label=\"Class #0\",\n alpha=0.5, edgecolor=almost_black, \n facecolor=palette1[4], linewidth=0.15)\n axes[index[0], index[1]].scatter(x_vis[Y==1, 0], x_vis[Y==1, 1], label=\"Class #1\", \n alpha=0.1, edgecolor=almost_black, \n facecolor=palette2[0], linewidth=0.15)\n axes[index[0], index[1]].set_title(item[1].capitalize(),'v.s.',\n item[0].capitalize(), \n fontsize=fs)\n\n i+=1\n \nplt.legend()\nplt.show()\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> print(datetime.strptime('2013-1-25', '%Y-%m-%d').strftime('%Y-%m-%d 00:00:00')) <|reserved_special_token_1|> from datetime import datetime, time, timedelta print(datetime.strptime('2013-1-25', '%Y-%m-%d').strftime('%Y-%m-%d 00:00:00')) <|reserved_special_token_1|> # from datetime import datetime from datetime import datetime, time, timedelta # today = datetime.now() # previous_day = today - timedelta(days=1) # previous_day = previous_day.strftime("%Y%m%d") # print(today) # print(previous_day) print(datetime.strptime("2013-1-25", '%Y-%m-%d').strftime('%Y-%m-%d 00:00:00')) # def is_midnight(): # current_time = datetime.now().time # match_date = datetime.now() # # current_time = datetime.now() # if current_time >= time(0,0) and current_time <= time(4, 30): # print(match_date) # return match_date.strftime("%Y%m%d") # else: # match_date = current_time - timedelta(days=1) # return match_date # print(is_midnight()) # def is_midnight(): # current_time = datetime.now().time() # # today = datetime.now() # previous_day = current_time - timedelta(days=1) # previous_day = previous_day.strftime("%Y%m%d") # if current_time >= time(0,0) and current_time <= time(4, 30): # print('yesy') # if time(0,0) < time(4, 30): # return current_time >= time(0,0) and current_time <= time(4, 30) # else: # crosses midnight # return current_time >= time(0,0) or current_time <= time(4, 30) # print(is_midnight()) # match_date = datetime.now() # current_time = datetime.now().time() # if current_time >= time(0,0) and current_time <= time(4, 30): # previous_day = match_date - timedelta(days=1) # match_date = previous_day.strftime("%Y%m%d") # print(match_date) # match_date = datetime.now().strftime("%Y-%m-%d") # # datetime.timedelta(days=1) # hour = datetime.now().strftime("%H") # if int(hour) < 4: # print('Previous day: %s' % (datetime.timedelta(days=1))) # else: # print(match_date) # print(datetime.now().strftime("%Y-%m-%d %H:%M:%S")) # import cx_Oracle # connection = None # try: # connection = cx_Oracle.connect( # 'JW', # '901203', # 'HOME-PC/XE', # encoding='UTF-8') # # show the version of the Oracle Database # print(connection.version) # c = connection.cursor() # c.execute('SELECT MATCH_ID,MATCH_DATETIME,LEAGUE,HOME_TEAM,AWAY_TEAM,HOME_FT_GOAL,AWAY_FT_GOAL,CASE WHEN HOME_FT_GOAL > AWAY_FT_GOAL THEN \'H\' WHEN HOME_FT_GOAL = AWAY_FT_GOAL THEN \'D\' ELSE \'A\' END HDA_RESULT,DRAW_MEAN,DRAW_MEDIAN,O_MACAU_D,O_BET365_D,O_HKJC_D,AWAY_MEAN,AWAY_MEDIAN,O_MACAU_A,O_BET365_A,O_HKJC_A,HKJC_ASIAN_HANDICAP,HKJC_ASIAN_AWAY FROM HDA_MEAN_VIEW WHERE MATCH_HANDICAP=\'上盤\' AND HOME_IND=0 AND DRAW_IND=1 AND AWAY_IND=1 ORDER BY MATCH_DATETIME DESC') # for row in c: # print(row[0],',',row[1]) # except cx_Oracle.Error as error: # print(error) # finally: # # release the connection # if connection: # connection.close()
flexible
{ "blob_id": "4dac8e7e695c473cb73ceaf3887373bcc0a08aff", "index": 5940, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint(datetime.strptime('2013-1-25', '%Y-%m-%d').strftime('%Y-%m-%d 00:00:00'))\n", "step-3": "from datetime import datetime, time, timedelta\nprint(datetime.strptime('2013-1-25', '%Y-%m-%d').strftime('%Y-%m-%d 00:00:00'))\n", "step-4": "# from datetime import datetime\nfrom datetime import datetime, time, timedelta\n# today = datetime.now()\n# previous_day = today - timedelta(days=1)\n# previous_day = previous_day.strftime(\"%Y%m%d\")\n# print(today)\n# print(previous_day)\n\nprint(datetime.strptime(\"2013-1-25\", '%Y-%m-%d').strftime('%Y-%m-%d 00:00:00'))\n\n# def is_midnight():\n# current_time = datetime.now().time\n# match_date = datetime.now()\n# # current_time = datetime.now()\n# if current_time >= time(0,0) and current_time <= time(4, 30):\n# print(match_date)\n# return match_date.strftime(\"%Y%m%d\")\n# else:\n# match_date = current_time - timedelta(days=1)\n# return match_date\n \n# print(is_midnight())\n\n# def is_midnight():\n# current_time = datetime.now().time()\n# # today = datetime.now()\n# previous_day = current_time - timedelta(days=1)\n# previous_day = previous_day.strftime(\"%Y%m%d\")\n# if current_time >= time(0,0) and current_time <= time(4, 30):\n# print('yesy')\n # if time(0,0) < time(4, 30):\n # return current_time >= time(0,0) and current_time <= time(4, 30)\n # else: # crosses midnight\n # return current_time >= time(0,0) or current_time <= time(4, 30)\n\n# print(is_midnight())\n\n\n# match_date = datetime.now()\n# current_time = datetime.now().time()\n# if current_time >= time(0,0) and current_time <= time(4, 30):\n# previous_day = match_date - timedelta(days=1)\n# match_date = previous_day.strftime(\"%Y%m%d\")\n \n# print(match_date)\n\n# match_date = datetime.now().strftime(\"%Y-%m-%d\")\n# # datetime.timedelta(days=1)\n# hour = datetime.now().strftime(\"%H\")\n# if int(hour) < 4:\n# print('Previous day: %s' % (datetime.timedelta(days=1)))\n# else:\n# print(match_date)\n \n# print(datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\"))\n\n\n# import cx_Oracle\n\n# connection = None\n# try:\n# connection = cx_Oracle.connect(\n# 'JW',\n# '901203',\n# 'HOME-PC/XE',\n# encoding='UTF-8')\n\n# # show the version of the Oracle Database\n# print(connection.version)\n \n# c = connection.cursor()\n# c.execute('SELECT MATCH_ID,MATCH_DATETIME,LEAGUE,HOME_TEAM,AWAY_TEAM,HOME_FT_GOAL,AWAY_FT_GOAL,CASE WHEN HOME_FT_GOAL > AWAY_FT_GOAL THEN \\'H\\' WHEN HOME_FT_GOAL = AWAY_FT_GOAL THEN \\'D\\' ELSE \\'A\\' END HDA_RESULT,DRAW_MEAN,DRAW_MEDIAN,O_MACAU_D,O_BET365_D,O_HKJC_D,AWAY_MEAN,AWAY_MEDIAN,O_MACAU_A,O_BET365_A,O_HKJC_A,HKJC_ASIAN_HANDICAP,HKJC_ASIAN_AWAY FROM HDA_MEAN_VIEW WHERE MATCH_HANDICAP=\\'上盤\\' AND HOME_IND=0 AND DRAW_IND=1 AND AWAY_IND=1 ORDER BY MATCH_DATETIME DESC')\n# for row in c:\n# print(row[0],',',row[1])\n# except cx_Oracle.Error as error:\n# print(error)\n# finally:\n# # release the connection\n# if connection:\n# connection.close()\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def new_w(w, d): """ Multi-period commitments in the next epoch. Args: d: Defender's actions m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) s = [q, r, w]: Current State tau: An array denoting the length of each multi-period commitment. Returns: next_w: Number of decision epochs remaining in the next epoch. """ if w.sum() > 0: next_w = w.copy() next_w[next_w > 0] -= 1 return next_w elif d[0] == 1: return np.array([51, 0, 0]) elif d[1] == 1: return np.array([0, 51, 0]) else: return np.array([0, 0, 51]) def attraction_h(next_r, a): """ Attraction function of resource (h in the paper). Args: next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_r == 9: return 0.8 elif next_r == 14: return 0.1 else: return 0.1 elif a == 1: if next_r == 9: return 0.1 elif next_r == 14: return 0.1 else: return 0.8 elif a == 2: if next_r == 9: return 0.1 elif next_r == 14: return 0.3 else: return 0.6 elif a == 3: if next_r == 9: return 0.1 elif next_r == 14: return 0.2 else: return 0.7 elif next_r == 9: return 0.1 elif next_r == 14: return 0.4 else: return 0.5 <|reserved_special_token_0|> def trans_prob(next_s, q, d): """ Probability of decision d from state s to state next_s Args: next_s = [next_q, next_r, next_w]: Next State d: Defender's actions s = [q, r, w]: Current State m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) tau: An array denoting the length of each multi-period commitment. c (nr * nd): cost of defender's each action h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. order: Order of ARA. Currently only 0 and 1 are available. Returns: prob: Probability. """ next_q, next_r, next_w = next_s A_actions = [0, 1, 2, 3, 4] prob = 0 for a in A_actions: prob_r = attraction_h(next_r[0], a) q1 = attraction_g(next_q[0], q, d, a) q2 = attraction_g(1 - next_q[0], q, d, a) prob_q = q1 / (q1 + q2) prob += a_given_s(a, q) * prob_r * prob_q return prob <|reserved_special_token_1|> <|reserved_special_token_0|> def new_w(w, d): """ Multi-period commitments in the next epoch. Args: d: Defender's actions m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) s = [q, r, w]: Current State tau: An array denoting the length of each multi-period commitment. Returns: next_w: Number of decision epochs remaining in the next epoch. """ if w.sum() > 0: next_w = w.copy() next_w[next_w > 0] -= 1 return next_w elif d[0] == 1: return np.array([51, 0, 0]) elif d[1] == 1: return np.array([0, 51, 0]) else: return np.array([0, 0, 51]) def attraction_h(next_r, a): """ Attraction function of resource (h in the paper). Args: next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_r == 9: return 0.8 elif next_r == 14: return 0.1 else: return 0.1 elif a == 1: if next_r == 9: return 0.1 elif next_r == 14: return 0.1 else: return 0.8 elif a == 2: if next_r == 9: return 0.1 elif next_r == 14: return 0.3 else: return 0.6 elif a == 3: if next_r == 9: return 0.1 elif next_r == 14: return 0.2 else: return 0.7 elif next_r == 9: return 0.1 elif next_r == 14: return 0.4 else: return 0.5 def attraction_g(next_q, q, d, a): """ Attraction function of operational conditions (g in the paper). Args: next_q: Operational conditions in the next epoch. next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_q == 0: xi_D = 8 else: xi_D = 1 elif a == 1: xi_D = 1 elif a == 2: if next_q == 0: xi_D = 1 else: xi_D = 3 elif a == 3: if next_q == 0: xi_D = 1 else: xi_D = 2 elif next_q == 0: xi_D = 1 else: xi_D = 4 dqq = 0 if next_q == 1 and q == 0: if d[3] == 1: dqq = 1 elif np.sum(d[6:]) == 3: dqq = 1 elif next_q == 0 and q == 1: if d[5] == 1: dqq = 1 elif np.sum(d[6:]) == 0: dqq = 1 return xi_D + dqq def trans_prob(next_s, q, d): """ Probability of decision d from state s to state next_s Args: next_s = [next_q, next_r, next_w]: Next State d: Defender's actions s = [q, r, w]: Current State m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) tau: An array denoting the length of each multi-period commitment. c (nr * nd): cost of defender's each action h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. order: Order of ARA. Currently only 0 and 1 are available. Returns: prob: Probability. """ next_q, next_r, next_w = next_s A_actions = [0, 1, 2, 3, 4] prob = 0 for a in A_actions: prob_r = attraction_h(next_r[0], a) q1 = attraction_g(next_q[0], q, d, a) q2 = attraction_g(1 - next_q[0], q, d, a) prob_q = q1 / (q1 + q2) prob += a_given_s(a, q) * prob_r * prob_q return prob <|reserved_special_token_1|> <|reserved_special_token_0|> def theta_given_s(theta, q): """ Probability of an random event theta given current state s. Args: theta: Random event s = [q, r, w]: State Returns: Unnormalized probability of the random event. """ if q == 0: return 0.3333 elif theta == 0: return 0.25 elif theta == 1: return 0.25 else: return 0.5 def new_w(w, d): """ Multi-period commitments in the next epoch. Args: d: Defender's actions m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) s = [q, r, w]: Current State tau: An array denoting the length of each multi-period commitment. Returns: next_w: Number of decision epochs remaining in the next epoch. """ if w.sum() > 0: next_w = w.copy() next_w[next_w > 0] -= 1 return next_w elif d[0] == 1: return np.array([51, 0, 0]) elif d[1] == 1: return np.array([0, 51, 0]) else: return np.array([0, 0, 51]) def attraction_h(next_r, a): """ Attraction function of resource (h in the paper). Args: next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_r == 9: return 0.8 elif next_r == 14: return 0.1 else: return 0.1 elif a == 1: if next_r == 9: return 0.1 elif next_r == 14: return 0.1 else: return 0.8 elif a == 2: if next_r == 9: return 0.1 elif next_r == 14: return 0.3 else: return 0.6 elif a == 3: if next_r == 9: return 0.1 elif next_r == 14: return 0.2 else: return 0.7 elif next_r == 9: return 0.1 elif next_r == 14: return 0.4 else: return 0.5 def attraction_g(next_q, q, d, a): """ Attraction function of operational conditions (g in the paper). Args: next_q: Operational conditions in the next epoch. next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_q == 0: xi_D = 8 else: xi_D = 1 elif a == 1: xi_D = 1 elif a == 2: if next_q == 0: xi_D = 1 else: xi_D = 3 elif a == 3: if next_q == 0: xi_D = 1 else: xi_D = 2 elif next_q == 0: xi_D = 1 else: xi_D = 4 dqq = 0 if next_q == 1 and q == 0: if d[3] == 1: dqq = 1 elif np.sum(d[6:]) == 3: dqq = 1 elif next_q == 0 and q == 1: if d[5] == 1: dqq = 1 elif np.sum(d[6:]) == 0: dqq = 1 return xi_D + dqq def trans_prob(next_s, q, d): """ Probability of decision d from state s to state next_s Args: next_s = [next_q, next_r, next_w]: Next State d: Defender's actions s = [q, r, w]: Current State m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) tau: An array denoting the length of each multi-period commitment. c (nr * nd): cost of defender's each action h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. order: Order of ARA. Currently only 0 and 1 are available. Returns: prob: Probability. """ next_q, next_r, next_w = next_s A_actions = [0, 1, 2, 3, 4] prob = 0 for a in A_actions: prob_r = attraction_h(next_r[0], a) q1 = attraction_g(next_q[0], q, d, a) q2 = attraction_g(1 - next_q[0], q, d, a) prob_q = q1 / (q1 + q2) prob += a_given_s(a, q) * prob_r * prob_q return prob <|reserved_special_token_1|> import numpy as np from ARA import * from State import * def theta_given_s(theta, q): """ Probability of an random event theta given current state s. Args: theta: Random event s = [q, r, w]: State Returns: Unnormalized probability of the random event. """ if q == 0: return 0.3333 elif theta == 0: return 0.25 elif theta == 1: return 0.25 else: return 0.5 def new_w(w, d): """ Multi-period commitments in the next epoch. Args: d: Defender's actions m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) s = [q, r, w]: Current State tau: An array denoting the length of each multi-period commitment. Returns: next_w: Number of decision epochs remaining in the next epoch. """ if w.sum() > 0: next_w = w.copy() next_w[next_w > 0] -= 1 return next_w elif d[0] == 1: return np.array([51, 0, 0]) elif d[1] == 1: return np.array([0, 51, 0]) else: return np.array([0, 0, 51]) def attraction_h(next_r, a): """ Attraction function of resource (h in the paper). Args: next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_r == 9: return 0.8 elif next_r == 14: return 0.1 else: return 0.1 elif a == 1: if next_r == 9: return 0.1 elif next_r == 14: return 0.1 else: return 0.8 elif a == 2: if next_r == 9: return 0.1 elif next_r == 14: return 0.3 else: return 0.6 elif a == 3: if next_r == 9: return 0.1 elif next_r == 14: return 0.2 else: return 0.7 elif next_r == 9: return 0.1 elif next_r == 14: return 0.4 else: return 0.5 def attraction_g(next_q, q, d, a): """ Attraction function of operational conditions (g in the paper). Args: next_q: Operational conditions in the next epoch. next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_q == 0: xi_D = 8 else: xi_D = 1 elif a == 1: xi_D = 1 elif a == 2: if next_q == 0: xi_D = 1 else: xi_D = 3 elif a == 3: if next_q == 0: xi_D = 1 else: xi_D = 2 elif next_q == 0: xi_D = 1 else: xi_D = 4 dqq = 0 if next_q == 1 and q == 0: if d[3] == 1: dqq = 1 elif np.sum(d[6:]) == 3: dqq = 1 elif next_q == 0 and q == 1: if d[5] == 1: dqq = 1 elif np.sum(d[6:]) == 0: dqq = 1 return xi_D + dqq def trans_prob(next_s, q, d): """ Probability of decision d from state s to state next_s Args: next_s = [next_q, next_r, next_w]: Next State d: Defender's actions s = [q, r, w]: Current State m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) tau: An array denoting the length of each multi-period commitment. c (nr * nd): cost of defender's each action h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. order: Order of ARA. Currently only 0 and 1 are available. Returns: prob: Probability. """ next_q, next_r, next_w = next_s A_actions = [0, 1, 2, 3, 4] prob = 0 for a in A_actions: prob_r = attraction_h(next_r[0], a) q1 = attraction_g(next_q[0], q, d, a) q2 = attraction_g(1 - next_q[0], q, d, a) prob_q = q1 / (q1 + q2) prob += a_given_s(a, q) * prob_r * prob_q return prob <|reserved_special_token_1|> import numpy as np from ARA import * from State import * def theta_given_s(theta, q): """ Probability of an random event theta given current state s. Args: theta: Random event s = [q, r, w]: State Returns: Unnormalized probability of the random event. """ if q == 0: return .3333 else: if theta == 0: return 0.25 elif theta == 1: return 0.25 else: return 0.5 def new_w(w, d): """ Multi-period commitments in the next epoch. Args: d: Defender's actions m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) s = [q, r, w]: Current State tau: An array denoting the length of each multi-period commitment. Returns: next_w: Number of decision epochs remaining in the next epoch. """ if w.sum() > 0: next_w = w.copy() next_w[next_w > 0] -= 1 return next_w else: if d[0] == 1: return np.array([51,0,0]) elif d[1] == 1: return np.array([0,51,0]) else: return np.array([0,0,51]) def attraction_h(next_r, a): """ Attraction function of resource (h in the paper). Args: next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_r == 9: return 0.8 elif next_r == 14: return 0.1 else: return 0.1 elif a == 1: if next_r == 9: return 0.1 elif next_r == 14: return 0.1 else: return 0.8 elif a == 2: if next_r == 9: return 0.1 elif next_r == 14: return 0.3 else: return 0.6 elif a == 3: if next_r == 9: return 0.1 elif next_r == 14: return 0.2 else: return 0.7 else: if next_r == 9: return 0.1 elif next_r == 14: return 0.4 else: return 0.5 def attraction_g(next_q, q, d, a): """ Attraction function of operational conditions (g in the paper). Args: next_q: Operational conditions in the next epoch. next_r: Probable resource array in the next epoch. next_w: Multi-period commitments in the next epoch. d: Defender's actions a: Attacker's actions s = [q, r, w]: Current State rho_da: A map mapping from (d_i, a_j) to response quality rho_dq: A map mapping from (d_i, q) to response quality g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold thres: Threshold for a good response. Returns: Attraction value. """ if a == 0: if next_q == 0: xi_D = 8 else: xi_D = 1 elif a == 1: xi_D = 1 elif a == 2: if next_q == 0: xi_D = 1 else: xi_D = 3 elif a == 3: if next_q == 0: xi_D = 1 else: xi_D = 2 else: if next_q == 0: xi_D = 1 else: xi_D = 4 dqq = 0 if next_q == 1 and q == 0: if d[3] == 1: dqq = 1 elif np.sum(d[6:]) == 3: dqq = 1 elif next_q == 0 and q == 1: if d[5] == 1: dqq = 1 elif np.sum(d[6:]) == 0: dqq = 1 return xi_D + dqq def trans_prob(next_s, q, d): """ Probability of decision d from state s to state next_s Args: next_s = [next_q, next_r, next_w]: Next State d: Defender's actions s = [q, r, w]: Current State m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period) tau: An array denoting the length of each multi-period commitment. c (nr * nd): cost of defender's each action h_above: attraction value when response quality is above threshold h_below: attraction value when response quality is below threshold g_above: attraction value when response quality is above threshold g_below: attraction value when response quality is below threshold dict_r: map resource to corresponding level. order: Order of ARA. Currently only 0 and 1 are available. Returns: prob: Probability. """ next_q, next_r, next_w = next_s A_actions = [0, 1, 2, 3, 4] prob = 0 for a in A_actions: prob_r = attraction_h(next_r[0], a) q1 = attraction_g(next_q[0], q, d, a) q2 = attraction_g(1-next_q[0], q, d, a) prob_q = q1 / (q1 + q2) prob += a_given_s(a, q) * prob_r * prob_q return prob
flexible
{ "blob_id": "87f3885b4357d66a745932f3c79804e6c15a57fa", "index": 3162, "step-1": "<mask token>\n\n\ndef new_w(w, d):\n \"\"\"\n Multi-period commitments in the next epoch.\n Args:\n d: Defender's actions\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n s = [q, r, w]: Current State\n tau: An array denoting the length of each multi-period commitment.\n Returns:\n next_w: Number of decision epochs remaining in the next epoch.\n \"\"\"\n if w.sum() > 0:\n next_w = w.copy()\n next_w[next_w > 0] -= 1\n return next_w\n elif d[0] == 1:\n return np.array([51, 0, 0])\n elif d[1] == 1:\n return np.array([0, 51, 0])\n else:\n return np.array([0, 0, 51])\n\n\ndef attraction_h(next_r, a):\n \"\"\"\n Attraction function of resource (h in the paper).\n Args:\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_r == 9:\n return 0.8\n elif next_r == 14:\n return 0.1\n else:\n return 0.1\n elif a == 1:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.1\n else:\n return 0.8\n elif a == 2:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.3\n else:\n return 0.6\n elif a == 3:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.2\n else:\n return 0.7\n elif next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.4\n else:\n return 0.5\n\n\n<mask token>\n\n\ndef trans_prob(next_s, q, d):\n \"\"\"\n Probability of decision d from state s to state next_s\n Args:\n next_s = [next_q, next_r, next_w]: Next State\n d: Defender's actions\n s = [q, r, w]: Current State\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n tau: An array denoting the length of each multi-period commitment.\n c (nr * nd): cost of defender's each action\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n order: Order of ARA. Currently only 0 and 1 are available.\n Returns:\n prob: Probability.\n \"\"\"\n next_q, next_r, next_w = next_s\n A_actions = [0, 1, 2, 3, 4]\n prob = 0\n for a in A_actions:\n prob_r = attraction_h(next_r[0], a)\n q1 = attraction_g(next_q[0], q, d, a)\n q2 = attraction_g(1 - next_q[0], q, d, a)\n prob_q = q1 / (q1 + q2)\n prob += a_given_s(a, q) * prob_r * prob_q\n return prob\n", "step-2": "<mask token>\n\n\ndef new_w(w, d):\n \"\"\"\n Multi-period commitments in the next epoch.\n Args:\n d: Defender's actions\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n s = [q, r, w]: Current State\n tau: An array denoting the length of each multi-period commitment.\n Returns:\n next_w: Number of decision epochs remaining in the next epoch.\n \"\"\"\n if w.sum() > 0:\n next_w = w.copy()\n next_w[next_w > 0] -= 1\n return next_w\n elif d[0] == 1:\n return np.array([51, 0, 0])\n elif d[1] == 1:\n return np.array([0, 51, 0])\n else:\n return np.array([0, 0, 51])\n\n\ndef attraction_h(next_r, a):\n \"\"\"\n Attraction function of resource (h in the paper).\n Args:\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_r == 9:\n return 0.8\n elif next_r == 14:\n return 0.1\n else:\n return 0.1\n elif a == 1:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.1\n else:\n return 0.8\n elif a == 2:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.3\n else:\n return 0.6\n elif a == 3:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.2\n else:\n return 0.7\n elif next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.4\n else:\n return 0.5\n\n\ndef attraction_g(next_q, q, d, a):\n \"\"\"\n Attraction function of operational conditions (g in the paper).\n Args:\n next_q: Operational conditions in the next epoch.\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_q == 0:\n xi_D = 8\n else:\n xi_D = 1\n elif a == 1:\n xi_D = 1\n elif a == 2:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 3\n elif a == 3:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 2\n elif next_q == 0:\n xi_D = 1\n else:\n xi_D = 4\n dqq = 0\n if next_q == 1 and q == 0:\n if d[3] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 3:\n dqq = 1\n elif next_q == 0 and q == 1:\n if d[5] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 0:\n dqq = 1\n return xi_D + dqq\n\n\ndef trans_prob(next_s, q, d):\n \"\"\"\n Probability of decision d from state s to state next_s\n Args:\n next_s = [next_q, next_r, next_w]: Next State\n d: Defender's actions\n s = [q, r, w]: Current State\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n tau: An array denoting the length of each multi-period commitment.\n c (nr * nd): cost of defender's each action\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n order: Order of ARA. Currently only 0 and 1 are available.\n Returns:\n prob: Probability.\n \"\"\"\n next_q, next_r, next_w = next_s\n A_actions = [0, 1, 2, 3, 4]\n prob = 0\n for a in A_actions:\n prob_r = attraction_h(next_r[0], a)\n q1 = attraction_g(next_q[0], q, d, a)\n q2 = attraction_g(1 - next_q[0], q, d, a)\n prob_q = q1 / (q1 + q2)\n prob += a_given_s(a, q) * prob_r * prob_q\n return prob\n", "step-3": "<mask token>\n\n\ndef theta_given_s(theta, q):\n \"\"\"\n Probability of an random event theta given current state s.\n Args:\n theta: Random event\n s = [q, r, w]: State\n Returns:\n Unnormalized probability of the random event.\n \"\"\"\n if q == 0:\n return 0.3333\n elif theta == 0:\n return 0.25\n elif theta == 1:\n return 0.25\n else:\n return 0.5\n\n\ndef new_w(w, d):\n \"\"\"\n Multi-period commitments in the next epoch.\n Args:\n d: Defender's actions\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n s = [q, r, w]: Current State\n tau: An array denoting the length of each multi-period commitment.\n Returns:\n next_w: Number of decision epochs remaining in the next epoch.\n \"\"\"\n if w.sum() > 0:\n next_w = w.copy()\n next_w[next_w > 0] -= 1\n return next_w\n elif d[0] == 1:\n return np.array([51, 0, 0])\n elif d[1] == 1:\n return np.array([0, 51, 0])\n else:\n return np.array([0, 0, 51])\n\n\ndef attraction_h(next_r, a):\n \"\"\"\n Attraction function of resource (h in the paper).\n Args:\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_r == 9:\n return 0.8\n elif next_r == 14:\n return 0.1\n else:\n return 0.1\n elif a == 1:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.1\n else:\n return 0.8\n elif a == 2:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.3\n else:\n return 0.6\n elif a == 3:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.2\n else:\n return 0.7\n elif next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.4\n else:\n return 0.5\n\n\ndef attraction_g(next_q, q, d, a):\n \"\"\"\n Attraction function of operational conditions (g in the paper).\n Args:\n next_q: Operational conditions in the next epoch.\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_q == 0:\n xi_D = 8\n else:\n xi_D = 1\n elif a == 1:\n xi_D = 1\n elif a == 2:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 3\n elif a == 3:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 2\n elif next_q == 0:\n xi_D = 1\n else:\n xi_D = 4\n dqq = 0\n if next_q == 1 and q == 0:\n if d[3] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 3:\n dqq = 1\n elif next_q == 0 and q == 1:\n if d[5] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 0:\n dqq = 1\n return xi_D + dqq\n\n\ndef trans_prob(next_s, q, d):\n \"\"\"\n Probability of decision d from state s to state next_s\n Args:\n next_s = [next_q, next_r, next_w]: Next State\n d: Defender's actions\n s = [q, r, w]: Current State\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n tau: An array denoting the length of each multi-period commitment.\n c (nr * nd): cost of defender's each action\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n order: Order of ARA. Currently only 0 and 1 are available.\n Returns:\n prob: Probability.\n \"\"\"\n next_q, next_r, next_w = next_s\n A_actions = [0, 1, 2, 3, 4]\n prob = 0\n for a in A_actions:\n prob_r = attraction_h(next_r[0], a)\n q1 = attraction_g(next_q[0], q, d, a)\n q2 = attraction_g(1 - next_q[0], q, d, a)\n prob_q = q1 / (q1 + q2)\n prob += a_given_s(a, q) * prob_r * prob_q\n return prob\n", "step-4": "import numpy as np\nfrom ARA import *\nfrom State import *\n\n\ndef theta_given_s(theta, q):\n \"\"\"\n Probability of an random event theta given current state s.\n Args:\n theta: Random event\n s = [q, r, w]: State\n Returns:\n Unnormalized probability of the random event.\n \"\"\"\n if q == 0:\n return 0.3333\n elif theta == 0:\n return 0.25\n elif theta == 1:\n return 0.25\n else:\n return 0.5\n\n\ndef new_w(w, d):\n \"\"\"\n Multi-period commitments in the next epoch.\n Args:\n d: Defender's actions\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n s = [q, r, w]: Current State\n tau: An array denoting the length of each multi-period commitment.\n Returns:\n next_w: Number of decision epochs remaining in the next epoch.\n \"\"\"\n if w.sum() > 0:\n next_w = w.copy()\n next_w[next_w > 0] -= 1\n return next_w\n elif d[0] == 1:\n return np.array([51, 0, 0])\n elif d[1] == 1:\n return np.array([0, 51, 0])\n else:\n return np.array([0, 0, 51])\n\n\ndef attraction_h(next_r, a):\n \"\"\"\n Attraction function of resource (h in the paper).\n Args:\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_r == 9:\n return 0.8\n elif next_r == 14:\n return 0.1\n else:\n return 0.1\n elif a == 1:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.1\n else:\n return 0.8\n elif a == 2:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.3\n else:\n return 0.6\n elif a == 3:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.2\n else:\n return 0.7\n elif next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.4\n else:\n return 0.5\n\n\ndef attraction_g(next_q, q, d, a):\n \"\"\"\n Attraction function of operational conditions (g in the paper).\n Args:\n next_q: Operational conditions in the next epoch.\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_q == 0:\n xi_D = 8\n else:\n xi_D = 1\n elif a == 1:\n xi_D = 1\n elif a == 2:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 3\n elif a == 3:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 2\n elif next_q == 0:\n xi_D = 1\n else:\n xi_D = 4\n dqq = 0\n if next_q == 1 and q == 0:\n if d[3] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 3:\n dqq = 1\n elif next_q == 0 and q == 1:\n if d[5] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 0:\n dqq = 1\n return xi_D + dqq\n\n\ndef trans_prob(next_s, q, d):\n \"\"\"\n Probability of decision d from state s to state next_s\n Args:\n next_s = [next_q, next_r, next_w]: Next State\n d: Defender's actions\n s = [q, r, w]: Current State\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n tau: An array denoting the length of each multi-period commitment.\n c (nr * nd): cost of defender's each action\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n order: Order of ARA. Currently only 0 and 1 are available.\n Returns:\n prob: Probability.\n \"\"\"\n next_q, next_r, next_w = next_s\n A_actions = [0, 1, 2, 3, 4]\n prob = 0\n for a in A_actions:\n prob_r = attraction_h(next_r[0], a)\n q1 = attraction_g(next_q[0], q, d, a)\n q2 = attraction_g(1 - next_q[0], q, d, a)\n prob_q = q1 / (q1 + q2)\n prob += a_given_s(a, q) * prob_r * prob_q\n return prob\n", "step-5": "import numpy as np\nfrom ARA import *\nfrom State import *\n\ndef theta_given_s(theta, q):\n \"\"\"\n Probability of an random event theta given current state s.\n Args:\n theta: Random event\n s = [q, r, w]: State\n Returns:\n Unnormalized probability of the random event.\n \"\"\"\n if q == 0:\n return .3333\n else:\n if theta == 0:\n return 0.25\n elif theta == 1:\n return 0.25\n else:\n return 0.5\n\n\ndef new_w(w, d):\n \"\"\"\n Multi-period commitments in the next epoch.\n Args:\n d: Defender's actions\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n s = [q, r, w]: Current State\n tau: An array denoting the length of each multi-period commitment.\n Returns:\n next_w: Number of decision epochs remaining in the next epoch.\n \"\"\"\n\n if w.sum() > 0:\n next_w = w.copy()\n next_w[next_w > 0] -= 1\n return next_w\n else:\n if d[0] == 1:\n return np.array([51,0,0])\n elif d[1] == 1:\n return np.array([0,51,0])\n else:\n return np.array([0,0,51])\n\n\n\ndef attraction_h(next_r, a):\n \"\"\"\n Attraction function of resource (h in the paper).\n Args:\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n if a == 0:\n if next_r == 9:\n return 0.8\n elif next_r == 14:\n return 0.1\n else:\n return 0.1\n\n elif a == 1:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.1\n else:\n return 0.8\n\n elif a == 2:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.3\n else:\n return 0.6\n\n elif a == 3:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.2\n else:\n return 0.7\n\n else:\n if next_r == 9:\n return 0.1\n elif next_r == 14:\n return 0.4\n else:\n return 0.5\n\n\ndef attraction_g(next_q, q, d, a):\n \"\"\"\n Attraction function of operational conditions (g in the paper).\n Args:\n next_q: Operational conditions in the next epoch.\n next_r: Probable resource array in the next epoch.\n next_w: Multi-period commitments in the next epoch.\n d: Defender's actions\n a: Attacker's actions\n s = [q, r, w]: Current State\n rho_da: A map mapping from (d_i, a_j) to response quality\n rho_dq: A map mapping from (d_i, q) to response quality\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n thres: Threshold for a good response.\n Returns:\n Attraction value.\n \"\"\"\n\n if a == 0:\n if next_q == 0:\n xi_D = 8\n else:\n xi_D = 1\n\n elif a == 1:\n xi_D = 1\n\n elif a == 2:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 3\n\n elif a == 3:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 2\n\n else:\n if next_q == 0:\n xi_D = 1\n else:\n xi_D = 4\n\n dqq = 0\n if next_q == 1 and q == 0:\n if d[3] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 3:\n dqq = 1\n elif next_q == 0 and q == 1:\n if d[5] == 1:\n dqq = 1\n elif np.sum(d[6:]) == 0:\n dqq = 1\n\n return xi_D + dqq\n\n\n\ndef trans_prob(next_s, q, d):\n \"\"\"\n Probability of decision d from state s to state next_s\n Args:\n next_s = [next_q, next_r, next_w]: Next State\n d: Defender's actions\n s = [q, r, w]: Current State\n m: Number of non multi-period commitments. (i.e. The first m defender's actions are not multi-period)\n tau: An array denoting the length of each multi-period commitment.\n c (nr * nd): cost of defender's each action\n h_above: attraction value when response quality is above threshold\n h_below: attraction value when response quality is below threshold\n g_above: attraction value when response quality is above threshold\n g_below: attraction value when response quality is below threshold\n dict_r: map resource to corresponding level.\n order: Order of ARA. Currently only 0 and 1 are available.\n Returns:\n prob: Probability.\n \"\"\"\n\n next_q, next_r, next_w = next_s\n\n A_actions = [0, 1, 2, 3, 4]\n\n prob = 0\n\n for a in A_actions:\n\n prob_r = attraction_h(next_r[0], a)\n\n q1 = attraction_g(next_q[0], q, d, a)\n q2 = attraction_g(1-next_q[0], q, d, a)\n prob_q = q1 / (q1 + q2)\n\n prob += a_given_s(a, q) * prob_r * prob_q\n\n return prob\n\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
from PyQt5 import QtCore from PyQt5.QtWidgets import QTableWidgetItem, QDialog from QT_view.PassportAdd import PassportAddDialog from QT_view.PassportWin import Ui_Dialog from Repository.Rep_Passport import PassportRepository class PassportQt(QDialog): def __init__(self): super(PassportQt, self).__init__() self.passport_rep = PassportRepository() self.initUI() def initUI(self): self.ui = Ui_Dialog() self.ui.setupUi(self) self.ui.tableWidget.setColumnWidth(1, 259) self.ui.tableWidget.setSelectionBehavior(1) self.ui.tableWidget.setSelectionMode(1) self.ui.pushButton.clicked.connect(self.click_add) self.ui.pushButton_2.clicked.connect(self.click_edit) self.ui.pushButton_3.clicked.connect(self.click_del) self.ui.pushButton_4.clicked.connect(self.click_cancel) passport = self.passport_rep.get_passports() self.ui.tableWidget.setRowCount(len(passport)) row = 0 for i in passport: id_passport = QTableWidgetItem(str(i['id'])) id_passport.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) serial_passport = QTableWidgetItem(i['serial']) serial_passport.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) number_passport = QTableWidgetItem(i['number']) number_passport.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) self.ui.tableWidget.setItem(row, 0, id_passport) self.ui.tableWidget.setItem(row, 1, serial_passport) self.ui.tableWidget.setItem(row, 2, number_passport) row += 1 def click_add(self): p_dict = {'id': -1, 'serial': "", 'number': ""} self.passport_rep.set_dict(p_dict) passport_add = PassportAddDialog(self.passport_rep) if (passport_add.exec()): passport_d = self.passport_rep.get_dict() count_row = self.ui.tableWidget.rowCount() self.ui.tableWidget.setRowCount(count_row + 1) id_passport = QTableWidgetItem(str(passport_d['id'])) id_passport.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) serial = QTableWidgetItem(passport_d['serial']) serial.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) number = QTableWidgetItem(passport_d['number']) number.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) self.ui.tableWidget.setItem(count_row, 0, id_passport) self.ui.tableWidget.setItem(count_row, 1, serial) self.ui.tableWidget.setItem(count_row, 2, number) def click_edit(self): edit_list = self.ui.tableWidget.selectedItems() if (len(edit_list)): select_row = self.ui.tableWidget.currentRow() edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1].text(), 'number': edit_list[2].text()} self.passport_rep.set_dict(edit_d) passport_edit = PassportAddDialog(self.passport_rep) if (passport_edit.exec()): passport_d = self.passport_rep.get_dict() id_passport = QTableWidgetItem(str(passport_d['id'])) id_passport.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) serial = QTableWidgetItem(passport_d['serial']) serial.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) number = QTableWidgetItem(passport_d['number']) number.setFlags( QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled ) self.ui.tableWidget.setItem(select_row, 0, id_passport) self.ui.tableWidget.setItem(select_row, 1, serial) self.ui.tableWidget.setItem(select_row, 2, number) def click_del(self): del_list = self.ui.tableWidget.selectedItems() if (len(del_list)): del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].text(), 'number': del_list[2].text()} self.passport_rep.del_passport(del_p) self.ui.tableWidget.removeRow(del_list[0].row()) def click_cancel(self): self.accept()
normal
{ "blob_id": "3f1715763a066fb337b3ff3d03e3736d0fb36b3f", "index": 7325, "step-1": "<mask token>\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n <mask token>\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n <mask token>\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n <mask token>\n", "step-2": "<mask token>\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n\n def initUI(self):\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n self.ui.tableWidget.setColumnWidth(1, 259)\n self.ui.tableWidget.setSelectionBehavior(1)\n self.ui.tableWidget.setSelectionMode(1)\n self.ui.pushButton.clicked.connect(self.click_add)\n self.ui.pushButton_2.clicked.connect(self.click_edit)\n self.ui.pushButton_3.clicked.connect(self.click_del)\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\n passport = self.passport_rep.get_passports()\n self.ui.tableWidget.setRowCount(len(passport))\n row = 0\n for i in passport:\n id_passport = QTableWidgetItem(str(i['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial_passport = QTableWidgetItem(i['serial'])\n serial_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n number_passport = QTableWidgetItem(i['number'])\n number_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n self.ui.tableWidget.setItem(row, 0, id_passport)\n self.ui.tableWidget.setItem(row, 1, serial_passport)\n self.ui.tableWidget.setItem(row, 2, number_passport)\n row += 1\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n\n def click_edit(self):\n edit_list = self.ui.tableWidget.selectedItems()\n if len(edit_list):\n select_row = self.ui.tableWidget.currentRow()\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1\n ].text(), 'number': edit_list[2].text()}\n self.passport_rep.set_dict(edit_d)\n passport_edit = PassportAddDialog(self.passport_rep)\n if passport_edit.exec():\n passport_d = self.passport_rep.get_dict()\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\n self.ui.tableWidget.setItem(select_row, 1, serial)\n self.ui.tableWidget.setItem(select_row, 2, number)\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n <mask token>\n", "step-3": "<mask token>\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n\n def initUI(self):\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n self.ui.tableWidget.setColumnWidth(1, 259)\n self.ui.tableWidget.setSelectionBehavior(1)\n self.ui.tableWidget.setSelectionMode(1)\n self.ui.pushButton.clicked.connect(self.click_add)\n self.ui.pushButton_2.clicked.connect(self.click_edit)\n self.ui.pushButton_3.clicked.connect(self.click_del)\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\n passport = self.passport_rep.get_passports()\n self.ui.tableWidget.setRowCount(len(passport))\n row = 0\n for i in passport:\n id_passport = QTableWidgetItem(str(i['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial_passport = QTableWidgetItem(i['serial'])\n serial_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n number_passport = QTableWidgetItem(i['number'])\n number_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n self.ui.tableWidget.setItem(row, 0, id_passport)\n self.ui.tableWidget.setItem(row, 1, serial_passport)\n self.ui.tableWidget.setItem(row, 2, number_passport)\n row += 1\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n\n def click_edit(self):\n edit_list = self.ui.tableWidget.selectedItems()\n if len(edit_list):\n select_row = self.ui.tableWidget.currentRow()\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1\n ].text(), 'number': edit_list[2].text()}\n self.passport_rep.set_dict(edit_d)\n passport_edit = PassportAddDialog(self.passport_rep)\n if passport_edit.exec():\n passport_d = self.passport_rep.get_dict()\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\n self.ui.tableWidget.setItem(select_row, 1, serial)\n self.ui.tableWidget.setItem(select_row, 2, number)\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n\n def click_cancel(self):\n self.accept()\n", "step-4": "from PyQt5 import QtCore\nfrom PyQt5.QtWidgets import QTableWidgetItem, QDialog\nfrom QT_view.PassportAdd import PassportAddDialog\nfrom QT_view.PassportWin import Ui_Dialog\nfrom Repository.Rep_Passport import PassportRepository\n\n\nclass PassportQt(QDialog):\n\n def __init__(self):\n super(PassportQt, self).__init__()\n self.passport_rep = PassportRepository()\n self.initUI()\n\n def initUI(self):\n self.ui = Ui_Dialog()\n self.ui.setupUi(self)\n self.ui.tableWidget.setColumnWidth(1, 259)\n self.ui.tableWidget.setSelectionBehavior(1)\n self.ui.tableWidget.setSelectionMode(1)\n self.ui.pushButton.clicked.connect(self.click_add)\n self.ui.pushButton_2.clicked.connect(self.click_edit)\n self.ui.pushButton_3.clicked.connect(self.click_del)\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\n passport = self.passport_rep.get_passports()\n self.ui.tableWidget.setRowCount(len(passport))\n row = 0\n for i in passport:\n id_passport = QTableWidgetItem(str(i['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial_passport = QTableWidgetItem(i['serial'])\n serial_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n number_passport = QTableWidgetItem(i['number'])\n number_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n self.ui.tableWidget.setItem(row, 0, id_passport)\n self.ui.tableWidget.setItem(row, 1, serial_passport)\n self.ui.tableWidget.setItem(row, 2, number_passport)\n row += 1\n\n def click_add(self):\n p_dict = {'id': -1, 'serial': '', 'number': ''}\n self.passport_rep.set_dict(p_dict)\n passport_add = PassportAddDialog(self.passport_rep)\n if passport_add.exec():\n passport_d = self.passport_rep.get_dict()\n count_row = self.ui.tableWidget.rowCount()\n self.ui.tableWidget.setRowCount(count_row + 1)\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\n self.ui.tableWidget.setItem(count_row, 1, serial)\n self.ui.tableWidget.setItem(count_row, 2, number)\n\n def click_edit(self):\n edit_list = self.ui.tableWidget.selectedItems()\n if len(edit_list):\n select_row = self.ui.tableWidget.currentRow()\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1\n ].text(), 'number': edit_list[2].text()}\n self.passport_rep.set_dict(edit_d)\n passport_edit = PassportAddDialog(self.passport_rep)\n if passport_edit.exec():\n passport_d = self.passport_rep.get_dict()\n id_passport = QTableWidgetItem(str(passport_d['id']))\n id_passport.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt\n .ItemIsEnabled)\n serial = QTableWidgetItem(passport_d['serial'])\n serial.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n number = QTableWidgetItem(passport_d['number'])\n number.setFlags(QtCore.Qt.ItemIsSelectable | QtCore.Qt.\n ItemIsEnabled)\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\n self.ui.tableWidget.setItem(select_row, 1, serial)\n self.ui.tableWidget.setItem(select_row, 2, number)\n\n def click_del(self):\n del_list = self.ui.tableWidget.selectedItems()\n if len(del_list):\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].\n text(), 'number': del_list[2].text()}\n self.passport_rep.del_passport(del_p)\n self.ui.tableWidget.removeRow(del_list[0].row())\n\n def click_cancel(self):\n self.accept()\n", "step-5": "from PyQt5 import QtCore\r\nfrom PyQt5.QtWidgets import QTableWidgetItem, QDialog\r\n\r\nfrom QT_view.PassportAdd import PassportAddDialog\r\nfrom QT_view.PassportWin import Ui_Dialog\r\n\r\nfrom Repository.Rep_Passport import PassportRepository\r\n\r\nclass PassportQt(QDialog):\r\n def __init__(self):\r\n super(PassportQt, self).__init__()\r\n self.passport_rep = PassportRepository()\r\n self.initUI()\r\n\r\n def initUI(self):\r\n self.ui = Ui_Dialog()\r\n self.ui.setupUi(self)\r\n self.ui.tableWidget.setColumnWidth(1, 259)\r\n self.ui.tableWidget.setSelectionBehavior(1)\r\n self.ui.tableWidget.setSelectionMode(1)\r\n\r\n self.ui.pushButton.clicked.connect(self.click_add)\r\n self.ui.pushButton_2.clicked.connect(self.click_edit)\r\n self.ui.pushButton_3.clicked.connect(self.click_del)\r\n self.ui.pushButton_4.clicked.connect(self.click_cancel)\r\n\r\n passport = self.passport_rep.get_passports()\r\n self.ui.tableWidget.setRowCount(len(passport))\r\n row = 0\r\n for i in passport:\r\n id_passport = QTableWidgetItem(str(i['id']))\r\n id_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n serial_passport = QTableWidgetItem(i['serial'])\r\n serial_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n number_passport = QTableWidgetItem(i['number'])\r\n number_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n self.ui.tableWidget.setItem(row, 0, id_passport)\r\n self.ui.tableWidget.setItem(row, 1, serial_passport)\r\n self.ui.tableWidget.setItem(row, 2, number_passport)\r\n row += 1\r\n\r\n def click_add(self):\r\n p_dict = {'id': -1, 'serial': \"\", 'number': \"\"}\r\n self.passport_rep.set_dict(p_dict)\r\n passport_add = PassportAddDialog(self.passport_rep)\r\n if (passport_add.exec()):\r\n passport_d = self.passport_rep.get_dict()\r\n count_row = self.ui.tableWidget.rowCount()\r\n self.ui.tableWidget.setRowCount(count_row + 1)\r\n id_passport = QTableWidgetItem(str(passport_d['id']))\r\n id_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n serial = QTableWidgetItem(passport_d['serial'])\r\n serial.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n number = QTableWidgetItem(passport_d['number'])\r\n number.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n self.ui.tableWidget.setItem(count_row, 0, id_passport)\r\n self.ui.tableWidget.setItem(count_row, 1, serial)\r\n self.ui.tableWidget.setItem(count_row, 2, number)\r\n\r\n def click_edit(self):\r\n edit_list = self.ui.tableWidget.selectedItems()\r\n if (len(edit_list)):\r\n select_row = self.ui.tableWidget.currentRow()\r\n edit_d = {'id': int(edit_list[0].text()), 'serial': edit_list[1].text(), 'number': edit_list[2].text()}\r\n self.passport_rep.set_dict(edit_d)\r\n passport_edit = PassportAddDialog(self.passport_rep)\r\n if (passport_edit.exec()):\r\n passport_d = self.passport_rep.get_dict()\r\n id_passport = QTableWidgetItem(str(passport_d['id']))\r\n id_passport.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n serial = QTableWidgetItem(passport_d['serial'])\r\n serial.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n number = QTableWidgetItem(passport_d['number'])\r\n number.setFlags(\r\n QtCore.Qt.ItemIsSelectable | QtCore.Qt.ItemIsEnabled\r\n )\r\n self.ui.tableWidget.setItem(select_row, 0, id_passport)\r\n self.ui.tableWidget.setItem(select_row, 1, serial)\r\n self.ui.tableWidget.setItem(select_row, 2, number)\r\n def click_del(self):\r\n del_list = self.ui.tableWidget.selectedItems()\r\n if (len(del_list)):\r\n del_p = {'id': int(del_list[0].text()), 'serial': del_list[1].text(), 'number': del_list[2].text()}\r\n self.passport_rep.del_passport(del_p)\r\n self.ui.tableWidget.removeRow(del_list[0].row())\r\n def click_cancel(self):\r\n self.accept()", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> list.sort(table) print(table[num - 1] - table[0]) <|reserved_special_token_1|> num = int(input()) str = input().split() table = [int(i) for i in str] list.sort(table) print(table[num - 1] - table[0]) <|reserved_special_token_1|> # coding: utf-8 num = int(input()) str = input().split() table = [int(i) for i in str] list.sort(table) print(table[num-1] - table[0])
flexible
{ "blob_id": "d853964d424e628d6331b27123ad045f8d945dc0", "index": 4026, "step-1": "<mask token>\n", "step-2": "<mask token>\nlist.sort(table)\nprint(table[num - 1] - table[0])\n", "step-3": "num = int(input())\nstr = input().split()\ntable = [int(i) for i in str]\nlist.sort(table)\nprint(table[num - 1] - table[0])\n", "step-4": "# coding: utf-8\n\nnum = int(input())\nstr = input().split()\ntable = [int(i) for i in str]\nlist.sort(table)\nprint(table[num-1] - table[0])", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
#app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db' #app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True; #db = SQLAlchemy(app) # MONGODB CREATION #Creating a pymongo client client = MongoClient('localhost', 27017) #Getting the database instance db = client['mydb'] print("Database created........") #Verification print("List of databases after creating new one") print(client.list_database_names()) # DB CREATION AND INSTANTIATION # #DB -- OPTION 1 engine = create_engine('sqlite:///test.db', echo = True) meta = MetaData() # Database Schema for Item and User # Items = Table( 'Items', meta, Column('id', Integer, primary_key = True), Column('product_name', String), Column('price', Float), Column('quantity', Integer) ) Users = Table( 'Users', meta, Column('firstname', String), Column('lastname', String), Column('email', String), Column('passwd', String), Column('phone', Integer) ) meta.create_all(engine) #class Item(db.Model): # id = db.Column(db.Integer, primary_key = True) # product = db.Column(db.String(200)) # price = db.Column(db.Integer)
normal
{ "blob_id": "5b7567129d447ae2b75f4a8f9c26127f8b7553ec", "index": 7818, "step-1": "<mask token>\n", "step-2": "<mask token>\nprint('Database created........')\nprint('List of databases after creating new one')\nprint(client.list_database_names())\n<mask token>\nmeta.create_all(engine)\n", "step-3": "client = MongoClient('localhost', 27017)\ndb = client['mydb']\nprint('Database created........')\nprint('List of databases after creating new one')\nprint(client.list_database_names())\nengine = create_engine('sqlite:///test.db', echo=True)\nmeta = MetaData()\nItems = Table('Items', meta, Column('id', Integer, primary_key=True),\n Column('product_name', String), Column('price', Float), Column(\n 'quantity', Integer))\nUsers = Table('Users', meta, Column('firstname', String), Column('lastname',\n String), Column('email', String), Column('passwd', String), Column(\n 'phone', Integer))\nmeta.create_all(engine)\n", "step-4": "#app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'\n#app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True;\n#db = SQLAlchemy(app)\n\n\n# MONGODB CREATION\n#Creating a pymongo client\nclient = MongoClient('localhost', 27017)\n\n#Getting the database instance\ndb = client['mydb']\nprint(\"Database created........\")\n\n#Verification\nprint(\"List of databases after creating new one\")\nprint(client.list_database_names())\n\n# DB CREATION AND INSTANTIATION #\n#DB -- OPTION 1\nengine = create_engine('sqlite:///test.db', echo = True)\nmeta = MetaData()\n\n# Database Schema for Item and User #\nItems = Table(\n 'Items', meta, \n Column('id', Integer, primary_key = True), \n Column('product_name', String), \n Column('price', Float), \n Column('quantity', Integer)\n)\nUsers = Table(\n 'Users', meta,\n Column('firstname', String),\n Column('lastname', String),\n Column('email', String),\n Column('passwd', String),\n Column('phone', Integer)\n)\nmeta.create_all(engine)\n\n\n#class Item(db.Model):\n# id = db.Column(db.Integer, primary_key = True)\n# product = db.Column(db.String(200))\n# price = db.Column(db.Integer)", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> def cv_diff_value(prevalue, postvalue): return postvalue - prevalue def cv_diff_rate(prevalue, postvalue): return (postvalue - prevalue) / prevalue * 100 def cv_maN_value(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: return cv.rolling(window=N).mean() else: return str_replay def cv_maN_rate(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: for i in range(cv.index[0], len(cv) + cv.index[0], 1): cv_list.append(cv[i]) for i in range(len(cv_list) - 1): if cv_list[i] != 0: cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i ] * 100) else: cv_ma_rate.append(0) for i in range(len(cv_ma_rate)): cv_ma_rate_round.append(round(cv_ma_rate[i], 2)) return cv_ma_rate_round else: return str_replay def ud_Nd(cvdv, N): cvdv_list = [] un_Nd_list = [] for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1): cvdv_list.append(cvdv[i]) for i in range(N - 2): un_Nd_list.append(0) for i in range(len(cvdv_list) - N + 1): increase_count = decrease_count = nothing_count = 0 for j in range(N - 1): if cvdv_list[i + j] < cvdv_list[i + j + 1]: increase_count += 1 elif cvdv_list[i + j] > cvdv_list[i + j + 1]: decrease_count += 1 else: nothing_count += 1 if increase_count == N - 1: un_Nd_list.append(1) elif decrease_count == N - 1: un_Nd_list.append(-1) else: un_Nd_list.append(0) un_Nd_list.append(0) return un_Nd_list <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def cv_diff_value(prevalue, postvalue): return postvalue - prevalue def cv_diff_rate(prevalue, postvalue): return (postvalue - prevalue) / prevalue * 100 def cv_maN_value(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: return cv.rolling(window=N).mean() else: return str_replay def cv_maN_rate(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: for i in range(cv.index[0], len(cv) + cv.index[0], 1): cv_list.append(cv[i]) for i in range(len(cv_list) - 1): if cv_list[i] != 0: cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i ] * 100) else: cv_ma_rate.append(0) for i in range(len(cv_ma_rate)): cv_ma_rate_round.append(round(cv_ma_rate[i], 2)) return cv_ma_rate_round else: return str_replay def ud_Nd(cvdv, N): cvdv_list = [] un_Nd_list = [] for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1): cvdv_list.append(cvdv[i]) for i in range(N - 2): un_Nd_list.append(0) for i in range(len(cvdv_list) - N + 1): increase_count = decrease_count = nothing_count = 0 for j in range(N - 1): if cvdv_list[i + j] < cvdv_list[i + j + 1]: increase_count += 1 elif cvdv_list[i + j] > cvdv_list[i + j + 1]: decrease_count += 1 else: nothing_count += 1 if increase_count == N - 1: un_Nd_list.append(1) elif decrease_count == N - 1: un_Nd_list.append(-1) else: un_Nd_list.append(0) un_Nd_list.append(0) return un_Nd_list <|reserved_special_token_0|> while True: cv_amount = [0] cv_rate = [0] cv_ma_rate = [0] un_Nd_plus = un_Nd_minus = 0 result3 = [] result4 = [] cv_list = [] cv_ma_rate_round = [] unNd_list = [] stock_name = input('종목을 입력해주세요 : ') Number = int(input('N의 값을 입력해주세요 : ')) one_stock = stock_DataFrame.loc[stock_DataFrame['stockname'] == stock_name] print(one_stock) close_value = one_stock['close_value'] one_stock_copy = one_stock.copy() try: for i in range(close_value.index[0], len(close_value) + close_value .index[0] - 1, 1): result = cv_diff_value(close_value[i], close_value[i + 1]) cv_amount.append(result) except IndexError: print('존재하지 않는 항목') continue one_stock_copy['cv_diff_value'] = cv_amount for i in range(close_value.index[0], len(close_value) + close_value. index[0] - 1, 1): result2 = round(cv_diff_rate(close_value[i], close_value[i + 1]), 2) cv_rate.append(result2) one_stock_copy['cv_diff_rate'] = cv_rate res3 = cv_maN_value(close_value, Number) if isinstance(res3, str): print(res3) continue else: result3 = res3.fillna(0) one_stock_copy['cv_maN_value'] = result3 ma_value = one_stock_copy['cv_maN_value'] result4 = cv_maN_rate(ma_value, Number) if isinstance(result4, str): print(result4) continue else: one_stock_copy['cv_maN_rate'] = result4 result5 = ud_Nd(close_value, Number) one_stock_copy['ud_Nd'] = result5 un_Nd_value = one_stock_copy['ud_Nd'] for i in range(un_Nd_value.index[0], len(un_Nd_value) + un_Nd_value. index[0], 1): unNd_list.append(un_Nd_value[i]) for i in range(len(unNd_list)): if unNd_list[i] == 1: un_Nd_plus += 1 if unNd_list[i] == -1: un_Nd_minus += 1 print(un_Nd_plus) print(un_Nd_minus) if un_Nd_plus >= 20 and un_Nd_minus >= 20: break else: print('un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다') continue one_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False) print('Data가 성공적으로 추가됐습니다') csv_file_read.close() <|reserved_special_token_1|> <|reserved_special_token_0|> def cv_diff_value(prevalue, postvalue): return postvalue - prevalue def cv_diff_rate(prevalue, postvalue): return (postvalue - prevalue) / prevalue * 100 def cv_maN_value(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: return cv.rolling(window=N).mean() else: return str_replay def cv_maN_rate(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: for i in range(cv.index[0], len(cv) + cv.index[0], 1): cv_list.append(cv[i]) for i in range(len(cv_list) - 1): if cv_list[i] != 0: cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i ] * 100) else: cv_ma_rate.append(0) for i in range(len(cv_ma_rate)): cv_ma_rate_round.append(round(cv_ma_rate[i], 2)) return cv_ma_rate_round else: return str_replay def ud_Nd(cvdv, N): cvdv_list = [] un_Nd_list = [] for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1): cvdv_list.append(cvdv[i]) for i in range(N - 2): un_Nd_list.append(0) for i in range(len(cvdv_list) - N + 1): increase_count = decrease_count = nothing_count = 0 for j in range(N - 1): if cvdv_list[i + j] < cvdv_list[i + j + 1]: increase_count += 1 elif cvdv_list[i + j] > cvdv_list[i + j + 1]: decrease_count += 1 else: nothing_count += 1 if increase_count == N - 1: un_Nd_list.append(1) elif decrease_count == N - 1: un_Nd_list.append(-1) else: un_Nd_list.append(0) un_Nd_list.append(0) return un_Nd_list csv_file_read = open('stock_history.csv', 'r', encoding='euc-kr') stock_data = pd.read_csv(csv_file_read) df = pd.DataFrame(stock_data) stock_DataFrame = df.dropna(axis=1) while True: cv_amount = [0] cv_rate = [0] cv_ma_rate = [0] un_Nd_plus = un_Nd_minus = 0 result3 = [] result4 = [] cv_list = [] cv_ma_rate_round = [] unNd_list = [] stock_name = input('종목을 입력해주세요 : ') Number = int(input('N의 값을 입력해주세요 : ')) one_stock = stock_DataFrame.loc[stock_DataFrame['stockname'] == stock_name] print(one_stock) close_value = one_stock['close_value'] one_stock_copy = one_stock.copy() try: for i in range(close_value.index[0], len(close_value) + close_value .index[0] - 1, 1): result = cv_diff_value(close_value[i], close_value[i + 1]) cv_amount.append(result) except IndexError: print('존재하지 않는 항목') continue one_stock_copy['cv_diff_value'] = cv_amount for i in range(close_value.index[0], len(close_value) + close_value. index[0] - 1, 1): result2 = round(cv_diff_rate(close_value[i], close_value[i + 1]), 2) cv_rate.append(result2) one_stock_copy['cv_diff_rate'] = cv_rate res3 = cv_maN_value(close_value, Number) if isinstance(res3, str): print(res3) continue else: result3 = res3.fillna(0) one_stock_copy['cv_maN_value'] = result3 ma_value = one_stock_copy['cv_maN_value'] result4 = cv_maN_rate(ma_value, Number) if isinstance(result4, str): print(result4) continue else: one_stock_copy['cv_maN_rate'] = result4 result5 = ud_Nd(close_value, Number) one_stock_copy['ud_Nd'] = result5 un_Nd_value = one_stock_copy['ud_Nd'] for i in range(un_Nd_value.index[0], len(un_Nd_value) + un_Nd_value. index[0], 1): unNd_list.append(un_Nd_value[i]) for i in range(len(unNd_list)): if unNd_list[i] == 1: un_Nd_plus += 1 if unNd_list[i] == -1: un_Nd_minus += 1 print(un_Nd_plus) print(un_Nd_minus) if un_Nd_plus >= 20 and un_Nd_minus >= 20: break else: print('un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다') continue one_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False) print('Data가 성공적으로 추가됐습니다') csv_file_read.close() <|reserved_special_token_1|> import pandas as pd def cv_diff_value(prevalue, postvalue): return postvalue - prevalue def cv_diff_rate(prevalue, postvalue): return (postvalue - prevalue) / prevalue * 100 def cv_maN_value(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: return cv.rolling(window=N).mean() else: return str_replay def cv_maN_rate(cv, N): str_replay = 'N의 값을 다시 입력해주세요' if 3 <= N <= 5: for i in range(cv.index[0], len(cv) + cv.index[0], 1): cv_list.append(cv[i]) for i in range(len(cv_list) - 1): if cv_list[i] != 0: cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i ] * 100) else: cv_ma_rate.append(0) for i in range(len(cv_ma_rate)): cv_ma_rate_round.append(round(cv_ma_rate[i], 2)) return cv_ma_rate_round else: return str_replay def ud_Nd(cvdv, N): cvdv_list = [] un_Nd_list = [] for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1): cvdv_list.append(cvdv[i]) for i in range(N - 2): un_Nd_list.append(0) for i in range(len(cvdv_list) - N + 1): increase_count = decrease_count = nothing_count = 0 for j in range(N - 1): if cvdv_list[i + j] < cvdv_list[i + j + 1]: increase_count += 1 elif cvdv_list[i + j] > cvdv_list[i + j + 1]: decrease_count += 1 else: nothing_count += 1 if increase_count == N - 1: un_Nd_list.append(1) elif decrease_count == N - 1: un_Nd_list.append(-1) else: un_Nd_list.append(0) un_Nd_list.append(0) return un_Nd_list csv_file_read = open('stock_history.csv', 'r', encoding='euc-kr') stock_data = pd.read_csv(csv_file_read) df = pd.DataFrame(stock_data) stock_DataFrame = df.dropna(axis=1) while True: cv_amount = [0] cv_rate = [0] cv_ma_rate = [0] un_Nd_plus = un_Nd_minus = 0 result3 = [] result4 = [] cv_list = [] cv_ma_rate_round = [] unNd_list = [] stock_name = input('종목을 입력해주세요 : ') Number = int(input('N의 값을 입력해주세요 : ')) one_stock = stock_DataFrame.loc[stock_DataFrame['stockname'] == stock_name] print(one_stock) close_value = one_stock['close_value'] one_stock_copy = one_stock.copy() try: for i in range(close_value.index[0], len(close_value) + close_value .index[0] - 1, 1): result = cv_diff_value(close_value[i], close_value[i + 1]) cv_amount.append(result) except IndexError: print('존재하지 않는 항목') continue one_stock_copy['cv_diff_value'] = cv_amount for i in range(close_value.index[0], len(close_value) + close_value. index[0] - 1, 1): result2 = round(cv_diff_rate(close_value[i], close_value[i + 1]), 2) cv_rate.append(result2) one_stock_copy['cv_diff_rate'] = cv_rate res3 = cv_maN_value(close_value, Number) if isinstance(res3, str): print(res3) continue else: result3 = res3.fillna(0) one_stock_copy['cv_maN_value'] = result3 ma_value = one_stock_copy['cv_maN_value'] result4 = cv_maN_rate(ma_value, Number) if isinstance(result4, str): print(result4) continue else: one_stock_copy['cv_maN_rate'] = result4 result5 = ud_Nd(close_value, Number) one_stock_copy['ud_Nd'] = result5 un_Nd_value = one_stock_copy['ud_Nd'] for i in range(un_Nd_value.index[0], len(un_Nd_value) + un_Nd_value. index[0], 1): unNd_list.append(un_Nd_value[i]) for i in range(len(unNd_list)): if unNd_list[i] == 1: un_Nd_plus += 1 if unNd_list[i] == -1: un_Nd_minus += 1 print(un_Nd_plus) print(un_Nd_minus) if un_Nd_plus >= 20 and un_Nd_minus >= 20: break else: print('un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다') continue one_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False) print('Data가 성공적으로 추가됐습니다') csv_file_read.close() <|reserved_special_token_1|> import pandas as pd # 칼럼값으로 추가 - 함수 작성 # 1. cv_diff_value : 종가 일간 변화량 def cv_diff_value(prevalue, postvalue): return postvalue - prevalue # 2. cv_diff_rate : 종가 일간 변화율 def cv_diff_rate(prevalue, postvalue): return (postvalue - prevalue) / prevalue * 100 # 3. cv_maN_value : 종가의 N일 이동평균 def cv_maN_value(cv, N): # min_period 옵션을 이용하여 할 수도 있음 // 데이터가 최소 x 개라도 존재하면 이동평균을 구함 str_replay = "N의 값을 다시 입력해주세요" if 3 <= N <= 5: return cv.rolling(window=N).mean() else: return str_replay # 4. cv_maN_rate : 종가의 N일 이동평균의 일간 변화율 def cv_maN_rate(cv, N): str_replay = "N의 값을 다시 입력해주세요" if 3 <= N <= 5: # DataFrame 을 list 로 변환 for i in range(cv.index[0], (len(cv)+cv.index[0]), 1): cv_list.append(cv[i]) # 종가의 N일 이동평균의 일간 변화율을 list 에 담기 for i in range(len(cv_list)-1): if cv_list[i] != 0: cv_ma_rate.append((cv_list[i+1] - cv_list[i]) / cv_list[i] * 100) else: cv_ma_rate.append(0) # 종가의 N일 이동평균의 일간 변화율을 소수점 2째자리 까지 표현 for i in range(len(cv_ma_rate)): cv_ma_rate_round.append(round(cv_ma_rate[i], 2)) return cv_ma_rate_round else: return str_replay # 5. ud_Nd : (a) N일 연속 증가 1, (b) N일 연속 하락 -1, (c) 그렇지 않은 날 0 def ud_Nd(cvdv, N): cvdv_list = [] # list un_Nd_list = [] # list # print(cvdv) # 종가 # print(len(cvdv)) # 길이 : 230 # DataFrame 을 list 로 변환 for i in range(cvdv.index[0], (len(cvdv)+cvdv.index[0]), 1): cvdv_list.append(cvdv[i]) # 알 수 없는 정보는 '0'으로 두겠다 for i in range(N-2): un_Nd_list.append(0) # 상승, 하락, 그렇지 않은 날 계산 for i in range(len(cvdv_list)-N+1): # 0 ~ 225 increase_count = decrease_count = nothing_count = 0 for j in range(N-1): # 0 ~ 3 if cvdv_list[i + j] < cvdv_list[i + j + 1]: # 종가가 상승한 날 increase_count += 1 elif cvdv_list[i + j] > cvdv_list[i + j + 1]: # 종가가 하락한 날 decrease_count += 1 else: # 종가가 상승도 하락도 아닌날 nothing_count += 1 # N일 연속 종가가 상승, 하락, 그렇지 않은 날 판단하고 (N-1)날에 삽입 if increase_count == (N - 1): un_Nd_list.append(1) elif decrease_count == (N - 1): un_Nd_list.append(-1) else: un_Nd_list.append(0) un_Nd_list.append(0) # 마지막날은 판단할 수 없어서 '0' 으로 삽입 return un_Nd_list # csv 파일 읽어오기 // DataFrame 으로 변경 // NaN값 제거 csv_file_read = open('stock_history.csv', 'r', encoding='euc-kr') stock_data = pd.read_csv(csv_file_read) df = pd.DataFrame(stock_data) stock_DataFrame = df.dropna(axis=1) # 반복 시작 while True: # 초기값 cv_amount = [0] # 종가 일간 변화량을 저장할 list cv_rate = [0] # 종가 일간 변화율을 저장할 list cv_ma_rate = [0] # 종가의 N일 이동평균의 일간 변화율을 저장할 list un_Nd_plus = un_Nd_minus = 0 # 20회이상 판단할 count 변수 result3 = [] # 종가의 N일 이동평균을 저장할 list result4 = [] # 종가 N일 이동평균의 일간 변화율 cv_list = [] # 종가의 N일 이동평균의 일간 변화율을 저장할 list cv_ma_rate_round = [] # 종가의 N일 이동평균의 일간 변화율을 소수점 2자리로 저장할 list unNd_list = [] # 종가의 N일 증감을 저장할 list # 종목을 선택하고 N의 값을 입력받는다 stock_name = input("종목을 입력해주세요 : ") Number = int(input("N의 값을 입력해주세요 : ")) one_stock = stock_DataFrame.loc[stock_DataFrame["stockname"] == stock_name] print(one_stock) close_value = one_stock["close_value"] # 종가만 가져오기 one_stock_copy = one_stock.copy() # DataFrame 에 열을 추가하기 위해 복사 # 종가 일간 변화량 try: for i in range(close_value.index[0], (len(close_value)+close_value.index[0])-1, 1): result = cv_diff_value(close_value[i], close_value[i+1]) cv_amount.append(result) except IndexError: print("존재하지 않는 항목") continue one_stock_copy["cv_diff_value"] = cv_amount # DataFrame 에 데이터 추가 # print(one_stock_copy) # 종가 일간 변화율 // 종가 일간 변화량과 마찬가지 // 소수점 2자리 표현 for i in range(close_value.index[0], (len(close_value)+close_value.index[0])-1, 1): result2 = round(cv_diff_rate(close_value[i], close_value[i+1]), 2) cv_rate.append(result2) one_stock_copy["cv_diff_rate"] = cv_rate # DataFrame 에 데이터 추가 # print(one_stock_copy) # 종가 N일 이동평균 res3 = cv_maN_value(close_value, Number) if isinstance(res3, str): print(res3) continue else: result3 = res3.fillna(0) # NaN값을 0으로 치환 one_stock_copy["cv_maN_value"] = result3 # print(one_stock_copy) # 종가 N일 이동평균의 일간 변화율 ma_value = one_stock_copy["cv_maN_value"] # 종가 N일 이동평균 가져오기 result4 = cv_maN_rate(ma_value, Number) if isinstance(result4, str): print(result4) continue else: one_stock_copy["cv_maN_rate"] = result4 # print(one_stock_copy) # N일 연속 상승, 하락, 그렇지 않은 날 파악 result5 = ud_Nd(close_value, Number) one_stock_copy["ud_Nd"] = result5 # un_Nd = 1, -1이 20회 이상 발생하도록 N을 3 ~ 5로 조정, 종목을 변경 un_Nd_value = one_stock_copy["ud_Nd"] # N일 연속되는 증감 column 가져오기 # DataFrame 을 list 로 변환 for i in range(un_Nd_value.index[0], (len(un_Nd_value)+un_Nd_value.index[0]), 1): unNd_list.append(un_Nd_value[i]) # 20회 이상 발생하는지 판단 for i in range(len(unNd_list)): if unNd_list[i] == 1: un_Nd_plus += 1 if unNd_list[i] == -1: un_Nd_minus += 1 print(un_Nd_plus) print(un_Nd_minus) # 발생했다면 반복문을 종료하고 발생하지 않았다면 N을 조정하거나 종목을 변경한다 if un_Nd_plus >= 20 and un_Nd_minus >= 20: break else: print("un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다") continue # 반복문이 끝나고 20회이상 발생하는 조건을 만족하면 csv 파일(stock_history_added.csv)로 저장 one_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False) print("Data가 성공적으로 추가됐습니다") csv_file_read.close()
flexible
{ "blob_id": "a967b97f090a71f28e33c5ca54cb64db3967aea3", "index": 7002, "step-1": "<mask token>\n\n\ndef cv_diff_value(prevalue, postvalue):\n return postvalue - prevalue\n\n\ndef cv_diff_rate(prevalue, postvalue):\n return (postvalue - prevalue) / prevalue * 100\n\n\ndef cv_maN_value(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n return cv.rolling(window=N).mean()\n else:\n return str_replay\n\n\ndef cv_maN_rate(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n for i in range(cv.index[0], len(cv) + cv.index[0], 1):\n cv_list.append(cv[i])\n for i in range(len(cv_list) - 1):\n if cv_list[i] != 0:\n cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i\n ] * 100)\n else:\n cv_ma_rate.append(0)\n for i in range(len(cv_ma_rate)):\n cv_ma_rate_round.append(round(cv_ma_rate[i], 2))\n return cv_ma_rate_round\n else:\n return str_replay\n\n\ndef ud_Nd(cvdv, N):\n cvdv_list = []\n un_Nd_list = []\n for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1):\n cvdv_list.append(cvdv[i])\n for i in range(N - 2):\n un_Nd_list.append(0)\n for i in range(len(cvdv_list) - N + 1):\n increase_count = decrease_count = nothing_count = 0\n for j in range(N - 1):\n if cvdv_list[i + j] < cvdv_list[i + j + 1]:\n increase_count += 1\n elif cvdv_list[i + j] > cvdv_list[i + j + 1]:\n decrease_count += 1\n else:\n nothing_count += 1\n if increase_count == N - 1:\n un_Nd_list.append(1)\n elif decrease_count == N - 1:\n un_Nd_list.append(-1)\n else:\n un_Nd_list.append(0)\n un_Nd_list.append(0)\n return un_Nd_list\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef cv_diff_value(prevalue, postvalue):\n return postvalue - prevalue\n\n\ndef cv_diff_rate(prevalue, postvalue):\n return (postvalue - prevalue) / prevalue * 100\n\n\ndef cv_maN_value(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n return cv.rolling(window=N).mean()\n else:\n return str_replay\n\n\ndef cv_maN_rate(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n for i in range(cv.index[0], len(cv) + cv.index[0], 1):\n cv_list.append(cv[i])\n for i in range(len(cv_list) - 1):\n if cv_list[i] != 0:\n cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i\n ] * 100)\n else:\n cv_ma_rate.append(0)\n for i in range(len(cv_ma_rate)):\n cv_ma_rate_round.append(round(cv_ma_rate[i], 2))\n return cv_ma_rate_round\n else:\n return str_replay\n\n\ndef ud_Nd(cvdv, N):\n cvdv_list = []\n un_Nd_list = []\n for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1):\n cvdv_list.append(cvdv[i])\n for i in range(N - 2):\n un_Nd_list.append(0)\n for i in range(len(cvdv_list) - N + 1):\n increase_count = decrease_count = nothing_count = 0\n for j in range(N - 1):\n if cvdv_list[i + j] < cvdv_list[i + j + 1]:\n increase_count += 1\n elif cvdv_list[i + j] > cvdv_list[i + j + 1]:\n decrease_count += 1\n else:\n nothing_count += 1\n if increase_count == N - 1:\n un_Nd_list.append(1)\n elif decrease_count == N - 1:\n un_Nd_list.append(-1)\n else:\n un_Nd_list.append(0)\n un_Nd_list.append(0)\n return un_Nd_list\n\n\n<mask token>\nwhile True:\n cv_amount = [0]\n cv_rate = [0]\n cv_ma_rate = [0]\n un_Nd_plus = un_Nd_minus = 0\n result3 = []\n result4 = []\n cv_list = []\n cv_ma_rate_round = []\n unNd_list = []\n stock_name = input('종목을 입력해주세요 : ')\n Number = int(input('N의 값을 입력해주세요 : '))\n one_stock = stock_DataFrame.loc[stock_DataFrame['stockname'] == stock_name]\n print(one_stock)\n close_value = one_stock['close_value']\n one_stock_copy = one_stock.copy()\n try:\n for i in range(close_value.index[0], len(close_value) + close_value\n .index[0] - 1, 1):\n result = cv_diff_value(close_value[i], close_value[i + 1])\n cv_amount.append(result)\n except IndexError:\n print('존재하지 않는 항목')\n continue\n one_stock_copy['cv_diff_value'] = cv_amount\n for i in range(close_value.index[0], len(close_value) + close_value.\n index[0] - 1, 1):\n result2 = round(cv_diff_rate(close_value[i], close_value[i + 1]), 2)\n cv_rate.append(result2)\n one_stock_copy['cv_diff_rate'] = cv_rate\n res3 = cv_maN_value(close_value, Number)\n if isinstance(res3, str):\n print(res3)\n continue\n else:\n result3 = res3.fillna(0)\n one_stock_copy['cv_maN_value'] = result3\n ma_value = one_stock_copy['cv_maN_value']\n result4 = cv_maN_rate(ma_value, Number)\n if isinstance(result4, str):\n print(result4)\n continue\n else:\n one_stock_copy['cv_maN_rate'] = result4\n result5 = ud_Nd(close_value, Number)\n one_stock_copy['ud_Nd'] = result5\n un_Nd_value = one_stock_copy['ud_Nd']\n for i in range(un_Nd_value.index[0], len(un_Nd_value) + un_Nd_value.\n index[0], 1):\n unNd_list.append(un_Nd_value[i])\n for i in range(len(unNd_list)):\n if unNd_list[i] == 1:\n un_Nd_plus += 1\n if unNd_list[i] == -1:\n un_Nd_minus += 1\n print(un_Nd_plus)\n print(un_Nd_minus)\n if un_Nd_plus >= 20 and un_Nd_minus >= 20:\n break\n else:\n print('un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다')\n continue\none_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False)\nprint('Data가 성공적으로 추가됐습니다')\ncsv_file_read.close()\n", "step-3": "<mask token>\n\n\ndef cv_diff_value(prevalue, postvalue):\n return postvalue - prevalue\n\n\ndef cv_diff_rate(prevalue, postvalue):\n return (postvalue - prevalue) / prevalue * 100\n\n\ndef cv_maN_value(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n return cv.rolling(window=N).mean()\n else:\n return str_replay\n\n\ndef cv_maN_rate(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n for i in range(cv.index[0], len(cv) + cv.index[0], 1):\n cv_list.append(cv[i])\n for i in range(len(cv_list) - 1):\n if cv_list[i] != 0:\n cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i\n ] * 100)\n else:\n cv_ma_rate.append(0)\n for i in range(len(cv_ma_rate)):\n cv_ma_rate_round.append(round(cv_ma_rate[i], 2))\n return cv_ma_rate_round\n else:\n return str_replay\n\n\ndef ud_Nd(cvdv, N):\n cvdv_list = []\n un_Nd_list = []\n for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1):\n cvdv_list.append(cvdv[i])\n for i in range(N - 2):\n un_Nd_list.append(0)\n for i in range(len(cvdv_list) - N + 1):\n increase_count = decrease_count = nothing_count = 0\n for j in range(N - 1):\n if cvdv_list[i + j] < cvdv_list[i + j + 1]:\n increase_count += 1\n elif cvdv_list[i + j] > cvdv_list[i + j + 1]:\n decrease_count += 1\n else:\n nothing_count += 1\n if increase_count == N - 1:\n un_Nd_list.append(1)\n elif decrease_count == N - 1:\n un_Nd_list.append(-1)\n else:\n un_Nd_list.append(0)\n un_Nd_list.append(0)\n return un_Nd_list\n\n\ncsv_file_read = open('stock_history.csv', 'r', encoding='euc-kr')\nstock_data = pd.read_csv(csv_file_read)\ndf = pd.DataFrame(stock_data)\nstock_DataFrame = df.dropna(axis=1)\nwhile True:\n cv_amount = [0]\n cv_rate = [0]\n cv_ma_rate = [0]\n un_Nd_plus = un_Nd_minus = 0\n result3 = []\n result4 = []\n cv_list = []\n cv_ma_rate_round = []\n unNd_list = []\n stock_name = input('종목을 입력해주세요 : ')\n Number = int(input('N의 값을 입력해주세요 : '))\n one_stock = stock_DataFrame.loc[stock_DataFrame['stockname'] == stock_name]\n print(one_stock)\n close_value = one_stock['close_value']\n one_stock_copy = one_stock.copy()\n try:\n for i in range(close_value.index[0], len(close_value) + close_value\n .index[0] - 1, 1):\n result = cv_diff_value(close_value[i], close_value[i + 1])\n cv_amount.append(result)\n except IndexError:\n print('존재하지 않는 항목')\n continue\n one_stock_copy['cv_diff_value'] = cv_amount\n for i in range(close_value.index[0], len(close_value) + close_value.\n index[0] - 1, 1):\n result2 = round(cv_diff_rate(close_value[i], close_value[i + 1]), 2)\n cv_rate.append(result2)\n one_stock_copy['cv_diff_rate'] = cv_rate\n res3 = cv_maN_value(close_value, Number)\n if isinstance(res3, str):\n print(res3)\n continue\n else:\n result3 = res3.fillna(0)\n one_stock_copy['cv_maN_value'] = result3\n ma_value = one_stock_copy['cv_maN_value']\n result4 = cv_maN_rate(ma_value, Number)\n if isinstance(result4, str):\n print(result4)\n continue\n else:\n one_stock_copy['cv_maN_rate'] = result4\n result5 = ud_Nd(close_value, Number)\n one_stock_copy['ud_Nd'] = result5\n un_Nd_value = one_stock_copy['ud_Nd']\n for i in range(un_Nd_value.index[0], len(un_Nd_value) + un_Nd_value.\n index[0], 1):\n unNd_list.append(un_Nd_value[i])\n for i in range(len(unNd_list)):\n if unNd_list[i] == 1:\n un_Nd_plus += 1\n if unNd_list[i] == -1:\n un_Nd_minus += 1\n print(un_Nd_plus)\n print(un_Nd_minus)\n if un_Nd_plus >= 20 and un_Nd_minus >= 20:\n break\n else:\n print('un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다')\n continue\none_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False)\nprint('Data가 성공적으로 추가됐습니다')\ncsv_file_read.close()\n", "step-4": "import pandas as pd\n\n\ndef cv_diff_value(prevalue, postvalue):\n return postvalue - prevalue\n\n\ndef cv_diff_rate(prevalue, postvalue):\n return (postvalue - prevalue) / prevalue * 100\n\n\ndef cv_maN_value(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n return cv.rolling(window=N).mean()\n else:\n return str_replay\n\n\ndef cv_maN_rate(cv, N):\n str_replay = 'N의 값을 다시 입력해주세요'\n if 3 <= N <= 5:\n for i in range(cv.index[0], len(cv) + cv.index[0], 1):\n cv_list.append(cv[i])\n for i in range(len(cv_list) - 1):\n if cv_list[i] != 0:\n cv_ma_rate.append((cv_list[i + 1] - cv_list[i]) / cv_list[i\n ] * 100)\n else:\n cv_ma_rate.append(0)\n for i in range(len(cv_ma_rate)):\n cv_ma_rate_round.append(round(cv_ma_rate[i], 2))\n return cv_ma_rate_round\n else:\n return str_replay\n\n\ndef ud_Nd(cvdv, N):\n cvdv_list = []\n un_Nd_list = []\n for i in range(cvdv.index[0], len(cvdv) + cvdv.index[0], 1):\n cvdv_list.append(cvdv[i])\n for i in range(N - 2):\n un_Nd_list.append(0)\n for i in range(len(cvdv_list) - N + 1):\n increase_count = decrease_count = nothing_count = 0\n for j in range(N - 1):\n if cvdv_list[i + j] < cvdv_list[i + j + 1]:\n increase_count += 1\n elif cvdv_list[i + j] > cvdv_list[i + j + 1]:\n decrease_count += 1\n else:\n nothing_count += 1\n if increase_count == N - 1:\n un_Nd_list.append(1)\n elif decrease_count == N - 1:\n un_Nd_list.append(-1)\n else:\n un_Nd_list.append(0)\n un_Nd_list.append(0)\n return un_Nd_list\n\n\ncsv_file_read = open('stock_history.csv', 'r', encoding='euc-kr')\nstock_data = pd.read_csv(csv_file_read)\ndf = pd.DataFrame(stock_data)\nstock_DataFrame = df.dropna(axis=1)\nwhile True:\n cv_amount = [0]\n cv_rate = [0]\n cv_ma_rate = [0]\n un_Nd_plus = un_Nd_minus = 0\n result3 = []\n result4 = []\n cv_list = []\n cv_ma_rate_round = []\n unNd_list = []\n stock_name = input('종목을 입력해주세요 : ')\n Number = int(input('N의 값을 입력해주세요 : '))\n one_stock = stock_DataFrame.loc[stock_DataFrame['stockname'] == stock_name]\n print(one_stock)\n close_value = one_stock['close_value']\n one_stock_copy = one_stock.copy()\n try:\n for i in range(close_value.index[0], len(close_value) + close_value\n .index[0] - 1, 1):\n result = cv_diff_value(close_value[i], close_value[i + 1])\n cv_amount.append(result)\n except IndexError:\n print('존재하지 않는 항목')\n continue\n one_stock_copy['cv_diff_value'] = cv_amount\n for i in range(close_value.index[0], len(close_value) + close_value.\n index[0] - 1, 1):\n result2 = round(cv_diff_rate(close_value[i], close_value[i + 1]), 2)\n cv_rate.append(result2)\n one_stock_copy['cv_diff_rate'] = cv_rate\n res3 = cv_maN_value(close_value, Number)\n if isinstance(res3, str):\n print(res3)\n continue\n else:\n result3 = res3.fillna(0)\n one_stock_copy['cv_maN_value'] = result3\n ma_value = one_stock_copy['cv_maN_value']\n result4 = cv_maN_rate(ma_value, Number)\n if isinstance(result4, str):\n print(result4)\n continue\n else:\n one_stock_copy['cv_maN_rate'] = result4\n result5 = ud_Nd(close_value, Number)\n one_stock_copy['ud_Nd'] = result5\n un_Nd_value = one_stock_copy['ud_Nd']\n for i in range(un_Nd_value.index[0], len(un_Nd_value) + un_Nd_value.\n index[0], 1):\n unNd_list.append(un_Nd_value[i])\n for i in range(len(unNd_list)):\n if unNd_list[i] == 1:\n un_Nd_plus += 1\n if unNd_list[i] == -1:\n un_Nd_minus += 1\n print(un_Nd_plus)\n print(un_Nd_minus)\n if un_Nd_plus >= 20 and un_Nd_minus >= 20:\n break\n else:\n print('un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다')\n continue\none_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False)\nprint('Data가 성공적으로 추가됐습니다')\ncsv_file_read.close()\n", "step-5": "import pandas as pd\n\n# 칼럼값으로 추가 - 함수 작성\n# 1. cv_diff_value : 종가 일간 변화량\ndef cv_diff_value(prevalue, postvalue):\n return postvalue - prevalue\n\n\n# 2. cv_diff_rate : 종가 일간 변화율\ndef cv_diff_rate(prevalue, postvalue):\n return (postvalue - prevalue) / prevalue * 100\n\n\n# 3. cv_maN_value : 종가의 N일 이동평균\ndef cv_maN_value(cv, N):\n # min_period 옵션을 이용하여 할 수도 있음 // 데이터가 최소 x 개라도 존재하면 이동평균을 구함\n str_replay = \"N의 값을 다시 입력해주세요\"\n if 3 <= N <= 5:\n return cv.rolling(window=N).mean()\n else:\n return str_replay\n\n\n# 4. cv_maN_rate : 종가의 N일 이동평균의 일간 변화율\ndef cv_maN_rate(cv, N):\n str_replay = \"N의 값을 다시 입력해주세요\"\n if 3 <= N <= 5:\n # DataFrame 을 list 로 변환\n for i in range(cv.index[0], (len(cv)+cv.index[0]), 1):\n cv_list.append(cv[i])\n # 종가의 N일 이동평균의 일간 변화율을 list 에 담기\n for i in range(len(cv_list)-1):\n if cv_list[i] != 0:\n cv_ma_rate.append((cv_list[i+1] - cv_list[i]) / cv_list[i] * 100)\n else:\n cv_ma_rate.append(0)\n # 종가의 N일 이동평균의 일간 변화율을 소수점 2째자리 까지 표현\n for i in range(len(cv_ma_rate)):\n cv_ma_rate_round.append(round(cv_ma_rate[i], 2))\n return cv_ma_rate_round\n else:\n return str_replay\n\n\n# 5. ud_Nd : (a) N일 연속 증가 1, (b) N일 연속 하락 -1, (c) 그렇지 않은 날 0\ndef ud_Nd(cvdv, N):\n cvdv_list = [] # list\n un_Nd_list = [] # list\n # print(cvdv) # 종가\n # print(len(cvdv)) # 길이 : 230\n # DataFrame 을 list 로 변환\n for i in range(cvdv.index[0], (len(cvdv)+cvdv.index[0]), 1):\n cvdv_list.append(cvdv[i])\n # 알 수 없는 정보는 '0'으로 두겠다\n for i in range(N-2):\n un_Nd_list.append(0)\n # 상승, 하락, 그렇지 않은 날 계산\n for i in range(len(cvdv_list)-N+1): # 0 ~ 225\n increase_count = decrease_count = nothing_count = 0\n for j in range(N-1): # 0 ~ 3\n if cvdv_list[i + j] < cvdv_list[i + j + 1]: # 종가가 상승한 날\n increase_count += 1\n elif cvdv_list[i + j] > cvdv_list[i + j + 1]: # 종가가 하락한 날\n decrease_count += 1\n else: # 종가가 상승도 하락도 아닌날\n nothing_count += 1\n # N일 연속 종가가 상승, 하락, 그렇지 않은 날 판단하고 (N-1)날에 삽입\n if increase_count == (N - 1):\n un_Nd_list.append(1)\n elif decrease_count == (N - 1):\n un_Nd_list.append(-1)\n else:\n un_Nd_list.append(0)\n un_Nd_list.append(0) # 마지막날은 판단할 수 없어서 '0' 으로 삽입\n return un_Nd_list\n\n\n# csv 파일 읽어오기 // DataFrame 으로 변경 // NaN값 제거\ncsv_file_read = open('stock_history.csv', 'r', encoding='euc-kr')\nstock_data = pd.read_csv(csv_file_read)\ndf = pd.DataFrame(stock_data)\nstock_DataFrame = df.dropna(axis=1)\n\n# 반복 시작\nwhile True:\n # 초기값\n cv_amount = [0] # 종가 일간 변화량을 저장할 list\n cv_rate = [0] # 종가 일간 변화율을 저장할 list\n cv_ma_rate = [0] # 종가의 N일 이동평균의 일간 변화율을 저장할 list\n un_Nd_plus = un_Nd_minus = 0 # 20회이상 판단할 count 변수\n result3 = [] # 종가의 N일 이동평균을 저장할 list\n result4 = [] # 종가 N일 이동평균의 일간 변화율\n cv_list = [] # 종가의 N일 이동평균의 일간 변화율을 저장할 list\n cv_ma_rate_round = [] # 종가의 N일 이동평균의 일간 변화율을 소수점 2자리로 저장할 list\n unNd_list = [] # 종가의 N일 증감을 저장할 list\n\n # 종목을 선택하고 N의 값을 입력받는다\n stock_name = input(\"종목을 입력해주세요 : \")\n Number = int(input(\"N의 값을 입력해주세요 : \"))\n one_stock = stock_DataFrame.loc[stock_DataFrame[\"stockname\"] == stock_name]\n print(one_stock)\n\n close_value = one_stock[\"close_value\"] # 종가만 가져오기\n one_stock_copy = one_stock.copy() # DataFrame 에 열을 추가하기 위해 복사\n\n # 종가 일간 변화량\n try:\n for i in range(close_value.index[0], (len(close_value)+close_value.index[0])-1, 1):\n result = cv_diff_value(close_value[i], close_value[i+1])\n cv_amount.append(result)\n except IndexError:\n print(\"존재하지 않는 항목\")\n continue\n one_stock_copy[\"cv_diff_value\"] = cv_amount # DataFrame 에 데이터 추가\n # print(one_stock_copy)\n\n # 종가 일간 변화율 // 종가 일간 변화량과 마찬가지 // 소수점 2자리 표현\n for i in range(close_value.index[0], (len(close_value)+close_value.index[0])-1, 1):\n result2 = round(cv_diff_rate(close_value[i], close_value[i+1]), 2)\n cv_rate.append(result2)\n one_stock_copy[\"cv_diff_rate\"] = cv_rate # DataFrame 에 데이터 추가\n # print(one_stock_copy)\n\n # 종가 N일 이동평균\n res3 = cv_maN_value(close_value, Number)\n if isinstance(res3, str):\n print(res3)\n continue\n else:\n result3 = res3.fillna(0) # NaN값을 0으로 치환\n one_stock_copy[\"cv_maN_value\"] = result3\n # print(one_stock_copy)\n\n # 종가 N일 이동평균의 일간 변화율\n ma_value = one_stock_copy[\"cv_maN_value\"] # 종가 N일 이동평균 가져오기\n result4 = cv_maN_rate(ma_value, Number)\n if isinstance(result4, str):\n print(result4)\n continue\n else:\n one_stock_copy[\"cv_maN_rate\"] = result4\n # print(one_stock_copy)\n\n # N일 연속 상승, 하락, 그렇지 않은 날 파악\n result5 = ud_Nd(close_value, Number)\n one_stock_copy[\"ud_Nd\"] = result5\n\n # un_Nd = 1, -1이 20회 이상 발생하도록 N을 3 ~ 5로 조정, 종목을 변경\n un_Nd_value = one_stock_copy[\"ud_Nd\"] # N일 연속되는 증감 column 가져오기\n # DataFrame 을 list 로 변환\n for i in range(un_Nd_value.index[0], (len(un_Nd_value)+un_Nd_value.index[0]), 1):\n unNd_list.append(un_Nd_value[i])\n # 20회 이상 발생하는지 판단\n for i in range(len(unNd_list)):\n if unNd_list[i] == 1:\n un_Nd_plus += 1\n if unNd_list[i] == -1:\n un_Nd_minus += 1\n\n print(un_Nd_plus)\n print(un_Nd_minus)\n\n# 발생했다면 반복문을 종료하고 발생하지 않았다면 N을 조정하거나 종목을 변경한다\n if un_Nd_plus >= 20 and un_Nd_minus >= 20:\n break\n else:\n print(\"un_Nd의 1 or -1 발생횟수가 둘 다 20을 넘지 않았습니다\")\n continue\n\n# 반복문이 끝나고 20회이상 발생하는 조건을 만족하면 csv 파일(stock_history_added.csv)로 저장\none_stock_copy.to_csv('stock_history_added.csv', encoding='ms949', index=False)\nprint(\"Data가 성공적으로 추가됐습니다\")\ncsv_file_read.close()\n", "step-ids": [ 5, 6, 7, 8, 9 ] }
[ 5, 6, 7, 8, 9 ]
<|reserved_special_token_0|> class ClientCreator(object): <|reserved_special_token_0|> def __init__(self, loader, endpoint_creator): self._loader = loader self._endpoint_creator = endpoint_creator def create_client(self, service_name, region_name, is_secure=True, endpoint_url=None, verify=None): service_model = self._load_service_model(service_name) cls = self.create_client_class(service_name) client_args = self._get_client_args(service_model, region_name, is_secure, endpoint_url, verify) return cls(**client_args) <|reserved_special_token_0|> <|reserved_special_token_0|> def _load_service_model(self, service_name): json_model = self._loader.load_service_model('aws/%s' % service_name) service_model = ServiceModel(json_model) return service_model <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class BaseClient(object): def __init__(self, serializer, endpoint, response_parser): self._serializer = serializer self._endpoint = endpoint self._response_parser = response_parser self._cache = {} <|reserved_special_token_1|> <|reserved_special_token_0|> class ClientCreator(object): <|reserved_special_token_0|> def __init__(self, loader, endpoint_creator): self._loader = loader self._endpoint_creator = endpoint_creator def create_client(self, service_name, region_name, is_secure=True, endpoint_url=None, verify=None): service_model = self._load_service_model(service_name) cls = self.create_client_class(service_name) client_args = self._get_client_args(service_model, region_name, is_secure, endpoint_url, verify) return cls(**client_args) <|reserved_special_token_0|> <|reserved_special_token_0|> def _load_service_model(self, service_name): json_model = self._loader.load_service_model('aws/%s' % service_name) service_model = ServiceModel(json_model) return service_model def _get_client_args(self, service_model, region_name, is_secure, endpoint_url, verify): protocol = service_model.metadata['protocol'] serializer = botocore_eb.serialize.create_serializer(protocol, include_validation=True) endpoint = self._endpoint_creator.create_endpoint(service_model, region_name, is_secure=is_secure, endpoint_url=endpoint_url, verify=verify) response_parser = botocore_eb.parsers.create_parser(protocol) return {'serializer': serializer, 'endpoint': endpoint, 'response_parser': response_parser} def _create_methods(self, service_model): op_dict = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) op_dict[py_operation_name] = self._create_api_method( py_operation_name, operation_name, service_model) return op_dict <|reserved_special_token_0|> <|reserved_special_token_0|> class BaseClient(object): def __init__(self, serializer, endpoint, response_parser): self._serializer = serializer self._endpoint = endpoint self._response_parser = response_parser self._cache = {} <|reserved_special_token_1|> <|reserved_special_token_0|> class ClientError(Exception): <|reserved_special_token_0|> def __init__(self, error_response, operation_name): msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][ 'Code'], error_message=error_response['Error']['Message'], operation_name=operation_name) super(ClientError, self).__init__(msg) self.response = error_response class ClientCreator(object): """Creates client objects for a service.""" def __init__(self, loader, endpoint_creator): self._loader = loader self._endpoint_creator = endpoint_creator def create_client(self, service_name, region_name, is_secure=True, endpoint_url=None, verify=None): service_model = self._load_service_model(service_name) cls = self.create_client_class(service_name) client_args = self._get_client_args(service_model, region_name, is_secure, endpoint_url, verify) return cls(**client_args) def create_client_class(self, service_name): service_model = self._load_service_model(service_name) methods = self._create_methods(service_model) py_name_to_operation_name = self._create_name_mapping(service_model) self._add_pagination_methods(service_model, methods, py_name_to_operation_name) cls = type(service_name, (BaseClient,), methods) return cls def _add_pagination_methods(self, service_model, methods, name_mapping): loader = self._loader def get_paginator(self, operation_name): """Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :raise OperationNotPageableError: Raised if the operation is not pageable. You can use the ``client.can_paginate`` method to check if an operation is pageable. :rtype: L{botocore.paginate.Paginator} :return: A paginator object. """ if not self.can_paginate(operation_name): raise OperationNotPageableError(operation_name=operation_name) else: actual_operation_name = name_mapping[operation_name] paginator = Paginator(getattr(self, operation_name), self. _cache['page_config'][actual_operation_name]) return paginator def can_paginate(self, operation_name): """Check if an operation can be paginated. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :return: ``True`` if the operation can be paginated, ``False`` otherwise. """ if 'page_config' not in self._cache: try: page_config = loader.load_data('aws/%s/%s.paginators' % (service_model.endpoint_prefix, service_model. api_version))['pagination'] self._cache['page_config'] = page_config except DataNotFoundError: self._cache['page_config'] = {} actual_operation_name = name_mapping[operation_name] return actual_operation_name in self._cache['page_config'] methods['get_paginator'] = get_paginator methods['can_paginate'] = can_paginate def _load_service_model(self, service_name): json_model = self._loader.load_service_model('aws/%s' % service_name) service_model = ServiceModel(json_model) return service_model def _get_client_args(self, service_model, region_name, is_secure, endpoint_url, verify): protocol = service_model.metadata['protocol'] serializer = botocore_eb.serialize.create_serializer(protocol, include_validation=True) endpoint = self._endpoint_creator.create_endpoint(service_model, region_name, is_secure=is_secure, endpoint_url=endpoint_url, verify=verify) response_parser = botocore_eb.parsers.create_parser(protocol) return {'serializer': serializer, 'endpoint': endpoint, 'response_parser': response_parser} def _create_methods(self, service_model): op_dict = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) op_dict[py_operation_name] = self._create_api_method( py_operation_name, operation_name, service_model) return op_dict def _create_name_mapping(self, service_model): mapping = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) mapping[py_operation_name] = operation_name return mapping def _create_api_method(self, py_operation_name, operation_name, service_model): def _api_call(self, **kwargs): operation_model = service_model.operation_model(operation_name) request_dict = self._serializer.serialize_to_request(kwargs, operation_model) http, parsed_response = self._endpoint.make_request(operation_model , request_dict) if http.status_code >= 300: raise ClientError(parsed_response, operation_name) else: return parsed_response _api_call.__name__ = str(py_operation_name) return _api_call class BaseClient(object): def __init__(self, serializer, endpoint, response_parser): self._serializer = serializer self._endpoint = endpoint self._response_parser = response_parser self._cache = {} <|reserved_special_token_1|> <|reserved_special_token_0|> class ClientError(Exception): MSG_TEMPLATE = ( 'An error occurred ({error_code}) when calling the {operation_name} operation: {error_message}' ) def __init__(self, error_response, operation_name): msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][ 'Code'], error_message=error_response['Error']['Message'], operation_name=operation_name) super(ClientError, self).__init__(msg) self.response = error_response class ClientCreator(object): """Creates client objects for a service.""" def __init__(self, loader, endpoint_creator): self._loader = loader self._endpoint_creator = endpoint_creator def create_client(self, service_name, region_name, is_secure=True, endpoint_url=None, verify=None): service_model = self._load_service_model(service_name) cls = self.create_client_class(service_name) client_args = self._get_client_args(service_model, region_name, is_secure, endpoint_url, verify) return cls(**client_args) def create_client_class(self, service_name): service_model = self._load_service_model(service_name) methods = self._create_methods(service_model) py_name_to_operation_name = self._create_name_mapping(service_model) self._add_pagination_methods(service_model, methods, py_name_to_operation_name) cls = type(service_name, (BaseClient,), methods) return cls def _add_pagination_methods(self, service_model, methods, name_mapping): loader = self._loader def get_paginator(self, operation_name): """Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :raise OperationNotPageableError: Raised if the operation is not pageable. You can use the ``client.can_paginate`` method to check if an operation is pageable. :rtype: L{botocore.paginate.Paginator} :return: A paginator object. """ if not self.can_paginate(operation_name): raise OperationNotPageableError(operation_name=operation_name) else: actual_operation_name = name_mapping[operation_name] paginator = Paginator(getattr(self, operation_name), self. _cache['page_config'][actual_operation_name]) return paginator def can_paginate(self, operation_name): """Check if an operation can be paginated. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :return: ``True`` if the operation can be paginated, ``False`` otherwise. """ if 'page_config' not in self._cache: try: page_config = loader.load_data('aws/%s/%s.paginators' % (service_model.endpoint_prefix, service_model. api_version))['pagination'] self._cache['page_config'] = page_config except DataNotFoundError: self._cache['page_config'] = {} actual_operation_name = name_mapping[operation_name] return actual_operation_name in self._cache['page_config'] methods['get_paginator'] = get_paginator methods['can_paginate'] = can_paginate def _load_service_model(self, service_name): json_model = self._loader.load_service_model('aws/%s' % service_name) service_model = ServiceModel(json_model) return service_model def _get_client_args(self, service_model, region_name, is_secure, endpoint_url, verify): protocol = service_model.metadata['protocol'] serializer = botocore_eb.serialize.create_serializer(protocol, include_validation=True) endpoint = self._endpoint_creator.create_endpoint(service_model, region_name, is_secure=is_secure, endpoint_url=endpoint_url, verify=verify) response_parser = botocore_eb.parsers.create_parser(protocol) return {'serializer': serializer, 'endpoint': endpoint, 'response_parser': response_parser} def _create_methods(self, service_model): op_dict = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) op_dict[py_operation_name] = self._create_api_method( py_operation_name, operation_name, service_model) return op_dict def _create_name_mapping(self, service_model): mapping = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) mapping[py_operation_name] = operation_name return mapping def _create_api_method(self, py_operation_name, operation_name, service_model): def _api_call(self, **kwargs): operation_model = service_model.operation_model(operation_name) request_dict = self._serializer.serialize_to_request(kwargs, operation_model) http, parsed_response = self._endpoint.make_request(operation_model , request_dict) if http.status_code >= 300: raise ClientError(parsed_response, operation_name) else: return parsed_response _api_call.__name__ = str(py_operation_name) return _api_call class BaseClient(object): def __init__(self, serializer, endpoint, response_parser): self._serializer = serializer self._endpoint = endpoint self._response_parser = response_parser self._cache = {} <|reserved_special_token_1|> from botocore_eb.model import ServiceModel from botocore_eb.exceptions import ParamValidationError from botocore_eb.exceptions import DataNotFoundError from botocore_eb.exceptions import OperationNotPageableError from botocore_eb import xform_name from botocore_eb.paginate import Paginator import botocore_eb.validate import botocore_eb.serialize class ClientError(Exception): MSG_TEMPLATE = ( 'An error occurred ({error_code}) when calling the {operation_name} ' 'operation: {error_message}') def __init__(self, error_response, operation_name): msg = self.MSG_TEMPLATE.format( error_code=error_response['Error']['Code'], error_message=error_response['Error']['Message'], operation_name=operation_name) super(ClientError, self).__init__(msg) self.response = error_response class ClientCreator(object): """Creates client objects for a service.""" def __init__(self, loader, endpoint_creator): self._loader = loader self._endpoint_creator = endpoint_creator def create_client(self, service_name, region_name, is_secure=True, endpoint_url=None, verify=None): service_model = self._load_service_model(service_name) cls = self.create_client_class(service_name) client_args = self._get_client_args(service_model, region_name, is_secure, endpoint_url, verify) return cls(**client_args) def create_client_class(self, service_name): service_model = self._load_service_model(service_name) methods = self._create_methods(service_model) py_name_to_operation_name = self._create_name_mapping(service_model) self._add_pagination_methods(service_model, methods, py_name_to_operation_name) cls = type(service_name, (BaseClient,), methods) return cls def _add_pagination_methods(self, service_model, methods, name_mapping): loader = self._loader def get_paginator(self, operation_name): """Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :raise OperationNotPageableError: Raised if the operation is not pageable. You can use the ``client.can_paginate`` method to check if an operation is pageable. :rtype: L{botocore.paginate.Paginator} :return: A paginator object. """ # Note that the 'self' in this method refers to the self on # BaseClient, not on ClientCreator. if not self.can_paginate(operation_name): raise OperationNotPageableError(operation_name=operation_name) else: actual_operation_name = name_mapping[operation_name] paginator = Paginator( getattr(self, operation_name), self._cache['page_config'][actual_operation_name]) return paginator def can_paginate(self, operation_name): """Check if an operation can be paginated. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :return: ``True`` if the operation can be paginated, ``False`` otherwise. """ if 'page_config' not in self._cache: try: page_config = loader.load_data('aws/%s/%s.paginators' % ( service_model.endpoint_prefix, service_model.api_version))['pagination'] self._cache['page_config'] = page_config except DataNotFoundError: self._cache['page_config'] = {} actual_operation_name = name_mapping[operation_name] return actual_operation_name in self._cache['page_config'] methods['get_paginator'] = get_paginator methods['can_paginate'] = can_paginate def _load_service_model(self, service_name): json_model = self._loader.load_service_model('aws/%s' % service_name) service_model = ServiceModel(json_model) return service_model def _get_client_args(self, service_model, region_name, is_secure, endpoint_url, verify): # A client needs: # # * serializer # * endpoint # * response parser protocol = service_model.metadata['protocol'] serializer = botocore_eb.serialize.create_serializer( protocol, include_validation=True) endpoint = self._endpoint_creator.create_endpoint( service_model, region_name, is_secure=is_secure, endpoint_url=endpoint_url, verify=verify) response_parser = botocore_eb.parsers.create_parser(protocol) return { 'serializer': serializer, 'endpoint': endpoint, 'response_parser': response_parser } def _create_methods(self, service_model): op_dict = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) op_dict[py_operation_name] = self._create_api_method( py_operation_name, operation_name, service_model) return op_dict def _create_name_mapping(self, service_model): # py_name -> OperationName mapping = {} for operation_name in service_model.operation_names: py_operation_name = xform_name(operation_name) mapping[py_operation_name] = operation_name return mapping def _create_api_method(self, py_operation_name, operation_name, service_model): def _api_call(self, **kwargs): operation_model = service_model.operation_model(operation_name) request_dict = self._serializer.serialize_to_request( kwargs, operation_model) http, parsed_response = self._endpoint.make_request( operation_model, request_dict) if http.status_code >= 300: raise ClientError(parsed_response, operation_name) else: return parsed_response _api_call.__name__ = str(py_operation_name) # TODO: docstrings. return _api_call class BaseClient(object): def __init__(self, serializer, endpoint, response_parser): self._serializer = serializer self._endpoint = endpoint self._response_parser = response_parser self._cache = {}
flexible
{ "blob_id": "829c833866198307d7d19c4a0cbe40299ee14eb9", "index": 5288, "step-1": "<mask token>\n\n\nclass ClientCreator(object):\n <mask token>\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n <mask token>\n <mask token>\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n", "step-2": "<mask token>\n\n\nclass ClientCreator(object):\n <mask token>\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n <mask token>\n <mask token>\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n <mask token>\n <mask token>\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n", "step-3": "<mask token>\n\n\nclass ClientError(Exception):\n <mask token>\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][\n 'Code'], error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(getattr(self, operation_name), self.\n _cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' %\n (service_model.endpoint_prefix, service_model.\n api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(kwargs,\n operation_model)\n http, parsed_response = self._endpoint.make_request(operation_model\n , request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n _api_call.__name__ = str(py_operation_name)\n return _api_call\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n", "step-4": "<mask token>\n\n\nclass ClientError(Exception):\n MSG_TEMPLATE = (\n 'An error occurred ({error_code}) when calling the {operation_name} operation: {error_message}'\n )\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(error_code=error_response['Error'][\n 'Code'], error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name,\n is_secure, endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(getattr(self, operation_name), self.\n _cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' %\n (service_model.endpoint_prefix, service_model.\n api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(protocol,\n include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(service_model,\n region_name, is_secure=is_secure, endpoint_url=endpoint_url,\n verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {'serializer': serializer, 'endpoint': endpoint,\n 'response_parser': response_parser}\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(kwargs,\n operation_model)\n http, parsed_response = self._endpoint.make_request(operation_model\n , request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n _api_call.__name__ = str(py_operation_name)\n return _api_call\n\n\nclass BaseClient(object):\n\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n", "step-5": "from botocore_eb.model import ServiceModel\nfrom botocore_eb.exceptions import ParamValidationError\nfrom botocore_eb.exceptions import DataNotFoundError\nfrom botocore_eb.exceptions import OperationNotPageableError\nfrom botocore_eb import xform_name\nfrom botocore_eb.paginate import Paginator\nimport botocore_eb.validate\nimport botocore_eb.serialize\n\n\nclass ClientError(Exception):\n MSG_TEMPLATE = (\n 'An error occurred ({error_code}) when calling the {operation_name} '\n 'operation: {error_message}')\n\n def __init__(self, error_response, operation_name):\n msg = self.MSG_TEMPLATE.format(\n error_code=error_response['Error']['Code'],\n error_message=error_response['Error']['Message'],\n operation_name=operation_name)\n super(ClientError, self).__init__(msg)\n self.response = error_response\n\n\nclass ClientCreator(object):\n \"\"\"Creates client objects for a service.\"\"\"\n def __init__(self, loader, endpoint_creator):\n self._loader = loader\n self._endpoint_creator = endpoint_creator\n\n def create_client(self, service_name, region_name, is_secure=True,\n endpoint_url=None, verify=None):\n service_model = self._load_service_model(service_name)\n cls = self.create_client_class(service_name)\n client_args = self._get_client_args(service_model, region_name, is_secure,\n endpoint_url, verify)\n return cls(**client_args)\n\n def create_client_class(self, service_name):\n service_model = self._load_service_model(service_name)\n methods = self._create_methods(service_model)\n py_name_to_operation_name = self._create_name_mapping(service_model)\n self._add_pagination_methods(service_model, methods,\n py_name_to_operation_name)\n cls = type(service_name, (BaseClient,), methods)\n return cls\n\n def _add_pagination_methods(self, service_model, methods, name_mapping):\n loader = self._loader\n\n def get_paginator(self, operation_name):\n \"\"\"Create a paginator for an operation.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :raise OperationNotPageableError: Raised if the operation is not\n pageable. You can use the ``client.can_paginate`` method to\n check if an operation is pageable.\n\n :rtype: L{botocore.paginate.Paginator}\n :return: A paginator object.\n\n \"\"\"\n # Note that the 'self' in this method refers to the self on\n # BaseClient, not on ClientCreator.\n if not self.can_paginate(operation_name):\n raise OperationNotPageableError(operation_name=operation_name)\n else:\n actual_operation_name = name_mapping[operation_name]\n paginator = Paginator(\n getattr(self, operation_name),\n self._cache['page_config'][actual_operation_name])\n return paginator\n\n def can_paginate(self, operation_name):\n \"\"\"Check if an operation can be paginated.\n\n :type operation_name: string\n :param operation_name: The operation name. This is the same name\n as the method name on the client. For example, if the\n method name is ``create_foo``, and you'd normally invoke the\n operation as ``client.create_foo(**kwargs)``, if the\n ``create_foo`` operation can be paginated, you can use the\n call ``client.get_paginator(\"create_foo\")``.\n\n :return: ``True`` if the operation can be paginated,\n ``False`` otherwise.\n\n \"\"\"\n if 'page_config' not in self._cache:\n try:\n page_config = loader.load_data('aws/%s/%s.paginators' % (\n service_model.endpoint_prefix,\n service_model.api_version))['pagination']\n self._cache['page_config'] = page_config\n except DataNotFoundError:\n self._cache['page_config'] = {}\n actual_operation_name = name_mapping[operation_name]\n return actual_operation_name in self._cache['page_config']\n\n methods['get_paginator'] = get_paginator\n methods['can_paginate'] = can_paginate\n\n def _load_service_model(self, service_name):\n json_model = self._loader.load_service_model('aws/%s' % service_name)\n service_model = ServiceModel(json_model)\n return service_model\n\n def _get_client_args(self, service_model, region_name, is_secure,\n endpoint_url, verify):\n # A client needs:\n #\n # * serializer\n # * endpoint\n # * response parser\n protocol = service_model.metadata['protocol']\n serializer = botocore_eb.serialize.create_serializer(\n protocol, include_validation=True)\n endpoint = self._endpoint_creator.create_endpoint(\n service_model, region_name, is_secure=is_secure,\n endpoint_url=endpoint_url, verify=verify)\n response_parser = botocore_eb.parsers.create_parser(protocol)\n return {\n 'serializer': serializer,\n 'endpoint': endpoint,\n 'response_parser': response_parser\n }\n\n def _create_methods(self, service_model):\n op_dict = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n op_dict[py_operation_name] = self._create_api_method(\n py_operation_name, operation_name, service_model)\n return op_dict\n\n def _create_name_mapping(self, service_model):\n # py_name -> OperationName\n mapping = {}\n for operation_name in service_model.operation_names:\n py_operation_name = xform_name(operation_name)\n mapping[py_operation_name] = operation_name\n return mapping\n\n def _create_api_method(self, py_operation_name, operation_name,\n service_model):\n def _api_call(self, **kwargs):\n operation_model = service_model.operation_model(operation_name)\n request_dict = self._serializer.serialize_to_request(\n kwargs, operation_model)\n\n http, parsed_response = self._endpoint.make_request(\n operation_model, request_dict)\n if http.status_code >= 300:\n raise ClientError(parsed_response, operation_name)\n else:\n return parsed_response\n\n _api_call.__name__ = str(py_operation_name)\n # TODO: docstrings.\n return _api_call\n\n\nclass BaseClient(object):\n def __init__(self, serializer, endpoint, response_parser):\n self._serializer = serializer\n self._endpoint = endpoint\n self._response_parser = response_parser\n self._cache = {}\n", "step-ids": [ 6, 8, 15, 16, 18 ] }
[ 6, 8, 15, 16, 18 ]
from abc import ABC # This is base class class Vehicle(ABC): pass # GroundVehicle inherits from Vehicle class GroundVehicle(Vehicle): pass # Car inherits from GroundVehicle class Car(GroundVehicle): pass # Motorcycle inherits from GroundVehicle class Motorcycle(GroundVehicle): pass # FlightVehicle inherits from Vehicle class FlightVehicle(Vehicle): pass # Starship inherits from FlightVehicle class Starship(FlightVehicle): pass # Airplane inherits from FlightVehicle class Airplane(FlightVehicle): pass
normal
{ "blob_id": "d7db617131bf6e72c7aa808030f7286ddb609cc2", "index": 4579, "step-1": "<mask token>\n\n\nclass Motorcycle(GroundVehicle):\n pass\n\n\nclass FlightVehicle(Vehicle):\n pass\n\n\nclass Starship(FlightVehicle):\n pass\n\n\nclass Airplane(FlightVehicle):\n pass\n", "step-2": "<mask token>\n\n\nclass GroundVehicle(Vehicle):\n pass\n\n\nclass Car(GroundVehicle):\n pass\n\n\nclass Motorcycle(GroundVehicle):\n pass\n\n\nclass FlightVehicle(Vehicle):\n pass\n\n\nclass Starship(FlightVehicle):\n pass\n\n\nclass Airplane(FlightVehicle):\n pass\n", "step-3": "<mask token>\n\n\nclass Vehicle(ABC):\n pass\n\n\nclass GroundVehicle(Vehicle):\n pass\n\n\nclass Car(GroundVehicle):\n pass\n\n\nclass Motorcycle(GroundVehicle):\n pass\n\n\nclass FlightVehicle(Vehicle):\n pass\n\n\nclass Starship(FlightVehicle):\n pass\n\n\nclass Airplane(FlightVehicle):\n pass\n", "step-4": "from abc import ABC\n\n\nclass Vehicle(ABC):\n pass\n\n\nclass GroundVehicle(Vehicle):\n pass\n\n\nclass Car(GroundVehicle):\n pass\n\n\nclass Motorcycle(GroundVehicle):\n pass\n\n\nclass FlightVehicle(Vehicle):\n pass\n\n\nclass Starship(FlightVehicle):\n pass\n\n\nclass Airplane(FlightVehicle):\n pass\n", "step-5": "from abc import ABC\n\n# This is base class\nclass Vehicle(ABC):\n pass\n\n# GroundVehicle inherits from Vehicle\nclass GroundVehicle(Vehicle):\n pass\n\n# Car inherits from GroundVehicle\nclass Car(GroundVehicle):\n pass\n\n# Motorcycle inherits from GroundVehicle\nclass Motorcycle(GroundVehicle):\n pass\n\n# FlightVehicle inherits from Vehicle\nclass FlightVehicle(Vehicle):\n pass\n\n# Starship inherits from FlightVehicle\nclass Starship(FlightVehicle):\n pass\n\n# Airplane inherits from FlightVehicle\nclass Airplane(FlightVehicle):\n pass\n", "step-ids": [ 4, 6, 7, 8, 9 ] }
[ 4, 6, 7, 8, 9 ]
<|reserved_special_token_0|> class VigenereCipher(object): def __init__(self, key): print('Vigenere Cipher Encription') self.key = key def encode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord(i) + ord(key[index]) - ord('!') > ord('~'): ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord( '!')) % ord('~') - 1) else: ans += chr(ord(i) + ord(key[index]) - ord('!')) else: ans += i return ans def decode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord('!') + ord(i) - ord(key[index]) < ord('!'): ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1) else: ans += chr(ord('!') + ord(i) - ord(key[index])) else: ans += i return ans <|reserved_special_token_0|> def write_to_file(file_name, data): f = open(file_name, 'w') data = f.write(data) f.close() def encode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.encode(data) write_to_file(file_name, data) print('encode file -> ' + file_name) def decode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.decode(data) write_to_file(file_name, data) print('decode file -> ' + file_name) def encription_form_path(PATH, obj): try: for path in os.listdir(PATH): encription_form_path(PATH + '/' + path, obj) except OSError: if args.encode: encode_from_file(PATH, obj) elif args.decode: decode_from_file(PATH, obj) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class VigenereCipher(object): def __init__(self, key): print('Vigenere Cipher Encription') self.key = key def encode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord(i) + ord(key[index]) - ord('!') > ord('~'): ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord( '!')) % ord('~') - 1) else: ans += chr(ord(i) + ord(key[index]) - ord('!')) else: ans += i return ans def decode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord('!') + ord(i) - ord(key[index]) < ord('!'): ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1) else: ans += chr(ord('!') + ord(i) - ord(key[index])) else: ans += i return ans def read_from_file(file_name): f = open(file_name, 'r') data = f.read() f.close() return data def write_to_file(file_name, data): f = open(file_name, 'w') data = f.write(data) f.close() def encode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.encode(data) write_to_file(file_name, data) print('encode file -> ' + file_name) def decode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.decode(data) write_to_file(file_name, data) print('decode file -> ' + file_name) def encription_form_path(PATH, obj): try: for path in os.listdir(PATH): encription_form_path(PATH + '/' + path, obj) except OSError: if args.encode: encode_from_file(PATH, obj) elif args.decode: decode_from_file(PATH, obj) <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class VigenereCipher(object): def __init__(self, key): print('Vigenere Cipher Encription') self.key = key def encode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord(i) + ord(key[index]) - ord('!') > ord('~'): ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord( '!')) % ord('~') - 1) else: ans += chr(ord(i) + ord(key[index]) - ord('!')) else: ans += i return ans def decode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord('!') + ord(i) - ord(key[index]) < ord('!'): ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1) else: ans += chr(ord('!') + ord(i) - ord(key[index])) else: ans += i return ans def read_from_file(file_name): f = open(file_name, 'r') data = f.read() f.close() return data def write_to_file(file_name, data): f = open(file_name, 'w') data = f.write(data) f.close() def encode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.encode(data) write_to_file(file_name, data) print('encode file -> ' + file_name) def decode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.decode(data) write_to_file(file_name, data) print('decode file -> ' + file_name) def encription_form_path(PATH, obj): try: for path in os.listdir(PATH): encription_form_path(PATH + '/' + path, obj) except OSError: if args.encode: encode_from_file(PATH, obj) elif args.decode: decode_from_file(PATH, obj) <|reserved_special_token_0|> parser = argparse.ArgumentParser('Description of your program') parser.add_argument('-i', '--input_file', help='input file name', required= False) parser.add_argument('-e', '--encode', help='encode password', required=False) parser.add_argument('-d', '--decode', help='decode password', required=False) parser.add_argument('-f', '--folder', help='folder name', required=False) parser.add_argument('-s', '--strength', help='encription strength', type= int, default=1, required=False) args = parser.parse_args() if args.input_file: PATH = args.input_file elif args.folder: PATH = args.folder else: exit('Need --input_file or --folder\nUse -h for help') if args.encode: pswd = args.encode elif args.decode: pswd = args.decode else: exit('Need --encode or --decode\nUse -h for help') obj = VigenereCipher(pswd) encription_form_path(PATH, obj) <|reserved_special_token_1|> <|reserved_special_token_0|> import os import argparse class VigenereCipher(object): def __init__(self, key): print('Vigenere Cipher Encription') self.key = key def encode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord(i) + ord(key[index]) - ord('!') > ord('~'): ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord( '!')) % ord('~') - 1) else: ans += chr(ord(i) + ord(key[index]) - ord('!')) else: ans += i return ans def decode(self, text): key = self.key ans = '' for index, i in enumerate(text): if ord('!') <= ord(i) <= ord('~'): index %= len(key) if ord('!') + ord(i) - ord(key[index]) < ord('!'): ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1) else: ans += chr(ord('!') + ord(i) - ord(key[index])) else: ans += i return ans def read_from_file(file_name): f = open(file_name, 'r') data = f.read() f.close() return data def write_to_file(file_name, data): f = open(file_name, 'w') data = f.write(data) f.close() def encode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.encode(data) write_to_file(file_name, data) print('encode file -> ' + file_name) def decode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.decode(data) write_to_file(file_name, data) print('decode file -> ' + file_name) def encription_form_path(PATH, obj): try: for path in os.listdir(PATH): encription_form_path(PATH + '/' + path, obj) except OSError: if args.encode: encode_from_file(PATH, obj) elif args.decode: decode_from_file(PATH, obj) <|reserved_special_token_0|> parser = argparse.ArgumentParser('Description of your program') parser.add_argument('-i', '--input_file', help='input file name', required= False) parser.add_argument('-e', '--encode', help='encode password', required=False) parser.add_argument('-d', '--decode', help='decode password', required=False) parser.add_argument('-f', '--folder', help='folder name', required=False) parser.add_argument('-s', '--strength', help='encription strength', type= int, default=1, required=False) args = parser.parse_args() if args.input_file: PATH = args.input_file elif args.folder: PATH = args.folder else: exit('Need --input_file or --folder\nUse -h for help') if args.encode: pswd = args.encode elif args.decode: pswd = args.decode else: exit('Need --encode or --decode\nUse -h for help') obj = VigenereCipher(pswd) encription_form_path(PATH, obj) <|reserved_special_token_1|> #!/usr/bin/env python3 # coding: utf-8 """ Blaise de Vigenère (1523–1596) mathematician, developed encryption scheme, VigenereCipher algorithm is implemented based on his work, with a utility of relative strength index for encryption and decryption. VERSION : 1.0 LICENSE : GNU GPLv3 STYLE : PEP 8 AUTHOR : AKULA.S.S.S.R.Krishna Date : 05/11/2020 PURPOSE : To encrypt and decrypt text based files INPUT : python3 VingenerCipher -i sample_file.txt -e "sample password" OUTPUT : sample_file.txt will be replaced with encrypted data. """ import os import argparse class VigenereCipher(object): def __init__(self, key): print('Vigenere Cipher Encription') self.key = key def encode(self, text): # Based on password every character key = self.key # will be encrypted with different bias ans = '' for index, i in enumerate(text): if(ord('!') <= ord(i) <= ord('~')): index %= len(key) if(ord(i) + ord(key[index]) - ord('!') > ord('~')): ans += (chr(ord('!') + (ord(i) + ord(key[index]) - ord('!')) % ord('~') - 1)) else: ans += (chr(ord(i) + ord(key[index]) - ord('!'))) else: ans += i return ans def decode(self, text): # Based on password every character key = self.key # will be decrypted with different bias ans = '' for index, i in enumerate(text): if(ord('!') <= ord(i) <= ord('~')): index %= len(key) if((ord('!') + ord(i) - ord(key[index])) < ord('!')): ans += (chr(ord('~') + (ord(i) - ord(key[index])) + 1)) else: ans += (chr(ord('!') + ord(i) - ord(key[index]))) else: ans += i return ans def read_from_file(file_name): f = open(file_name, 'r') data = f.read() f.close() return data def write_to_file(file_name, data): f = open(file_name, 'w') data = f.write(data) f.close() def encode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.encode(data) write_to_file(file_name, data) # Replaces file with encrypted data print('encode file -> ' + file_name) def decode_from_file(file_name, obj): data = read_from_file(file_name) for _ in range(args.strength): data = obj.decode(data) write_to_file(file_name, data) # Replaces file with decrypted data print('decode file -> ' + file_name) def encription_form_path(PATH, obj): # Recursive function (MT-safe) try: for path in os.listdir(PATH): encription_form_path(PATH + '/' + path, obj) except(OSError): if(args.encode): encode_from_file(PATH, obj) elif(args.decode): decode_from_file(PATH, obj) """ input can be either -i file / -f folder, encode -e, decode -d for encryption and decryption respectively, strength -s indicates number of times to be encrypted / decrypted. """ parser = argparse.ArgumentParser('Description of your program') parser.add_argument('-i', '--input_file', help='input file name', required=False) parser.add_argument('-e', '--encode', help='encode password', required=False) parser.add_argument('-d', '--decode', help='decode password', required=False) parser.add_argument('-f', '--folder', help='folder name', required=False) parser.add_argument('-s', '--strength', help='encription strength', type=int, default=1, required=False) args = (parser.parse_args()) if(args.input_file): PATH = args.input_file elif(args.folder): PATH = args.folder else: exit('Need --input_file or --folder\nUse -h for help') if(args.encode): pswd = args.encode elif(args.decode): pswd = args.decode else: exit('Need --encode or --decode\nUse -h for help') obj = VigenereCipher(pswd) encription_form_path(PATH, obj)
flexible
{ "blob_id": "38906a31ab96e05a9e55a51260632538872ed463", "index": 6889, "step-1": "<mask token>\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\n<mask token>\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\ndef read_from_file(file_name):\n f = open(file_name, 'r')\n data = f.read()\n f.close()\n return data\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\ndef read_from_file(file_name):\n f = open(file_name, 'r')\n data = f.read()\n f.close()\n return data\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\nparser = argparse.ArgumentParser('Description of your program')\nparser.add_argument('-i', '--input_file', help='input file name', required=\n False)\nparser.add_argument('-e', '--encode', help='encode password', required=False)\nparser.add_argument('-d', '--decode', help='decode password', required=False)\nparser.add_argument('-f', '--folder', help='folder name', required=False)\nparser.add_argument('-s', '--strength', help='encription strength', type=\n int, default=1, required=False)\nargs = parser.parse_args()\nif args.input_file:\n PATH = args.input_file\nelif args.folder:\n PATH = args.folder\nelse:\n exit('Need --input_file or --folder\\nUse -h for help')\nif args.encode:\n pswd = args.encode\nelif args.decode:\n pswd = args.decode\nelse:\n exit('Need --encode or --decode\\nUse -h for help')\nobj = VigenereCipher(pswd)\nencription_form_path(PATH, obj)\n", "step-4": "<mask token>\nimport os\nimport argparse\n\n\nclass VigenereCipher(object):\n\n def __init__(self, key):\n print('Vigenere Cipher Encription')\n self.key = key\n\n def encode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord(i) + ord(key[index]) - ord('!') > ord('~'):\n ans += chr(ord('!') + (ord(i) + ord(key[index]) - ord(\n '!')) % ord('~') - 1)\n else:\n ans += chr(ord(i) + ord(key[index]) - ord('!'))\n else:\n ans += i\n return ans\n\n def decode(self, text):\n key = self.key\n ans = ''\n for index, i in enumerate(text):\n if ord('!') <= ord(i) <= ord('~'):\n index %= len(key)\n if ord('!') + ord(i) - ord(key[index]) < ord('!'):\n ans += chr(ord('~') + (ord(i) - ord(key[index])) + 1)\n else:\n ans += chr(ord('!') + ord(i) - ord(key[index]))\n else:\n ans += i\n return ans\n\n\ndef read_from_file(file_name):\n f = open(file_name, 'r')\n data = f.read()\n f.close()\n return data\n\n\ndef write_to_file(file_name, data):\n f = open(file_name, 'w')\n data = f.write(data)\n f.close()\n\n\ndef encode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.encode(data)\n write_to_file(file_name, data)\n print('encode file -> ' + file_name)\n\n\ndef decode_from_file(file_name, obj):\n data = read_from_file(file_name)\n for _ in range(args.strength):\n data = obj.decode(data)\n write_to_file(file_name, data)\n print('decode file -> ' + file_name)\n\n\ndef encription_form_path(PATH, obj):\n try:\n for path in os.listdir(PATH):\n encription_form_path(PATH + '/' + path, obj)\n except OSError:\n if args.encode:\n encode_from_file(PATH, obj)\n elif args.decode:\n decode_from_file(PATH, obj)\n\n\n<mask token>\nparser = argparse.ArgumentParser('Description of your program')\nparser.add_argument('-i', '--input_file', help='input file name', required=\n False)\nparser.add_argument('-e', '--encode', help='encode password', required=False)\nparser.add_argument('-d', '--decode', help='decode password', required=False)\nparser.add_argument('-f', '--folder', help='folder name', required=False)\nparser.add_argument('-s', '--strength', help='encription strength', type=\n int, default=1, required=False)\nargs = parser.parse_args()\nif args.input_file:\n PATH = args.input_file\nelif args.folder:\n PATH = args.folder\nelse:\n exit('Need --input_file or --folder\\nUse -h for help')\nif args.encode:\n pswd = args.encode\nelif args.decode:\n pswd = args.decode\nelse:\n exit('Need --encode or --decode\\nUse -h for help')\nobj = VigenereCipher(pswd)\nencription_form_path(PATH, obj)\n", "step-5": "#!/usr/bin/env python3\r\n# coding: utf-8\r\n\r\n\r\n\"\"\"\r\n Blaise de Vigenère (1523–1596) mathematician, developed encryption scheme,\r\n VigenereCipher algorithm is implemented based on his work, with a utility\r\n of relative strength index for encryption and decryption.\r\n\r\n VERSION : 1.0\r\n LICENSE : GNU GPLv3\r\n STYLE : PEP 8\r\n AUTHOR : AKULA.S.S.S.R.Krishna\r\n Date : 05/11/2020\r\n\r\n PURPOSE : To encrypt and decrypt text based files\r\n INPUT : python3 VingenerCipher -i sample_file.txt -e \"sample password\"\r\n OUTPUT : sample_file.txt will be replaced with encrypted data.\r\n\"\"\"\r\n\r\n\r\nimport os\r\nimport argparse\r\n\r\n\r\nclass VigenereCipher(object):\r\n def __init__(self, key):\r\n print('Vigenere Cipher Encription')\r\n self.key = key\r\n\r\n def encode(self, text): # Based on password every character\r\n key = self.key # will be encrypted with different bias\r\n ans = ''\r\n for index, i in enumerate(text):\r\n if(ord('!') <= ord(i) <= ord('~')):\r\n index %= len(key)\r\n if(ord(i) + ord(key[index]) - ord('!') > ord('~')):\r\n ans += (chr(ord('!') + (ord(i) + ord(key[index])\r\n - ord('!')) % ord('~') - 1))\r\n else:\r\n ans += (chr(ord(i) + ord(key[index]) - ord('!')))\r\n else:\r\n ans += i\r\n return ans\r\n\r\n def decode(self, text): # Based on password every character\r\n key = self.key # will be decrypted with different bias\r\n ans = ''\r\n for index, i in enumerate(text):\r\n if(ord('!') <= ord(i) <= ord('~')):\r\n index %= len(key)\r\n if((ord('!') + ord(i) - ord(key[index])) < ord('!')):\r\n ans += (chr(ord('~') + (ord(i) - ord(key[index])) + 1))\r\n else:\r\n ans += (chr(ord('!') + ord(i) - ord(key[index])))\r\n else:\r\n ans += i\r\n return ans\r\n\r\n\r\ndef read_from_file(file_name):\r\n f = open(file_name, 'r')\r\n data = f.read()\r\n f.close()\r\n return data\r\n\r\n\r\ndef write_to_file(file_name, data):\r\n f = open(file_name, 'w')\r\n data = f.write(data)\r\n f.close()\r\n\r\n\r\ndef encode_from_file(file_name, obj):\r\n data = read_from_file(file_name)\r\n for _ in range(args.strength):\r\n data = obj.encode(data)\r\n write_to_file(file_name, data) # Replaces file with encrypted data\r\n print('encode file -> ' + file_name)\r\n\r\n\r\ndef decode_from_file(file_name, obj):\r\n data = read_from_file(file_name)\r\n for _ in range(args.strength):\r\n data = obj.decode(data)\r\n write_to_file(file_name, data) # Replaces file with decrypted data\r\n print('decode file -> ' + file_name)\r\n\r\n\r\ndef encription_form_path(PATH, obj): # Recursive function (MT-safe)\r\n try:\r\n for path in os.listdir(PATH):\r\n encription_form_path(PATH + '/' + path, obj)\r\n except(OSError):\r\n if(args.encode):\r\n encode_from_file(PATH, obj)\r\n elif(args.decode):\r\n decode_from_file(PATH, obj)\r\n\r\n\r\n\"\"\"\r\n input can be either -i file / -f folder,\r\n encode -e, decode -d for encryption and decryption respectively,\r\n strength -s indicates number of times to be encrypted / decrypted.\r\n\r\n\"\"\"\r\n\r\n\r\nparser = argparse.ArgumentParser('Description of your program')\r\nparser.add_argument('-i', '--input_file',\r\n help='input file name', required=False)\r\nparser.add_argument('-e', '--encode',\r\n help='encode password', required=False)\r\nparser.add_argument('-d', '--decode',\r\n help='decode password', required=False)\r\nparser.add_argument('-f', '--folder',\r\n help='folder name', required=False)\r\nparser.add_argument('-s', '--strength',\r\n help='encription strength', type=int,\r\n default=1, required=False)\r\nargs = (parser.parse_args())\r\n\r\nif(args.input_file):\r\n PATH = args.input_file\r\nelif(args.folder):\r\n PATH = args.folder\r\nelse:\r\n exit('Need --input_file or --folder\\nUse -h for help')\r\n\r\nif(args.encode):\r\n pswd = args.encode\r\nelif(args.decode):\r\n pswd = args.decode\r\nelse:\r\n exit('Need --encode or --decode\\nUse -h for help')\r\n\r\n\r\nobj = VigenereCipher(pswd)\r\nencription_form_path(PATH, obj)\r\n", "step-ids": [ 8, 9, 11, 12, 13 ] }
[ 8, 9, 11, 12, 13 ]
# coding: utf-8 # In[1]: #coding:utf8 import matplotlib import os if 'DISPLAY' not in os.environ: matplotlib.use('Agg') else: pass import torch import torch.nn as nn from torch.autograd import Variable import torch.optim as optim from matplotlib import pyplot as plt import seaborn as sns from tqdm import tqdm import copy from utils import Predicate,Clause,KnowledgeBase, Propositional from utils import load_knowledge_base,load_propositional from models import LTN import pickle import numpy as np import seaborn as sns sns.set(style="white", context="talk") # In[2]: def get_accuracy(model,kb): results=[] for clause in kb.clauses: o1,o2=model.forward(clause) if o2.data.numpy()[0][0]>0.9: results.append(1.0) else: results.append(0.0) return sum(results)/len(kb.clauses) # In[3]: def test_model(model,kb1, kb2,filename): kb_train=kb1.union(kb2) optimizor=torch.optim.Adam(model.parameters(),lr=0.001) mone=torch.FloatTensor([-1]) one=torch.FloatTensor([1]) average_prob=[] averate_loss=[] best_accuracy1=0.0 best_accuracy2=0.0 for i in tqdm(range(1000)): optimizor.zero_grad() total_probability=0.0 total_loss=0.0 for clause in kb_train.clauses: loss,prob=model.forward(clause=clause) loss.backward(one) total_probability+=prob.data.numpy()[0] total_loss+=loss.data.numpy()[0] optimizor.step() average_prob.append(total_probability/len(kb_train.clauses)) averate_loss.append(total_loss/len(kb_train.clauses)) accuracy1=get_accuracy(model,kb1) accuracy2=get_accuracy(model,kb2) if accuracy1+accuracy2>best_accuracy1+best_accuracy2: best_accuracy1=accuracy1 best_accuracy2=accuracy2 pickle.dump((average_prob,averate_loss,best_accuracy1,best_accuracy2), open("./results/%s"%filename, "wb" )) # In[4]: kb1=load_knowledge_base('./facts1.txt') kb2=load_knowledge_base('./facts2.txt') propositionals=load_propositional('./knowledge.txt') gkbs1=[] for p in propositionals: gkbs1.append(p.generate_knowledge_base('abcdefgh',change_weight=False)) gkb1=gkbs1[0] for tkb in gkbs1[1:]: gkb1=gkb1.union(tkb) gkbs2=[] for p in propositionals: gkbs2.append(p.generate_knowledge_base('ijklmn',change_weight=False)) gkb2=gkbs2[0] for tkb in gkbs2[1:]: gkb2=gkb2.union(tkb) gkbs3=[] for p in propositionals: gkbs3.append(p.generate_knowledge_base('abcdefgh',change_weight=True)) gkb3=gkbs3[0] for tkb in gkbs3[1:]: gkb3=gkb3.union(tkb) gkbs4=[] for p in propositionals: gkbs4.append(p.generate_knowledge_base('ijklmn',change_weight=True)) gkb4=gkbs4[0] for tkb in gkbs4[1:]: gkb4=gkb4.union(tkb) # In[5]: emb_dim=50 # In[6]: emb_dim_range=list(range(10,20,5))+list(range(20,101,20)) emb_dim_range=list(range(160,161,20)) # In[ ]: for emb_dim in emb_dim_range: test_model( model=LTN(emb_dim,'abcdefghijklmn',[['S',1],['F',2],['C',1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4), filename='LTN_Learn_emb_dim=%d.pkl'%(emb_dim) ) # In[80]: accuracys1=[] accuracys2=[] for emb_dim in emb_dim_range: prob,loss,first,second=pickle.load(open('./results/LTN_Learn_emb_dim=%d.pkl'%(emb_dim),'rb')) accuracys1.append(first) accuracys2.append(second) plt.plot(emb_dim_range,accuracys1,label='Group1') plt.plot(emb_dim_range,accuracys2,label='Group2') plt.legend() plt.xlabel('Vector Length') plt.ylabel('Accuracy') plt.savefig('./Report/img/curve4.pdf') plt.show()
normal
{ "blob_id": "3022cade3bfa36925bcbda8023e5cd98ed33d093", "index": 9901, "step-1": "<mask token>\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\n<mask token>\n", "step-2": "<mask token>\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\n<mask token>\nsns.set(style='white', context='talk')\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\n<mask token>\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))\n<mask token>\nfor tkb in gkbs1[1:]:\n gkb1 = gkb1.union(tkb)\n<mask token>\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))\n<mask token>\nfor tkb in gkbs2[1:]:\n gkb2 = gkb2.union(tkb)\n<mask token>\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))\n<mask token>\nfor tkb in gkbs3[1:]:\n gkb3 = gkb3.union(tkb)\n<mask token>\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))\n<mask token>\nfor tkb in gkbs4[1:]:\n gkb4 = gkb4.union(tkb)\n<mask token>\nfor emb_dim in emb_dim_range:\n test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [\n 'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)\n<mask token>\nfor emb_dim in emb_dim_range:\n prob, loss, first, second = pickle.load(open(\n './results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range, accuracys1, label='Group1')\nplt.plot(emb_dim_range, accuracys2, label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n", "step-3": "<mask token>\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\n<mask token>\nsns.set(style='white', context='talk')\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\nkb1 = load_knowledge_base('./facts1.txt')\nkb2 = load_knowledge_base('./facts2.txt')\npropositionals = load_propositional('./knowledge.txt')\ngkbs1 = []\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))\ngkb1 = gkbs1[0]\nfor tkb in gkbs1[1:]:\n gkb1 = gkb1.union(tkb)\ngkbs2 = []\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))\ngkb2 = gkbs2[0]\nfor tkb in gkbs2[1:]:\n gkb2 = gkb2.union(tkb)\ngkbs3 = []\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))\ngkb3 = gkbs3[0]\nfor tkb in gkbs3[1:]:\n gkb3 = gkb3.union(tkb)\ngkbs4 = []\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))\ngkb4 = gkbs4[0]\nfor tkb in gkbs4[1:]:\n gkb4 = gkb4.union(tkb)\nemb_dim = 50\nemb_dim_range = list(range(10, 20, 5)) + list(range(20, 101, 20))\nemb_dim_range = list(range(160, 161, 20))\nfor emb_dim in emb_dim_range:\n test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [\n 'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)\naccuracys1 = []\naccuracys2 = []\nfor emb_dim in emb_dim_range:\n prob, loss, first, second = pickle.load(open(\n './results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range, accuracys1, label='Group1')\nplt.plot(emb_dim_range, accuracys2, label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n", "step-4": "import matplotlib\nimport os\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\nimport torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport torch.optim as optim\nfrom matplotlib import pyplot as plt\nimport seaborn as sns\nfrom tqdm import tqdm\nimport copy\nfrom utils import Predicate, Clause, KnowledgeBase, Propositional\nfrom utils import load_knowledge_base, load_propositional\nfrom models import LTN\nimport pickle\nimport numpy as np\nimport seaborn as sns\nsns.set(style='white', context='talk')\n\n\ndef get_accuracy(model, kb):\n results = []\n for clause in kb.clauses:\n o1, o2 = model.forward(clause)\n if o2.data.numpy()[0][0] > 0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n return sum(results) / len(kb.clauses)\n\n\ndef test_model(model, kb1, kb2, filename):\n kb_train = kb1.union(kb2)\n optimizor = torch.optim.Adam(model.parameters(), lr=0.001)\n mone = torch.FloatTensor([-1])\n one = torch.FloatTensor([1])\n average_prob = []\n averate_loss = []\n best_accuracy1 = 0.0\n best_accuracy2 = 0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability = 0.0\n total_loss = 0.0\n for clause in kb_train.clauses:\n loss, prob = model.forward(clause=clause)\n loss.backward(one)\n total_probability += prob.data.numpy()[0]\n total_loss += loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability / len(kb_train.clauses))\n averate_loss.append(total_loss / len(kb_train.clauses))\n accuracy1 = get_accuracy(model, kb1)\n accuracy2 = get_accuracy(model, kb2)\n if accuracy1 + accuracy2 > best_accuracy1 + best_accuracy2:\n best_accuracy1 = accuracy1\n best_accuracy2 = accuracy2\n pickle.dump((average_prob, averate_loss, best_accuracy1, best_accuracy2\n ), open('./results/%s' % filename, 'wb'))\n\n\nkb1 = load_knowledge_base('./facts1.txt')\nkb2 = load_knowledge_base('./facts2.txt')\npropositionals = load_propositional('./knowledge.txt')\ngkbs1 = []\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh', change_weight=False))\ngkb1 = gkbs1[0]\nfor tkb in gkbs1[1:]:\n gkb1 = gkb1.union(tkb)\ngkbs2 = []\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn', change_weight=False))\ngkb2 = gkbs2[0]\nfor tkb in gkbs2[1:]:\n gkb2 = gkb2.union(tkb)\ngkbs3 = []\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh', change_weight=True))\ngkb3 = gkbs3[0]\nfor tkb in gkbs3[1:]:\n gkb3 = gkb3.union(tkb)\ngkbs4 = []\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn', change_weight=True))\ngkb4 = gkbs4[0]\nfor tkb in gkbs4[1:]:\n gkb4 = gkb4.union(tkb)\nemb_dim = 50\nemb_dim_range = list(range(10, 20, 5)) + list(range(20, 101, 20))\nemb_dim_range = list(range(160, 161, 20))\nfor emb_dim in emb_dim_range:\n test_model(model=LTN(emb_dim, 'abcdefghijklmn', [['S', 1], ['F', 2], [\n 'C', 1]], CLTN=True), kb1=kb1.union(gkb3), kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl' % emb_dim)\naccuracys1 = []\naccuracys2 = []\nfor emb_dim in emb_dim_range:\n prob, loss, first, second = pickle.load(open(\n './results/LTN_Learn_emb_dim=%d.pkl' % emb_dim, 'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range, accuracys1, label='Group1')\nplt.plot(emb_dim_range, accuracys2, label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n", "step-5": "\n# coding: utf-8\n\n# In[1]:\n\n\n#coding:utf8\nimport matplotlib\nimport os\nif 'DISPLAY' not in os.environ:\n matplotlib.use('Agg')\nelse:\n pass\nimport torch\nimport torch.nn as nn\nfrom torch.autograd import Variable\nimport torch.optim as optim\nfrom matplotlib import pyplot as plt\nimport seaborn as sns\nfrom tqdm import tqdm\nimport copy\nfrom utils import Predicate,Clause,KnowledgeBase, Propositional\nfrom utils import load_knowledge_base,load_propositional\nfrom models import LTN\nimport pickle\nimport numpy as np\nimport seaborn as sns\nsns.set(style=\"white\", context=\"talk\")\n\n\n# In[2]:\n\n\ndef get_accuracy(model,kb):\n results=[]\n for clause in kb.clauses:\n o1,o2=model.forward(clause)\n if o2.data.numpy()[0][0]>0.9:\n results.append(1.0)\n else:\n results.append(0.0)\n\n return sum(results)/len(kb.clauses)\n\n\n# In[3]:\n\n\ndef test_model(model,kb1, kb2,filename):\n kb_train=kb1.union(kb2)\n optimizor=torch.optim.Adam(model.parameters(),lr=0.001)\n mone=torch.FloatTensor([-1])\n one=torch.FloatTensor([1])\n average_prob=[]\n averate_loss=[]\n best_accuracy1=0.0\n best_accuracy2=0.0\n for i in tqdm(range(1000)):\n optimizor.zero_grad()\n total_probability=0.0\n total_loss=0.0\n for clause in kb_train.clauses:\n loss,prob=model.forward(clause=clause)\n loss.backward(one)\n total_probability+=prob.data.numpy()[0]\n total_loss+=loss.data.numpy()[0]\n optimizor.step()\n average_prob.append(total_probability/len(kb_train.clauses))\n averate_loss.append(total_loss/len(kb_train.clauses))\n accuracy1=get_accuracy(model,kb1)\n accuracy2=get_accuracy(model,kb2)\n if accuracy1+accuracy2>best_accuracy1+best_accuracy2:\n best_accuracy1=accuracy1\n best_accuracy2=accuracy2\n pickle.dump((average_prob,averate_loss,best_accuracy1,best_accuracy2), open(\"./results/%s\"%filename, \"wb\" ))\n\n\n# In[4]:\n\n\nkb1=load_knowledge_base('./facts1.txt')\nkb2=load_knowledge_base('./facts2.txt')\npropositionals=load_propositional('./knowledge.txt')\ngkbs1=[]\nfor p in propositionals:\n gkbs1.append(p.generate_knowledge_base('abcdefgh',change_weight=False))\ngkb1=gkbs1[0]\nfor tkb in gkbs1[1:]:\n gkb1=gkb1.union(tkb)\ngkbs2=[]\nfor p in propositionals:\n gkbs2.append(p.generate_knowledge_base('ijklmn',change_weight=False))\ngkb2=gkbs2[0]\nfor tkb in gkbs2[1:]:\n gkb2=gkb2.union(tkb)\n\ngkbs3=[]\nfor p in propositionals:\n gkbs3.append(p.generate_knowledge_base('abcdefgh',change_weight=True))\ngkb3=gkbs3[0]\nfor tkb in gkbs3[1:]:\n gkb3=gkb3.union(tkb)\ngkbs4=[]\nfor p in propositionals:\n gkbs4.append(p.generate_knowledge_base('ijklmn',change_weight=True))\ngkb4=gkbs4[0]\nfor tkb in gkbs4[1:]:\n gkb4=gkb4.union(tkb)\n\n\n# In[5]:\n\n\nemb_dim=50\n\n\n# In[6]:\n\n\nemb_dim_range=list(range(10,20,5))+list(range(20,101,20))\nemb_dim_range=list(range(160,161,20))\n\n\n# In[ ]:\n\n\nfor emb_dim in emb_dim_range:\n test_model(\n model=LTN(emb_dim,'abcdefghijklmn',[['S',1],['F',2],['C',1]], CLTN=True),\n kb1=kb1.union(gkb3),\n kb2=kb2.union(gkb4),\n filename='LTN_Learn_emb_dim=%d.pkl'%(emb_dim)\n )\n\n\n# In[80]:\n\n\naccuracys1=[]\naccuracys2=[]\nfor emb_dim in emb_dim_range:\n prob,loss,first,second=pickle.load(open('./results/LTN_Learn_emb_dim=%d.pkl'%(emb_dim),'rb'))\n accuracys1.append(first)\n accuracys2.append(second)\nplt.plot(emb_dim_range,accuracys1,label='Group1')\nplt.plot(emb_dim_range,accuracys2,label='Group2')\nplt.legend()\nplt.xlabel('Vector Length')\nplt.ylabel('Accuracy')\nplt.savefig('./Report/img/curve4.pdf')\nplt.show()\n\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
<|reserved_special_token_0|> class ghetto(GridLayout): <|reserved_special_token_0|> def biyoCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09' ) def edebCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09' ) def kimyaCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09' ) <|reserved_special_token_0|> <|reserved_special_token_0|> def bilisiCallback(self, a): webbrowser.open_new('https://us02web.zoom.us/j/3469922894') def muzCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09' ) def ingCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09' ) def felCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09' ) def __init__(self, **kwargs): super(ghetto, self).__init__(**kwargs) self.cols = 2 self.btn1 = Button(text='MATEMATİK') self.btn1.bind(on_press=self.matCallback) self.btn2 = Button(text='KİMYA') self.btn2.bind(on_press=self.kimyaCallback) self.btn3 = Button(text='BİYOLOJİ') self.btn3.bind(on_press=self.biyoCallback) self.btn4 = Button(text='FELSEFE') self.btn4.bind(on_press=self.felCallback) self.btn6 = Button(text='EDEBİYAT') self.btn6.bind(on_press=self.edebCallback) self.btn7 = Button(text='BİLİŞİM') self.btn7.bind(on_press=self.bilisiCallback) self.btn5 = Button(text='TARİH') self.btn5.bind(on_press=self.tarihCallback) self.btn8 = Button(text='MÜZİK') self.btn8.bind(on_press=self.muzCallback) self.btn9 = Button(text='İNGİLİZCE') self.btn9.bind(on_press=self.ingCallback) self.btn10 = Button(text='COĞRAFYA') self.btn10.bind(on_press=self.cogCallback) self.add_widget(self.btn10) self.add_widget(self.btn1) self.add_widget(self.btn2) self.add_widget(self.btn3) self.add_widget(self.btn4) self.add_widget(self.btn5) self.add_widget(self.btn6) self.add_widget(self.btn7) self.add_widget(self.btn8) self.add_widget(self.btn9) class main(App): def build(self): return ghetto() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class ghetto(GridLayout): def matCallback(self, a): webbrowser.open_new( 'https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09' ) def biyoCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09' ) def edebCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09' ) def kimyaCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09' ) <|reserved_special_token_0|> <|reserved_special_token_0|> def bilisiCallback(self, a): webbrowser.open_new('https://us02web.zoom.us/j/3469922894') def muzCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09' ) def ingCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09' ) def felCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09' ) def __init__(self, **kwargs): super(ghetto, self).__init__(**kwargs) self.cols = 2 self.btn1 = Button(text='MATEMATİK') self.btn1.bind(on_press=self.matCallback) self.btn2 = Button(text='KİMYA') self.btn2.bind(on_press=self.kimyaCallback) self.btn3 = Button(text='BİYOLOJİ') self.btn3.bind(on_press=self.biyoCallback) self.btn4 = Button(text='FELSEFE') self.btn4.bind(on_press=self.felCallback) self.btn6 = Button(text='EDEBİYAT') self.btn6.bind(on_press=self.edebCallback) self.btn7 = Button(text='BİLİŞİM') self.btn7.bind(on_press=self.bilisiCallback) self.btn5 = Button(text='TARİH') self.btn5.bind(on_press=self.tarihCallback) self.btn8 = Button(text='MÜZİK') self.btn8.bind(on_press=self.muzCallback) self.btn9 = Button(text='İNGİLİZCE') self.btn9.bind(on_press=self.ingCallback) self.btn10 = Button(text='COĞRAFYA') self.btn10.bind(on_press=self.cogCallback) self.add_widget(self.btn10) self.add_widget(self.btn1) self.add_widget(self.btn2) self.add_widget(self.btn3) self.add_widget(self.btn4) self.add_widget(self.btn5) self.add_widget(self.btn6) self.add_widget(self.btn7) self.add_widget(self.btn8) self.add_widget(self.btn9) class main(App): def build(self): return ghetto() <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class ghetto(GridLayout): def matCallback(self, a): webbrowser.open_new( 'https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09' ) def biyoCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09' ) def edebCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09' ) def kimyaCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09' ) def tarihCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/7045543550?pwd=yPBZGImZndgSF-Mj4JRTaFTq2Oh94Bs' ) def cogCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/6832847624?pwd=TzhNUzlFNHM2K3FpR09nVHhCaFZPQT09' ) def bilisiCallback(self, a): webbrowser.open_new('https://us02web.zoom.us/j/3469922894') def muzCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09' ) def ingCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09' ) def felCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09' ) def __init__(self, **kwargs): super(ghetto, self).__init__(**kwargs) self.cols = 2 self.btn1 = Button(text='MATEMATİK') self.btn1.bind(on_press=self.matCallback) self.btn2 = Button(text='KİMYA') self.btn2.bind(on_press=self.kimyaCallback) self.btn3 = Button(text='BİYOLOJİ') self.btn3.bind(on_press=self.biyoCallback) self.btn4 = Button(text='FELSEFE') self.btn4.bind(on_press=self.felCallback) self.btn6 = Button(text='EDEBİYAT') self.btn6.bind(on_press=self.edebCallback) self.btn7 = Button(text='BİLİŞİM') self.btn7.bind(on_press=self.bilisiCallback) self.btn5 = Button(text='TARİH') self.btn5.bind(on_press=self.tarihCallback) self.btn8 = Button(text='MÜZİK') self.btn8.bind(on_press=self.muzCallback) self.btn9 = Button(text='İNGİLİZCE') self.btn9.bind(on_press=self.ingCallback) self.btn10 = Button(text='COĞRAFYA') self.btn10.bind(on_press=self.cogCallback) self.add_widget(self.btn10) self.add_widget(self.btn1) self.add_widget(self.btn2) self.add_widget(self.btn3) self.add_widget(self.btn4) self.add_widget(self.btn5) self.add_widget(self.btn6) self.add_widget(self.btn7) self.add_widget(self.btn8) self.add_widget(self.btn9) class main(App): def build(self): return ghetto() if __name__ == '__main__': main().run() <|reserved_special_token_1|> from kivy.uix.button import Button from kivy.uix.gridlayout import GridLayout from kivy.uix.floatlayout import FloatLayout from kivy.uix.label import Label from kivy.app import App import webbrowser a = 0.0 b = '?' n = 0.0 k = '' g = '' class ghetto(GridLayout): def matCallback(self, a): webbrowser.open_new( 'https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09' ) def biyoCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09' ) def edebCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09' ) def kimyaCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09' ) def tarihCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/7045543550?pwd=yPBZGImZndgSF-Mj4JRTaFTq2Oh94Bs' ) def cogCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/6832847624?pwd=TzhNUzlFNHM2K3FpR09nVHhCaFZPQT09' ) def bilisiCallback(self, a): webbrowser.open_new('https://us02web.zoom.us/j/3469922894') def muzCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09' ) def ingCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09' ) def felCallback(self, a): webbrowser.open_new( 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09' ) def __init__(self, **kwargs): super(ghetto, self).__init__(**kwargs) self.cols = 2 self.btn1 = Button(text='MATEMATİK') self.btn1.bind(on_press=self.matCallback) self.btn2 = Button(text='KİMYA') self.btn2.bind(on_press=self.kimyaCallback) self.btn3 = Button(text='BİYOLOJİ') self.btn3.bind(on_press=self.biyoCallback) self.btn4 = Button(text='FELSEFE') self.btn4.bind(on_press=self.felCallback) self.btn6 = Button(text='EDEBİYAT') self.btn6.bind(on_press=self.edebCallback) self.btn7 = Button(text='BİLİŞİM') self.btn7.bind(on_press=self.bilisiCallback) self.btn5 = Button(text='TARİH') self.btn5.bind(on_press=self.tarihCallback) self.btn8 = Button(text='MÜZİK') self.btn8.bind(on_press=self.muzCallback) self.btn9 = Button(text='İNGİLİZCE') self.btn9.bind(on_press=self.ingCallback) self.btn10 = Button(text='COĞRAFYA') self.btn10.bind(on_press=self.cogCallback) self.add_widget(self.btn10) self.add_widget(self.btn1) self.add_widget(self.btn2) self.add_widget(self.btn3) self.add_widget(self.btn4) self.add_widget(self.btn5) self.add_widget(self.btn6) self.add_widget(self.btn7) self.add_widget(self.btn8) self.add_widget(self.btn9) class main(App): def build(self): return ghetto() if __name__ == '__main__': main().run() <|reserved_special_token_1|> from kivy.uix.button import Button from kivy.uix.gridlayout import GridLayout from kivy.uix.floatlayout import FloatLayout from kivy.uix.label import Label from kivy.app import App import webbrowser a=0.0 b="?" n=0.0 k="" g="" class ghetto(GridLayout): def matCallback(self,a): webbrowser.open_new("https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09") def biyoCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09") def edebCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09") def kimyaCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09") def tarihCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/7045543550?pwd=yPBZGImZndgSF-Mj4JRTaFTq2Oh94Bs") def cogCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/6832847624?pwd=TzhNUzlFNHM2K3FpR09nVHhCaFZPQT09") def bilisiCallback(self,a): webbrowser.open_new("https://us02web.zoom.us/j/3469922894") def muzCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09") def ingCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09") def felCallback(self,a): webbrowser.open_new("https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09") def __init__(self,**kwargs): super(ghetto, self).__init__(**kwargs) self.cols = 2 self.btn1 = Button(text='MATEMATİK') self.btn1.bind(on_press=self.matCallback) self.btn2 = Button(text='KİMYA') self.btn2.bind(on_press=self.kimyaCallback) self.btn3 = Button(text='BİYOLOJİ') self.btn3.bind(on_press=self.biyoCallback) self.btn4 = Button(text='FELSEFE') self.btn4.bind(on_press=self.felCallback) self.btn6 = Button(text='EDEBİYAT') self.btn6.bind(on_press=self.edebCallback) self.btn7 = Button(text='BİLİŞİM') self.btn7.bind(on_press=self.bilisiCallback) self.btn5 = Button(text='TARİH') self.btn5.bind(on_press=self.tarihCallback) self.btn8 = Button(text='MÜZİK') self.btn8.bind(on_press=self.muzCallback) self.btn9 = Button(text='İNGİLİZCE') self.btn9.bind(on_press=self.ingCallback) self.btn10 = Button(text='COĞRAFYA') self.btn10.bind(on_press=self.cogCallback) self.add_widget(self.btn10) self.add_widget(self.btn1) self.add_widget(self.btn2) self.add_widget(self.btn3) self.add_widget(self.btn4) self.add_widget(self.btn5) self.add_widget(self.btn6) self.add_widget(self.btn7) self.add_widget(self.btn8) self.add_widget(self.btn9) class main(App): def build(self): return ghetto() if __name__ == "__main__": main().run()
flexible
{ "blob_id": "39affe139eec4cf6877646188839d79ed575235c", "index": 8952, "step-1": "<mask token>\n\n\nclass ghetto(GridLayout):\n <mask token>\n\n def biyoCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09'\n )\n\n def edebCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09'\n )\n\n def kimyaCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09'\n )\n <mask token>\n <mask token>\n\n def bilisiCallback(self, a):\n webbrowser.open_new('https://us02web.zoom.us/j/3469922894')\n\n def muzCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09'\n )\n\n def ingCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09'\n )\n\n def felCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09'\n )\n\n def __init__(self, **kwargs):\n super(ghetto, self).__init__(**kwargs)\n self.cols = 2\n self.btn1 = Button(text='MATEMATİK')\n self.btn1.bind(on_press=self.matCallback)\n self.btn2 = Button(text='KİMYA')\n self.btn2.bind(on_press=self.kimyaCallback)\n self.btn3 = Button(text='BİYOLOJİ')\n self.btn3.bind(on_press=self.biyoCallback)\n self.btn4 = Button(text='FELSEFE')\n self.btn4.bind(on_press=self.felCallback)\n self.btn6 = Button(text='EDEBİYAT')\n self.btn6.bind(on_press=self.edebCallback)\n self.btn7 = Button(text='BİLİŞİM')\n self.btn7.bind(on_press=self.bilisiCallback)\n self.btn5 = Button(text='TARİH')\n self.btn5.bind(on_press=self.tarihCallback)\n self.btn8 = Button(text='MÜZİK')\n self.btn8.bind(on_press=self.muzCallback)\n self.btn9 = Button(text='İNGİLİZCE')\n self.btn9.bind(on_press=self.ingCallback)\n self.btn10 = Button(text='COĞRAFYA')\n self.btn10.bind(on_press=self.cogCallback)\n self.add_widget(self.btn10)\n self.add_widget(self.btn1)\n self.add_widget(self.btn2)\n self.add_widget(self.btn3)\n self.add_widget(self.btn4)\n self.add_widget(self.btn5)\n self.add_widget(self.btn6)\n self.add_widget(self.btn7)\n self.add_widget(self.btn8)\n self.add_widget(self.btn9)\n\n\nclass main(App):\n\n def build(self):\n return ghetto()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass ghetto(GridLayout):\n\n def matCallback(self, a):\n webbrowser.open_new(\n 'https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09'\n )\n\n def biyoCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09'\n )\n\n def edebCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09'\n )\n\n def kimyaCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09'\n )\n <mask token>\n <mask token>\n\n def bilisiCallback(self, a):\n webbrowser.open_new('https://us02web.zoom.us/j/3469922894')\n\n def muzCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09'\n )\n\n def ingCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09'\n )\n\n def felCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09'\n )\n\n def __init__(self, **kwargs):\n super(ghetto, self).__init__(**kwargs)\n self.cols = 2\n self.btn1 = Button(text='MATEMATİK')\n self.btn1.bind(on_press=self.matCallback)\n self.btn2 = Button(text='KİMYA')\n self.btn2.bind(on_press=self.kimyaCallback)\n self.btn3 = Button(text='BİYOLOJİ')\n self.btn3.bind(on_press=self.biyoCallback)\n self.btn4 = Button(text='FELSEFE')\n self.btn4.bind(on_press=self.felCallback)\n self.btn6 = Button(text='EDEBİYAT')\n self.btn6.bind(on_press=self.edebCallback)\n self.btn7 = Button(text='BİLİŞİM')\n self.btn7.bind(on_press=self.bilisiCallback)\n self.btn5 = Button(text='TARİH')\n self.btn5.bind(on_press=self.tarihCallback)\n self.btn8 = Button(text='MÜZİK')\n self.btn8.bind(on_press=self.muzCallback)\n self.btn9 = Button(text='İNGİLİZCE')\n self.btn9.bind(on_press=self.ingCallback)\n self.btn10 = Button(text='COĞRAFYA')\n self.btn10.bind(on_press=self.cogCallback)\n self.add_widget(self.btn10)\n self.add_widget(self.btn1)\n self.add_widget(self.btn2)\n self.add_widget(self.btn3)\n self.add_widget(self.btn4)\n self.add_widget(self.btn5)\n self.add_widget(self.btn6)\n self.add_widget(self.btn7)\n self.add_widget(self.btn8)\n self.add_widget(self.btn9)\n\n\nclass main(App):\n\n def build(self):\n return ghetto()\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass ghetto(GridLayout):\n\n def matCallback(self, a):\n webbrowser.open_new(\n 'https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09'\n )\n\n def biyoCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09'\n )\n\n def edebCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09'\n )\n\n def kimyaCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09'\n )\n\n def tarihCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/7045543550?pwd=yPBZGImZndgSF-Mj4JRTaFTq2Oh94Bs'\n )\n\n def cogCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/6832847624?pwd=TzhNUzlFNHM2K3FpR09nVHhCaFZPQT09'\n )\n\n def bilisiCallback(self, a):\n webbrowser.open_new('https://us02web.zoom.us/j/3469922894')\n\n def muzCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09'\n )\n\n def ingCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09'\n )\n\n def felCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09'\n )\n\n def __init__(self, **kwargs):\n super(ghetto, self).__init__(**kwargs)\n self.cols = 2\n self.btn1 = Button(text='MATEMATİK')\n self.btn1.bind(on_press=self.matCallback)\n self.btn2 = Button(text='KİMYA')\n self.btn2.bind(on_press=self.kimyaCallback)\n self.btn3 = Button(text='BİYOLOJİ')\n self.btn3.bind(on_press=self.biyoCallback)\n self.btn4 = Button(text='FELSEFE')\n self.btn4.bind(on_press=self.felCallback)\n self.btn6 = Button(text='EDEBİYAT')\n self.btn6.bind(on_press=self.edebCallback)\n self.btn7 = Button(text='BİLİŞİM')\n self.btn7.bind(on_press=self.bilisiCallback)\n self.btn5 = Button(text='TARİH')\n self.btn5.bind(on_press=self.tarihCallback)\n self.btn8 = Button(text='MÜZİK')\n self.btn8.bind(on_press=self.muzCallback)\n self.btn9 = Button(text='İNGİLİZCE')\n self.btn9.bind(on_press=self.ingCallback)\n self.btn10 = Button(text='COĞRAFYA')\n self.btn10.bind(on_press=self.cogCallback)\n self.add_widget(self.btn10)\n self.add_widget(self.btn1)\n self.add_widget(self.btn2)\n self.add_widget(self.btn3)\n self.add_widget(self.btn4)\n self.add_widget(self.btn5)\n self.add_widget(self.btn6)\n self.add_widget(self.btn7)\n self.add_widget(self.btn8)\n self.add_widget(self.btn9)\n\n\nclass main(App):\n\n def build(self):\n return ghetto()\n\n\nif __name__ == '__main__':\n main().run()\n", "step-4": "from kivy.uix.button import Button\nfrom kivy.uix.gridlayout import GridLayout\nfrom kivy.uix.floatlayout import FloatLayout\nfrom kivy.uix.label import Label\nfrom kivy.app import App\nimport webbrowser\na = 0.0\nb = '?'\nn = 0.0\nk = ''\ng = ''\n\n\nclass ghetto(GridLayout):\n\n def matCallback(self, a):\n webbrowser.open_new(\n 'https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09'\n )\n\n def biyoCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09'\n )\n\n def edebCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09'\n )\n\n def kimyaCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09'\n )\n\n def tarihCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/7045543550?pwd=yPBZGImZndgSF-Mj4JRTaFTq2Oh94Bs'\n )\n\n def cogCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/6832847624?pwd=TzhNUzlFNHM2K3FpR09nVHhCaFZPQT09'\n )\n\n def bilisiCallback(self, a):\n webbrowser.open_new('https://us02web.zoom.us/j/3469922894')\n\n def muzCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09'\n )\n\n def ingCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09'\n )\n\n def felCallback(self, a):\n webbrowser.open_new(\n 'https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09'\n )\n\n def __init__(self, **kwargs):\n super(ghetto, self).__init__(**kwargs)\n self.cols = 2\n self.btn1 = Button(text='MATEMATİK')\n self.btn1.bind(on_press=self.matCallback)\n self.btn2 = Button(text='KİMYA')\n self.btn2.bind(on_press=self.kimyaCallback)\n self.btn3 = Button(text='BİYOLOJİ')\n self.btn3.bind(on_press=self.biyoCallback)\n self.btn4 = Button(text='FELSEFE')\n self.btn4.bind(on_press=self.felCallback)\n self.btn6 = Button(text='EDEBİYAT')\n self.btn6.bind(on_press=self.edebCallback)\n self.btn7 = Button(text='BİLİŞİM')\n self.btn7.bind(on_press=self.bilisiCallback)\n self.btn5 = Button(text='TARİH')\n self.btn5.bind(on_press=self.tarihCallback)\n self.btn8 = Button(text='MÜZİK')\n self.btn8.bind(on_press=self.muzCallback)\n self.btn9 = Button(text='İNGİLİZCE')\n self.btn9.bind(on_press=self.ingCallback)\n self.btn10 = Button(text='COĞRAFYA')\n self.btn10.bind(on_press=self.cogCallback)\n self.add_widget(self.btn10)\n self.add_widget(self.btn1)\n self.add_widget(self.btn2)\n self.add_widget(self.btn3)\n self.add_widget(self.btn4)\n self.add_widget(self.btn5)\n self.add_widget(self.btn6)\n self.add_widget(self.btn7)\n self.add_widget(self.btn8)\n self.add_widget(self.btn9)\n\n\nclass main(App):\n\n def build(self):\n return ghetto()\n\n\nif __name__ == '__main__':\n main().run()\n", "step-5": "from kivy.uix.button import Button\r\nfrom kivy.uix.gridlayout import GridLayout\r\nfrom kivy.uix.floatlayout import FloatLayout\r\nfrom kivy.uix.label import Label\r\nfrom kivy.app import App\r\nimport webbrowser\r\na=0.0\r\nb=\"?\"\r\nn=0.0\r\nk=\"\"\r\ng=\"\"\r\nclass ghetto(GridLayout):\r\n def matCallback(self,a):\r\n webbrowser.open_new(\"https://us05web.zoom.us/j/2688374138?pwd=ekJpMnJsdWkyTWdGcE0zMEZzdjFydz09\")\r\n def biyoCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/8651192984?pwd=cFV0bUNPTXRUOGVPZWw4dEhDQm0vUT09\")\r\n def edebCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/4724567240?pwd=MzIzam5jcE9MeEkxTkVnR1plVVZ6dz09\")\r\n def kimyaCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/8080079163?pwd=UitJVWs4Y0dOU2ZjbHMvZUVBQVZXdz09\")\r\n def tarihCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/7045543550?pwd=yPBZGImZndgSF-Mj4JRTaFTq2Oh94Bs\")\r\n def cogCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/6832847624?pwd=TzhNUzlFNHM2K3FpR09nVHhCaFZPQT09\")\r\n def bilisiCallback(self,a):\r\n webbrowser.open_new(\"https://us02web.zoom.us/j/3469922894\")\r\n def muzCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/7411417677?pwd=K1A5czBGWWlnRzdBOWs0VEJQaUloUT09\")\r\n def ingCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/6712002142?pwd=azFMYjljb3lPOVBoTXdYT3FabmpIUT09\")\r\n def felCallback(self,a):\r\n webbrowser.open_new(\"https://us04web.zoom.us/j/8358223221?pwd=eTlXcm4vc3RVUnNOSzV0UmhqM1ZEZz09\")\r\n\r\n \r\n \r\n def __init__(self,**kwargs):\r\n super(ghetto, self).__init__(**kwargs)\r\n self.cols = 2\r\n self.btn1 = Button(text='MATEMATİK')\r\n self.btn1.bind(on_press=self.matCallback)\r\n self.btn2 = Button(text='KİMYA')\r\n self.btn2.bind(on_press=self.kimyaCallback)\r\n self.btn3 = Button(text='BİYOLOJİ')\r\n self.btn3.bind(on_press=self.biyoCallback)\r\n self.btn4 = Button(text='FELSEFE')\r\n self.btn4.bind(on_press=self.felCallback)\r\n self.btn6 = Button(text='EDEBİYAT')\r\n self.btn6.bind(on_press=self.edebCallback)\r\n self.btn7 = Button(text='BİLİŞİM')\r\n self.btn7.bind(on_press=self.bilisiCallback)\r\n self.btn5 = Button(text='TARİH')\r\n self.btn5.bind(on_press=self.tarihCallback)\r\n self.btn8 = Button(text='MÜZİK')\r\n self.btn8.bind(on_press=self.muzCallback)\r\n self.btn9 = Button(text='İNGİLİZCE')\r\n self.btn9.bind(on_press=self.ingCallback)\r\n self.btn10 = Button(text='COĞRAFYA')\r\n self.btn10.bind(on_press=self.cogCallback)\r\n self.add_widget(self.btn10)\r\n self.add_widget(self.btn1)\r\n self.add_widget(self.btn2)\r\n self.add_widget(self.btn3)\r\n self.add_widget(self.btn4)\r\n self.add_widget(self.btn5)\r\n self.add_widget(self.btn6)\r\n self.add_widget(self.btn7)\r\n self.add_widget(self.btn8)\r\n self.add_widget(self.btn9)\r\n \r\n \r\n\r\nclass main(App):\r\n def build(self):\r\n return ghetto()\r\n\r\nif __name__ == \"__main__\":\r\n main().run()\r\n", "step-ids": [ 11, 12, 15, 17, 18 ] }
[ 11, 12, 15, 17, 18 ]
#!/usr/bin/env python # coding: utf-8 import sys,pysrt import urllib2,urllib,json import re from urlparse import urlparse import os from mtranslate import translate from argparse import ArgumentParser reload(sys) sys.setdefaultencoding('utf8') #---------------------------------------------------------------------------------------------------------------------------------- def cleanhtml(raw_html): ''' TODO: refactor this to make it as generic as possible ''' cleanr = re.compile('<.*?>') cleantext = re.sub(cleanr, '', raw_html) cleantext = cleantext.replace('[vacilación]','...') cleantext = cleantext.replace('&nbsp;',' ') cleantext = urlparse(cleantext).path return cleantext #---------------------------------------------------------------------------------------------------------------------------------- def generateSub(args,_subtitle,_filename): subs = pysrt.from_string(str(_subtitle).decode('utf-8')) output = args.OUTPUT + _filename #file = pysrt.SubRipFile() text = '' for index in range(len(subs)): if subs[index].text != '': if args.VERBOSE: print "Translating line:" + cleanhtml(subs[index].text) subs[index].text = translate(cleanhtml(subs[index].text).encode('utf-8'),args.LANG_TO,args.LANG_FROM) subs.save(output) #---------------------------------------------------------------------------------------------------------------------------------- def generateSubMedia(args): subLangURL= 'https://media.upv.es/rest/plugins/admin-plugin-translectures/langs/' subUrl = 'https://media.upv.es/rest/plugins/admin-plugin-translectures/srt/' langlist =json.loads(urllib2.urlopen(subLangURL + args.SOURCE).read()) for lang in langlist: if lang['lang']==args.LANG_FROM: sub = urllib2.urlopen(subUrl + args.SOURCE +'/' + args.LANG_FROM).read() generateSub(args,sub,args.SOURCE+'_' + args.LANG_TO.lower() + '.srt') return 0 #---------------------------------------------------------------------------------------------------------------------------------- def generateSubFile(args,_filename=None): if _filename is None: _source = args.SOURCE else: _source = _filename if _source[-4:]=='.srt': substring = open(_source,'r').read() generateSub(args,substring,_source.replace('.srt','_' + args.LANG_TO + '.srt')) else: print "Incorrect file format" return -1 #---------------------------------------------------------------------------------------------------------------------------------- def generateSubFolder(args): _source = args.SOURCE if args.SOURCE[-1:]=='/' else args.SOURCE + '/' if os.path.isdir(args.SOURCE): for root, dirs, files in os.walk(args.SOURCE): for f in files: if f[-4:]=='.srt': substring = open(root + f if root[-1:]=='/' else root + '/' + f,'r').read() generateSub(args,substring,f.replace('.srt','_' + args.LANG_TO + '.srt')) else: print "Incorrect file format" return -1 #---------------------------------------------------------------------------------------------------------------------------------- def main(): parser = ArgumentParser(description='Translate subtitle from media id, file or folder', parents=[]) parser.add_argument('-v', '--verbose', action='store_true', dest='VERBOSE', default=False, help='Verbose') parser.add_argument('-t', '--sourceType', type=str, dest='SOURCE_TYPE', help='source type, pick between media|file|folder') parser.add_argument('-s', '--source', type=str, dest='SOURCE', help='source of the subtitle/s') parser.add_argument('-langf', '--langFrom', type=str, dest='LANG_FROM', default='es', help='Language that we want to translate') parser.add_argument('-langt', '--langTo', type=str, dest='LANG_TO', default='en', help='Language of the output subtitle') parser.add_argument('-o', '--output', type=str, dest='OUTPUT', default='./', help='Output folder to store the result') args = parser.parse_args() if (args.SOURCE_TYPE.lower()=='file'): try: generateSubFile(args) except: return -1 elif (args.SOURCE_TYPE.lower()=='folder'): try: generateSubFolder(args) except: return -1 elif (args.SOURCE_TYPE.lower()=='media'): try: generateSubMedia(args) except: return -1 else: print "Choose a valid source type" return 0 #---------------------------------------------------------------------------------------------------------------------------------- if (__name__ == '__main__'): main()
normal
{ "blob_id": "e51c57f4487a3225936d073142f1f770815c0d47", "index": 7589, "step-1": "#!/usr/bin/env python\n# coding: utf-8\nimport sys,pysrt\nimport urllib2,urllib,json\nimport re\nfrom urlparse import urlparse\nimport os\nfrom mtranslate import translate\nfrom argparse import ArgumentParser\nreload(sys) \nsys.setdefaultencoding('utf8')\n\n#----------------------------------------------------------------------------------------------------------------------------------\ndef cleanhtml(raw_html):\n '''\n TODO: refactor this to make it as generic as possible\n '''\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, '', raw_html)\n cleantext = cleantext.replace('[vacilación]','...')\n cleantext = cleantext.replace('&nbsp;',' ')\n cleantext = urlparse(cleantext).path \n return cleantext\n\n#----------------------------------------------------------------------------------------------------------------------------------\ndef generateSub(args,_subtitle,_filename): \n subs = pysrt.from_string(str(_subtitle).decode('utf-8')) \n output = args.OUTPUT + _filename\n #file = pysrt.SubRipFile() \n text = '' \n for index in range(len(subs)): \n if subs[index].text != '': \n if args.VERBOSE:\n print \"Translating line:\" + cleanhtml(subs[index].text) \n subs[index].text = translate(cleanhtml(subs[index].text).encode('utf-8'),args.LANG_TO,args.LANG_FROM) \n subs.save(output)\n\n#----------------------------------------------------------------------------------------------------------------------------------\ndef generateSubMedia(args):\n subLangURL= 'https://media.upv.es/rest/plugins/admin-plugin-translectures/langs/'\n subUrl = 'https://media.upv.es/rest/plugins/admin-plugin-translectures/srt/' \n langlist =json.loads(urllib2.urlopen(subLangURL + args.SOURCE).read()) \n for lang in langlist: \n if lang['lang']==args.LANG_FROM: \n sub = urllib2.urlopen(subUrl + args.SOURCE +'/' + args.LANG_FROM).read() \n generateSub(args,sub,args.SOURCE+'_' + args.LANG_TO.lower() + '.srt') \n return 0\n\n#----------------------------------------------------------------------------------------------------------------------------------\ndef generateSubFile(args,_filename=None): \n if _filename is None:\n _source = args.SOURCE\n else:\n _source = _filename\n if _source[-4:]=='.srt': \n substring = open(_source,'r').read() \n generateSub(args,substring,_source.replace('.srt','_' + args.LANG_TO + '.srt')) \n else:\n print \"Incorrect file format\"\n return -1\n\n#----------------------------------------------------------------------------------------------------------------------------------\ndef generateSubFolder(args): \n _source = args.SOURCE if args.SOURCE[-1:]=='/' else args.SOURCE + '/'\n if os.path.isdir(args.SOURCE):\n for root, dirs, files in os.walk(args.SOURCE): \n for f in files: \n if f[-4:]=='.srt':\n substring = open(root + f if root[-1:]=='/' else root + '/' + f,'r').read() \n generateSub(args,substring,f.replace('.srt','_' + args.LANG_TO + '.srt')) \n else:\n print \"Incorrect file format\"\n return -1\n\n#----------------------------------------------------------------------------------------------------------------------------------\ndef main():\n parser = ArgumentParser(description='Translate subtitle from media id, file or folder', parents=[]) \n parser.add_argument('-v', '--verbose', action='store_true', dest='VERBOSE', default=False, help='Verbose') \n parser.add_argument('-t', '--sourceType', type=str, dest='SOURCE_TYPE', help='source type, pick between media|file|folder')\n parser.add_argument('-s', '--source', type=str, dest='SOURCE', help='source of the subtitle/s')\n parser.add_argument('-langf', '--langFrom', type=str, dest='LANG_FROM', default='es', help='Language that we want to translate')\n parser.add_argument('-langt', '--langTo', type=str, dest='LANG_TO', default='en', help='Language of the output subtitle') \n parser.add_argument('-o', '--output', type=str, dest='OUTPUT', default='./', help='Output folder to store the result') \n args = parser.parse_args() \n \n \n if (args.SOURCE_TYPE.lower()=='file'):\n try: \n generateSubFile(args)\n except:\n return -1 \n elif (args.SOURCE_TYPE.lower()=='folder'):\n try:\n generateSubFolder(args)\n except:\n return -1\n elif (args.SOURCE_TYPE.lower()=='media'):\n try: \n generateSubMedia(args)\n except: \n return -1\n else:\n print \"Choose a valid source type\" \n\n return 0\n \n#---------------------------------------------------------------------------------------------------------------------------------- \nif (__name__ == '__main__'):\n main() \n\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
""" Function of main.py: config loader hprams loader feature extraction Call model training and validation Model Save and Load Call model validation 载入训练参数 载入指定模型超参数 调用特征提取 调用模型训练和验证 模型保存与载入 调用模型验证 """ """A very simple MNIST classifier. See extensive documentation at https://www.tensorflow.org/get_started/mnist/beginners usage: main.py [options] options: --data_dir=<dir> Where to get training data [default: ./datasets/MNIST/]. --base_log_dir=<dir> Where to save models [default: ./generated/logdir/]. --model Which model to use [default: autoencoder_vae]. --experiment_name Name of experiment defines the log path [default: Date-of-now]. --load_model=<dir> Where to load checkpoint, if necessary [default: None] --total_epoch Max num of training epochs [default: by the model]. --eval_per_epoch Model eval per n epoch [default: by the model]. --save_per_epoch Model save per n epoch [default: by the model]. --batch_size Batch size [default: by the model]. -h, --help Show this help message and exit """ import argparse import sys import datetime from tqdm import tqdm import numpy as np import os import tensorflow as tf from model.model_example import model_example from model.deep_mnist import deep_mnist from model.VAE.autoencoder_vae import autoencoder from model.deep_mnist_with_Res import deep_mnist_with_Res from preprocessing_util import autoencoder_vae_add_noise from training_util import save,load import params FLAGS = None def prepare_params(FLAGS): if FLAGS.experiment_name == "default": now=datetime.datetime.now() FLAGS.experiment_name=now.strftime('%Y%m%d%H%M%S') FLAGS.log_dir = FLAGS.base_log_dir+FLAGS.experiment_name+'_'+FLAGS.model+'/' return FLAGS def main(): #Avoid tensorboard error on IPython tf.reset_default_graph() # Prepare data train_data = np.load(os.path.join(FLAGS.data_dir, 'train_data.npy')) train_labels = np.load(os.path.join(FLAGS.data_dir, 'train_labels.npy')) test_data = np.load(os.path.join(FLAGS.data_dir, 'test_data.npy')) test_labels = np.load(os.path.join(FLAGS.data_dir, 'test_labels.npy')) train_set = tf.data.Dataset.from_tensor_slices((train_data, train_labels)) test_set = tf.data.Dataset.from_tensor_slices((test_data, test_labels)) if FLAGS.model == "autoencoder_vae": train_set = train_set.map(autoencoder_vae_add_noise) test_set = test_set.map(autoencoder_vae_add_noise) # Do reshuffle to avoid biased estimation when model reloaded train_set = train_set.shuffle( FLAGS.batch_size,reshuffle_each_iteration=True).batch( FLAGS.batch_size).repeat(10) test_set = test_set.shuffle( FLAGS.batch_size,reshuffle_each_iteration=True).batch( FLAGS.batch_size).repeat(10) trainIter = train_set.make_initializable_iterator() next_examples, next_labels = trainIter.get_next() testIter = test_set.make_initializable_iterator() test_examples, text_labels = testIter.get_next() # Create the model if FLAGS.model == "deep_mnist": hp = params.Deep_MNIST_model_params x = tf.placeholder(tf.float32, [None, hp.input_dim]) y = tf.placeholder(tf.float32, [None, hp.output_dim]) keep_probe = tf.placeholder(tf.float32) model = deep_mnist(hp, x ,y, keep_probe) train_fetch_list = [model.train_step,model.merged] test_fetch_list = [model.accuracy,model.merged] if FLAGS.model == "deep_mnist_AdamW": hp = params.Deep_MNIST_model_params x = tf.placeholder(tf.float32, [None, hp.input_dim]) y = tf.placeholder(tf.float32, [None, hp.output_dim]) keep_probe = tf.placeholder(tf.float32) model = deep_mnist(hp, x ,y, keep_probe,use_adamW = True) train_fetch_list = [model.train_step,model.merged] test_fetch_list = [model.accuracy,model.merged] if FLAGS.model == "deep_mnist_with_Res": hp = params.Deep_MNIST_model_params x = tf.placeholder(tf.float32, [None, hp.input_dim]) y = tf.placeholder(tf.float32, [None, hp.output_dim]) keep_probe = tf.placeholder(tf.float32) model = deep_mnist_with_Res(hp, x ,y, keep_probe) train_fetch_list = [model.train_step,model.merged] test_fetch_list = [model.accuracy,model.merged] if FLAGS.model == "autoencoder_vae": hp = params.autoencoder_vae_model_params x = tf.placeholder(tf.float32, [None, hp.input_dim]) x_hat = tf.placeholder(tf.float32, [None, hp.input_dim]) keep_probe = tf.placeholder(tf.float32) model = autoencoder(hp, x ,x_hat, keep_probe) y=x_hat train_fetch_list = [model.train_step,model.merged] test_fetch_list = [model.loss_mean,model.merged] #Prepare tensorboard train_writer = tf.summary.FileWriter(FLAGS.log_dir+'/train',model.train_step.graph) test_writer = tf.summary.FileWriter(FLAGS.log_dir+'/test') print('checkout result of this time with "tensorboard --logdir={}"'.format(FLAGS.log_dir)) print('For result compare run "tensorboard --logdir={}"'.format(FLAGS.base_log_dir)) session_conf = tf.ConfigProto( gpu_options=tf.GPUOptions( allow_growth=True, ), ) saver = tf.train.Saver() #Start tf session with tf.Session(config=session_conf) as sess: try: sess.run(tf.global_variables_initializer()) sess.run(trainIter.initializer) sess.run(testIter.initializer) # Restore variables from disk. if FLAGS.load_model != None: load(saver, sess, FLAGS.load_model) for epoch in tqdm(range(FLAGS.total_epoch)): batch_xs, batch_ys = sess.run([next_examples, next_labels]) train_feed_dict={x: batch_xs, y: batch_ys, keep_probe: hp.keep_probe} _,summary = sess.run(train_fetch_list, feed_dict=train_feed_dict) if epoch % 10 == 0: train_writer.add_summary(summary, epoch) if epoch % FLAGS.eval_per_epoch == 0: batch_xs, batch_ys = sess.run([test_examples, text_labels]) test_feed_dict={x: batch_xs, y: batch_ys, keep_probe: hp.keep_probe_test} mertics,summary = sess.run(test_fetch_list, feed_dict=test_feed_dict) test_writer.add_summary(summary, epoch) if epoch % FLAGS.save_per_epoch == 0: save(saver, sess, FLAGS.log_dir, epoch) except: pass finally: save(saver, sess, FLAGS.log_dir, epoch) train_writer.close() test_writer.close() if __name__ == '__main__': default_hp=params.default_hyper_params parser = argparse.ArgumentParser() parser.add_argument('--data_dir', type=str, default="./datasets/MNIST/") parser.add_argument('--experiment_name', type=str, default="deep_mnist_AdamW_wd1e4") parser.add_argument('--base_log_dir', type=str, default="./generated/logdir/") parser.add_argument('--model', type=str, default="deep_mnist_AdamW") parser.add_argument('--load_model', type=str, default=None) parser.add_argument('--total_epoch', type=int, default=default_hp.num_epochs) parser.add_argument('--eval_per_epoch', type=int, default=default_hp.eval_per_epoch) parser.add_argument('--save_per_epoch', type=int, default=default_hp.save_per_epoch) parser.add_argument('--batch_size', type=int, default=default_hp.batch_size) FLAGS, unparsed = parser.parse_known_args() FLAGS = prepare_params(FLAGS) main()
normal
{ "blob_id": "c6174fae929366cabb8da3d810df705b19895c1c", "index": 2763, "step-1": "\"\"\"\nFunction of main.py:\n\nconfig loader\nhprams loader\nfeature extraction\nCall model training and validation\nModel Save and Load\nCall model validation\n\n载入训练参数\n载入指定模型超参数\n调用特征提取\n调用模型训练和验证\n模型保存与载入\n调用模型验证\n\"\"\"\n\n\"\"\"A very simple MNIST classifier.\nSee extensive documentation at\nhttps://www.tensorflow.org/get_started/mnist/beginners\nusage: main.py [options] \noptions:\n --data_dir=<dir> Where to get training data [default: ./datasets/MNIST/].\n --base_log_dir=<dir> Where to save models [default: ./generated/logdir/].\n --model Which model to use [default: autoencoder_vae].\n --experiment_name Name of experiment defines the log path [default: Date-of-now].\n --load_model=<dir> Where to load checkpoint, if necessary [default: None]\n --total_epoch Max num of training epochs [default: by the model].\n --eval_per_epoch Model eval per n epoch [default: by the model].\n --save_per_epoch Model save per n epoch [default: by the model].\n --batch_size Batch size [default: by the model].\n -h, --help Show this help message and exit\n\"\"\"\n\nimport argparse\nimport sys\nimport datetime\nfrom tqdm import tqdm\nimport numpy as np\nimport os\n\nimport tensorflow as tf\n\nfrom model.model_example import model_example\nfrom model.deep_mnist import deep_mnist\nfrom model.VAE.autoencoder_vae import autoencoder\nfrom model.deep_mnist_with_Res import deep_mnist_with_Res\n\nfrom preprocessing_util import autoencoder_vae_add_noise\nfrom training_util import save,load\nimport params \n\nFLAGS = None\n\ndef prepare_params(FLAGS):\n if FLAGS.experiment_name == \"default\":\n now=datetime.datetime.now()\n FLAGS.experiment_name=now.strftime('%Y%m%d%H%M%S')\n FLAGS.log_dir = FLAGS.base_log_dir+FLAGS.experiment_name+'_'+FLAGS.model+'/'\n return FLAGS\n\n\ndef main():\n #Avoid tensorboard error on IPython\n tf.reset_default_graph()\n \n # Prepare data\n train_data = np.load(os.path.join(FLAGS.data_dir, 'train_data.npy'))\n train_labels = np.load(os.path.join(FLAGS.data_dir, 'train_labels.npy'))\n test_data = np.load(os.path.join(FLAGS.data_dir, 'test_data.npy'))\n test_labels = np.load(os.path.join(FLAGS.data_dir, 'test_labels.npy'))\n \n train_set = tf.data.Dataset.from_tensor_slices((train_data, train_labels))\n test_set = tf.data.Dataset.from_tensor_slices((test_data, test_labels))\n \n if FLAGS.model == \"autoencoder_vae\":\n train_set = train_set.map(autoencoder_vae_add_noise)\n test_set = test_set.map(autoencoder_vae_add_noise)\n \n # Do reshuffle to avoid biased estimation when model reloaded\n train_set = train_set.shuffle(\n FLAGS.batch_size,reshuffle_each_iteration=True).batch(\n FLAGS.batch_size).repeat(10)\n test_set = test_set.shuffle(\n FLAGS.batch_size,reshuffle_each_iteration=True).batch(\n FLAGS.batch_size).repeat(10)\n \n trainIter = train_set.make_initializable_iterator()\n next_examples, next_labels = trainIter.get_next()\n \n testIter = test_set.make_initializable_iterator()\n test_examples, text_labels = testIter.get_next()\n \n # Create the model\n \n if FLAGS.model == \"deep_mnist\":\n hp = params.Deep_MNIST_model_params\n \n x = tf.placeholder(tf.float32, [None, hp.input_dim])\n y = tf.placeholder(tf.float32, [None, hp.output_dim])\n keep_probe = tf.placeholder(tf.float32)\n \n model = deep_mnist(hp, x ,y, keep_probe)\n \n train_fetch_list = [model.train_step,model.merged]\n test_fetch_list = [model.accuracy,model.merged]\n \n if FLAGS.model == \"deep_mnist_AdamW\":\n hp = params.Deep_MNIST_model_params\n \n x = tf.placeholder(tf.float32, [None, hp.input_dim])\n y = tf.placeholder(tf.float32, [None, hp.output_dim])\n keep_probe = tf.placeholder(tf.float32)\n \n model = deep_mnist(hp, x ,y, keep_probe,use_adamW = True)\n \n train_fetch_list = [model.train_step,model.merged]\n test_fetch_list = [model.accuracy,model.merged]\n \n if FLAGS.model == \"deep_mnist_with_Res\":\n hp = params.Deep_MNIST_model_params\n \n x = tf.placeholder(tf.float32, [None, hp.input_dim])\n y = tf.placeholder(tf.float32, [None, hp.output_dim])\n keep_probe = tf.placeholder(tf.float32)\n \n model = deep_mnist_with_Res(hp, x ,y, keep_probe)\n \n train_fetch_list = [model.train_step,model.merged]\n test_fetch_list = [model.accuracy,model.merged]\n \n if FLAGS.model == \"autoencoder_vae\":\n hp = params.autoencoder_vae_model_params\n \n x = tf.placeholder(tf.float32, [None, hp.input_dim])\n x_hat = tf.placeholder(tf.float32, [None, hp.input_dim])\n keep_probe = tf.placeholder(tf.float32)\n \n model = autoencoder(hp, x ,x_hat, keep_probe)\n \n y=x_hat\n train_fetch_list = [model.train_step,model.merged]\n test_fetch_list = [model.loss_mean,model.merged]\n \n #Prepare tensorboard\n train_writer = tf.summary.FileWriter(FLAGS.log_dir+'/train',model.train_step.graph)\n test_writer = tf.summary.FileWriter(FLAGS.log_dir+'/test')\n print('checkout result of this time with \"tensorboard --logdir={}\"'.format(FLAGS.log_dir))\n print('For result compare run \"tensorboard --logdir={}\"'.format(FLAGS.base_log_dir))\n \n \n session_conf = tf.ConfigProto(\n gpu_options=tf.GPUOptions(\n allow_growth=True,\n ),\n )\n saver = tf.train.Saver()\n\n #Start tf session\n with tf.Session(config=session_conf) as sess:\n try:\n sess.run(tf.global_variables_initializer())\n sess.run(trainIter.initializer)\n sess.run(testIter.initializer)\n \n # Restore variables from disk.\n if FLAGS.load_model != None:\n load(saver, sess, FLAGS.load_model)\n \n \n for epoch in tqdm(range(FLAGS.total_epoch)):\n batch_xs, batch_ys = sess.run([next_examples, next_labels])\n train_feed_dict={x: batch_xs,\n y: batch_ys,\n keep_probe: hp.keep_probe}\n _,summary = sess.run(train_fetch_list, feed_dict=train_feed_dict)\n \n if epoch % 10 == 0:\n train_writer.add_summary(summary, epoch)\n \n if epoch % FLAGS.eval_per_epoch == 0:\n batch_xs, batch_ys = sess.run([test_examples, text_labels])\n test_feed_dict={x: batch_xs,\n y: batch_ys,\n keep_probe: hp.keep_probe_test}\n mertics,summary = sess.run(test_fetch_list, feed_dict=test_feed_dict)\n test_writer.add_summary(summary, epoch)\n \n if epoch % FLAGS.save_per_epoch == 0:\n save(saver, sess, FLAGS.log_dir, epoch)\n \n except:\n pass\n finally:\n save(saver, sess, FLAGS.log_dir, epoch)\n train_writer.close()\n test_writer.close()\n\n \n \nif __name__ == '__main__':\n default_hp=params.default_hyper_params\n parser = argparse.ArgumentParser()\n parser.add_argument('--data_dir', type=str, default=\"./datasets/MNIST/\")\n parser.add_argument('--experiment_name', type=str, default=\"deep_mnist_AdamW_wd1e4\")\n parser.add_argument('--base_log_dir', type=str, default=\"./generated/logdir/\")\n parser.add_argument('--model', type=str, default=\"deep_mnist_AdamW\")\n parser.add_argument('--load_model', type=str, default=None)\n parser.add_argument('--total_epoch', type=int, default=default_hp.num_epochs)\n parser.add_argument('--eval_per_epoch', type=int, default=default_hp.eval_per_epoch)\n parser.add_argument('--save_per_epoch', type=int, default=default_hp.save_per_epoch)\n parser.add_argument('--batch_size', type=int, default=default_hp.batch_size)\n \n FLAGS, unparsed = parser.parse_known_args()\n FLAGS = prepare_params(FLAGS)\n main()", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct import arm_navigation_msgs.msg import geometry_msgs.msg import std_msgs.msg import genpy import sensor_msgs.msg class GetPlanningSceneRequest(genpy.Message): _md5sum = "67ad55e9bed9c8f21dfb4b9b1ca8df7d" _type = "arm_navigation_msgs/GetPlanningSceneRequest" _has_header = False #flag to mark the presence of a Header object _full_text = """ PlanningScene planning_scene_diff arm_navigation_msgs/OrderedCollisionOperations operations ================================================================================ MSG: arm_navigation_msgs/PlanningScene #full robot state arm_navigation_msgs/RobotState robot_state #additional frames for duplicating tf geometry_msgs/TransformStamped[] fixed_frame_transforms #full allowed collision matrix AllowedCollisionMatrix allowed_collision_matrix #allowed contacts arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts #all link paddings arm_navigation_msgs/LinkPadding[] link_padding #collision objects arm_navigation_msgs/CollisionObject[] collision_objects arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects #the collision map arm_navigation_msgs/CollisionMap collision_map ================================================================================ MSG: arm_navigation_msgs/RobotState # This message contains information about the robot state, i.e. the positions of its joints and links sensor_msgs/JointState joint_state arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state ================================================================================ MSG: sensor_msgs/JointState # This is a message that holds data to describe the state of a set of torque controlled joints. # # The state of each joint (revolute or prismatic) is defined by: # * the position of the joint (rad or m), # * the velocity of the joint (rad/s or m/s) and # * the effort that is applied in the joint (Nm or N). # # Each joint is uniquely identified by its name # The header specifies the time at which the joint states were recorded. All the joint states # in one message have to be recorded at the same time. # # This message consists of a multiple arrays, one for each part of the joint state. # The goal is to make each of the fields optional. When e.g. your joints have no # effort associated with them, you can leave the effort array empty. # # All arrays in this message should have the same size, or be empty. # This is the only way to uniquely associate the joint name with the correct # states. Header header string[] name float64[] position float64[] velocity float64[] effort ================================================================================ MSG: std_msgs/Header # Standard metadata for higher-level stamped data types. # This is generally used to communicate timestamped data # in a particular coordinate frame. # # sequence ID: consecutively increasing ID uint32 seq #Two-integer timestamp that is expressed as: # * stamp.secs: seconds (stamp_secs) since epoch # * stamp.nsecs: nanoseconds since stamp_secs # time-handling sugar is provided by the client library time stamp #Frame this data is associated with # 0: no frame # 1: global frame string frame_id ================================================================================ MSG: arm_navigation_msgs/MultiDOFJointState #A representation of a multi-dof joint state time stamp string[] joint_names string[] frame_ids string[] child_frame_ids geometry_msgs/Pose[] poses ================================================================================ MSG: geometry_msgs/Pose # A representation of pose in free space, composed of postion and orientation. Point position Quaternion orientation ================================================================================ MSG: geometry_msgs/Point # This contains the position of a point in free space float64 x float64 y float64 z ================================================================================ MSG: geometry_msgs/Quaternion # This represents an orientation in free space in quaternion form. float64 x float64 y float64 z float64 w ================================================================================ MSG: geometry_msgs/TransformStamped # This expresses a transform from coordinate frame header.frame_id # to the coordinate frame child_frame_id # # This message is mostly used by the # <a href="http://www.ros.org/wiki/tf">tf</a> package. # See it's documentation for more information. Header header string child_frame_id # the frame id of the child frame Transform transform ================================================================================ MSG: geometry_msgs/Transform # This represents the transform between two coordinate frames in free space. Vector3 translation Quaternion rotation ================================================================================ MSG: geometry_msgs/Vector3 # This represents a vector in free space. float64 x float64 y float64 z ================================================================================ MSG: arm_navigation_msgs/AllowedCollisionMatrix # the list of link names in the matrix string[] link_names # the individual entries in the allowed collision matrix # symmetric, with same order as link_names AllowedCollisionEntry[] entries ================================================================================ MSG: arm_navigation_msgs/AllowedCollisionEntry # whether or not collision checking is enabled bool[] enabled ================================================================================ MSG: arm_navigation_msgs/AllowedContactSpecification # The names of the regions string name # The shape of the region in the environment arm_navigation_msgs/Shape shape # The pose of the space defining the region geometry_msgs/PoseStamped pose_stamped # The set of links that will be allowed to have penetration contact within this region string[] link_names # The maximum penetration depth allowed for every link float64 penetration_depth ================================================================================ MSG: arm_navigation_msgs/Shape byte SPHERE=0 byte BOX=1 byte CYLINDER=2 byte MESH=3 byte type #### define sphere, box, cylinder #### # the origin of each shape is considered at the shape's center # for sphere # radius := dimensions[0] # for cylinder # radius := dimensions[0] # length := dimensions[1] # the length is along the Z axis # for box # size_x := dimensions[0] # size_y := dimensions[1] # size_z := dimensions[2] float64[] dimensions #### define mesh #### # list of triangles; triangle k is defined by tre vertices located # at indices triangles[3k], triangles[3k+1], triangles[3k+2] int32[] triangles geometry_msgs/Point[] vertices ================================================================================ MSG: geometry_msgs/PoseStamped # A Pose with reference coordinate frame and timestamp Header header Pose pose ================================================================================ MSG: arm_navigation_msgs/LinkPadding #name for the link string link_name # padding to apply to the link float64 padding ================================================================================ MSG: arm_navigation_msgs/CollisionObject # a header, used for interpreting the poses Header header # the id of the object string id # The padding used for filtering points near the object. # This does not affect collision checking for the object. # Set to negative to get zero padding. float32 padding #This contains what is to be done with the object CollisionObjectOperation operation #the shapes associated with the object arm_navigation_msgs/Shape[] shapes #the poses associated with the shapes - will be transformed using the header geometry_msgs/Pose[] poses ================================================================================ MSG: arm_navigation_msgs/CollisionObjectOperation #Puts the object into the environment #or updates the object if already added byte ADD=0 #Removes the object from the environment entirely byte REMOVE=1 #Only valid within the context of a CollisionAttachedObject message #Will be ignored if sent with an CollisionObject message #Takes an attached object, detaches from the attached link #But adds back in as regular object byte DETACH_AND_ADD_AS_OBJECT=2 #Only valid within the context of a CollisionAttachedObject message #Will be ignored if sent with an CollisionObject message #Takes current object in the environment and removes it as #a regular object byte ATTACH_AND_REMOVE_AS_OBJECT=3 # Byte code for operation byte operation ================================================================================ MSG: arm_navigation_msgs/AttachedCollisionObject # The CollisionObject will be attached with a fixed joint to this link # If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation # is set to REMOVE will remove all attached bodies attached to any object string link_name #Reserved for indicating that all attached objects should be removed string REMOVE_ALL_ATTACHED_OBJECTS = "all" #This contains the actual shapes and poses for the CollisionObject #to be attached to the link #If action is remove and no object.id is set, all objects #attached to the link indicated by link_name will be removed CollisionObject object # The set of links that the attached objects are allowed to touch # by default - the link_name is included by default string[] touch_links ================================================================================ MSG: arm_navigation_msgs/CollisionMap #header for interpreting box positions Header header #boxes for use in collision testing OrientedBoundingBox[] boxes ================================================================================ MSG: arm_navigation_msgs/OrientedBoundingBox #the center of the box geometry_msgs/Point32 center #the extents of the box, assuming the center is at the point geometry_msgs/Point32 extents #the axis of the box geometry_msgs/Point32 axis #the angle of rotation around the axis float32 angle ================================================================================ MSG: geometry_msgs/Point32 # This contains the position of a point in free space(with 32 bits of precision). # It is recommeded to use Point wherever possible instead of Point32. # # This recommendation is to promote interoperability. # # This message is designed to take up less space when sending # lots of points at once, as in the case of a PointCloud. float32 x float32 y float32 z ================================================================================ MSG: arm_navigation_msgs/OrderedCollisionOperations # A set of collision operations that will be performed in the order they are specified CollisionOperation[] collision_operations ================================================================================ MSG: arm_navigation_msgs/CollisionOperation # A definition of a collision operation # E.g. ("gripper",COLLISION_SET_ALL,ENABLE) will enable collisions # between the gripper and all objects in the collision space string object1 string object2 string COLLISION_SET_ALL="all" string COLLISION_SET_OBJECTS="objects" string COLLISION_SET_ATTACHED_OBJECTS="attached" # The penetration distance to which collisions are allowed. This is 0.0 by default. float64 penetration_distance # Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above int32 operation int32 DISABLE=0 int32 ENABLE=1 """ __slots__ = ['planning_scene_diff','operations'] _slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations'] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: planning_scene_diff,operations :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(GetPlanningSceneRequest, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.planning_scene_diff is None: self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene() if self.operations is None: self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations() else: self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene() self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations() def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs)) _x = self.planning_scene_diff.robot_state.joint_state.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene_diff.robot_state.joint_state.name) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.joint_state.name: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.joint_state.position) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position)) length = len(self.planning_scene_diff.robot_state.joint_state.velocity) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity)) length = len(self.planning_scene_diff.robot_state.joint_state.effort) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort)) _x = self buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses: _v1 = val1.position _x = _v1 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v2 = val1.orientation _x = _v2 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene_diff.fixed_frame_transforms) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.fixed_frame_transforms: _v3 = val1.header buff.write(_struct_I.pack(_v3.seq)) _v4 = _v3.stamp _x = _v4 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v3.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.child_frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v5 = val1.transform _v6 = _v5.translation _x = _v6 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v7 = _v5.rotation _x = _v7 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene_diff.allowed_collision_matrix.link_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.allowed_collision_matrix.entries) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.allowed_collision_matrix.entries: length = len(val1.enabled) buff.write(_struct_I.pack(length)) pattern = '<%sB'%length buff.write(struct.pack(pattern, *val1.enabled)) length = len(self.planning_scene_diff.allowed_contacts) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.allowed_contacts: _x = val1.name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v8 = val1.shape buff.write(_struct_b.pack(_v8.type)) length = len(_v8.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *_v8.dimensions)) length = len(_v8.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(struct.pack(pattern, *_v8.triangles)) length = len(_v8.vertices) buff.write(_struct_I.pack(length)) for val3 in _v8.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v9 = val1.pose_stamped _v10 = _v9.header buff.write(_struct_I.pack(_v10.seq)) _v11 = _v10.stamp _x = _v11 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v10.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v12 = _v9.pose _v13 = _v12.position _x = _v13 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v14 = _v12.orientation _x = _v14 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.link_names) buff.write(_struct_I.pack(length)) for val2 in val1.link_names: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) buff.write(_struct_d.pack(val1.penetration_depth)) length = len(self.planning_scene_diff.link_padding) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.link_padding: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_d.pack(val1.padding)) length = len(self.planning_scene_diff.collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.collision_objects: _v15 = val1.header buff.write(_struct_I.pack(_v15.seq)) _v16 = _v15.stamp _x = _v16 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v15.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(val1.padding)) _v17 = val1.operation buff.write(_struct_b.pack(_v17.operation)) length = len(val1.shapes) buff.write(_struct_I.pack(length)) for val2 in val1.shapes: buff.write(_struct_b.pack(val2.type)) length = len(val2.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *val2.dimensions)) length = len(val2.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(struct.pack(pattern, *val2.triangles)) length = len(val2.vertices) buff.write(_struct_I.pack(length)) for val3 in val2.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(val1.poses) buff.write(_struct_I.pack(length)) for val2 in val1.poses: _v18 = val2.position _x = _v18 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v19 = val2.orientation _x = _v19 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene_diff.attached_collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.attached_collision_objects: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v20 = val1.object _v21 = _v20.header buff.write(_struct_I.pack(_v21.seq)) _v22 = _v21.stamp _x = _v22 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v21.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = _v20.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(_v20.padding)) _v23 = _v20.operation buff.write(_struct_b.pack(_v23.operation)) length = len(_v20.shapes) buff.write(_struct_I.pack(length)) for val3 in _v20.shapes: buff.write(_struct_b.pack(val3.type)) length = len(val3.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *val3.dimensions)) length = len(val3.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(struct.pack(pattern, *val3.triangles)) length = len(val3.vertices) buff.write(_struct_I.pack(length)) for val4 in val3.vertices: _x = val4 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(_v20.poses) buff.write(_struct_I.pack(length)) for val3 in _v20.poses: _v24 = val3.position _x = _v24 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v25 = val3.orientation _x = _v25 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.touch_links) buff.write(_struct_I.pack(length)) for val2 in val1.touch_links: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) _x = self buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs)) _x = self.planning_scene_diff.collision_map.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene_diff.collision_map.boxes) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.collision_map.boxes: _v26 = val1.center _x = _v26 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v27 = val1.extents _x = _v27 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v28 = val1.axis _x = _v28 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) buff.write(_struct_f.pack(val1.angle)) length = len(self.operations.collision_operations) buff.write(_struct_I.pack(length)) for val1 in self.operations.collision_operations: _x = val1.object1 length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.object2 length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1 buff.write(_struct_di.pack(_x.penetration_distance, _x.operation)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: if self.planning_scene_diff is None: self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene() if self.operations is None: self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations() end = 0 _x = self start = end end += 12 (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.joint_state.name = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.joint_state.name.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end]) _x = self start = end end += 8 (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = [] for i in range(0, length): val1 = geometry_msgs.msg.Pose() _v29 = val1.position _x = _v29 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v30 = val1.orientation _x = _v30 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.fixed_frame_transforms = [] for i in range(0, length): val1 = geometry_msgs.msg.TransformStamped() _v31 = val1.header start = end end += 4 (_v31.seq,) = _struct_I.unpack(str[start:end]) _v32 = _v31.stamp _x = _v32 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v31.frame_id = str[start:end].decode('utf-8') else: _v31.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.child_frame_id = str[start:end].decode('utf-8') else: val1.child_frame_id = str[start:end] _v33 = val1.transform _v34 = _v33.translation _x = _v34 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v35 = _v33.rotation _x = _v35 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene_diff.fixed_frame_transforms.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.allowed_collision_matrix.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.allowed_collision_matrix.entries = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedCollisionEntry() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sB'%length start = end end += struct.calcsize(pattern) val1.enabled = struct.unpack(pattern, str[start:end]) val1.enabled = map(bool, val1.enabled) self.planning_scene_diff.allowed_collision_matrix.entries.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.allowed_contacts = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedContactSpecification() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.name = str[start:end].decode('utf-8') else: val1.name = str[start:end] _v36 = val1.shape start = end end += 1 (_v36.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) _v36.dimensions = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) _v36.triangles = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v36.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v36.vertices.append(val3) _v37 = val1.pose_stamped _v38 = _v37.header start = end end += 4 (_v38.seq,) = _struct_I.unpack(str[start:end]) _v39 = _v38.stamp _x = _v39 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v38.frame_id = str[start:end].decode('utf-8') else: _v38.frame_id = str[start:end] _v40 = _v37.pose _v41 = _v40.position _x = _v41 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v42 = _v40.orientation _x = _v42 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.link_names.append(val2) start = end end += 8 (val1.penetration_depth,) = _struct_d.unpack(str[start:end]) self.planning_scene_diff.allowed_contacts.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.link_padding = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.LinkPadding() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] start = end end += 8 (val1.padding,) = _struct_d.unpack(str[start:end]) self.planning_scene_diff.link_padding.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.CollisionObject() _v43 = val1.header start = end end += 4 (_v43.seq,) = _struct_I.unpack(str[start:end]) _v44 = _v43.stamp _x = _v44 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v43.frame_id = str[start:end].decode('utf-8') else: _v43.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.id = str[start:end].decode('utf-8') else: val1.id = str[start:end] start = end end += 4 (val1.padding,) = _struct_f.unpack(str[start:end]) _v45 = val1.operation start = end end += 1 (_v45.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.shapes = [] for i in range(0, length): val2 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val2.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val2.dimensions = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val2.triangles = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val2.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val2.vertices.append(val3) val1.shapes.append(val2) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.poses = [] for i in range(0, length): val2 = geometry_msgs.msg.Pose() _v46 = val2.position _x = _v46 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v47 = val2.orientation _x = _v47 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) val1.poses.append(val2) self.planning_scene_diff.collision_objects.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.attached_collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AttachedCollisionObject() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] _v48 = val1.object _v49 = _v48.header start = end end += 4 (_v49.seq,) = _struct_I.unpack(str[start:end]) _v50 = _v49.stamp _x = _v50 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v49.frame_id = str[start:end].decode('utf-8') else: _v49.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v48.id = str[start:end].decode('utf-8') else: _v48.id = str[start:end] start = end end += 4 (_v48.padding,) = _struct_f.unpack(str[start:end]) _v51 = _v48.operation start = end end += 1 (_v51.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v48.shapes = [] for i in range(0, length): val3 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val3.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val3.dimensions = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val3.triangles = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val3.vertices = [] for i in range(0, length): val4 = geometry_msgs.msg.Point() _x = val4 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val3.vertices.append(val4) _v48.shapes.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v48.poses = [] for i in range(0, length): val3 = geometry_msgs.msg.Pose() _v52 = val3.position _x = _v52 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v53 = val3.orientation _x = _v53 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) _v48.poses.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.touch_links = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.touch_links.append(val2) self.planning_scene_diff.attached_collision_objects.append(val1) _x = self start = end end += 12 (_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene_diff.collision_map.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.collision_map.boxes = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.OrientedBoundingBox() _v54 = val1.center _x = _v54 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v55 = val1.extents _x = _v55 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v56 = val1.axis _x = _v56 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) start = end end += 4 (val1.angle,) = _struct_f.unpack(str[start:end]) self.planning_scene_diff.collision_map.boxes.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.operations.collision_operations = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.CollisionOperation() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.object1 = str[start:end].decode('utf-8') else: val1.object1 = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.object2 = str[start:end].decode('utf-8') else: val1.object2 = str[start:end] _x = val1 start = end end += 12 (_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end]) self.operations.collision_operations.append(val1) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs)) _x = self.planning_scene_diff.robot_state.joint_state.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene_diff.robot_state.joint_state.name) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.joint_state.name: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.joint_state.position) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.planning_scene_diff.robot_state.joint_state.position.tostring()) length = len(self.planning_scene_diff.robot_state.joint_state.velocity) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.planning_scene_diff.robot_state.joint_state.velocity.tostring()) length = len(self.planning_scene_diff.robot_state.joint_state.effort) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.planning_scene_diff.robot_state.joint_state.effort.tostring()) _x = self buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses: _v57 = val1.position _x = _v57 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v58 = val1.orientation _x = _v58 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene_diff.fixed_frame_transforms) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.fixed_frame_transforms: _v59 = val1.header buff.write(_struct_I.pack(_v59.seq)) _v60 = _v59.stamp _x = _v60 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v59.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.child_frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v61 = val1.transform _v62 = _v61.translation _x = _v62 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v63 = _v61.rotation _x = _v63 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene_diff.allowed_collision_matrix.link_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene_diff.allowed_collision_matrix.entries) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.allowed_collision_matrix.entries: length = len(val1.enabled) buff.write(_struct_I.pack(length)) pattern = '<%sB'%length buff.write(val1.enabled.tostring()) length = len(self.planning_scene_diff.allowed_contacts) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.allowed_contacts: _x = val1.name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v64 = val1.shape buff.write(_struct_b.pack(_v64.type)) length = len(_v64.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(_v64.dimensions.tostring()) length = len(_v64.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(_v64.triangles.tostring()) length = len(_v64.vertices) buff.write(_struct_I.pack(length)) for val3 in _v64.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v65 = val1.pose_stamped _v66 = _v65.header buff.write(_struct_I.pack(_v66.seq)) _v67 = _v66.stamp _x = _v67 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v66.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v68 = _v65.pose _v69 = _v68.position _x = _v69 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v70 = _v68.orientation _x = _v70 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.link_names) buff.write(_struct_I.pack(length)) for val2 in val1.link_names: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) buff.write(_struct_d.pack(val1.penetration_depth)) length = len(self.planning_scene_diff.link_padding) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.link_padding: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_d.pack(val1.padding)) length = len(self.planning_scene_diff.collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.collision_objects: _v71 = val1.header buff.write(_struct_I.pack(_v71.seq)) _v72 = _v71.stamp _x = _v72 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v71.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(val1.padding)) _v73 = val1.operation buff.write(_struct_b.pack(_v73.operation)) length = len(val1.shapes) buff.write(_struct_I.pack(length)) for val2 in val1.shapes: buff.write(_struct_b.pack(val2.type)) length = len(val2.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(val2.dimensions.tostring()) length = len(val2.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(val2.triangles.tostring()) length = len(val2.vertices) buff.write(_struct_I.pack(length)) for val3 in val2.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(val1.poses) buff.write(_struct_I.pack(length)) for val2 in val1.poses: _v74 = val2.position _x = _v74 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v75 = val2.orientation _x = _v75 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene_diff.attached_collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.attached_collision_objects: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v76 = val1.object _v77 = _v76.header buff.write(_struct_I.pack(_v77.seq)) _v78 = _v77.stamp _x = _v78 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v77.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = _v76.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(_v76.padding)) _v79 = _v76.operation buff.write(_struct_b.pack(_v79.operation)) length = len(_v76.shapes) buff.write(_struct_I.pack(length)) for val3 in _v76.shapes: buff.write(_struct_b.pack(val3.type)) length = len(val3.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(val3.dimensions.tostring()) length = len(val3.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(val3.triangles.tostring()) length = len(val3.vertices) buff.write(_struct_I.pack(length)) for val4 in val3.vertices: _x = val4 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(_v76.poses) buff.write(_struct_I.pack(length)) for val3 in _v76.poses: _v80 = val3.position _x = _v80 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v81 = val3.orientation _x = _v81 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.touch_links) buff.write(_struct_I.pack(length)) for val2 in val1.touch_links: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) _x = self buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs)) _x = self.planning_scene_diff.collision_map.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene_diff.collision_map.boxes) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene_diff.collision_map.boxes: _v82 = val1.center _x = _v82 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v83 = val1.extents _x = _v83 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v84 = val1.axis _x = _v84 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) buff.write(_struct_f.pack(val1.angle)) length = len(self.operations.collision_operations) buff.write(_struct_I.pack(length)) for val1 in self.operations.collision_operations: _x = val1.object1 length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.object2 length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1 buff.write(_struct_di.pack(_x.penetration_distance, _x.operation)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: if self.planning_scene_diff is None: self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene() if self.operations is None: self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations() end = 0 _x = self start = end end += 12 (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.joint_state.name = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.joint_state.name.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene_diff.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene_diff.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene_diff.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) _x = self start = end end += 8 (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = [] for i in range(0, length): val1 = geometry_msgs.msg.Pose() _v85 = val1.position _x = _v85 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v86 = val1.orientation _x = _v86 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.fixed_frame_transforms = [] for i in range(0, length): val1 = geometry_msgs.msg.TransformStamped() _v87 = val1.header start = end end += 4 (_v87.seq,) = _struct_I.unpack(str[start:end]) _v88 = _v87.stamp _x = _v88 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v87.frame_id = str[start:end].decode('utf-8') else: _v87.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.child_frame_id = str[start:end].decode('utf-8') else: val1.child_frame_id = str[start:end] _v89 = val1.transform _v90 = _v89.translation _x = _v90 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v91 = _v89.rotation _x = _v91 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene_diff.fixed_frame_transforms.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.allowed_collision_matrix.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.allowed_collision_matrix.entries = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedCollisionEntry() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sB'%length start = end end += struct.calcsize(pattern) val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length) val1.enabled = map(bool, val1.enabled) self.planning_scene_diff.allowed_collision_matrix.entries.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.allowed_contacts = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedContactSpecification() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.name = str[start:end].decode('utf-8') else: val1.name = str[start:end] _v92 = val1.shape start = end end += 1 (_v92.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) _v92.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) _v92.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v92.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v92.vertices.append(val3) _v93 = val1.pose_stamped _v94 = _v93.header start = end end += 4 (_v94.seq,) = _struct_I.unpack(str[start:end]) _v95 = _v94.stamp _x = _v95 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v94.frame_id = str[start:end].decode('utf-8') else: _v94.frame_id = str[start:end] _v96 = _v93.pose _v97 = _v96.position _x = _v97 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v98 = _v96.orientation _x = _v98 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.link_names.append(val2) start = end end += 8 (val1.penetration_depth,) = _struct_d.unpack(str[start:end]) self.planning_scene_diff.allowed_contacts.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.link_padding = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.LinkPadding() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] start = end end += 8 (val1.padding,) = _struct_d.unpack(str[start:end]) self.planning_scene_diff.link_padding.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.CollisionObject() _v99 = val1.header start = end end += 4 (_v99.seq,) = _struct_I.unpack(str[start:end]) _v100 = _v99.stamp _x = _v100 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v99.frame_id = str[start:end].decode('utf-8') else: _v99.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.id = str[start:end].decode('utf-8') else: val1.id = str[start:end] start = end end += 4 (val1.padding,) = _struct_f.unpack(str[start:end]) _v101 = val1.operation start = end end += 1 (_v101.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.shapes = [] for i in range(0, length): val2 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val2.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val2.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val2.vertices.append(val3) val1.shapes.append(val2) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.poses = [] for i in range(0, length): val2 = geometry_msgs.msg.Pose() _v102 = val2.position _x = _v102 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v103 = val2.orientation _x = _v103 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) val1.poses.append(val2) self.planning_scene_diff.collision_objects.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.attached_collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AttachedCollisionObject() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] _v104 = val1.object _v105 = _v104.header start = end end += 4 (_v105.seq,) = _struct_I.unpack(str[start:end]) _v106 = _v105.stamp _x = _v106 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v105.frame_id = str[start:end].decode('utf-8') else: _v105.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v104.id = str[start:end].decode('utf-8') else: _v104.id = str[start:end] start = end end += 4 (_v104.padding,) = _struct_f.unpack(str[start:end]) _v107 = _v104.operation start = end end += 1 (_v107.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v104.shapes = [] for i in range(0, length): val3 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val3.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val3.vertices = [] for i in range(0, length): val4 = geometry_msgs.msg.Point() _x = val4 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val3.vertices.append(val4) _v104.shapes.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v104.poses = [] for i in range(0, length): val3 = geometry_msgs.msg.Pose() _v108 = val3.position _x = _v108 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v109 = val3.orientation _x = _v109 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) _v104.poses.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.touch_links = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.touch_links.append(val2) self.planning_scene_diff.attached_collision_objects.append(val1) _x = self start = end end += 12 (_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene_diff.collision_map.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene_diff.collision_map.boxes = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.OrientedBoundingBox() _v110 = val1.center _x = _v110 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v111 = val1.extents _x = _v111 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v112 = val1.axis _x = _v112 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) start = end end += 4 (val1.angle,) = _struct_f.unpack(str[start:end]) self.planning_scene_diff.collision_map.boxes.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.operations.collision_operations = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.CollisionOperation() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.object1 = str[start:end].decode('utf-8') else: val1.object1 = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.object2 = str[start:end].decode('utf-8') else: val1.object2 = str[start:end] _x = val1 start = end end += 12 (_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end]) self.operations.collision_operations.append(val1) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I _struct_b = struct.Struct("<b") _struct_d = struct.Struct("<d") _struct_f = struct.Struct("<f") _struct_di = struct.Struct("<di") _struct_3f = struct.Struct("<3f") _struct_3I = struct.Struct("<3I") _struct_4d = struct.Struct("<4d") _struct_2I = struct.Struct("<2I") _struct_3d = struct.Struct("<3d") """autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneResponse.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct import arm_navigation_msgs.msg import geometry_msgs.msg import std_msgs.msg import genpy import sensor_msgs.msg class GetPlanningSceneResponse(genpy.Message): _md5sum = "285525c9abe002fbafa99af84a14b4cb" _type = "arm_navigation_msgs/GetPlanningSceneResponse" _has_header = False #flag to mark the presence of a Header object _full_text = """ PlanningScene planning_scene ================================================================================ MSG: arm_navigation_msgs/PlanningScene #full robot state arm_navigation_msgs/RobotState robot_state #additional frames for duplicating tf geometry_msgs/TransformStamped[] fixed_frame_transforms #full allowed collision matrix AllowedCollisionMatrix allowed_collision_matrix #allowed contacts arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts #all link paddings arm_navigation_msgs/LinkPadding[] link_padding #collision objects arm_navigation_msgs/CollisionObject[] collision_objects arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects #the collision map arm_navigation_msgs/CollisionMap collision_map ================================================================================ MSG: arm_navigation_msgs/RobotState # This message contains information about the robot state, i.e. the positions of its joints and links sensor_msgs/JointState joint_state arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state ================================================================================ MSG: sensor_msgs/JointState # This is a message that holds data to describe the state of a set of torque controlled joints. # # The state of each joint (revolute or prismatic) is defined by: # * the position of the joint (rad or m), # * the velocity of the joint (rad/s or m/s) and # * the effort that is applied in the joint (Nm or N). # # Each joint is uniquely identified by its name # The header specifies the time at which the joint states were recorded. All the joint states # in one message have to be recorded at the same time. # # This message consists of a multiple arrays, one for each part of the joint state. # The goal is to make each of the fields optional. When e.g. your joints have no # effort associated with them, you can leave the effort array empty. # # All arrays in this message should have the same size, or be empty. # This is the only way to uniquely associate the joint name with the correct # states. Header header string[] name float64[] position float64[] velocity float64[] effort ================================================================================ MSG: std_msgs/Header # Standard metadata for higher-level stamped data types. # This is generally used to communicate timestamped data # in a particular coordinate frame. # # sequence ID: consecutively increasing ID uint32 seq #Two-integer timestamp that is expressed as: # * stamp.secs: seconds (stamp_secs) since epoch # * stamp.nsecs: nanoseconds since stamp_secs # time-handling sugar is provided by the client library time stamp #Frame this data is associated with # 0: no frame # 1: global frame string frame_id ================================================================================ MSG: arm_navigation_msgs/MultiDOFJointState #A representation of a multi-dof joint state time stamp string[] joint_names string[] frame_ids string[] child_frame_ids geometry_msgs/Pose[] poses ================================================================================ MSG: geometry_msgs/Pose # A representation of pose in free space, composed of postion and orientation. Point position Quaternion orientation ================================================================================ MSG: geometry_msgs/Point # This contains the position of a point in free space float64 x float64 y float64 z ================================================================================ MSG: geometry_msgs/Quaternion # This represents an orientation in free space in quaternion form. float64 x float64 y float64 z float64 w ================================================================================ MSG: geometry_msgs/TransformStamped # This expresses a transform from coordinate frame header.frame_id # to the coordinate frame child_frame_id # # This message is mostly used by the # <a href="http://www.ros.org/wiki/tf">tf</a> package. # See it's documentation for more information. Header header string child_frame_id # the frame id of the child frame Transform transform ================================================================================ MSG: geometry_msgs/Transform # This represents the transform between two coordinate frames in free space. Vector3 translation Quaternion rotation ================================================================================ MSG: geometry_msgs/Vector3 # This represents a vector in free space. float64 x float64 y float64 z ================================================================================ MSG: arm_navigation_msgs/AllowedCollisionMatrix # the list of link names in the matrix string[] link_names # the individual entries in the allowed collision matrix # symmetric, with same order as link_names AllowedCollisionEntry[] entries ================================================================================ MSG: arm_navigation_msgs/AllowedCollisionEntry # whether or not collision checking is enabled bool[] enabled ================================================================================ MSG: arm_navigation_msgs/AllowedContactSpecification # The names of the regions string name # The shape of the region in the environment arm_navigation_msgs/Shape shape # The pose of the space defining the region geometry_msgs/PoseStamped pose_stamped # The set of links that will be allowed to have penetration contact within this region string[] link_names # The maximum penetration depth allowed for every link float64 penetration_depth ================================================================================ MSG: arm_navigation_msgs/Shape byte SPHERE=0 byte BOX=1 byte CYLINDER=2 byte MESH=3 byte type #### define sphere, box, cylinder #### # the origin of each shape is considered at the shape's center # for sphere # radius := dimensions[0] # for cylinder # radius := dimensions[0] # length := dimensions[1] # the length is along the Z axis # for box # size_x := dimensions[0] # size_y := dimensions[1] # size_z := dimensions[2] float64[] dimensions #### define mesh #### # list of triangles; triangle k is defined by tre vertices located # at indices triangles[3k], triangles[3k+1], triangles[3k+2] int32[] triangles geometry_msgs/Point[] vertices ================================================================================ MSG: geometry_msgs/PoseStamped # A Pose with reference coordinate frame and timestamp Header header Pose pose ================================================================================ MSG: arm_navigation_msgs/LinkPadding #name for the link string link_name # padding to apply to the link float64 padding ================================================================================ MSG: arm_navigation_msgs/CollisionObject # a header, used for interpreting the poses Header header # the id of the object string id # The padding used for filtering points near the object. # This does not affect collision checking for the object. # Set to negative to get zero padding. float32 padding #This contains what is to be done with the object CollisionObjectOperation operation #the shapes associated with the object arm_navigation_msgs/Shape[] shapes #the poses associated with the shapes - will be transformed using the header geometry_msgs/Pose[] poses ================================================================================ MSG: arm_navigation_msgs/CollisionObjectOperation #Puts the object into the environment #or updates the object if already added byte ADD=0 #Removes the object from the environment entirely byte REMOVE=1 #Only valid within the context of a CollisionAttachedObject message #Will be ignored if sent with an CollisionObject message #Takes an attached object, detaches from the attached link #But adds back in as regular object byte DETACH_AND_ADD_AS_OBJECT=2 #Only valid within the context of a CollisionAttachedObject message #Will be ignored if sent with an CollisionObject message #Takes current object in the environment and removes it as #a regular object byte ATTACH_AND_REMOVE_AS_OBJECT=3 # Byte code for operation byte operation ================================================================================ MSG: arm_navigation_msgs/AttachedCollisionObject # The CollisionObject will be attached with a fixed joint to this link # If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation # is set to REMOVE will remove all attached bodies attached to any object string link_name #Reserved for indicating that all attached objects should be removed string REMOVE_ALL_ATTACHED_OBJECTS = "all" #This contains the actual shapes and poses for the CollisionObject #to be attached to the link #If action is remove and no object.id is set, all objects #attached to the link indicated by link_name will be removed CollisionObject object # The set of links that the attached objects are allowed to touch # by default - the link_name is included by default string[] touch_links ================================================================================ MSG: arm_navigation_msgs/CollisionMap #header for interpreting box positions Header header #boxes for use in collision testing OrientedBoundingBox[] boxes ================================================================================ MSG: arm_navigation_msgs/OrientedBoundingBox #the center of the box geometry_msgs/Point32 center #the extents of the box, assuming the center is at the point geometry_msgs/Point32 extents #the axis of the box geometry_msgs/Point32 axis #the angle of rotation around the axis float32 angle ================================================================================ MSG: geometry_msgs/Point32 # This contains the position of a point in free space(with 32 bits of precision). # It is recommeded to use Point wherever possible instead of Point32. # # This recommendation is to promote interoperability. # # This message is designed to take up less space when sending # lots of points at once, as in the case of a PointCloud. float32 x float32 y float32 z """ __slots__ = ['planning_scene'] _slot_types = ['arm_navigation_msgs/PlanningScene'] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: planning_scene :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(GetPlanningSceneResponse, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.planning_scene is None: self.planning_scene = arm_navigation_msgs.msg.PlanningScene() else: self.planning_scene = arm_navigation_msgs.msg.PlanningScene() def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs)) _x = self.planning_scene.robot_state.joint_state.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene.robot_state.joint_state.name) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.joint_state.name: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.joint_state.position) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.position)) length = len(self.planning_scene.robot_state.joint_state.velocity) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.velocity)) length = len(self.planning_scene.robot_state.joint_state.effort) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.effort)) _x = self buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses: _v113 = val1.position _x = _v113 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v114 = val1.orientation _x = _v114 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene.fixed_frame_transforms) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.fixed_frame_transforms: _v115 = val1.header buff.write(_struct_I.pack(_v115.seq)) _v116 = _v115.stamp _x = _v116 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v115.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.child_frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v117 = val1.transform _v118 = _v117.translation _x = _v118 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v119 = _v117.rotation _x = _v119 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene.allowed_collision_matrix.link_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.allowed_collision_matrix.link_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.allowed_collision_matrix.entries) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.allowed_collision_matrix.entries: length = len(val1.enabled) buff.write(_struct_I.pack(length)) pattern = '<%sB'%length buff.write(struct.pack(pattern, *val1.enabled)) length = len(self.planning_scene.allowed_contacts) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.allowed_contacts: _x = val1.name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v120 = val1.shape buff.write(_struct_b.pack(_v120.type)) length = len(_v120.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *_v120.dimensions)) length = len(_v120.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(struct.pack(pattern, *_v120.triangles)) length = len(_v120.vertices) buff.write(_struct_I.pack(length)) for val3 in _v120.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v121 = val1.pose_stamped _v122 = _v121.header buff.write(_struct_I.pack(_v122.seq)) _v123 = _v122.stamp _x = _v123 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v122.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v124 = _v121.pose _v125 = _v124.position _x = _v125 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v126 = _v124.orientation _x = _v126 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.link_names) buff.write(_struct_I.pack(length)) for val2 in val1.link_names: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) buff.write(_struct_d.pack(val1.penetration_depth)) length = len(self.planning_scene.link_padding) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.link_padding: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_d.pack(val1.padding)) length = len(self.planning_scene.collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.collision_objects: _v127 = val1.header buff.write(_struct_I.pack(_v127.seq)) _v128 = _v127.stamp _x = _v128 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v127.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(val1.padding)) _v129 = val1.operation buff.write(_struct_b.pack(_v129.operation)) length = len(val1.shapes) buff.write(_struct_I.pack(length)) for val2 in val1.shapes: buff.write(_struct_b.pack(val2.type)) length = len(val2.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *val2.dimensions)) length = len(val2.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(struct.pack(pattern, *val2.triangles)) length = len(val2.vertices) buff.write(_struct_I.pack(length)) for val3 in val2.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(val1.poses) buff.write(_struct_I.pack(length)) for val2 in val1.poses: _v130 = val2.position _x = _v130 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v131 = val2.orientation _x = _v131 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene.attached_collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.attached_collision_objects: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v132 = val1.object _v133 = _v132.header buff.write(_struct_I.pack(_v133.seq)) _v134 = _v133.stamp _x = _v134 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v133.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = _v132.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(_v132.padding)) _v135 = _v132.operation buff.write(_struct_b.pack(_v135.operation)) length = len(_v132.shapes) buff.write(_struct_I.pack(length)) for val3 in _v132.shapes: buff.write(_struct_b.pack(val3.type)) length = len(val3.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(struct.pack(pattern, *val3.dimensions)) length = len(val3.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(struct.pack(pattern, *val3.triangles)) length = len(val3.vertices) buff.write(_struct_I.pack(length)) for val4 in val3.vertices: _x = val4 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(_v132.poses) buff.write(_struct_I.pack(length)) for val3 in _v132.poses: _v136 = val3.position _x = _v136 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v137 = val3.orientation _x = _v137 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.touch_links) buff.write(_struct_I.pack(length)) for val2 in val1.touch_links: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) _x = self buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs)) _x = self.planning_scene.collision_map.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene.collision_map.boxes) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.collision_map.boxes: _v138 = val1.center _x = _v138 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v139 = val1.extents _x = _v139 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v140 = val1.axis _x = _v140 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) buff.write(_struct_f.pack(val1.angle)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: if self.planning_scene is None: self.planning_scene = arm_navigation_msgs.msg.PlanningScene() end = 0 _x = self start = end end += 12 (_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.joint_state.name = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.joint_state.name.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene.robot_state.joint_state.position = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end]) _x = self start = end end += 8 (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.joint_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.poses = [] for i in range(0, length): val1 = geometry_msgs.msg.Pose() _v141 = val1.position _x = _v141 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v142 = val1.orientation _x = _v142 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.fixed_frame_transforms = [] for i in range(0, length): val1 = geometry_msgs.msg.TransformStamped() _v143 = val1.header start = end end += 4 (_v143.seq,) = _struct_I.unpack(str[start:end]) _v144 = _v143.stamp _x = _v144 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v143.frame_id = str[start:end].decode('utf-8') else: _v143.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.child_frame_id = str[start:end].decode('utf-8') else: val1.child_frame_id = str[start:end] _v145 = val1.transform _v146 = _v145.translation _x = _v146 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v147 = _v145.rotation _x = _v147 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene.fixed_frame_transforms.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.allowed_collision_matrix.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.allowed_collision_matrix.link_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.allowed_collision_matrix.entries = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedCollisionEntry() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sB'%length start = end end += struct.calcsize(pattern) val1.enabled = struct.unpack(pattern, str[start:end]) val1.enabled = map(bool, val1.enabled) self.planning_scene.allowed_collision_matrix.entries.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.allowed_contacts = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedContactSpecification() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.name = str[start:end].decode('utf-8') else: val1.name = str[start:end] _v148 = val1.shape start = end end += 1 (_v148.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) _v148.dimensions = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) _v148.triangles = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v148.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v148.vertices.append(val3) _v149 = val1.pose_stamped _v150 = _v149.header start = end end += 4 (_v150.seq,) = _struct_I.unpack(str[start:end]) _v151 = _v150.stamp _x = _v151 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v150.frame_id = str[start:end].decode('utf-8') else: _v150.frame_id = str[start:end] _v152 = _v149.pose _v153 = _v152.position _x = _v153 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v154 = _v152.orientation _x = _v154 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.link_names.append(val2) start = end end += 8 (val1.penetration_depth,) = _struct_d.unpack(str[start:end]) self.planning_scene.allowed_contacts.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.link_padding = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.LinkPadding() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] start = end end += 8 (val1.padding,) = _struct_d.unpack(str[start:end]) self.planning_scene.link_padding.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.CollisionObject() _v155 = val1.header start = end end += 4 (_v155.seq,) = _struct_I.unpack(str[start:end]) _v156 = _v155.stamp _x = _v156 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v155.frame_id = str[start:end].decode('utf-8') else: _v155.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.id = str[start:end].decode('utf-8') else: val1.id = str[start:end] start = end end += 4 (val1.padding,) = _struct_f.unpack(str[start:end]) _v157 = val1.operation start = end end += 1 (_v157.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.shapes = [] for i in range(0, length): val2 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val2.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val2.dimensions = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val2.triangles = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val2.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val2.vertices.append(val3) val1.shapes.append(val2) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.poses = [] for i in range(0, length): val2 = geometry_msgs.msg.Pose() _v158 = val2.position _x = _v158 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v159 = val2.orientation _x = _v159 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) val1.poses.append(val2) self.planning_scene.collision_objects.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.attached_collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AttachedCollisionObject() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] _v160 = val1.object _v161 = _v160.header start = end end += 4 (_v161.seq,) = _struct_I.unpack(str[start:end]) _v162 = _v161.stamp _x = _v162 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v161.frame_id = str[start:end].decode('utf-8') else: _v161.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v160.id = str[start:end].decode('utf-8') else: _v160.id = str[start:end] start = end end += 4 (_v160.padding,) = _struct_f.unpack(str[start:end]) _v163 = _v160.operation start = end end += 1 (_v163.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v160.shapes = [] for i in range(0, length): val3 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val3.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val3.dimensions = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val3.triangles = struct.unpack(pattern, str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val3.vertices = [] for i in range(0, length): val4 = geometry_msgs.msg.Point() _x = val4 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val3.vertices.append(val4) _v160.shapes.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v160.poses = [] for i in range(0, length): val3 = geometry_msgs.msg.Pose() _v164 = val3.position _x = _v164 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v165 = val3.orientation _x = _v165 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) _v160.poses.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.touch_links = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.touch_links.append(val2) self.planning_scene.attached_collision_objects.append(val1) _x = self start = end end += 12 (_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene.collision_map.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.collision_map.boxes = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.OrientedBoundingBox() _v166 = val1.center _x = _v166 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v167 = val1.extents _x = _v167 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v168 = val1.axis _x = _v168 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) start = end end += 4 (val1.angle,) = _struct_f.unpack(str[start:end]) self.planning_scene.collision_map.boxes.append(val1) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs)) _x = self.planning_scene.robot_state.joint_state.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene.robot_state.joint_state.name) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.joint_state.name: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.joint_state.position) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.planning_scene.robot_state.joint_state.position.tostring()) length = len(self.planning_scene.robot_state.joint_state.velocity) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.planning_scene.robot_state.joint_state.velocity.tostring()) length = len(self.planning_scene.robot_state.joint_state.effort) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(self.planning_scene.robot_state.joint_state.effort.tostring()) _x = self buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses: _v169 = val1.position _x = _v169 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v170 = val1.orientation _x = _v170 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene.fixed_frame_transforms) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.fixed_frame_transforms: _v171 = val1.header buff.write(_struct_I.pack(_v171.seq)) _v172 = _v171.stamp _x = _v172 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v171.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.child_frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v173 = val1.transform _v174 = _v173.translation _x = _v174 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v175 = _v173.rotation _x = _v175 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene.allowed_collision_matrix.link_names) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.allowed_collision_matrix.link_names: length = len(val1) if python3 or type(val1) == unicode: val1 = val1.encode('utf-8') length = len(val1) buff.write(struct.pack('<I%ss'%length, length, val1)) length = len(self.planning_scene.allowed_collision_matrix.entries) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.allowed_collision_matrix.entries: length = len(val1.enabled) buff.write(_struct_I.pack(length)) pattern = '<%sB'%length buff.write(val1.enabled.tostring()) length = len(self.planning_scene.allowed_contacts) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.allowed_contacts: _x = val1.name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v176 = val1.shape buff.write(_struct_b.pack(_v176.type)) length = len(_v176.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(_v176.dimensions.tostring()) length = len(_v176.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(_v176.triangles.tostring()) length = len(_v176.vertices) buff.write(_struct_I.pack(length)) for val3 in _v176.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v177 = val1.pose_stamped _v178 = _v177.header buff.write(_struct_I.pack(_v178.seq)) _v179 = _v178.stamp _x = _v179 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v178.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v180 = _v177.pose _v181 = _v180.position _x = _v181 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v182 = _v180.orientation _x = _v182 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.link_names) buff.write(_struct_I.pack(length)) for val2 in val1.link_names: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) buff.write(_struct_d.pack(val1.penetration_depth)) length = len(self.planning_scene.link_padding) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.link_padding: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_d.pack(val1.padding)) length = len(self.planning_scene.collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.collision_objects: _v183 = val1.header buff.write(_struct_I.pack(_v183.seq)) _v184 = _v183.stamp _x = _v184 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v183.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = val1.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(val1.padding)) _v185 = val1.operation buff.write(_struct_b.pack(_v185.operation)) length = len(val1.shapes) buff.write(_struct_I.pack(length)) for val2 in val1.shapes: buff.write(_struct_b.pack(val2.type)) length = len(val2.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(val2.dimensions.tostring()) length = len(val2.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(val2.triangles.tostring()) length = len(val2.vertices) buff.write(_struct_I.pack(length)) for val3 in val2.vertices: _x = val3 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(val1.poses) buff.write(_struct_I.pack(length)) for val2 in val1.poses: _v186 = val2.position _x = _v186 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v187 = val2.orientation _x = _v187 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(self.planning_scene.attached_collision_objects) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.attached_collision_objects: _x = val1.link_name length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _v188 = val1.object _v189 = _v188.header buff.write(_struct_I.pack(_v189.seq)) _v190 = _v189.stamp _x = _v190 buff.write(_struct_2I.pack(_x.secs, _x.nsecs)) _x = _v189.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = _v188.id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) buff.write(_struct_f.pack(_v188.padding)) _v191 = _v188.operation buff.write(_struct_b.pack(_v191.operation)) length = len(_v188.shapes) buff.write(_struct_I.pack(length)) for val3 in _v188.shapes: buff.write(_struct_b.pack(val3.type)) length = len(val3.dimensions) buff.write(_struct_I.pack(length)) pattern = '<%sd'%length buff.write(val3.dimensions.tostring()) length = len(val3.triangles) buff.write(_struct_I.pack(length)) pattern = '<%si'%length buff.write(val3.triangles.tostring()) length = len(val3.vertices) buff.write(_struct_I.pack(length)) for val4 in val3.vertices: _x = val4 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) length = len(_v188.poses) buff.write(_struct_I.pack(length)) for val3 in _v188.poses: _v192 = val3.position _x = _v192 buff.write(_struct_3d.pack(_x.x, _x.y, _x.z)) _v193 = val3.orientation _x = _v193 buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w)) length = len(val1.touch_links) buff.write(_struct_I.pack(length)) for val2 in val1.touch_links: length = len(val2) if python3 or type(val2) == unicode: val2 = val2.encode('utf-8') length = len(val2) buff.write(struct.pack('<I%ss'%length, length, val2)) _x = self buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs)) _x = self.planning_scene.collision_map.header.frame_id length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) length = len(self.planning_scene.collision_map.boxes) buff.write(_struct_I.pack(length)) for val1 in self.planning_scene.collision_map.boxes: _v194 = val1.center _x = _v194 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v195 = val1.extents _x = _v195 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) _v196 = val1.axis _x = _v196 buff.write(_struct_3f.pack(_x.x, _x.y, _x.z)) buff.write(_struct_f.pack(val1.angle)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: if self.planning_scene is None: self.planning_scene = arm_navigation_msgs.msg.PlanningScene() end = 0 _x = self start = end end += 12 (_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.joint_state.name = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.joint_state.name.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) self.planning_scene.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) _x = self start = end end += 8 (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.joint_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.poses = [] for i in range(0, length): val1 = geometry_msgs.msg.Pose() _v197 = val1.position _x = _v197 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v198 = val1.orientation _x = _v198 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.fixed_frame_transforms = [] for i in range(0, length): val1 = geometry_msgs.msg.TransformStamped() _v199 = val1.header start = end end += 4 (_v199.seq,) = _struct_I.unpack(str[start:end]) _v200 = _v199.stamp _x = _v200 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v199.frame_id = str[start:end].decode('utf-8') else: _v199.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.child_frame_id = str[start:end].decode('utf-8') else: val1.child_frame_id = str[start:end] _v201 = val1.transform _v202 = _v201.translation _x = _v202 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v203 = _v201.rotation _x = _v203 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) self.planning_scene.fixed_frame_transforms.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.allowed_collision_matrix.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1 = str[start:end].decode('utf-8') else: val1 = str[start:end] self.planning_scene.allowed_collision_matrix.link_names.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.allowed_collision_matrix.entries = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedCollisionEntry() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sB'%length start = end end += struct.calcsize(pattern) val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length) val1.enabled = map(bool, val1.enabled) self.planning_scene.allowed_collision_matrix.entries.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.allowed_contacts = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AllowedContactSpecification() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.name = str[start:end].decode('utf-8') else: val1.name = str[start:end] _v204 = val1.shape start = end end += 1 (_v204.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) _v204.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) _v204.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v204.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v204.vertices.append(val3) _v205 = val1.pose_stamped _v206 = _v205.header start = end end += 4 (_v206.seq,) = _struct_I.unpack(str[start:end]) _v207 = _v206.stamp _x = _v207 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v206.frame_id = str[start:end].decode('utf-8') else: _v206.frame_id = str[start:end] _v208 = _v205.pose _v209 = _v208.position _x = _v209 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v210 = _v208.orientation _x = _v210 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.link_names = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.link_names.append(val2) start = end end += 8 (val1.penetration_depth,) = _struct_d.unpack(str[start:end]) self.planning_scene.allowed_contacts.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.link_padding = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.LinkPadding() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] start = end end += 8 (val1.padding,) = _struct_d.unpack(str[start:end]) self.planning_scene.link_padding.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.CollisionObject() _v211 = val1.header start = end end += 4 (_v211.seq,) = _struct_I.unpack(str[start:end]) _v212 = _v211.stamp _x = _v212 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v211.frame_id = str[start:end].decode('utf-8') else: _v211.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.id = str[start:end].decode('utf-8') else: val1.id = str[start:end] start = end end += 4 (val1.padding,) = _struct_f.unpack(str[start:end]) _v213 = val1.operation start = end end += 1 (_v213.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.shapes = [] for i in range(0, length): val2 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val2.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val2.vertices = [] for i in range(0, length): val3 = geometry_msgs.msg.Point() _x = val3 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val2.vertices.append(val3) val1.shapes.append(val2) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.poses = [] for i in range(0, length): val2 = geometry_msgs.msg.Pose() _v214 = val2.position _x = _v214 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v215 = val2.orientation _x = _v215 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) val1.poses.append(val2) self.planning_scene.collision_objects.append(val1) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.attached_collision_objects = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.AttachedCollisionObject() start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val1.link_name = str[start:end].decode('utf-8') else: val1.link_name = str[start:end] _v216 = val1.object _v217 = _v216.header start = end end += 4 (_v217.seq,) = _struct_I.unpack(str[start:end]) _v218 = _v217.stamp _x = _v218 start = end end += 8 (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v217.frame_id = str[start:end].decode('utf-8') else: _v217.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: _v216.id = str[start:end].decode('utf-8') else: _v216.id = str[start:end] start = end end += 4 (_v216.padding,) = _struct_f.unpack(str[start:end]) _v219 = _v216.operation start = end end += 1 (_v219.operation,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v216.shapes = [] for i in range(0, length): val3 = arm_navigation_msgs.msg.Shape() start = end end += 1 (val3.type,) = _struct_b.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%sd'%length start = end end += struct.calcsize(pattern) val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) pattern = '<%si'%length start = end end += struct.calcsize(pattern) val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val3.vertices = [] for i in range(0, length): val4 = geometry_msgs.msg.Point() _x = val4 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) val3.vertices.append(val4) _v216.shapes.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) _v216.poses = [] for i in range(0, length): val3 = geometry_msgs.msg.Pose() _v220 = val3.position _x = _v220 start = end end += 24 (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end]) _v221 = val3.orientation _x = _v221 start = end end += 32 (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end]) _v216.poses.append(val3) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) val1.touch_links = [] for i in range(0, length): start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: val2 = str[start:end].decode('utf-8') else: val2 = str[start:end] val1.touch_links.append(val2) self.planning_scene.attached_collision_objects.append(val1) _x = self start = end end += 12 (_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8') else: self.planning_scene.collision_map.header.frame_id = str[start:end] start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) self.planning_scene.collision_map.boxes = [] for i in range(0, length): val1 = arm_navigation_msgs.msg.OrientedBoundingBox() _v222 = val1.center _x = _v222 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v223 = val1.extents _x = _v223 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) _v224 = val1.axis _x = _v224 start = end end += 12 (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end]) start = end end += 4 (val1.angle,) = _struct_f.unpack(str[start:end]) self.planning_scene.collision_map.boxes.append(val1) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I _struct_b = struct.Struct("<b") _struct_d = struct.Struct("<d") _struct_f = struct.Struct("<f") _struct_3f = struct.Struct("<3f") _struct_3I = struct.Struct("<3I") _struct_4d = struct.Struct("<4d") _struct_2I = struct.Struct("<2I") _struct_3d = struct.Struct("<3d") class GetPlanningScene(object): _type = 'arm_navigation_msgs/GetPlanningScene' _md5sum = '0a7b07718e4e5c5d35740c730509a151' _request_class = GetPlanningSceneRequest _response_class = GetPlanningSceneResponse
normal
{ "blob_id": "b8e18877af990c533c642d4937354198a4676419", "index": 5194, "step-1": "<mask token>\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n", "step-2": "<mask token>\n\n\nclass GetPlanningSceneRequest(genpy.Message):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n <mask token>\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n <mask token>\n\n\n<mask token>\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n", "step-3": "<mask token>\n\n\nclass GetPlanningSceneRequest(genpy.Message):\n _md5sum = '67ad55e9bed9c8f21dfb4b9b1ca8df7d'\n _type = 'arm_navigation_msgs/GetPlanningSceneRequest'\n _has_header = False\n _full_text = \"\"\"\n\n\nPlanningScene planning_scene_diff\n\n\narm_navigation_msgs/OrderedCollisionOperations operations\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n================================================================================\nMSG: arm_navigation_msgs/OrderedCollisionOperations\n# A set of collision operations that will be performed in the order they are specified\nCollisionOperation[] collision_operations\n================================================================================\nMSG: arm_navigation_msgs/CollisionOperation\n# A definition of a collision operation\n# E.g. (\"gripper\",COLLISION_SET_ALL,ENABLE) will enable collisions \n# between the gripper and all objects in the collision space\n\nstring object1\nstring object2\nstring COLLISION_SET_ALL=\"all\"\nstring COLLISION_SET_OBJECTS=\"objects\"\nstring COLLISION_SET_ATTACHED_OBJECTS=\"attached\"\n\n# The penetration distance to which collisions are allowed. This is 0.0 by default.\nfloat64 penetration_distance\n\n# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above\nint32 operation\nint32 DISABLE=0\nint32 ENABLE=1\n\n\"\"\"\n __slots__ = ['planning_scene_diff', 'operations']\n _slot_types = ['arm_navigation_msgs/PlanningScene',\n 'arm_navigation_msgs/OrderedCollisionOperations']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene_diff,operations\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneRequest, self).__init__(*args, **kwds)\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n else:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (struct\n .unpack(pattern, str[start:end]))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v29 = val1.position\n _x = _v29\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v30 = val1.orientation\n _x = _v30\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v31 = val1.header\n start = end\n end += 4\n _v31.seq, = _struct_I.unpack(str[start:end])\n _v32 = _v31.stamp\n _x = _v32\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v31.frame_id = str[start:end].decode('utf-8')\n else:\n _v31.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v33 = val1.transform\n _v34 = _v33.translation\n _x = _v34\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v35 = _v33.rotation\n _x = _v35\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v36 = val1.shape\n start = end\n end += 1\n _v36.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v36.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v36.vertices.append(val3)\n _v37 = val1.pose_stamped\n _v38 = _v37.header\n start = end\n end += 4\n _v38.seq, = _struct_I.unpack(str[start:end])\n _v39 = _v38.stamp\n _x = _v39\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v38.frame_id = str[start:end].decode('utf-8')\n else:\n _v38.frame_id = str[start:end]\n _v40 = _v37.pose\n _v41 = _v40.position\n _x = _v41\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v42 = _v40.orientation\n _x = _v42\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v43 = val1.header\n start = end\n end += 4\n _v43.seq, = _struct_I.unpack(str[start:end])\n _v44 = _v43.stamp\n _x = _v44\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v43.frame_id = str[start:end].decode('utf-8')\n else:\n _v43.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v45 = val1.operation\n start = end\n end += 1\n _v45.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v46 = val2.position\n _x = _v46\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v47 = val2.orientation\n _x = _v47\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v48 = val1.object\n _v49 = _v48.header\n start = end\n end += 4\n _v49.seq, = _struct_I.unpack(str[start:end])\n _v50 = _v49.stamp\n _x = _v50\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v49.frame_id = str[start:end].decode('utf-8')\n else:\n _v49.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v48.id = str[start:end].decode('utf-8')\n else:\n _v48.id = str[start:end]\n start = end\n end += 4\n _v48.padding, = _struct_f.unpack(str[start:end])\n _v51 = _v48.operation\n start = end\n end += 1\n _v51.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v48.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v52 = val3.position\n _x = _v52\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v53 = val3.orientation\n _x = _v53\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v48.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v54 = val1.center\n _x = _v54\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v55 = val1.extents\n _x = _v55\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v56 = val1.axis\n _x = _v56\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v85 = val1.position\n _x = _v85\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v86 = val1.orientation\n _x = _v86\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v87 = val1.header\n start = end\n end += 4\n _v87.seq, = _struct_I.unpack(str[start:end])\n _v88 = _v87.stamp\n _x = _v88\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v87.frame_id = str[start:end].decode('utf-8')\n else:\n _v87.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v89 = val1.transform\n _v90 = _v89.translation\n _x = _v90\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v91 = _v89.rotation\n _x = _v91\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v92 = val1.shape\n start = end\n end += 1\n _v92.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v92.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v92.vertices.append(val3)\n _v93 = val1.pose_stamped\n _v94 = _v93.header\n start = end\n end += 4\n _v94.seq, = _struct_I.unpack(str[start:end])\n _v95 = _v94.stamp\n _x = _v95\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v94.frame_id = str[start:end].decode('utf-8')\n else:\n _v94.frame_id = str[start:end]\n _v96 = _v93.pose\n _v97 = _v96.position\n _x = _v97\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v98 = _v96.orientation\n _x = _v98\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v99 = val1.header\n start = end\n end += 4\n _v99.seq, = _struct_I.unpack(str[start:end])\n _v100 = _v99.stamp\n _x = _v100\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v99.frame_id = str[start:end].decode('utf-8')\n else:\n _v99.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v101 = val1.operation\n start = end\n end += 1\n _v101.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v102 = val2.position\n _x = _v102\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v103 = val2.orientation\n _x = _v103\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v104 = val1.object\n _v105 = _v104.header\n start = end\n end += 4\n _v105.seq, = _struct_I.unpack(str[start:end])\n _v106 = _v105.stamp\n _x = _v106\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v105.frame_id = str[start:end].decode('utf-8')\n else:\n _v105.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v104.id = str[start:end].decode('utf-8')\n else:\n _v104.id = str[start:end]\n start = end\n end += 4\n _v104.padding, = _struct_f.unpack(str[start:end])\n _v107 = _v104.operation\n start = end\n end += 1\n _v107.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v104.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v108 = val3.position\n _x = _v108\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v109 = val3.orientation\n _x = _v109\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v104.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v110 = val1.center\n _x = _v110\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v111 = val1.extents\n _x = _v111\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v112 = val1.axis\n _x = _v112\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n<mask token>\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n", "step-4": "<mask token>\nimport sys\npython3 = True if sys.hexversion > 50331648 else False\nimport genpy\nimport struct\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\n\nclass GetPlanningSceneRequest(genpy.Message):\n _md5sum = '67ad55e9bed9c8f21dfb4b9b1ca8df7d'\n _type = 'arm_navigation_msgs/GetPlanningSceneRequest'\n _has_header = False\n _full_text = \"\"\"\n\n\nPlanningScene planning_scene_diff\n\n\narm_navigation_msgs/OrderedCollisionOperations operations\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n================================================================================\nMSG: arm_navigation_msgs/OrderedCollisionOperations\n# A set of collision operations that will be performed in the order they are specified\nCollisionOperation[] collision_operations\n================================================================================\nMSG: arm_navigation_msgs/CollisionOperation\n# A definition of a collision operation\n# E.g. (\"gripper\",COLLISION_SET_ALL,ENABLE) will enable collisions \n# between the gripper and all objects in the collision space\n\nstring object1\nstring object2\nstring COLLISION_SET_ALL=\"all\"\nstring COLLISION_SET_OBJECTS=\"objects\"\nstring COLLISION_SET_ATTACHED_OBJECTS=\"attached\"\n\n# The penetration distance to which collisions are allowed. This is 0.0 by default.\nfloat64 penetration_distance\n\n# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above\nint32 operation\nint32 DISABLE=0\nint32 ENABLE=1\n\n\"\"\"\n __slots__ = ['planning_scene_diff', 'operations']\n _slot_types = ['arm_navigation_msgs/PlanningScene',\n 'arm_navigation_msgs/OrderedCollisionOperations']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene_diff,operations\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneRequest, self).__init__(*args, **kwds)\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n else:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (struct\n .unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (struct\n .unpack(pattern, str[start:end]))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v29 = val1.position\n _x = _v29\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v30 = val1.orientation\n _x = _v30\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v31 = val1.header\n start = end\n end += 4\n _v31.seq, = _struct_I.unpack(str[start:end])\n _v32 = _v31.stamp\n _x = _v32\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v31.frame_id = str[start:end].decode('utf-8')\n else:\n _v31.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v33 = val1.transform\n _v34 = _v33.translation\n _x = _v34\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v35 = _v33.rotation\n _x = _v35\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v36 = val1.shape\n start = end\n end += 1\n _v36.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v36.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v36.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v36.vertices.append(val3)\n _v37 = val1.pose_stamped\n _v38 = _v37.header\n start = end\n end += 4\n _v38.seq, = _struct_I.unpack(str[start:end])\n _v39 = _v38.stamp\n _x = _v39\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v38.frame_id = str[start:end].decode('utf-8')\n else:\n _v38.frame_id = str[start:end]\n _v40 = _v37.pose\n _v41 = _v40.position\n _x = _v41\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v42 = _v40.orientation\n _x = _v42\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v43 = val1.header\n start = end\n end += 4\n _v43.seq, = _struct_I.unpack(str[start:end])\n _v44 = _v43.stamp\n _x = _v44\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v43.frame_id = str[start:end].decode('utf-8')\n else:\n _v43.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v45 = val1.operation\n start = end\n end += 1\n _v45.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v46 = val2.position\n _x = _v46\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v47 = val2.orientation\n _x = _v47\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v48 = val1.object\n _v49 = _v48.header\n start = end\n end += 4\n _v49.seq, = _struct_I.unpack(str[start:end])\n _v50 = _v49.stamp\n _x = _v50\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v49.frame_id = str[start:end].decode('utf-8')\n else:\n _v49.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v48.id = str[start:end].decode('utf-8')\n else:\n _v48.id = str[start:end]\n start = end\n end += 4\n _v48.padding, = _struct_f.unpack(str[start:end])\n _v51 = _v48.operation\n start = end\n end += 1\n _v51.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v48.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v48.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v52 = val3.position\n _x = _v52\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v53 = val3.orientation\n _x = _v53\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v48.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v54 = val1.center\n _x = _v54\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v55 = val1.extents\n _x = _v55\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v56 = val1.axis\n _x = _v56\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.\n joint_state.header.seq, _x.planning_scene_diff.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene_diff.\n robot_state.joint_state.header.stamp.nsecs))\n _x = (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id)\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.\n position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.\n effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene_diff.robot_state.joint_state.\n effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.\n entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map\n .header.seq, _x.planning_scene_diff.collision_map.header.\n stamp.secs, _x.planning_scene_diff.collision_map.header.\n stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.\n operation))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = (arm_navigation_msgs.msg.\n PlanningScene())\n if self.operations is None:\n self.operations = (arm_navigation_msgs.msg.\n OrderedCollisionOperations())\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.\n planning_scene_diff.robot_state.joint_state.header.stamp.\n secs, _x.planning_scene_diff.robot_state.joint_state.header\n .stamp.nsecs) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene_diff.robot_state.joint_state.header.\n frame_id) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = (numpy\n .frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp\n .secs, _x.planning_scene_diff.robot_state.\n multi_dof_joint_state.stamp.nsecs) = _struct_2I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n joint_names) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene_diff.robot_state.multi_dof_joint_state.poses\n ) = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v85 = val1.position\n _x = _v85\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v86 = val1.orientation\n _x = _v86\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v87 = val1.header\n start = end\n end += 4\n _v87.seq, = _struct_I.unpack(str[start:end])\n _v88 = _v87.stamp\n _x = _v88\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v87.frame_id = str[start:end].decode('utf-8')\n else:\n _v87.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v89 = val1.transform\n _v90 = _v89.translation\n _x = _v90\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v91 = _v89.rotation\n _x = _v91\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v92 = val1.shape\n start = end\n end += 1\n _v92.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v92.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v92.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v92.vertices.append(val3)\n _v93 = val1.pose_stamped\n _v94 = _v93.header\n start = end\n end += 4\n _v94.seq, = _struct_I.unpack(str[start:end])\n _v95 = _v94.stamp\n _x = _v95\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v94.frame_id = str[start:end].decode('utf-8')\n else:\n _v94.frame_id = str[start:end]\n _v96 = _v93.pose\n _v97 = _v96.position\n _x = _v97\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v98 = _v96.orientation\n _x = _v98\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v99 = val1.header\n start = end\n end += 4\n _v99.seq, = _struct_I.unpack(str[start:end])\n _v100 = _v99.stamp\n _x = _v100\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v99.frame_id = str[start:end].decode('utf-8')\n else:\n _v99.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v101 = val1.operation\n start = end\n end += 1\n _v101.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v102 = val2.position\n _x = _v102\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v103 = val2.orientation\n _x = _v103\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v104 = val1.object\n _v105 = _v104.header\n start = end\n end += 4\n _v105.seq, = _struct_I.unpack(str[start:end])\n _v106 = _v105.stamp\n _x = _v106\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v105.frame_id = str[start:end].decode('utf-8')\n else:\n _v105.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v104.id = str[start:end].decode('utf-8')\n else:\n _v104.id = str[start:end]\n start = end\n end += 4\n _v104.padding, = _struct_f.unpack(str[start:end])\n _v107 = _v104.operation\n start = end\n end += 1\n _v107.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v104.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v104.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v108 = val3.position\n _x = _v108\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v109 = val3.orientation\n _x = _v109\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v104.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1\n )\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.\n planning_scene_diff.collision_map.header.stamp.secs, _x.\n planning_scene_diff.collision_map.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[\n start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v110 = val1.center\n _x = _v110\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v111 = val1.extents\n _x = _v111\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v112 = val1.axis\n _x = _v112\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n _x.penetration_distance, _x.operation = _struct_di.unpack(str\n [start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct('<b')\n_struct_d = struct.Struct('<d')\n_struct_f = struct.Struct('<f')\n_struct_di = struct.Struct('<di')\n_struct_3f = struct.Struct('<3f')\n_struct_3I = struct.Struct('<3I')\n_struct_4d = struct.Struct('<4d')\n_struct_2I = struct.Struct('<2I')\n_struct_3d = struct.Struct('<3d')\n<mask token>\nimport sys\npython3 = True if sys.hexversion > 50331648 else False\nimport genpy\nimport struct\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = '285525c9abe002fbafa99af84a14b4cb'\n _type = 'arm_navigation_msgs/GetPlanningSceneResponse'\n _has_header = False\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *self.planning_scene.\n robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (struct.\n unpack(pattern, str[start:end]))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(\n pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n _v143.seq, = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n _v148.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n _v150.seq, = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n _v155.seq, = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n _v157.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n _v161.seq, = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n _v160.padding, = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n _v163.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.\n joint_state.header.seq, _x.planning_scene.robot_state.\n joint_state.header.stamp.secs, _x.planning_scene.\n robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.position\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.velocity\n .tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(self.planning_scene.robot_state.joint_state.effort.\n tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.\n multi_dof_joint_state.stamp.secs, _x.planning_scene.\n robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.robot_state.\n multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.\n link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss' % length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB' % length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd' % length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si' % length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss' % length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.\n header.seq, _x.planning_scene.collision_map.header.stamp.\n secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss' % length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se:\n self._check_types(se)\n except TypeError as te:\n self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.\n planning_scene.robot_state.joint_state.header.stamp.secs,\n _x.planning_scene.robot_state.joint_state.header.stamp.nsecs\n ) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end].decode('utf-8')\n else:\n (self.planning_scene.robot_state.joint_state.header.frame_id\n ) = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = (numpy.\n frombuffer(str[start:end], dtype=numpy.float64, count=length))\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs,\n _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs\n ) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.joint_names\n ) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = [\n ]\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n (self.planning_scene.robot_state.multi_dof_joint_state.\n child_frame_ids) = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n _v199.seq, = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sB' % length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy\n .bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(\n val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n _v204.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=\n numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=\n numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n _v206.seq, = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n val1.penetration_depth, = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n val1.padding, = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n _v211.seq, = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n val1.padding, = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n _v213.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val2.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n _v217.seq, = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n _x.secs, _x.nsecs = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n _v216.padding, = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n _v219.operation, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n val3.type, = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%sd' % length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end],\n dtype=numpy.float64, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n pattern = '<%si' % length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype\n =numpy.int32, count=length)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n _x.x, _x.y, _x.z = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n _x.x, _x.y, _x.z, _x.w = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.\n collision_map.header.stamp.secs, _x.planning_scene.\n collision_map.header.stamp.nsecs) = _struct_3I.unpack(str[\n start:end])\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start\n :end]\n start = end\n end += 4\n length, = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n _x.x, _x.y, _x.z = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n val1.angle, = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e)\n\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct('<b')\n_struct_d = struct.Struct('<d')\n_struct_f = struct.Struct('<f')\n_struct_3f = struct.Struct('<3f')\n_struct_3I = struct.Struct('<3I')\n_struct_4d = struct.Struct('<4d')\n_struct_2I = struct.Struct('<2I')\n_struct_3d = struct.Struct('<3d')\n\n\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n", "step-5": "\"\"\"autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit.\"\"\"\nimport sys\npython3 = True if sys.hexversion > 0x03000000 else False\nimport genpy\nimport struct\n\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\nclass GetPlanningSceneRequest(genpy.Message):\n _md5sum = \"67ad55e9bed9c8f21dfb4b9b1ca8df7d\"\n _type = \"arm_navigation_msgs/GetPlanningSceneRequest\"\n _has_header = False #flag to mark the presence of a Header object\n _full_text = \"\"\"\n\n\nPlanningScene planning_scene_diff\n\n\narm_navigation_msgs/OrderedCollisionOperations operations\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n================================================================================\nMSG: arm_navigation_msgs/OrderedCollisionOperations\n# A set of collision operations that will be performed in the order they are specified\nCollisionOperation[] collision_operations\n================================================================================\nMSG: arm_navigation_msgs/CollisionOperation\n# A definition of a collision operation\n# E.g. (\"gripper\",COLLISION_SET_ALL,ENABLE) will enable collisions \n# between the gripper and all objects in the collision space\n\nstring object1\nstring object2\nstring COLLISION_SET_ALL=\"all\"\nstring COLLISION_SET_OBJECTS=\"objects\"\nstring COLLISION_SET_ATTACHED_OBJECTS=\"attached\"\n\n# The penetration distance to which collisions are allowed. This is 0.0 by default.\nfloat64 penetration_distance\n\n# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above\nint32 operation\nint32 DISABLE=0\nint32 ENABLE=1\n\n\"\"\"\n __slots__ = ['planning_scene_diff','operations']\n _slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene_diff,operations\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneRequest, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.planning_scene_diff is None:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n if self.operations is None:\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n else:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene_diff.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position))\n length = len(self.planning_scene_diff.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity))\n length = len(self.planning_scene_diff.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v1 = val1.position\n _x = _v1\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v2 = val1.orientation\n _x = _v2\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v3 = val1.header\n buff.write(_struct_I.pack(_v3.seq))\n _v4 = _v3.stamp\n _x = _v4\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v3.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v5 = val1.transform\n _v6 = _v5.translation\n _x = _v6\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v7 = _v5.rotation\n _x = _v7\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v8 = val1.shape\n buff.write(_struct_b.pack(_v8.type))\n length = len(_v8.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *_v8.dimensions))\n length = len(_v8.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *_v8.triangles))\n length = len(_v8.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v8.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v9 = val1.pose_stamped\n _v10 = _v9.header\n buff.write(_struct_I.pack(_v10.seq))\n _v11 = _v10.stamp\n _x = _v11\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v10.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v12 = _v9.pose\n _v13 = _v12.position\n _x = _v13\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v14 = _v12.orientation\n _x = _v14\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v15 = val1.header\n buff.write(_struct_I.pack(_v15.seq))\n _v16 = _v15.stamp\n _x = _v16\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v15.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v17 = val1.operation\n buff.write(_struct_b.pack(_v17.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v18 = val2.position\n _x = _v18\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v19 = val2.orientation\n _x = _v19\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v20 = val1.object\n _v21 = _v20.header\n buff.write(_struct_I.pack(_v21.seq))\n _v22 = _v21.stamp\n _x = _v22\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v21.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v20.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v20.padding))\n _v23 = _v20.operation\n buff.write(_struct_b.pack(_v23.operation))\n length = len(_v20.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v20.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v20.poses:\n _v24 = val3.position\n _x = _v24\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v25 = val3.orientation\n _x = _v25\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v26 = val1.center\n _x = _v26\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v27 = val1.extents\n _x = _v27\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v28 = val1.axis\n _x = _v28\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n if self.operations is None:\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v29 = val1.position\n _x = _v29\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v30 = val1.orientation\n _x = _v30\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v31 = val1.header\n start = end\n end += 4\n (_v31.seq,) = _struct_I.unpack(str[start:end])\n _v32 = _v31.stamp\n _x = _v32\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v31.frame_id = str[start:end].decode('utf-8')\n else:\n _v31.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v33 = val1.transform\n _v34 = _v33.translation\n _x = _v34\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v35 = _v33.rotation\n _x = _v35\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v36 = val1.shape\n start = end\n end += 1\n (_v36.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v36.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v36.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v36.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v36.vertices.append(val3)\n _v37 = val1.pose_stamped\n _v38 = _v37.header\n start = end\n end += 4\n (_v38.seq,) = _struct_I.unpack(str[start:end])\n _v39 = _v38.stamp\n _x = _v39\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v38.frame_id = str[start:end].decode('utf-8')\n else:\n _v38.frame_id = str[start:end]\n _v40 = _v37.pose\n _v41 = _v40.position\n _x = _v41\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v42 = _v40.orientation\n _x = _v42\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v43 = val1.header\n start = end\n end += 4\n (_v43.seq,) = _struct_I.unpack(str[start:end])\n _v44 = _v43.stamp\n _x = _v44\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v43.frame_id = str[start:end].decode('utf-8')\n else:\n _v43.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v45 = val1.operation\n start = end\n end += 1\n (_v45.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v46 = val2.position\n _x = _v46\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v47 = val2.orientation\n _x = _v47\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v48 = val1.object\n _v49 = _v48.header\n start = end\n end += 4\n (_v49.seq,) = _struct_I.unpack(str[start:end])\n _v50 = _v49.stamp\n _x = _v50\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v49.frame_id = str[start:end].decode('utf-8')\n else:\n _v49.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v48.id = str[start:end].decode('utf-8')\n else:\n _v48.id = str[start:end]\n start = end\n end += 4\n (_v48.padding,) = _struct_f.unpack(str[start:end])\n _v51 = _v48.operation\n start = end\n end += 1\n (_v51.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v48.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v48.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v48.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v52 = val3.position\n _x = _v52\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v53 = val3.orientation\n _x = _v53\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v48.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v54 = val1.center\n _x = _v54\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v55 = val1.extents\n _x = _v55\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v56 = val1.axis\n _x = _v56\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n (_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene_diff.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene_diff.robot_state.joint_state.position.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene_diff.robot_state.joint_state.velocity.tostring())\n length = len(self.planning_scene_diff.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene_diff.robot_state.joint_state.effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:\n _v57 = val1.position\n _x = _v57\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v58 = val1.orientation\n _x = _v58\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.fixed_frame_transforms:\n _v59 = val1.header\n buff.write(_struct_I.pack(_v59.seq))\n _v60 = _v59.stamp\n _x = _v60\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v59.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v61 = val1.transform\n _v62 = _v61.translation\n _x = _v62\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v63 = _v61.rotation\n _x = _v63\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene_diff.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene_diff.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v64 = val1.shape\n buff.write(_struct_b.pack(_v64.type))\n length = len(_v64.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(_v64.dimensions.tostring())\n length = len(_v64.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(_v64.triangles.tostring())\n length = len(_v64.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v64.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v65 = val1.pose_stamped\n _v66 = _v65.header\n buff.write(_struct_I.pack(_v66.seq))\n _v67 = _v66.stamp\n _x = _v67\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v66.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v68 = _v65.pose\n _v69 = _v68.position\n _x = _v69\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v70 = _v68.orientation\n _x = _v70\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene_diff.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene_diff.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_objects:\n _v71 = val1.header\n buff.write(_struct_I.pack(_v71.seq))\n _v72 = _v71.stamp\n _x = _v72\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v71.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v73 = val1.operation\n buff.write(_struct_b.pack(_v73.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v74 = val2.position\n _x = _v74\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v75 = val2.orientation\n _x = _v75\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene_diff.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v76 = val1.object\n _v77 = _v76.header\n buff.write(_struct_I.pack(_v77.seq))\n _v78 = _v77.stamp\n _x = _v78\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v77.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v76.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v76.padding))\n _v79 = _v76.operation\n buff.write(_struct_b.pack(_v79.operation))\n length = len(_v76.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v76.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v76.poses:\n _v80 = val3.position\n _x = _v80\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v81 = val3.orientation\n _x = _v81\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))\n _x = self.planning_scene_diff.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene_diff.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene_diff.collision_map.boxes:\n _v82 = val1.center\n _x = _v82\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v83 = val1.extents\n _x = _v83\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v84 = val1.axis\n _x = _v84\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n length = len(self.operations.collision_operations)\n buff.write(_struct_I.pack(length))\n for val1 in self.operations.collision_operations:\n _x = val1.object1\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.object2\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1\n buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene_diff is None:\n self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()\n if self.operations is None:\n self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene_diff.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n _x = self\n start = end\n end += 8\n (_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v85 = val1.position\n _x = _v85\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v86 = val1.orientation\n _x = _v86\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v87 = val1.header\n start = end\n end += 4\n (_v87.seq,) = _struct_I.unpack(str[start:end])\n _v88 = _v87.stamp\n _x = _v88\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v87.frame_id = str[start:end].decode('utf-8')\n else:\n _v87.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v89 = val1.transform\n _v90 = _v89.translation\n _x = _v90\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v91 = _v89.rotation\n _x = _v91\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene_diff.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v92 = val1.shape\n start = end\n end += 1\n (_v92.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v92.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v92.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v92.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v92.vertices.append(val3)\n _v93 = val1.pose_stamped\n _v94 = _v93.header\n start = end\n end += 4\n (_v94.seq,) = _struct_I.unpack(str[start:end])\n _v95 = _v94.stamp\n _x = _v95\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v94.frame_id = str[start:end].decode('utf-8')\n else:\n _v94.frame_id = str[start:end]\n _v96 = _v93.pose\n _v97 = _v96.position\n _x = _v97\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v98 = _v96.orientation\n _x = _v98\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene_diff.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v99 = val1.header\n start = end\n end += 4\n (_v99.seq,) = _struct_I.unpack(str[start:end])\n _v100 = _v99.stamp\n _x = _v100\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v99.frame_id = str[start:end].decode('utf-8')\n else:\n _v99.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v101 = val1.operation\n start = end\n end += 1\n (_v101.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v102 = val2.position\n _x = _v102\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v103 = val2.orientation\n _x = _v103\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene_diff.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v104 = val1.object\n _v105 = _v104.header\n start = end\n end += 4\n (_v105.seq,) = _struct_I.unpack(str[start:end])\n _v106 = _v105.stamp\n _x = _v106\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v105.frame_id = str[start:end].decode('utf-8')\n else:\n _v105.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v104.id = str[start:end].decode('utf-8')\n else:\n _v104.id = str[start:end]\n start = end\n end += 4\n (_v104.padding,) = _struct_f.unpack(str[start:end])\n _v107 = _v104.operation\n start = end\n end += 1\n (_v107.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v104.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v104.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v104.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v108 = val3.position\n _x = _v108\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v109 = val3.orientation\n _x = _v109\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v104.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene_diff.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene_diff.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v110 = val1.center\n _x = _v110\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v111 = val1.extents\n _x = _v111\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v112 = val1.axis\n _x = _v112\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene_diff.collision_map.boxes.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.operations.collision_operations = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionOperation()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object1 = str[start:end].decode('utf-8')\n else:\n val1.object1 = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.object2 = str[start:end].decode('utf-8')\n else:\n val1.object2 = str[start:end]\n _x = val1\n start = end\n end += 12\n (_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])\n self.operations.collision_operations.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct(\"<b\")\n_struct_d = struct.Struct(\"<d\")\n_struct_f = struct.Struct(\"<f\")\n_struct_di = struct.Struct(\"<di\")\n_struct_3f = struct.Struct(\"<3f\")\n_struct_3I = struct.Struct(\"<3I\")\n_struct_4d = struct.Struct(\"<4d\")\n_struct_2I = struct.Struct(\"<2I\")\n_struct_3d = struct.Struct(\"<3d\")\n\"\"\"autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneResponse.msg. Do not edit.\"\"\"\nimport sys\npython3 = True if sys.hexversion > 0x03000000 else False\nimport genpy\nimport struct\n\nimport arm_navigation_msgs.msg\nimport geometry_msgs.msg\nimport std_msgs.msg\nimport genpy\nimport sensor_msgs.msg\n\nclass GetPlanningSceneResponse(genpy.Message):\n _md5sum = \"285525c9abe002fbafa99af84a14b4cb\"\n _type = \"arm_navigation_msgs/GetPlanningSceneResponse\"\n _has_header = False #flag to mark the presence of a Header object\n _full_text = \"\"\"\n\nPlanningScene planning_scene\n\n\n\n\n\n================================================================================\nMSG: arm_navigation_msgs/PlanningScene\n#full robot state\narm_navigation_msgs/RobotState robot_state\n\n#additional frames for duplicating tf\ngeometry_msgs/TransformStamped[] fixed_frame_transforms\n\n#full allowed collision matrix\nAllowedCollisionMatrix allowed_collision_matrix\n\n#allowed contacts\narm_navigation_msgs/AllowedContactSpecification[] allowed_contacts\n\n#all link paddings\narm_navigation_msgs/LinkPadding[] link_padding\n\n#collision objects\narm_navigation_msgs/CollisionObject[] collision_objects\narm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects\n\n#the collision map\narm_navigation_msgs/CollisionMap collision_map\n\n================================================================================\nMSG: arm_navigation_msgs/RobotState\n# This message contains information about the robot state, i.e. the positions of its joints and links\nsensor_msgs/JointState joint_state\narm_navigation_msgs/MultiDOFJointState multi_dof_joint_state\n\n================================================================================\nMSG: sensor_msgs/JointState\n# This is a message that holds data to describe the state of a set of torque controlled joints. \n#\n# The state of each joint (revolute or prismatic) is defined by:\n# * the position of the joint (rad or m),\n# * the velocity of the joint (rad/s or m/s) and \n# * the effort that is applied in the joint (Nm or N).\n#\n# Each joint is uniquely identified by its name\n# The header specifies the time at which the joint states were recorded. All the joint states\n# in one message have to be recorded at the same time.\n#\n# This message consists of a multiple arrays, one for each part of the joint state. \n# The goal is to make each of the fields optional. When e.g. your joints have no\n# effort associated with them, you can leave the effort array empty. \n#\n# All arrays in this message should have the same size, or be empty.\n# This is the only way to uniquely associate the joint name with the correct\n# states.\n\n\nHeader header\n\nstring[] name\nfloat64[] position\nfloat64[] velocity\nfloat64[] effort\n\n================================================================================\nMSG: std_msgs/Header\n# Standard metadata for higher-level stamped data types.\n# This is generally used to communicate timestamped data \n# in a particular coordinate frame.\n# \n# sequence ID: consecutively increasing ID \nuint32 seq\n#Two-integer timestamp that is expressed as:\n# * stamp.secs: seconds (stamp_secs) since epoch\n# * stamp.nsecs: nanoseconds since stamp_secs\n# time-handling sugar is provided by the client library\ntime stamp\n#Frame this data is associated with\n# 0: no frame\n# 1: global frame\nstring frame_id\n\n================================================================================\nMSG: arm_navigation_msgs/MultiDOFJointState\n#A representation of a multi-dof joint state\ntime stamp\nstring[] joint_names\nstring[] frame_ids\nstring[] child_frame_ids\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: geometry_msgs/Pose\n# A representation of pose in free space, composed of postion and orientation. \nPoint position\nQuaternion orientation\n\n================================================================================\nMSG: geometry_msgs/Point\n# This contains the position of a point in free space\nfloat64 x\nfloat64 y\nfloat64 z\n\n================================================================================\nMSG: geometry_msgs/Quaternion\n# This represents an orientation in free space in quaternion form.\n\nfloat64 x\nfloat64 y\nfloat64 z\nfloat64 w\n\n================================================================================\nMSG: geometry_msgs/TransformStamped\n# This expresses a transform from coordinate frame header.frame_id\n# to the coordinate frame child_frame_id\n#\n# This message is mostly used by the \n# <a href=\"http://www.ros.org/wiki/tf\">tf</a> package. \n# See it's documentation for more information.\n\nHeader header\nstring child_frame_id # the frame id of the child frame\nTransform transform\n\n================================================================================\nMSG: geometry_msgs/Transform\n# This represents the transform between two coordinate frames in free space.\n\nVector3 translation\nQuaternion rotation\n\n================================================================================\nMSG: geometry_msgs/Vector3\n# This represents a vector in free space. \n\nfloat64 x\nfloat64 y\nfloat64 z\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionMatrix\n# the list of link names in the matrix\nstring[] link_names\n\n# the individual entries in the allowed collision matrix\n# symmetric, with same order as link_names\nAllowedCollisionEntry[] entries\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedCollisionEntry\n# whether or not collision checking is enabled\nbool[] enabled\n\n================================================================================\nMSG: arm_navigation_msgs/AllowedContactSpecification\n# The names of the regions\nstring name\n\n# The shape of the region in the environment\narm_navigation_msgs/Shape shape\n\n# The pose of the space defining the region\ngeometry_msgs/PoseStamped pose_stamped\n\n# The set of links that will be allowed to have penetration contact within this region\nstring[] link_names\n\n# The maximum penetration depth allowed for every link\nfloat64 penetration_depth\n\n================================================================================\nMSG: arm_navigation_msgs/Shape\nbyte SPHERE=0\nbyte BOX=1\nbyte CYLINDER=2\nbyte MESH=3\n\nbyte type\n\n\n#### define sphere, box, cylinder ####\n# the origin of each shape is considered at the shape's center\n\n# for sphere\n# radius := dimensions[0]\n\n# for cylinder\n# radius := dimensions[0]\n# length := dimensions[1]\n# the length is along the Z axis\n\n# for box\n# size_x := dimensions[0]\n# size_y := dimensions[1]\n# size_z := dimensions[2]\nfloat64[] dimensions\n\n\n#### define mesh ####\n\n# list of triangles; triangle k is defined by tre vertices located\n# at indices triangles[3k], triangles[3k+1], triangles[3k+2]\nint32[] triangles\ngeometry_msgs/Point[] vertices\n\n================================================================================\nMSG: geometry_msgs/PoseStamped\n# A Pose with reference coordinate frame and timestamp\nHeader header\nPose pose\n\n================================================================================\nMSG: arm_navigation_msgs/LinkPadding\n#name for the link\nstring link_name\n\n# padding to apply to the link\nfloat64 padding\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObject\n# a header, used for interpreting the poses\nHeader header\n\n# the id of the object\nstring id\n\n# The padding used for filtering points near the object.\n# This does not affect collision checking for the object. \n# Set to negative to get zero padding.\nfloat32 padding\n\n#This contains what is to be done with the object\nCollisionObjectOperation operation\n\n#the shapes associated with the object\narm_navigation_msgs/Shape[] shapes\n\n#the poses associated with the shapes - will be transformed using the header\ngeometry_msgs/Pose[] poses\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionObjectOperation\n#Puts the object into the environment\n#or updates the object if already added\nbyte ADD=0\n\n#Removes the object from the environment entirely\nbyte REMOVE=1\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes an attached object, detaches from the attached link\n#But adds back in as regular object\nbyte DETACH_AND_ADD_AS_OBJECT=2\n\n#Only valid within the context of a CollisionAttachedObject message\n#Will be ignored if sent with an CollisionObject message\n#Takes current object in the environment and removes it as\n#a regular object\nbyte ATTACH_AND_REMOVE_AS_OBJECT=3\n\n# Byte code for operation\nbyte operation\n\n================================================================================\nMSG: arm_navigation_msgs/AttachedCollisionObject\n# The CollisionObject will be attached with a fixed joint to this link\n# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation \n# is set to REMOVE will remove all attached bodies attached to any object\nstring link_name\n\n#Reserved for indicating that all attached objects should be removed\nstring REMOVE_ALL_ATTACHED_OBJECTS = \"all\"\n\n#This contains the actual shapes and poses for the CollisionObject\n#to be attached to the link\n#If action is remove and no object.id is set, all objects\n#attached to the link indicated by link_name will be removed\nCollisionObject object\n\n# The set of links that the attached objects are allowed to touch\n# by default - the link_name is included by default\nstring[] touch_links\n\n================================================================================\nMSG: arm_navigation_msgs/CollisionMap\n#header for interpreting box positions\nHeader header\n\n#boxes for use in collision testing\nOrientedBoundingBox[] boxes\n\n================================================================================\nMSG: arm_navigation_msgs/OrientedBoundingBox\n#the center of the box\ngeometry_msgs/Point32 center\n\n#the extents of the box, assuming the center is at the point\ngeometry_msgs/Point32 extents\n\n#the axis of the box\ngeometry_msgs/Point32 axis\n\n#the angle of rotation around the axis\nfloat32 angle\n\n================================================================================\nMSG: geometry_msgs/Point32\n# This contains the position of a point in free space(with 32 bits of precision).\n# It is recommeded to use Point wherever possible instead of Point32. \n# \n# This recommendation is to promote interoperability. \n#\n# This message is designed to take up less space when sending\n# lots of points at once, as in the case of a PointCloud. \n\nfloat32 x\nfloat32 y\nfloat32 z\n\"\"\"\n __slots__ = ['planning_scene']\n _slot_types = ['arm_navigation_msgs/PlanningScene']\n\n def __init__(self, *args, **kwds):\n \"\"\"\n Constructor. Any message fields that are implicitly/explicitly\n set to None will be assigned a default value. The recommend\n use is keyword arguments as this is more robust to future message\n changes. You cannot mix in-order arguments and keyword arguments.\n\n The available fields are:\n planning_scene\n\n :param args: complete set of field values, in .msg order\n :param kwds: use keyword arguments corresponding to message field names\n to set specific fields.\n \"\"\"\n if args or kwds:\n super(GetPlanningSceneResponse, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n else:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n\n def _get_types(self):\n \"\"\"\n internal API method\n \"\"\"\n return self._slot_types\n\n def serialize(self, buff):\n \"\"\"\n serialize message into buffer\n :param buff: buffer, ``StringIO``\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.position))\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.velocity))\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.effort))\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v113 = val1.position\n _x = _v113\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v114 = val1.orientation\n _x = _v114\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v115 = val1.header\n buff.write(_struct_I.pack(_v115.seq))\n _v116 = _v115.stamp\n _x = _v116\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v115.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v117 = val1.transform\n _v118 = _v117.translation\n _x = _v118\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v119 = _v117.rotation\n _x = _v119\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(struct.pack(pattern, *val1.enabled))\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v120 = val1.shape\n buff.write(_struct_b.pack(_v120.type))\n length = len(_v120.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *_v120.dimensions))\n length = len(_v120.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *_v120.triangles))\n length = len(_v120.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v120.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v121 = val1.pose_stamped\n _v122 = _v121.header\n buff.write(_struct_I.pack(_v122.seq))\n _v123 = _v122.stamp\n _x = _v123\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v122.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v124 = _v121.pose\n _v125 = _v124.position\n _x = _v125\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v126 = _v124.orientation\n _x = _v126\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v127 = val1.header\n buff.write(_struct_I.pack(_v127.seq))\n _v128 = _v127.stamp\n _x = _v128\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v127.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v129 = val1.operation\n buff.write(_struct_b.pack(_v129.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val2.dimensions))\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val2.triangles))\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v130 = val2.position\n _x = _v130\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v131 = val2.orientation\n _x = _v131\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v132 = val1.object\n _v133 = _v132.header\n buff.write(_struct_I.pack(_v133.seq))\n _v134 = _v133.stamp\n _x = _v134\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v133.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v132.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v132.padding))\n _v135 = _v132.operation\n buff.write(_struct_b.pack(_v135.operation))\n length = len(_v132.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(struct.pack(pattern, *val3.dimensions))\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(struct.pack(pattern, *val3.triangles))\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v132.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v132.poses:\n _v136 = val3.position\n _x = _v136\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v137 = val3.orientation\n _x = _v137\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v138 = val1.center\n _x = _v138\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v139 = val1.extents\n _x = _v139\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v140 = val1.axis\n _x = _v140\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize(self, str):\n \"\"\"\n unpack serialized message in str into this message instance\n :param str: byte array of serialized message, ``str``\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v141 = val1.position\n _x = _v141\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v142 = val1.orientation\n _x = _v142\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v143 = val1.header\n start = end\n end += 4\n (_v143.seq,) = _struct_I.unpack(str[start:end])\n _v144 = _v143.stamp\n _x = _v144\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v143.frame_id = str[start:end].decode('utf-8')\n else:\n _v143.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v145 = val1.transform\n _v146 = _v145.translation\n _x = _v146\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v147 = _v145.rotation\n _x = _v147\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = struct.unpack(pattern, str[start:end])\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v148 = val1.shape\n start = end\n end += 1\n (_v148.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v148.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v148.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v148.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v148.vertices.append(val3)\n _v149 = val1.pose_stamped\n _v150 = _v149.header\n start = end\n end += 4\n (_v150.seq,) = _struct_I.unpack(str[start:end])\n _v151 = _v150.stamp\n _x = _v151\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v150.frame_id = str[start:end].decode('utf-8')\n else:\n _v150.frame_id = str[start:end]\n _v152 = _v149.pose\n _v153 = _v152.position\n _x = _v153\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v154 = _v152.orientation\n _x = _v154\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v155 = val1.header\n start = end\n end += 4\n (_v155.seq,) = _struct_I.unpack(str[start:end])\n _v156 = _v155.stamp\n _x = _v156\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v155.frame_id = str[start:end].decode('utf-8')\n else:\n _v155.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v157 = val1.operation\n start = end\n end += 1\n (_v157.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v158 = val2.position\n _x = _v158\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v159 = val2.orientation\n _x = _v159\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v160 = val1.object\n _v161 = _v160.header\n start = end\n end += 4\n (_v161.seq,) = _struct_I.unpack(str[start:end])\n _v162 = _v161.stamp\n _x = _v162\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v161.frame_id = str[start:end].decode('utf-8')\n else:\n _v161.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v160.id = str[start:end].decode('utf-8')\n else:\n _v160.id = str[start:end]\n start = end\n end += 4\n (_v160.padding,) = _struct_f.unpack(str[start:end])\n _v163 = _v160.operation\n start = end\n end += 1\n (_v163.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v160.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = struct.unpack(pattern, str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v160.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v160.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v164 = val3.position\n _x = _v164\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v165 = val3.orientation\n _x = _v165\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v160.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v166 = val1.center\n _x = _v166\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v167 = val1.extents\n _x = _v167\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v168 = val1.axis\n _x = _v168\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n\n def serialize_numpy(self, buff, numpy):\n \"\"\"\n serialize message with numpy array types into buffer\n :param buff: buffer, ``StringIO``\n :param numpy: numpy python module\n \"\"\"\n try:\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))\n _x = self.planning_scene.robot_state.joint_state.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.robot_state.joint_state.name)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.joint_state.name:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.joint_state.position)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene.robot_state.joint_state.position.tostring())\n length = len(self.planning_scene.robot_state.joint_state.velocity)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene.robot_state.joint_state.velocity.tostring())\n length = len(self.planning_scene.robot_state.joint_state.effort)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(self.planning_scene.robot_state.joint_state.effort.tostring())\n _x = self\n buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:\n _v169 = val1.position\n _x = _v169\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v170 = val1.orientation\n _x = _v170\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.fixed_frame_transforms)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.fixed_frame_transforms:\n _v171 = val1.header\n buff.write(_struct_I.pack(_v171.seq))\n _v172 = _v171.stamp\n _x = _v172\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v171.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.child_frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v173 = val1.transform\n _v174 = _v173.translation\n _x = _v174\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v175 = _v173.rotation\n _x = _v175\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.allowed_collision_matrix.link_names)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.link_names:\n length = len(val1)\n if python3 or type(val1) == unicode:\n val1 = val1.encode('utf-8')\n length = len(val1)\n buff.write(struct.pack('<I%ss'%length, length, val1))\n length = len(self.planning_scene.allowed_collision_matrix.entries)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_collision_matrix.entries:\n length = len(val1.enabled)\n buff.write(_struct_I.pack(length))\n pattern = '<%sB'%length\n buff.write(val1.enabled.tostring())\n length = len(self.planning_scene.allowed_contacts)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.allowed_contacts:\n _x = val1.name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v176 = val1.shape\n buff.write(_struct_b.pack(_v176.type))\n length = len(_v176.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(_v176.dimensions.tostring())\n length = len(_v176.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(_v176.triangles.tostring())\n length = len(_v176.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in _v176.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v177 = val1.pose_stamped\n _v178 = _v177.header\n buff.write(_struct_I.pack(_v178.seq))\n _v179 = _v178.stamp\n _x = _v179\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v178.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v180 = _v177.pose\n _v181 = _v180.position\n _x = _v181\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v182 = _v180.orientation\n _x = _v182\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.link_names)\n buff.write(_struct_I.pack(length))\n for val2 in val1.link_names:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n buff.write(_struct_d.pack(val1.penetration_depth))\n length = len(self.planning_scene.link_padding)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.link_padding:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_d.pack(val1.padding))\n length = len(self.planning_scene.collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_objects:\n _v183 = val1.header\n buff.write(_struct_I.pack(_v183.seq))\n _v184 = _v183.stamp\n _x = _v184\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v183.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = val1.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(val1.padding))\n _v185 = val1.operation\n buff.write(_struct_b.pack(_v185.operation))\n length = len(val1.shapes)\n buff.write(_struct_I.pack(length))\n for val2 in val1.shapes:\n buff.write(_struct_b.pack(val2.type))\n length = len(val2.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val2.dimensions.tostring())\n length = len(val2.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val2.triangles.tostring())\n length = len(val2.vertices)\n buff.write(_struct_I.pack(length))\n for val3 in val2.vertices:\n _x = val3\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(val1.poses)\n buff.write(_struct_I.pack(length))\n for val2 in val1.poses:\n _v186 = val2.position\n _x = _v186\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v187 = val2.orientation\n _x = _v187\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(self.planning_scene.attached_collision_objects)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.attached_collision_objects:\n _x = val1.link_name\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _v188 = val1.object\n _v189 = _v188.header\n buff.write(_struct_I.pack(_v189.seq))\n _v190 = _v189.stamp\n _x = _v190\n buff.write(_struct_2I.pack(_x.secs, _x.nsecs))\n _x = _v189.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n _x = _v188.id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n buff.write(_struct_f.pack(_v188.padding))\n _v191 = _v188.operation\n buff.write(_struct_b.pack(_v191.operation))\n length = len(_v188.shapes)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.shapes:\n buff.write(_struct_b.pack(val3.type))\n length = len(val3.dimensions)\n buff.write(_struct_I.pack(length))\n pattern = '<%sd'%length\n buff.write(val3.dimensions.tostring())\n length = len(val3.triangles)\n buff.write(_struct_I.pack(length))\n pattern = '<%si'%length\n buff.write(val3.triangles.tostring())\n length = len(val3.vertices)\n buff.write(_struct_I.pack(length))\n for val4 in val3.vertices:\n _x = val4\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n length = len(_v188.poses)\n buff.write(_struct_I.pack(length))\n for val3 in _v188.poses:\n _v192 = val3.position\n _x = _v192\n buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))\n _v193 = val3.orientation\n _x = _v193\n buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))\n length = len(val1.touch_links)\n buff.write(_struct_I.pack(length))\n for val2 in val1.touch_links:\n length = len(val2)\n if python3 or type(val2) == unicode:\n val2 = val2.encode('utf-8')\n length = len(val2)\n buff.write(struct.pack('<I%ss'%length, length, val2))\n _x = self\n buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))\n _x = self.planning_scene.collision_map.header.frame_id\n length = len(_x)\n if python3 or type(_x) == unicode:\n _x = _x.encode('utf-8')\n length = len(_x)\n buff.write(struct.pack('<I%ss'%length, length, _x))\n length = len(self.planning_scene.collision_map.boxes)\n buff.write(_struct_I.pack(length))\n for val1 in self.planning_scene.collision_map.boxes:\n _v194 = val1.center\n _x = _v194\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v195 = val1.extents\n _x = _v195\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n _v196 = val1.axis\n _x = _v196\n buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))\n buff.write(_struct_f.pack(val1.angle))\n except struct.error as se: self._check_types(se)\n except TypeError as te: self._check_types(te)\n\n def deserialize_numpy(self, str, numpy):\n \"\"\"\n unpack serialized message in str into this message instance using numpy for array types\n :param str: byte array of serialized message, ``str``\n :param numpy: numpy python module\n \"\"\"\n try:\n if self.planning_scene is None:\n self.planning_scene = arm_navigation_msgs.msg.PlanningScene()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.joint_state.name = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.joint_state.name.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n self.planning_scene.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n _x = self\n start = end\n end += 8\n (_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.Pose()\n _v197 = val1.position\n _x = _v197\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v198 = val1.orientation\n _x = _v198\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms = []\n for i in range(0, length):\n val1 = geometry_msgs.msg.TransformStamped()\n _v199 = val1.header\n start = end\n end += 4\n (_v199.seq,) = _struct_I.unpack(str[start:end])\n _v200 = _v199.stamp\n _x = _v200\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v199.frame_id = str[start:end].decode('utf-8')\n else:\n _v199.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.child_frame_id = str[start:end].decode('utf-8')\n else:\n val1.child_frame_id = str[start:end]\n _v201 = val1.transform\n _v202 = _v201.translation\n _x = _v202\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v203 = _v201.rotation\n _x = _v203\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n self.planning_scene.fixed_frame_transforms.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1 = str[start:end].decode('utf-8')\n else:\n val1 = str[start:end]\n self.planning_scene.allowed_collision_matrix.link_names.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_collision_matrix.entries = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sB'%length\n start = end\n end += struct.calcsize(pattern)\n val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)\n val1.enabled = map(bool, val1.enabled)\n self.planning_scene.allowed_collision_matrix.entries.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.allowed_contacts = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AllowedContactSpecification()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.name = str[start:end].decode('utf-8')\n else:\n val1.name = str[start:end]\n _v204 = val1.shape\n start = end\n end += 1\n (_v204.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n _v204.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n _v204.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v204.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v204.vertices.append(val3)\n _v205 = val1.pose_stamped\n _v206 = _v205.header\n start = end\n end += 4\n (_v206.seq,) = _struct_I.unpack(str[start:end])\n _v207 = _v206.stamp\n _x = _v207\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v206.frame_id = str[start:end].decode('utf-8')\n else:\n _v206.frame_id = str[start:end]\n _v208 = _v205.pose\n _v209 = _v208.position\n _x = _v209\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v210 = _v208.orientation\n _x = _v210\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.link_names = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.link_names.append(val2)\n start = end\n end += 8\n (val1.penetration_depth,) = _struct_d.unpack(str[start:end])\n self.planning_scene.allowed_contacts.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.link_padding = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.LinkPadding()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n start = end\n end += 8\n (val1.padding,) = _struct_d.unpack(str[start:end])\n self.planning_scene.link_padding.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.CollisionObject()\n _v211 = val1.header\n start = end\n end += 4\n (_v211.seq,) = _struct_I.unpack(str[start:end])\n _v212 = _v211.stamp\n _x = _v212\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v211.frame_id = str[start:end].decode('utf-8')\n else:\n _v211.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.id = str[start:end].decode('utf-8')\n else:\n val1.id = str[start:end]\n start = end\n end += 4\n (val1.padding,) = _struct_f.unpack(str[start:end])\n _v213 = val1.operation\n start = end\n end += 1\n (_v213.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.shapes = []\n for i in range(0, length):\n val2 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val2.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val2.vertices = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Point()\n _x = val3\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val2.vertices.append(val3)\n val1.shapes.append(val2)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.poses = []\n for i in range(0, length):\n val2 = geometry_msgs.msg.Pose()\n _v214 = val2.position\n _x = _v214\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v215 = val2.orientation\n _x = _v215\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n val1.poses.append(val2)\n self.planning_scene.collision_objects.append(val1)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.attached_collision_objects = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.AttachedCollisionObject()\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val1.link_name = str[start:end].decode('utf-8')\n else:\n val1.link_name = str[start:end]\n _v216 = val1.object\n _v217 = _v216.header\n start = end\n end += 4\n (_v217.seq,) = _struct_I.unpack(str[start:end])\n _v218 = _v217.stamp\n _x = _v218\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v217.frame_id = str[start:end].decode('utf-8')\n else:\n _v217.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v216.id = str[start:end].decode('utf-8')\n else:\n _v216.id = str[start:end]\n start = end\n end += 4\n (_v216.padding,) = _struct_f.unpack(str[start:end])\n _v219 = _v216.operation\n start = end\n end += 1\n (_v219.operation,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v216.shapes = []\n for i in range(0, length):\n val3 = arm_navigation_msgs.msg.Shape()\n start = end\n end += 1\n (val3.type,) = _struct_b.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%sd'%length\n start = end\n end += struct.calcsize(pattern)\n val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n pattern = '<%si'%length\n start = end\n end += struct.calcsize(pattern)\n val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val3.vertices = []\n for i in range(0, length):\n val4 = geometry_msgs.msg.Point()\n _x = val4\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n val3.vertices.append(val4)\n _v216.shapes.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n _v216.poses = []\n for i in range(0, length):\n val3 = geometry_msgs.msg.Pose()\n _v220 = val3.position\n _x = _v220\n start = end\n end += 24\n (_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])\n _v221 = val3.orientation\n _x = _v221\n start = end\n end += 32\n (_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])\n _v216.poses.append(val3)\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n val1.touch_links = []\n for i in range(0, length):\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n val2 = str[start:end].decode('utf-8')\n else:\n val2 = str[start:end]\n val1.touch_links.append(val2)\n self.planning_scene.attached_collision_objects.append(val1)\n _x = self\n start = end\n end += 12\n (_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.planning_scene.collision_map.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.planning_scene.collision_map.boxes = []\n for i in range(0, length):\n val1 = arm_navigation_msgs.msg.OrientedBoundingBox()\n _v222 = val1.center\n _x = _v222\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v223 = val1.extents\n _x = _v223\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n _v224 = val1.axis\n _x = _v224\n start = end\n end += 12\n (_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])\n start = end\n end += 4\n (val1.angle,) = _struct_f.unpack(str[start:end])\n self.planning_scene.collision_map.boxes.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill\n\n_struct_I = genpy.struct_I\n_struct_b = struct.Struct(\"<b\")\n_struct_d = struct.Struct(\"<d\")\n_struct_f = struct.Struct(\"<f\")\n_struct_3f = struct.Struct(\"<3f\")\n_struct_3I = struct.Struct(\"<3I\")\n_struct_4d = struct.Struct(\"<4d\")\n_struct_2I = struct.Struct(\"<2I\")\n_struct_3d = struct.Struct(\"<3d\")\nclass GetPlanningScene(object):\n _type = 'arm_navigation_msgs/GetPlanningScene'\n _md5sum = '0a7b07718e4e5c5d35740c730509a151'\n _request_class = GetPlanningSceneRequest\n _response_class = GetPlanningSceneResponse\n", "step-ids": [ 10, 14, 18, 20, 21 ] }
[ 10, 14, 18, 20, 21 ]
<|reserved_special_token_0|> class Zaojiaopage(Crazy): <|reserved_special_token_0|> <|reserved_special_token_0|> def click_zao(self): self.click(self.zao_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_find(self): self.click(self.find_loc) <|reserved_special_token_0|> def click_title_btn(self): self.click(self.title_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_helper(self): self.click(self.helper_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_small_name(self): return self.find_element(self.small_name_loc) def click_small_name(self): self.click(self.small_name_loc) <|reserved_special_token_0|> def click_switching_applet_btn(self): self.click(self.switching_applet_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_experience_version_btn(self): self.clicks(self.experience_version_btn_loc, -1) <|reserved_special_token_0|> def element_audition_class_btn(self): return self.find_element(self.audition_class_btn_loc) def click_audition_class_btn(self): self.click(self.audition_class_btn_loc) <|reserved_special_token_0|> def click_wechat_grant_btn(self): self.click(self.wechat_grant_btn_loc) def double_click_wechat_grant(self): self.double_click(self.wechat_grant_btn_loc) def element_wechat_grant_btn(self): return self.find_element(self.wechat_grant_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_attend_lectures_btn(self): return self.find_element(self.attend_lectures_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_class_btn(self): return self.find_element(self.class_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def input_buy_password(self, paw): self.send_keys(self.buy_password_loc, paw) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_success_btn(self): self.click(self.success_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_add_address_btn(self): self.click(self.add_address_btn_loc) <|reserved_special_token_0|> def input_name_btn(self, name): self.send_keys(self.name_loc, name) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def input_detailed_address_btn(self, address): self.send_keys(self.detailed_address_btn_loc, address) <|reserved_special_token_0|> def click_save_btn(self): self.click(self.save_btn_loc) <|reserved_special_token_0|> def click_receive_btn(self): self.click(self.receive_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_addressee(self): self.clicks(self.addressee_loc, 0) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_all_curriculum_btn(self): return self.find_element(self.all_curriculum_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def text_my_baby_title(self): return self.get_text(self.my_baby_title_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_new_baby_btn(self, n): self.clicks(self.new_baby_btn_loc, n) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_baby_bir_btn(self): self.click(self.baby_bir_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_home(self): self.click(self.my_home_loc) def element_my_home(self): return self.find_element(self.my_home_loc) <|reserved_special_token_0|> def click_switch_btn(self): self.click(self.switch_btn_loc) <|reserved_special_token_0|> def click_baby_bri(self): self.click(self.baby_bri_loc) <|reserved_special_token_0|> def clicks_class_img(self): self.clicks(self.class_img_btn_loc, 0) <|reserved_special_token_0|> def click_collection_btn(self): self.click(self.collection_btn_loc) def clicks_collection_btn(self, n): self.clicks(self.collection_btn_loc, n) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_write_record_btn(self): self.click(self.write_record_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_album_btn(self): return self.find_element(self.album_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_small_video_btn(self): return self.find_element(self.small_video_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_record_info(self, data): record_info_loc = 'xpath', '//*[contains(@text, "{}")]'.format(data) record_info = self.find_element(record_info_loc) if record_info: return True else: return False <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def elements_class_name(self): return self.find_elements(self.class_name_loc) <|reserved_special_token_0|> def click_class2_name(self): self.click(self.class_name2_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def elements_choice_album(self): return self.find_elements(self.choice_album_loc) <|reserved_special_token_0|> def click_complete_btn(self): self.click(self.complete_btn_loc) <|reserved_special_token_0|> def click_my_collection_btn(self): self.click(self.my_collection_btn_loc) <|reserved_special_token_0|> def elements_my_collection_english_course_btn(self): return self.find_elements(self.my_collection_english_course_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_course_btn(self): self.click(self.my_course_btn_loc) <|reserved_special_token_0|> def elements_my_course_buy_btn(self): return self.find_elements(self.my_course_buy_btn_loc) <|reserved_special_token_0|> def click_my_order_btn(self): self.click(self.my_order_btn_loc) <|reserved_special_token_0|> def elements_my_order_card_btn(self): return self.find_elements(self.my_order_card_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def elements_my_record_class_btn(self): return self.find_elements(self.my_record_class_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_reply_code(self, n): self.clicks(self.reply_code_loc, n) <|reserved_special_token_0|> def element_long_code(self): return self.find_element(self.long_code_loc) def click_long_code(self): self.click(self.long_code_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def text_class_group(self): return self.get_text(self.class_group_loc) <|reserved_special_token_0|> def element_add_group_chat(self): return self.find_element(self.add_group_chat_loc) <|reserved_special_token_0|> def elements_reply_8(self): return self.find_elements(self.reply_8_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_more_games_btn(self): self.click(self.more_games_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def text_start_fingerprint_buy(self): return self.get_text(self.start_fingerprint_buy_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_cancel_btn(self): self.click(self.cancel_btn_loc) <|reserved_special_token_0|> def element_usd_password(self): return self.find_element(self.usd_password_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_password_error(self): return self.find_element(self.password_error_loc) <|reserved_special_token_0|> def click_again_btn(self): self.click(self.again_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_typewriting_finish_btn(self): return self.find_element(self.typewriting_finish_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_clock_btn(self): return self.find_element(self.clock_btn_loc) <|reserved_special_token_0|> def element_no_clock_btn(self): return self.find_element(self.no_clock_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_upload_card_btn(self): self.click(self.upload_card_btn_loc) <|reserved_special_token_0|> def click_again_upload_card_btn(self): self.click(self.again_upload_card_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_copy_format_btn(self): return self.find_element(self.copy_format_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_upload_btn(self): self.click(self.upload_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_reset_img_btn(self): self.click(self.reset_img_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_reminder_btn(self): return self.find_element(self.reminder_btn_loc) <|reserved_special_token_0|> def element_page_expired(self): return self.find_element(self.page_expired_loc) <|reserved_special_token_0|> def click_x_btn(self): self.click(self.x_btn_loc) <|reserved_special_token_1|> <|reserved_special_token_0|> class Zaojiaopage(Crazy): <|reserved_special_token_0|> <|reserved_special_token_0|> def click_zao(self): self.click(self.zao_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_find(self): self.click(self.find_loc) <|reserved_special_token_0|> def click_title_btn(self): self.click(self.title_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_helper(self): self.click(self.helper_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_small_name(self): return self.find_element(self.small_name_loc) def click_small_name(self): self.click(self.small_name_loc) <|reserved_special_token_0|> def click_switching_applet_btn(self): self.click(self.switching_applet_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_experience_version_btn(self): self.clicks(self.experience_version_btn_loc, -1) <|reserved_special_token_0|> def element_audition_class_btn(self): return self.find_element(self.audition_class_btn_loc) def click_audition_class_btn(self): self.click(self.audition_class_btn_loc) <|reserved_special_token_0|> def click_wechat_grant_btn(self): self.click(self.wechat_grant_btn_loc) def double_click_wechat_grant(self): self.double_click(self.wechat_grant_btn_loc) def element_wechat_grant_btn(self): return self.find_element(self.wechat_grant_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_mouth_btn(self): self.click(self.month_btn_loc) <|reserved_special_token_0|> def click_sure_btn(self): self.click(self.sure_btn_loc) <|reserved_special_token_0|> def class_info_btn(self): self.click(self.class_info_loc) <|reserved_special_token_0|> def element_attend_lectures_btn(self): return self.find_element(self.attend_lectures_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_class_btn(self): return self.find_element(self.class_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_get_to_know_btn(self): return self.find_element(self.get_to_know_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def input_buy_password(self, paw): self.send_keys(self.buy_password_loc, paw) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_success_btn(self): self.click(self.success_btn_loc) <|reserved_special_token_0|> def click_check_address_btn(self): self.click(self.check_address_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_add_address_btn(self): self.click(self.add_address_btn_loc) <|reserved_special_token_0|> def input_name_btn(self, name): self.send_keys(self.name_loc, name) <|reserved_special_token_0|> def input_phone_btn(self, phone): self.send_keys(self.phone_btn_loc, phone) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def input_detailed_address_btn(self, address): self.send_keys(self.detailed_address_btn_loc, address) <|reserved_special_token_0|> def click_save_btn(self): self.click(self.save_btn_loc) <|reserved_special_token_0|> def click_receive_btn(self): self.click(self.receive_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_addressee(self): self.clicks(self.addressee_loc, 0) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_all_curriculum_btn(self): return self.find_element(self.all_curriculum_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_my_btn(self): return self.find_element(self.my_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def text_my_baby_title(self): return self.get_text(self.my_baby_title_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_new_baby_btn(self): return self.find_element(self.new_baby_btn_loc) <|reserved_special_token_0|> def clicks_new_baby_btn(self, n): self.clicks(self.new_baby_btn_loc, n) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_baby_bir_btn(self): self.click(self.baby_bir_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_home(self): self.click(self.my_home_loc) def element_my_home(self): return self.find_element(self.my_home_loc) <|reserved_special_token_0|> def click_switch_btn(self): self.click(self.switch_btn_loc) <|reserved_special_token_0|> def click_baby_bri(self): self.click(self.baby_bri_loc) <|reserved_special_token_0|> def clicks_class_img(self): self.clicks(self.class_img_btn_loc, 0) <|reserved_special_token_0|> def click_collection_btn(self): self.click(self.collection_btn_loc) def clicks_collection_btn(self, n): self.clicks(self.collection_btn_loc, n) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_write_record_btn(self): self.click(self.write_record_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_album_btn(self): return self.find_element(self.album_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_small_video_btn(self): return self.find_element(self.small_video_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_release_btn(self, n): self.clicks(self.release_btn_loc, n) def element_record_info(self, data): record_info_loc = 'xpath', '//*[contains(@text, "{}")]'.format(data) record_info = self.find_element(record_info_loc) if record_info: return True else: return False <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def elements_class_name(self): return self.find_elements(self.class_name_loc) <|reserved_special_token_0|> def click_class2_name(self): self.click(self.class_name2_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_choice_album(self, n): self.clicks(self.choice_album_loc, n) def elements_choice_album(self): return self.find_elements(self.choice_album_loc) <|reserved_special_token_0|> def click_complete_btn(self): self.click(self.complete_btn_loc) <|reserved_special_token_0|> def click_my_collection_btn(self): self.click(self.my_collection_btn_loc) <|reserved_special_token_0|> def elements_my_collection_english_course_btn(self): return self.find_elements(self.my_collection_english_course_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_course_btn(self): self.click(self.my_course_btn_loc) <|reserved_special_token_0|> def elements_my_course_buy_btn(self): return self.find_elements(self.my_course_buy_btn_loc) <|reserved_special_token_0|> def click_my_order_btn(self): self.click(self.my_order_btn_loc) <|reserved_special_token_0|> def elements_my_order_card_btn(self): return self.find_elements(self.my_order_card_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def elements_my_record_class_btn(self): return self.find_elements(self.my_record_class_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_back_btn(self): self.click(self.back_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_send(self): self.click(self.send_5_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_reply_code(self, n): self.clicks(self.reply_code_loc, n) <|reserved_special_token_0|> def element_long_code(self): return self.find_element(self.long_code_loc) def click_long_code(self): self.click(self.long_code_loc) <|reserved_special_token_0|> def click_discern_code(self): self.click(self.discern_code_loc) <|reserved_special_token_0|> def text_class_group(self): return self.get_text(self.class_group_loc) <|reserved_special_token_0|> def element_add_group_chat(self): return self.find_element(self.add_group_chat_loc) <|reserved_special_token_0|> def elements_reply_8(self): return self.find_elements(self.reply_8_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_more_games_btn(self): self.click(self.more_games_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def text_start_fingerprint_buy(self): return self.get_text(self.start_fingerprint_buy_loc) <|reserved_special_token_0|> def click_no_more_reminder_btn(self): self.click(self.no_more_reminder_btn_loc) <|reserved_special_token_0|> def click_cancel_btn(self): self.click(self.cancel_btn_loc) <|reserved_special_token_0|> def element_usd_password(self): return self.find_element(self.usd_password_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_password_error(self): return self.find_element(self.password_error_loc) <|reserved_special_token_0|> def click_again_btn(self): self.click(self.again_btn_loc) <|reserved_special_token_0|> def text_payment(self): return self.get_text(self.payment_loc) <|reserved_special_token_0|> def element_typewriting_finish_btn(self): return self.find_element(self.typewriting_finish_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_clock_btn(self): return self.find_element(self.clock_btn_loc) <|reserved_special_token_0|> def element_no_clock_btn(self): return self.find_element(self.no_clock_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_upload_card_btn(self): self.click(self.upload_card_btn_loc) <|reserved_special_token_0|> def click_again_upload_card_btn(self): self.click(self.again_upload_card_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_copy_text_btn(self): self.click(self.copy_text_btn_loc) <|reserved_special_token_0|> def element_copy_format_btn(self): return self.find_element(self.copy_format_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_upload_btn(self): self.click(self.upload_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_reset_img_btn(self): self.click(self.reset_img_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_reminder_btn(self): return self.find_element(self.reminder_btn_loc) <|reserved_special_token_0|> def element_page_expired(self): return self.find_element(self.page_expired_loc) <|reserved_special_token_0|> def click_x_btn(self): self.click(self.x_btn_loc) <|reserved_special_token_1|> <|reserved_special_token_0|> class Zaojiaopage(Crazy): <|reserved_special_token_0|> <|reserved_special_token_0|> def click_zao(self): self.click(self.zao_btn_loc) def element_zao(self): return self.find_element(self.zao_btn_loc) <|reserved_special_token_0|> def click_find(self): self.click(self.find_loc) <|reserved_special_token_0|> def click_title_btn(self): self.click(self.title_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_helper(self): self.click(self.helper_loc) <|reserved_special_token_0|> def click_small_help_btn(self): self.click(self.small_help_btn_loc) <|reserved_special_token_0|> def element_small_name(self): return self.find_element(self.small_name_loc) def click_small_name(self): self.click(self.small_name_loc) <|reserved_special_token_0|> def click_switching_applet_btn(self): self.click(self.switching_applet_btn_loc) <|reserved_special_token_0|> def click_delete_small_btn(self): self.click(self.delete_small_btn_loc) <|reserved_special_token_0|> def element_edition_btn(self): return self.find_element(self.edition_btn_loc) <|reserved_special_token_0|> def element_delete_small1_btn(self): return self.find_element(self.delete_small1_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_experience_version_btn(self): self.clicks(self.experience_version_btn_loc, -1) <|reserved_special_token_0|> def element_audition_class_btn(self): return self.find_element(self.audition_class_btn_loc) def click_audition_class_btn(self): self.click(self.audition_class_btn_loc) <|reserved_special_token_0|> def click_wechat_grant_btn(self): self.click(self.wechat_grant_btn_loc) def double_click_wechat_grant(self): self.double_click(self.wechat_grant_btn_loc) def element_wechat_grant_btn(self): return self.find_element(self.wechat_grant_btn_loc) <|reserved_special_token_0|> def click_allow_btn(self): self.click(self.allow_btn_loc) <|reserved_special_token_0|> def click_mouth_btn(self): self.click(self.month_btn_loc) <|reserved_special_token_0|> def click_sure_btn(self): self.click(self.sure_btn_loc) <|reserved_special_token_0|> def class_info_btn(self): self.click(self.class_info_loc) <|reserved_special_token_0|> def element_attend_lectures_btn(self): return self.find_element(self.attend_lectures_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_class_btn(self): return self.find_element(self.class_btn_loc) <|reserved_special_token_0|> def click_get_to_know_btn(self): self.click(self.get_to_know_btn_loc) def element_get_to_know_btn(self): return self.find_element(self.get_to_know_btn_loc) <|reserved_special_token_0|> def click_sure_buy_btn(self): self.click(self.sure_buy_btn_loc) <|reserved_special_token_0|> def input_buy_password(self, paw): self.send_keys(self.buy_password_loc, paw) <|reserved_special_token_0|> def text_buy_money(self): return self.get_text(self.check_buy_money_loc) <|reserved_special_token_0|> def click_success_btn(self): self.click(self.success_btn_loc) <|reserved_special_token_0|> def click_check_address_btn(self): self.click(self.check_address_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_add_address_btn(self): self.click(self.add_address_btn_loc) <|reserved_special_token_0|> def input_name_btn(self, name): self.send_keys(self.name_loc, name) <|reserved_special_token_0|> def input_phone_btn(self, phone): self.send_keys(self.phone_btn_loc, phone) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def input_detailed_address_btn(self, address): self.send_keys(self.detailed_address_btn_loc, address) <|reserved_special_token_0|> def click_save_btn(self): self.click(self.save_btn_loc) <|reserved_special_token_0|> def click_receive_btn(self): self.click(self.receive_btn_loc) <|reserved_special_token_0|> def elements_addressee(self): return self.find_elements(self.addressee_loc) def clicks_addressee(self): self.clicks(self.addressee_loc, 0) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_know(self): self.click(self.know_btn_loc) <|reserved_special_token_0|> def element_all_curriculum_btn(self): return self.find_element(self.all_curriculum_btn_loc) def click_all_curriculum_btn(self): self.click(self.all_curriculum_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_my_btn(self): return self.find_element(self.my_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_baby(self): self.click(self.my_baby_btn_loc) <|reserved_special_token_0|> def text_my_baby_title(self): return self.get_text(self.my_baby_title_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_new_baby_btn(self): return self.find_element(self.new_baby_btn_loc) def click_new_baby_btn(self): self.click(self.new_baby_btn_loc) def clicks_new_baby_btn(self, n): self.clicks(self.new_baby_btn_loc, n) <|reserved_special_token_0|> def element_get_set(self): return self.find_element(self.get_set_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def inputs_baby_name(self, name, n): self.sends_keys(self.baby_name_loc, name, n) <|reserved_special_token_0|> def click_baby_bir_btn(self): self.click(self.baby_bir_btn_loc) <|reserved_special_token_0|> def click_finish_btn(self): self.click(self.finish_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_home(self): self.click(self.my_home_loc) def element_my_home(self): return self.find_element(self.my_home_loc) <|reserved_special_token_0|> def click_switch_btn(self): self.click(self.switch_btn_loc) <|reserved_special_token_0|> def click_baby_bri(self): self.click(self.baby_bri_loc) <|reserved_special_token_0|> def clicks_class_img(self): self.clicks(self.class_img_btn_loc, 0) <|reserved_special_token_0|> def click_collection_btn(self): self.click(self.collection_btn_loc) def clicks_collection_btn(self, n): self.clicks(self.collection_btn_loc, n) <|reserved_special_token_0|> <|reserved_special_token_0|> def click_write_record_btn(self): self.click(self.write_record_btn_loc) def clicks_write_record_btn(self, n): self.clicks(self.write_record_btn_loc, n) <|reserved_special_token_0|> def click_album_btn(self): self.click(self.album_btn_loc) def element_album_btn(self): return self.find_element(self.album_btn_loc) <|reserved_special_token_0|> def click_small_video_btn(self): self.click(self.small_video_btn_loc) def element_small_video_btn(self): return self.find_element(self.small_video_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_release_btn(self, n): self.clicks(self.release_btn_loc, n) def element_record_info(self, data): record_info_loc = 'xpath', '//*[contains(@text, "{}")]'.format(data) record_info = self.find_element(record_info_loc) if record_info: return True else: return False <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def elements_class_name(self): return self.find_elements(self.class_name_loc) <|reserved_special_token_0|> def click_class2_name(self): self.click(self.class_name2_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def input_write_text(self, text): self.send_keys(self.write_text_loc, text) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_choice_album(self, n): self.clicks(self.choice_album_loc, n) def elements_choice_album(self): return self.find_elements(self.choice_album_loc) <|reserved_special_token_0|> def click_complete_btn(self): self.click(self.complete_btn_loc) <|reserved_special_token_0|> def click_my_collection_btn(self): self.click(self.my_collection_btn_loc) <|reserved_special_token_0|> def elements_my_collection_english_course_btn(self): return self.find_elements(self.my_collection_english_course_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_my_course_btn(self): self.click(self.my_course_btn_loc) <|reserved_special_token_0|> def elements_my_course_buy_btn(self): return self.find_elements(self.my_course_buy_btn_loc) <|reserved_special_token_0|> def click_my_order_btn(self): self.click(self.my_order_btn_loc) <|reserved_special_token_0|> def elements_my_order_card_btn(self): return self.find_elements(self.my_order_card_btn_loc) <|reserved_special_token_0|> def click_my_record_btn(self): self.click(self.my_record_btn_loc) <|reserved_special_token_0|> def elements_my_record_class_btn(self): return self.find_elements(self.my_record_class_btn_loc) <|reserved_special_token_0|> def element_back_btn(self): return self.find_element(self.back_btn_loc) def click_back_btn(self): self.click(self.back_btn_loc) <|reserved_special_token_0|> def click_reply_5(self): self.click(self.reply_5_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def input_reply_5(self, num): self.send_keys(self.reply_input_5_loc, num) <|reserved_special_token_0|> def click_send(self): self.click(self.send_5_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def clicks_reply_code(self, n): self.clicks(self.reply_code_loc, n) <|reserved_special_token_0|> def element_long_code(self): return self.find_element(self.long_code_loc) def click_long_code(self): self.click(self.long_code_loc) <|reserved_special_token_0|> def click_discern_code(self): self.click(self.discern_code_loc) <|reserved_special_token_0|> def text_class_group(self): return self.get_text(self.class_group_loc) <|reserved_special_token_0|> def element_add_group_chat(self): return self.find_element(self.add_group_chat_loc) <|reserved_special_token_0|> def elements_reply_8(self): return self.find_elements(self.reply_8_loc) <|reserved_special_token_0|> def element_parent_btn(self): return self.find_element(self.parent_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_more_games_btn(self): self.click(self.more_games_btn_loc) <|reserved_special_token_0|> def click_look_all_btn(self): self.click(self.look_all_btn_loc) def element_look_all_btn(self): return self.find_elements(self.look_all_btn_loc) <|reserved_special_token_0|> def text_start_fingerprint_buy(self): return self.get_text(self.start_fingerprint_buy_loc) <|reserved_special_token_0|> def click_no_more_reminder_btn(self): self.click(self.no_more_reminder_btn_loc) <|reserved_special_token_0|> def click_cancel_btn(self): self.click(self.cancel_btn_loc) <|reserved_special_token_0|> def element_usd_password(self): return self.find_element(self.usd_password_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> def element_password_error(self): return self.find_element(self.password_error_loc) <|reserved_special_token_0|> def click_again_btn(self): self.click(self.again_btn_loc) <|reserved_special_token_0|> def text_payment(self): return self.get_text(self.payment_loc) <|reserved_special_token_0|> def element_typewriting_finish_btn(self): return self.find_element(self.typewriting_finish_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def element_clock_btn(self): return self.find_element(self.clock_btn_loc) <|reserved_special_token_0|> def element_no_clock_btn(self): return self.find_element(self.no_clock_btn_loc) <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> def click_upload_card_btn(self): self.click(self.upload_card_btn_loc) <|reserved_special_token_0|> def click_again_upload_card_btn(self): self.click(self.again_upload_card_btn_loc) <|reserved_special_token_0|> def click_save_img_btn(self): self.click(self.save_img_btn_loc) <|reserved_special_token_0|> def click_copy_text_btn(self): self.click(self.copy_text_btn_loc) <|reserved_special_token_0|> def element_copy_format_btn(self): return self.find_element(self.copy_format_btn_loc) <|reserved_special_token_0|> def click_card_go_btn(self): self.click(self.card_go_btn_loc) <|reserved_special_token_0|> def click_upload_btn(self): self.click(self.upload_btn_loc) <|reserved_special_token_0|> def element_today_card_btn(self): return self.find_element(self.today_card_btn_loc) <|reserved_special_token_0|> def click_reset_img_btn(self): self.click(self.reset_img_btn_loc) <|reserved_special_token_0|> def element_generated_loading(self): return self.find_element(self.generated_loading_loc) <|reserved_special_token_0|> def element_reminder_btn(self): return self.find_element(self.reminder_btn_loc) <|reserved_special_token_0|> def element_page_expired(self): return self.find_element(self.page_expired_loc) <|reserved_special_token_0|> def click_x_btn(self): self.click(self.x_btn_loc) <|reserved_special_token_1|> <|reserved_special_token_0|> class Zaojiaopage(Crazy): <|reserved_special_token_0|> <|reserved_special_token_0|> def click_zao(self): self.click(self.zao_btn_loc) def element_zao(self): return self.find_element(self.zao_btn_loc) <|reserved_special_token_0|> def click_find(self): self.click(self.find_loc) <|reserved_special_token_0|> def click_title_btn(self): self.click(self.title_btn_loc) <|reserved_special_token_0|> def element_helper(self): return self.find_element(self.helper_loc) def click_helper(self): self.click(self.helper_loc) <|reserved_special_token_0|> def click_small_help_btn(self): self.click(self.small_help_btn_loc) <|reserved_special_token_0|> def element_small_name(self): return self.find_element(self.small_name_loc) def click_small_name(self): self.click(self.small_name_loc) <|reserved_special_token_0|> def click_switching_applet_btn(self): self.click(self.switching_applet_btn_loc) <|reserved_special_token_0|> def click_delete_small_btn(self): self.click(self.delete_small_btn_loc) <|reserved_special_token_0|> def element_edition_btn(self): return self.find_element(self.edition_btn_loc) <|reserved_special_token_0|> def element_delete_small1_btn(self): return self.find_element(self.delete_small1_btn_loc) <|reserved_special_token_0|> def click_version_btn(self): self.click(self.version_btn_loc) <|reserved_special_token_0|> def clicks_experience_version_btn(self): self.clicks(self.experience_version_btn_loc, -1) <|reserved_special_token_0|> def element_audition_class_btn(self): return self.find_element(self.audition_class_btn_loc) def click_audition_class_btn(self): self.click(self.audition_class_btn_loc) <|reserved_special_token_0|> def click_wechat_grant_btn(self): self.click(self.wechat_grant_btn_loc) def double_click_wechat_grant(self): self.double_click(self.wechat_grant_btn_loc) def element_wechat_grant_btn(self): return self.find_element(self.wechat_grant_btn_loc) <|reserved_special_token_0|> def click_allow_btn(self): self.click(self.allow_btn_loc) <|reserved_special_token_0|> def click_mouth_btn(self): self.click(self.month_btn_loc) <|reserved_special_token_0|> def click_sure_btn(self): self.click(self.sure_btn_loc) <|reserved_special_token_0|> def class_info_btn(self): self.click(self.class_info_loc) <|reserved_special_token_0|> def element_attend_lectures_btn(self): return self.find_element(self.attend_lectures_btn_loc) def click_attend_lectures_btn(self): self.click(self.attend_lectures_btn_loc) <|reserved_special_token_0|> def element_class_btn(self): return self.find_element(self.class_btn_loc) <|reserved_special_token_0|> def click_get_to_know_btn(self): self.click(self.get_to_know_btn_loc) def element_get_to_know_btn(self): return self.find_element(self.get_to_know_btn_loc) <|reserved_special_token_0|> def click_sure_buy_btn(self): self.click(self.sure_buy_btn_loc) <|reserved_special_token_0|> def input_buy_password(self, paw): self.send_keys(self.buy_password_loc, paw) <|reserved_special_token_0|> def text_buy_money(self): return self.get_text(self.check_buy_money_loc) <|reserved_special_token_0|> def click_success_btn(self): self.click(self.success_btn_loc) <|reserved_special_token_0|> def click_check_address_btn(self): self.click(self.check_address_btn_loc) def element_check_address_btn(self): return self.find_element(self.check_address_btn_loc) <|reserved_special_token_0|> def click_add_address_btn(self): self.click(self.add_address_btn_loc) <|reserved_special_token_0|> def input_name_btn(self, name): self.send_keys(self.name_loc, name) <|reserved_special_token_0|> def input_phone_btn(self, phone): self.send_keys(self.phone_btn_loc, phone) <|reserved_special_token_0|> def click_region_btn(self): self.click(self.region_btn_loc) <|reserved_special_token_0|> def input_detailed_address_btn(self, address): self.send_keys(self.detailed_address_btn_loc, address) <|reserved_special_token_0|> def click_save_btn(self): self.click(self.save_btn_loc) <|reserved_special_token_0|> def click_receive_btn(self): self.click(self.receive_btn_loc) <|reserved_special_token_0|> def elements_addressee(self): return self.find_elements(self.addressee_loc) def clicks_addressee(self): self.clicks(self.addressee_loc, 0) <|reserved_special_token_0|> def element_know(self): return self.find_element(self.know_btn_loc) def click_know(self): self.click(self.know_btn_loc) <|reserved_special_token_0|> def element_all_curriculum_btn(self): return self.find_element(self.all_curriculum_btn_loc) def click_all_curriculum_btn(self): self.click(self.all_curriculum_btn_loc) <|reserved_special_token_0|> def element_curriculum_date_btn(self): return self.find_element(self.curriculum_date_btn_loc) <|reserved_special_token_0|> def element_my_btn(self): return self.find_element(self.my_btn_loc) def click_my(self): self.click(self.my_btn_loc) <|reserved_special_token_0|> def click_my_baby(self): self.click(self.my_baby_btn_loc) <|reserved_special_token_0|> def text_my_baby_title(self): return self.get_text(self.my_baby_title_loc) def elements_title(self): return self.find_elements(self.my_baby_title_loc) <|reserved_special_token_0|> def element_new_baby_btn(self): return self.find_element(self.new_baby_btn_loc) def click_new_baby_btn(self): self.click(self.new_baby_btn_loc) def clicks_new_baby_btn(self, n): self.clicks(self.new_baby_btn_loc, n) <|reserved_special_token_0|> def element_get_set(self): return self.find_element(self.get_set_loc) <|reserved_special_token_0|> def click_next(self): self.click(self.next_btn_loc) <|reserved_special_token_0|> def inputs_baby_name(self, name, n): self.sends_keys(self.baby_name_loc, name, n) <|reserved_special_token_0|> def click_baby_bir_btn(self): self.click(self.baby_bir_btn_loc) <|reserved_special_token_0|> def click_finish_btn(self): self.click(self.finish_btn_loc) def clicks_finish_btn(self, n): self.clicks(self.finish_btn_loc, n) <|reserved_special_token_0|> def click_my_home(self): self.click(self.my_home_loc) def element_my_home(self): return self.find_element(self.my_home_loc) <|reserved_special_token_0|> def click_switch_btn(self): self.click(self.switch_btn_loc) <|reserved_special_token_0|> def click_baby_bri(self): self.click(self.baby_bri_loc) <|reserved_special_token_0|> def clicks_class_img(self): self.clicks(self.class_img_btn_loc, 0) <|reserved_special_token_0|> def click_collection_btn(self): self.click(self.collection_btn_loc) def clicks_collection_btn(self, n): self.clicks(self.collection_btn_loc, n) def element_collection_btn(self): return self.find_element(self.collection_btn_loc) <|reserved_special_token_0|> def click_write_record_btn(self): self.click(self.write_record_btn_loc) def clicks_write_record_btn(self, n): self.clicks(self.write_record_btn_loc, n) <|reserved_special_token_0|> def click_album_btn(self): self.click(self.album_btn_loc) def element_album_btn(self): return self.find_element(self.album_btn_loc) <|reserved_special_token_0|> def click_small_video_btn(self): self.click(self.small_video_btn_loc) def element_small_video_btn(self): return self.find_element(self.small_video_btn_loc) <|reserved_special_token_0|> def click_release_btn(self): self.click(self.release_btn_loc) def clicks_release_btn(self, n): self.clicks(self.release_btn_loc, n) def element_record_info(self, data): record_info_loc = 'xpath', '//*[contains(@text, "{}")]'.format(data) record_info = self.find_element(record_info_loc) if record_info: return True else: return False <|reserved_special_token_0|> def click_class_name(self): self.click(self.class_name_loc) def clicks_class_name(self, n): self.clicks(self.class_name_loc, n) def elements_class_name(self): return self.find_elements(self.class_name_loc) <|reserved_special_token_0|> def click_class2_name(self): self.click(self.class_name2_loc) def clicks_class2_name(self, n): self.clicks(self.class_name2_loc, n) <|reserved_special_token_0|> def input_write_text(self, text): self.send_keys(self.write_text_loc, text) def inputs_write_text(self, text, n): self.sends_keys(self.write_text_loc, text, n) <|reserved_special_token_0|> def clicks_choice_album(self, n): self.clicks(self.choice_album_loc, n) def elements_choice_album(self): return self.find_elements(self.choice_album_loc) <|reserved_special_token_0|> def click_complete_btn(self): self.click(self.complete_btn_loc) <|reserved_special_token_0|> def click_my_collection_btn(self): self.click(self.my_collection_btn_loc) <|reserved_special_token_0|> def elements_my_collection_english_course_btn(self): return self.find_elements(self.my_collection_english_course_btn_loc) <|reserved_special_token_0|> def elements_my_collection_game_course_btn(self): return self.find_elements(self.my_collection_game_course_btn_loc) <|reserved_special_token_0|> def click_my_course_btn(self): self.click(self.my_course_btn_loc) <|reserved_special_token_0|> def elements_my_course_buy_btn(self): return self.find_elements(self.my_course_buy_btn_loc) <|reserved_special_token_0|> def click_my_order_btn(self): self.click(self.my_order_btn_loc) <|reserved_special_token_0|> def elements_my_order_card_btn(self): return self.find_elements(self.my_order_card_btn_loc) <|reserved_special_token_0|> def click_my_record_btn(self): self.click(self.my_record_btn_loc) <|reserved_special_token_0|> def elements_my_record_class_btn(self): return self.find_elements(self.my_record_class_btn_loc) <|reserved_special_token_0|> def element_back_btn(self): return self.find_element(self.back_btn_loc) def click_back_btn(self): self.click(self.back_btn_loc) <|reserved_special_token_0|> def click_reply_5(self): self.click(self.reply_5_loc) def elements_reply_5(self): return self.find_elements(self.reply_5_loc) <|reserved_special_token_0|> def click_add_to_btn(self): self.click(self.add_to_btn_loc) <|reserved_special_token_0|> def input_reply_5(self, num): self.send_keys(self.reply_input_5_loc, num) <|reserved_special_token_0|> def click_send(self): self.click(self.send_5_loc) <|reserved_special_token_0|> def elements_reply_code(self): return self.find_elements(self.reply_code_loc) def clicks_reply_code(self, n): self.clicks(self.reply_code_loc, n) <|reserved_special_token_0|> def element_long_code(self): return self.find_element(self.long_code_loc) def click_long_code(self): self.click(self.long_code_loc) <|reserved_special_token_0|> def click_discern_code(self): self.click(self.discern_code_loc) <|reserved_special_token_0|> def text_class_group(self): return self.get_text(self.class_group_loc) <|reserved_special_token_0|> def element_add_group_chat(self): return self.find_element(self.add_group_chat_loc) <|reserved_special_token_0|> def elements_reply_8(self): return self.find_elements(self.reply_8_loc) <|reserved_special_token_0|> def element_parent_btn(self): return self.find_element(self.parent_btn_loc) <|reserved_special_token_0|> def elements_info_btn(self): return self.find_elements(self.info_btn_loc) def clicks_info_btn(self, n): self.clicks(self.info_btn_loc, n) <|reserved_special_token_0|> def click_more_games_btn(self): self.click(self.more_games_btn_loc) <|reserved_special_token_0|> def click_look_all_btn(self): self.click(self.look_all_btn_loc) def element_look_all_btn(self): return self.find_elements(self.look_all_btn_loc) <|reserved_special_token_0|> def text_start_fingerprint_buy(self): return self.get_text(self.start_fingerprint_buy_loc) <|reserved_special_token_0|> def click_no_more_reminder_btn(self): self.click(self.no_more_reminder_btn_loc) <|reserved_special_token_0|> def click_cancel_btn(self): self.click(self.cancel_btn_loc) <|reserved_special_token_0|> def element_usd_password(self): return self.find_element(self.usd_password_loc) def click_usd_password(self): self.click(self.usd_password_loc) <|reserved_special_token_0|> def element_password_error(self): return self.find_element(self.password_error_loc) <|reserved_special_token_0|> def click_again_btn(self): self.click(self.again_btn_loc) <|reserved_special_token_0|> def text_payment(self): return self.get_text(self.payment_loc) <|reserved_special_token_0|> def element_typewriting_finish_btn(self): return self.find_element(self.typewriting_finish_btn_loc) def click_typewriting_finish_btn(self): self.click(self.typewriting_finish_btn_loc) <|reserved_special_token_0|> def click_clock_btn(self): self.click(self.clock_btn_loc) def element_clock_btn(self): return self.find_element(self.clock_btn_loc) <|reserved_special_token_0|> def element_no_clock_btn(self): return self.find_element(self.no_clock_btn_loc) <|reserved_special_token_0|> def click_get_card_btn(self): self.click(self.get_card_btn_loc) <|reserved_special_token_0|> def click_upload_card_btn(self): self.click(self.upload_card_btn_loc) <|reserved_special_token_0|> def click_again_upload_card_btn(self): self.click(self.again_upload_card_btn_loc) <|reserved_special_token_0|> def click_save_img_btn(self): self.click(self.save_img_btn_loc) <|reserved_special_token_0|> def click_copy_text_btn(self): self.click(self.copy_text_btn_loc) <|reserved_special_token_0|> def element_copy_format_btn(self): return self.find_element(self.copy_format_btn_loc) <|reserved_special_token_0|> def click_card_go_btn(self): self.click(self.card_go_btn_loc) <|reserved_special_token_0|> def click_upload_btn(self): self.click(self.upload_btn_loc) <|reserved_special_token_0|> def element_today_card_btn(self): return self.find_element(self.today_card_btn_loc) <|reserved_special_token_0|> def click_reset_img_btn(self): self.click(self.reset_img_btn_loc) <|reserved_special_token_0|> def element_generated_loading(self): return self.find_element(self.generated_loading_loc) <|reserved_special_token_0|> def element_reminder_btn(self): return self.find_element(self.reminder_btn_loc) <|reserved_special_token_0|> def element_page_expired(self): return self.find_element(self.page_expired_loc) <|reserved_special_token_0|> def click_x_btn(self): self.click(self.x_btn_loc) <|reserved_special_token_1|> #!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2019/4/14 14:31 # @Author : lixiaofeng # @File : page_zaojiao.py # @Software: PyCharm # @desc : from common.basics import Crazy class Zaojiaopage(Crazy): """早教小程序""" zao_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/cx" and @text="包妈优选"]') # zao_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/cx" and @text="小小包早教"]') def click_zao(self): self.click(self.zao_btn_loc) def element_zao(self): return self.find_element(self.zao_btn_loc) find_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/d7b" and @text="发现"]') # 发现按钮 def click_find(self): self.click(self.find_loc) title_btn_loc = ('xpath', '//*[@resource-id="android:id/title" and @text="小程序"]') # 发现页小程序按钮 def click_title_btn(self): self.click(self.title_btn_loc) helper_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/c5" and @text="小程序助手"]') # 小程序助手 def element_helper(self): return self.find_element(self.helper_loc) def click_helper(self): self.click(self.helper_loc) small_help_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/cx" and @text="小程序助手"]') # 小程序助手 def click_small_help_btn(self): self.click(self.small_help_btn_loc) small_name_loc = ('xpath', '//*[contains(@text, "包妈优选")]') # 包妈优选 def element_small_name(self): return self.find_element(self.small_name_loc) def click_small_name(self): self.click(self.small_name_loc) switching_applet_btn_loc = ('xpath', '//*[contains(@text, "切换小程序")]') # 切换小程序 def click_switching_applet_btn(self): self.click(self.switching_applet_btn_loc) delete_small_btn_loc = ('xpath', '//*[contains(@text, "删除")]') # 删除小程序按钮 def click_delete_small_btn(self): self.click(self.delete_small_btn_loc) edition_btn_loc = ('xpath', '//*[contains(@text, "百宝福利Buy")]') def element_edition_btn(self): return self.find_element(self.edition_btn_loc) delete_small1_btn_loc = ('xpath', '//*[contains(@text, "拖动到此处删除")]') def element_delete_small1_btn(self): return self.find_element(self.delete_small1_btn_loc) version_btn_loc = ('xpath', '//*[contains(@text, "版本查看")]') # 版本查看按钮 def click_version_btn(self): self.click(self.version_btn_loc) experience_version_btn_loc = ('xpath', '//*[contains(@text, "6.0.09")]') # 体验版 def clicks_experience_version_btn(self): self.clicks(self.experience_version_btn_loc, -1) audition_class_btn_loc = ('xpath', '//*[contains(@text, "0元领取10节试听课")]') # 领取试听课 def element_audition_class_btn(self): return self.find_element(self.audition_class_btn_loc) def click_audition_class_btn(self): self.click(self.audition_class_btn_loc) wechat_grant_btn_loc = (('xpath', '//*[contains(@text, "微信授权") and @class="android.widget.Button" ]')) # 微信授权 def click_wechat_grant_btn(self): self.click(self.wechat_grant_btn_loc) def double_click_wechat_grant(self): self.double_click(self.wechat_grant_btn_loc) def element_wechat_grant_btn(self): return self.find_element(self.wechat_grant_btn_loc) allow_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/st" and @text="允许"]') # 完成按钮 def click_allow_btn(self): self.click(self.allow_btn_loc) month_btn_loc = ('xpath', '//*[contains(@text, "2018")]') # 选择月份 def click_mouth_btn(self): self.click(self.month_btn_loc) sure_btn_loc = ('xpath', '//*[contains(@text, "确定")]') # 确定按钮 def click_sure_btn(self): self.click(self.sure_btn_loc) class_info_loc = ('xpath', '//*[contains(@text, "课程介绍")]') # 课程介绍 # class_info_loc = ('xpath', '//android.widget.FrameLayout/android.view.ViewGroup[0]') # 课程介绍 def class_info_btn(self): self.click(self.class_info_loc) attend_lectures_btn_loc = ('xpath', '//*[contains(@text, "立即听课")]') # 立即听课 def element_attend_lectures_btn(self): return self.find_element(self.attend_lectures_btn_loc) def click_attend_lectures_btn(self): self.click(self.attend_lectures_btn_loc) class_btn_loc = ('xpath', '//*[contains(@text, "预备课 预备课")]') # 预备课 预备课 def element_class_btn(self): return self.find_element(self.class_btn_loc) get_to_know_btn_loc = ('xpath', '//*[contains(@text, "立即了解正式课 ")]') # 立即了解正式课 def click_get_to_know_btn(self): self.click(self.get_to_know_btn_loc) def element_get_to_know_btn(self): return self.find_element(self.get_to_know_btn_loc) sure_buy_btn_loc = ('xpath', '//*[contains(@text, "立即购买")]') # 立即购买 def click_sure_buy_btn(self): self.click(self.sure_buy_btn_loc) buy_password_loc = ('id', 'com.tencent.mm:id/cfs') # 输入支付密码 def input_buy_password(self, paw): self.send_keys(self.buy_password_loc, paw) check_buy_money_loc = ('id', 'com.tencent.mm:id/dlh') # 获取支付金额 def text_buy_money(self): return self.get_text(self.check_buy_money_loc) success_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/f8o" and @text="完成"]') # 完成按钮 def click_success_btn(self): self.click(self.success_btn_loc) check_address_btn_loc = ('xpath', '//*[contains(@text, "收货地址:请选择地址")]') # 选择收货地址 def click_check_address_btn(self): self.click(self.check_address_btn_loc) def element_check_address_btn(self): return self.find_element(self.check_address_btn_loc) add_address_btn_loc = ('xpath', '//*[contains(@text, "添加地址")]') # 添加地址 def click_add_address_btn(self): self.click(self.add_address_btn_loc) name_loc = ('xpath', '//*[contains(@text, "请输入你的姓名")]') # 请输入你的姓名 def input_name_btn(self, name): self.send_keys(self.name_loc, name) phone_btn_loc = ('xpath', '//*[contains(@text, "请填写收件人电话")]') # 请填写收件人电话 def input_phone_btn(self, phone): self.send_keys(self.phone_btn_loc, phone) region_btn_loc = ('xpath', '//*[contains(@text, "请输入你所在地区")]') # 请输入你所在地区 def click_region_btn(self): self.click(self.region_btn_loc) detailed_address_btn_loc = ('xpath', '//*[contains(@text, "请输入你的详细地址")]') # 请输入你的详细地址 def input_detailed_address_btn(self, address): self.send_keys(self.detailed_address_btn_loc, address) save_btn_loc = ('xpath', '//*[contains(@text, "保存")]') # 保存 def click_save_btn(self): self.click(self.save_btn_loc) receive_btn_loc = ('xpath', '//*[contains(@text, "立即领取")]') # 立即领取 def click_receive_btn(self): self.click(self.receive_btn_loc) addressee_loc = ('xpath', '//*[contains(@text, "收件人:")]') # 地址列表是否有地址信息 def elements_addressee(self): return self.find_elements(self.addressee_loc) def clicks_addressee(self): self.clicks(self.addressee_loc, 0) know_btn_loc = ('xpath', '//*[contains(@text, "知道了")]') # 地址列表是否有地址信息 def element_know(self): return self.find_element(self.know_btn_loc) def click_know(self): self.click(self.know_btn_loc) all_curriculum_btn_loc = ('xpath', '//*[contains(@text, "查看全部课程")]') # 查看全部课程 def element_all_curriculum_btn(self): return self.find_element(self.all_curriculum_btn_loc) def click_all_curriculum_btn(self): self.click(self.all_curriculum_btn_loc) curriculum_date_btn_loc = ('xpath', '//*[contains(@text, "2019-0")]') # 历史推送 def element_curriculum_date_btn(self): return self.find_element(self.curriculum_date_btn_loc) my_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/ct" and @text="我的"]') # 我的 def element_my_btn(self): return self.find_element(self.my_btn_loc) def click_my(self): self.click(self.my_btn_loc) my_baby_btn_loc = ('xpath', '//*[contains(@text, "我的宝宝")]') # 我的宝宝 def click_my_baby(self): self.click(self.my_baby_btn_loc) my_baby_title_loc = ('id', 'com.tencent.mm:id/ox') def text_my_baby_title(self): return self.get_text(self.my_baby_title_loc) def elements_title(self): return self.find_elements(self.my_baby_title_loc) new_baby_btn_loc = ('xpath', '//*[contains(@text, "新建宝宝")]') # 新建宝宝 def element_new_baby_btn(self): return self.find_element(self.new_baby_btn_loc) def click_new_baby_btn(self): self.click(self.new_baby_btn_loc) def clicks_new_baby_btn(self, n): self.clicks(self.new_baby_btn_loc, n) get_set_loc = ('xpath', '//*[contains(@text, "预备课 预备课")]') # 新建宝宝 def element_get_set(self): return self.find_element(self.get_set_loc) next_btn_loc = ('xpath', '//*[contains(@text, "下一步")]') # 我的宝宝 def click_next(self): self.click(self.next_btn_loc) baby_name_loc = ('xpath', '//*[contains(@text, "请输入宝宝姓名")]') # 请输入宝宝姓名 def inputs_baby_name(self, name, n): self.sends_keys(self.baby_name_loc, name, n) baby_bir_btn_loc = ('xpath', '//*[contains(@text, "宝宝的生日:")]') # 宝宝的生日 def click_baby_bir_btn(self): self.click(self.baby_bir_btn_loc) finish_btn_loc = ('xpath', '//*[contains(@text, "完成")]') # 完成按钮 def click_finish_btn(self): self.click(self.finish_btn_loc) def clicks_finish_btn(self, n): self.clicks(self.finish_btn_loc, n) my_home_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/ct" and @text="首页"]') # 首页 def click_my_home(self): self.click(self.my_home_loc) def element_my_home(self): return self.find_element(self.my_home_loc) switch_btn_loc = ('xpath', '//*[contains(@text, "切换")]') # 切换 def click_switch_btn(self): self.click(self.switch_btn_loc) baby_bri_loc = ('xpath', '//*[contains(@text, "宝宝生日:")]') # 宝宝生日: def click_baby_bri(self): self.click(self.baby_bri_loc) class_img_btn_loc = ('xpath', 'android.widget.Image') def clicks_class_img(self): self.clicks(self.class_img_btn_loc, 0) collection_btn_loc = ('xpath', '//*[contains(@text, "收藏")]') # 收藏 def click_collection_btn(self): self.click(self.collection_btn_loc) def clicks_collection_btn(self, n): self.clicks(self.collection_btn_loc, n) def element_collection_btn(self): return self.find_element(self.collection_btn_loc) write_record_btn_loc = ('xpath', '//*[contains(@text, "写记录") and @class="android.widget.Button" ]') # 写记录按钮 def click_write_record_btn(self): self.click(self.write_record_btn_loc) def clicks_write_record_btn(self, n): self.clicks(self.write_record_btn_loc, n) album_btn_loc = ('xpath', '//*[contains(@text, "相册")]') # 相册 def click_album_btn(self): self.click(self.album_btn_loc) def element_album_btn(self): return self.find_element(self.album_btn_loc) small_video_btn_loc = ('xpath', '//*[contains(@text, "小视频")]') # 小视频 def click_small_video_btn(self): self.click(self.small_video_btn_loc) def element_small_video_btn(self): return self.find_element(self.small_video_btn_loc) release_btn_loc = ('xpath', '//*[contains(@text, "发布")]') # 发布 def click_release_btn(self): self.click(self.release_btn_loc) def clicks_release_btn(self, n): self.clicks(self.release_btn_loc, n) def element_record_info(self, data): # 判断是否定位到包含text的元素 record_info_loc = ('xpath', '//*[contains(@text, "{}")]'.format(data)) record_info = self.find_element(record_info_loc) if record_info: return True else: return False class_name_loc = ('xpath', '//*[contains(@text, "歌曲")]') # 课程名称 # class_name_loc = ('xpath', '//*[contains(@text, "歌曲:Head and shoulders")]') # 课程名称 def click_class_name(self): self.click(self.class_name_loc) def clicks_class_name(self, n): self.clicks(self.class_name_loc, n) def elements_class_name(self): return self.find_elements(self.class_name_loc) class_name2_loc = ('xpath', '//*[contains(@text, "一起走")]') # 课程名称 # class_name2_loc = ('xpath', '//*[contains(@text, "弹出来的画")]') # 课程名称 def click_class2_name(self): self.click(self.class_name2_loc) def clicks_class2_name(self, n): self.clicks(self.class_name2_loc, n) write_text_loc = ('xpath', '//*[contains(@text, "0/1000")]') # 写记录 def input_write_text(self, text): self.send_keys(self.write_text_loc, text) def inputs_write_text(self, text, n): self.sends_keys(self.write_text_loc, text, n) choice_album_loc = ('id', 'com.tencent.mm:id/bpy') def clicks_choice_album(self, n): self.clicks(self.choice_album_loc, n) def elements_choice_album(self): return self.find_elements(self.choice_album_loc) complete_btn_loc = ('id', 'com.tencent.mm:id/ki') # 完成 def click_complete_btn(self): self.click(self.complete_btn_loc) my_collection_btn_loc = ('xpath', '//*[contains(@text, "我的收藏")]') # 我的收藏 def click_my_collection_btn(self): self.click(self.my_collection_btn_loc) my_collection_english_course_btn_loc = ('xpath', '//*[contains(@text, "早教")]') # 早教英语课 def elements_my_collection_english_course_btn(self): return self.find_elements(self.my_collection_english_course_btn_loc) my_collection_game_course_btn_loc = ('xpath', '//*[contains(@text, "宝宝游戏馆")]') # 宝宝游戏馆 def elements_my_collection_game_course_btn(self): return self.find_elements(self.my_collection_game_course_btn_loc) my_course_btn_loc = ('xpath', '//*[contains(@text, "我的课程")]') # 我的课程 def click_my_course_btn(self): self.click(self.my_course_btn_loc) my_course_buy_btn_loc = ('xpath', '//*[contains(@text, "早教核心课年卡")]') # 早教核心课年卡 def elements_my_course_buy_btn(self): return self.find_elements(self.my_course_buy_btn_loc) my_order_btn_loc = ('xpath', '//*[contains(@text, "我的订单")]') # 我的订单 def click_my_order_btn(self): self.click(self.my_order_btn_loc) my_order_card_btn_loc = ('xpath', '//*[contains(@text, "订单编号:")]') # 订单编号: def elements_my_order_card_btn(self): return self.find_elements(self.my_order_card_btn_loc) my_record_btn_loc = ('xpath', '//*[contains(@text, "成长记录")]') # 成长记录 def click_my_record_btn(self): self.click(self.my_record_btn_loc) my_record_class_btn_loc = ('xpath', '//*[contains(@text, "#")]') # # 测试英语课程组 def elements_my_record_class_btn(self): return self.find_elements(self.my_record_class_btn_loc) back_btn_loc = ( 'xpath', '//*[@resource-id="com.tencent.mm:id/on" and @class="android.widget.LinearLayout"]') # 返回按钮 def element_back_btn(self): return self.find_element(self.back_btn_loc) def click_back_btn(self): self.click(self.back_btn_loc) reply_5_loc = ('xpath', '//android.widget.Image') # 回复5 def click_reply_5(self): self.click(self.reply_5_loc) def elements_reply_5(self): return self.find_elements(self.reply_5_loc) add_to_btn_loc = ('xpath', '//*[contains(@text, "立即添加")]') # 立即添加 def click_add_to_btn(self): self.click(self.add_to_btn_loc) reply_input_5_loc = ('id', 'com.tencent.mm:id/ami') def input_reply_5(self, num): self.send_keys(self.reply_input_5_loc, num) send_5_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/amp" and @text="发送"]') # 发送 def click_send(self): self.click(self.send_5_loc) reply_code_loc = ('id', 'com.tencent.mm:id/ap9') # 获取回复的二维码 def elements_reply_code(self): return self.find_elements(self.reply_code_loc) def clicks_reply_code(self, n): self.clicks(self.reply_code_loc, n) long_code_loc = ('id', 'com.tencent.mm:id/adi') # 长按二维码 def element_long_code(self): return self.find_element(self.long_code_loc) def click_long_code(self): self.click(self.long_code_loc) discern_code_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/cx" and @text="识别图中二维码"]') # 识别图中二维码 def click_discern_code(self): self.click(self.discern_code_loc) class_group_loc = ('id', 'android:id/text1') # 群名称 def text_class_group(self): return self.get_text(self.class_group_loc) add_group_chat_loc = ('xpath', '//*[contains(@text, "加入该群聊")]') # 加入该群聊 def element_add_group_chat(self): return self.find_element(self.add_group_chat_loc) reply_8_loc = ('xpath', '//android.widget.Image') # 回复8的banner 回复8->进公众号->点击推送 看到的二维码 def elements_reply_8(self): return self.find_elements(self.reply_8_loc) parent_btn_loc = ('xpath', '//*[contains(@text, "亲爱的家长:")]') # 亲爱的家长: def element_parent_btn(self): return self.find_element(self.parent_btn_loc) info_btn_loc = ('id', 'com.tencent.mm:id/a8q') # 详情 def elements_info_btn(self): return self.find_elements(self.info_btn_loc) def clicks_info_btn(self, n): self.clicks(self.info_btn_loc, n) more_games_btn_loc = ('xpath', '//*[contains(@text, "更多亲子游戏")]') # 更多亲子游戏 def click_more_games_btn(self): self.click(self.more_games_btn_loc) look_all_btn_loc = ('xpath', '//*[contains(@text, "查看全部")]') # 查看全部 def click_look_all_btn(self): self.click(self.look_all_btn_loc) def element_look_all_btn(self): return self.find_elements(self.look_all_btn_loc) start_fingerprint_buy_loc = ('id', 'com.tencent.mm:id/btp') # 开启指纹支付弹窗文本 开启指纹支付,支付时可通过验证指纹快速完成付款。 def text_start_fingerprint_buy(self): return self.get_text(self.start_fingerprint_buy_loc) no_more_reminder_btn_loc = ('id', 'com.tencent.mm:id/btq') # 不再提醒 def click_no_more_reminder_btn(self): self.click(self.no_more_reminder_btn_loc) cancel_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/azz" and @text="取消"]') # 取消 def click_cancel_btn(self): self.click(self.cancel_btn_loc) usd_password_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/fg4" and @text="使用密码"]') # 使用密码 def element_usd_password(self): return self.find_element(self.usd_password_loc) def click_usd_password(self): self.click(self.usd_password_loc) password_error_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/d8x" and @text="支付密码错误,请重试"]') # 支付密码错误,请重试 def element_password_error(self): return self.find_element(self.password_error_loc) again_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/azz" and @text="重试"]') # 重试 def click_again_btn(self): self.click(self.again_btn_loc) payment_loc = ('id', 'com.tencent.mm:id/fg3') # 请输入支付密码 文本 def text_payment(self): return self.get_text(self.payment_loc) typewriting_finish_btn_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/z2" and @text="完成"]') # 输入法上的完成按钮 def element_typewriting_finish_btn(self): return self.find_element(self.typewriting_finish_btn_loc) def click_typewriting_finish_btn(self): self.click(self.typewriting_finish_btn_loc) # 打卡 clock_btn_loc = ('xpath', '//*[contains(@text, "打卡")]') # 打卡 def click_clock_btn(self): self.click(self.clock_btn_loc) def element_clock_btn(self): return self.find_element(self.clock_btn_loc) # com.tencent.mm:id/ox no_clock_btn_loc = ('xpath', '//*[contains(@text, "你还未开启打卡")]') # 你还未开启打卡 def element_no_clock_btn(self): return self.find_element(self.no_clock_btn_loc) get_card_btn_loc = ('xpath', '//*[@text="获取打卡海报" and @class="android.widget.Button"]') # 获取打卡海报 def click_get_card_btn(self): self.click(self.get_card_btn_loc) upload_card_btn_loc = ('xpath', '//*[@text="上传截图" and @class="android.widget.Button"]') # 上传截图 def click_upload_card_btn(self): self.click(self.upload_card_btn_loc) again_upload_card_btn_loc = ('xpath', '//*[@text="重新上传截图" and @class="android.widget.Button"]') # 重新上传截图 def click_again_upload_card_btn(self): self.click(self.again_upload_card_btn_loc) save_img_btn_loc = ('xpath', '//*[@text="保存图片" and @class="android.widget.Button"]') # 保存图片 def click_save_img_btn(self): self.click(self.save_img_btn_loc) copy_text_btn_loc = ('xpath', '//*[@text="复制发圈文案" and @class="android.widget.Button"]') # 复制发圈文案 def click_copy_text_btn(self): self.click(self.copy_text_btn_loc) copy_format_btn_loc = ('xpath', '//*[contains(@text, "发布朋友圈截图规范")]') # 发布朋友圈截图规范 def element_copy_format_btn(self): return self.find_element(self.copy_format_btn_loc) card_go_btn_loc = ('xpath', '//*[contains(@text, "关闭小程序,去朋友圈打卡截图")]') # 关闭小程序,去朋友圈打卡截图 def click_card_go_btn(self): self.click(self.card_go_btn_loc) upload_btn_loc = ('xpath', '//*[@text="上传" and @class="android.widget.Button"]') # 上传 def click_upload_btn(self): self.click(self.upload_btn_loc) today_card_btn_loc = ('xpath', '//*[contains(@text, "今日已提交打卡")]') # 今日已提交打卡 def element_today_card_btn(self): return self.find_element(self.today_card_btn_loc) reset_img_btn_loc = ('xpath', '//*[@text="重新选择截图" and @class="android.widget.Button"]') # 重新选择截图 def click_reset_img_btn(self): self.click(self.reset_img_btn_loc) generated_loading_loc = ('xpath', '//*[@resource-id="com.tencent.mm:id/cx" and @text="正在生成..."]') # 正在生成... def element_generated_loading(self): return self.find_element(self.generated_loading_loc) reminder_btn_loc = ('xpath', '//*[contains(@text, "温馨提示")]') # 温馨提示 def element_reminder_btn(self): return self.find_element(self.reminder_btn_loc) page_expired_loc = ('xpath', '//*[contains(@text, "页面已经过期")]') # 页面已经过期 def element_page_expired(self): return self.find_element(self.page_expired_loc) x_btn_loc = ('id', 'com.tencent.mm:id/kx') def click_x_btn(self): self.click(self.x_btn_loc)
flexible
{ "blob_id": "1980fb4d6e7d3c6fe51f4a242610b5489e553859", "index": 128, "step-1": "<mask token>\n\n\nclass Zaojiaopage(Crazy):\n <mask token>\n <mask token>\n\n def click_zao(self):\n self.click(self.zao_btn_loc)\n <mask token>\n <mask token>\n\n def click_find(self):\n self.click(self.find_loc)\n <mask token>\n\n def click_title_btn(self):\n self.click(self.title_btn_loc)\n <mask token>\n <mask token>\n\n def click_helper(self):\n self.click(self.helper_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_small_name(self):\n return self.find_element(self.small_name_loc)\n\n def click_small_name(self):\n self.click(self.small_name_loc)\n <mask token>\n\n def click_switching_applet_btn(self):\n self.click(self.switching_applet_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def clicks_experience_version_btn(self):\n self.clicks(self.experience_version_btn_loc, -1)\n <mask token>\n\n def element_audition_class_btn(self):\n return self.find_element(self.audition_class_btn_loc)\n\n def click_audition_class_btn(self):\n self.click(self.audition_class_btn_loc)\n <mask token>\n\n def click_wechat_grant_btn(self):\n self.click(self.wechat_grant_btn_loc)\n\n def double_click_wechat_grant(self):\n self.double_click(self.wechat_grant_btn_loc)\n\n def element_wechat_grant_btn(self):\n return self.find_element(self.wechat_grant_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def element_attend_lectures_btn(self):\n return self.find_element(self.attend_lectures_btn_loc)\n <mask token>\n <mask token>\n\n def element_class_btn(self):\n return self.find_element(self.class_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def input_buy_password(self, paw):\n self.send_keys(self.buy_password_loc, paw)\n <mask token>\n <mask token>\n <mask token>\n\n def click_success_btn(self):\n self.click(self.success_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_add_address_btn(self):\n self.click(self.add_address_btn_loc)\n <mask token>\n\n def input_name_btn(self, name):\n self.send_keys(self.name_loc, name)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def input_detailed_address_btn(self, address):\n self.send_keys(self.detailed_address_btn_loc, address)\n <mask token>\n\n def click_save_btn(self):\n self.click(self.save_btn_loc)\n <mask token>\n\n def click_receive_btn(self):\n self.click(self.receive_btn_loc)\n <mask token>\n <mask token>\n\n def clicks_addressee(self):\n self.clicks(self.addressee_loc, 0)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def element_all_curriculum_btn(self):\n return self.find_element(self.all_curriculum_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def text_my_baby_title(self):\n return self.get_text(self.my_baby_title_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def clicks_new_baby_btn(self, n):\n self.clicks(self.new_baby_btn_loc, n)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_baby_bir_btn(self):\n self.click(self.baby_bir_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_my_home(self):\n self.click(self.my_home_loc)\n\n def element_my_home(self):\n return self.find_element(self.my_home_loc)\n <mask token>\n\n def click_switch_btn(self):\n self.click(self.switch_btn_loc)\n <mask token>\n\n def click_baby_bri(self):\n self.click(self.baby_bri_loc)\n <mask token>\n\n def clicks_class_img(self):\n self.clicks(self.class_img_btn_loc, 0)\n <mask token>\n\n def click_collection_btn(self):\n self.click(self.collection_btn_loc)\n\n def clicks_collection_btn(self, n):\n self.clicks(self.collection_btn_loc, n)\n <mask token>\n <mask token>\n\n def click_write_record_btn(self):\n self.click(self.write_record_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_album_btn(self):\n return self.find_element(self.album_btn_loc)\n <mask token>\n <mask token>\n\n def element_small_video_btn(self):\n return self.find_element(self.small_video_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_record_info(self, data):\n record_info_loc = 'xpath', '//*[contains(@text, \"{}\")]'.format(data)\n record_info = self.find_element(record_info_loc)\n if record_info:\n return True\n else:\n return False\n <mask token>\n <mask token>\n <mask token>\n\n def elements_class_name(self):\n return self.find_elements(self.class_name_loc)\n <mask token>\n\n def click_class2_name(self):\n self.click(self.class_name2_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def elements_choice_album(self):\n return self.find_elements(self.choice_album_loc)\n <mask token>\n\n def click_complete_btn(self):\n self.click(self.complete_btn_loc)\n <mask token>\n\n def click_my_collection_btn(self):\n self.click(self.my_collection_btn_loc)\n <mask token>\n\n def elements_my_collection_english_course_btn(self):\n return self.find_elements(self.my_collection_english_course_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_my_course_btn(self):\n self.click(self.my_course_btn_loc)\n <mask token>\n\n def elements_my_course_buy_btn(self):\n return self.find_elements(self.my_course_buy_btn_loc)\n <mask token>\n\n def click_my_order_btn(self):\n self.click(self.my_order_btn_loc)\n <mask token>\n\n def elements_my_order_card_btn(self):\n return self.find_elements(self.my_order_card_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def elements_my_record_class_btn(self):\n return self.find_elements(self.my_record_class_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def clicks_reply_code(self, n):\n self.clicks(self.reply_code_loc, n)\n <mask token>\n\n def element_long_code(self):\n return self.find_element(self.long_code_loc)\n\n def click_long_code(self):\n self.click(self.long_code_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def text_class_group(self):\n return self.get_text(self.class_group_loc)\n <mask token>\n\n def element_add_group_chat(self):\n return self.find_element(self.add_group_chat_loc)\n <mask token>\n\n def elements_reply_8(self):\n return self.find_elements(self.reply_8_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_more_games_btn(self):\n self.click(self.more_games_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def text_start_fingerprint_buy(self):\n return self.get_text(self.start_fingerprint_buy_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_cancel_btn(self):\n self.click(self.cancel_btn_loc)\n <mask token>\n\n def element_usd_password(self):\n return self.find_element(self.usd_password_loc)\n <mask token>\n <mask token>\n\n def element_password_error(self):\n return self.find_element(self.password_error_loc)\n <mask token>\n\n def click_again_btn(self):\n self.click(self.again_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_typewriting_finish_btn(self):\n return self.find_element(self.typewriting_finish_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_clock_btn(self):\n return self.find_element(self.clock_btn_loc)\n <mask token>\n\n def element_no_clock_btn(self):\n return self.find_element(self.no_clock_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_upload_card_btn(self):\n self.click(self.upload_card_btn_loc)\n <mask token>\n\n def click_again_upload_card_btn(self):\n self.click(self.again_upload_card_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def element_copy_format_btn(self):\n return self.find_element(self.copy_format_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_upload_btn(self):\n self.click(self.upload_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_reset_img_btn(self):\n self.click(self.reset_img_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_reminder_btn(self):\n return self.find_element(self.reminder_btn_loc)\n <mask token>\n\n def element_page_expired(self):\n return self.find_element(self.page_expired_loc)\n <mask token>\n\n def click_x_btn(self):\n self.click(self.x_btn_loc)\n", "step-2": "<mask token>\n\n\nclass Zaojiaopage(Crazy):\n <mask token>\n <mask token>\n\n def click_zao(self):\n self.click(self.zao_btn_loc)\n <mask token>\n <mask token>\n\n def click_find(self):\n self.click(self.find_loc)\n <mask token>\n\n def click_title_btn(self):\n self.click(self.title_btn_loc)\n <mask token>\n <mask token>\n\n def click_helper(self):\n self.click(self.helper_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_small_name(self):\n return self.find_element(self.small_name_loc)\n\n def click_small_name(self):\n self.click(self.small_name_loc)\n <mask token>\n\n def click_switching_applet_btn(self):\n self.click(self.switching_applet_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def clicks_experience_version_btn(self):\n self.clicks(self.experience_version_btn_loc, -1)\n <mask token>\n\n def element_audition_class_btn(self):\n return self.find_element(self.audition_class_btn_loc)\n\n def click_audition_class_btn(self):\n self.click(self.audition_class_btn_loc)\n <mask token>\n\n def click_wechat_grant_btn(self):\n self.click(self.wechat_grant_btn_loc)\n\n def double_click_wechat_grant(self):\n self.double_click(self.wechat_grant_btn_loc)\n\n def element_wechat_grant_btn(self):\n return self.find_element(self.wechat_grant_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_mouth_btn(self):\n self.click(self.month_btn_loc)\n <mask token>\n\n def click_sure_btn(self):\n self.click(self.sure_btn_loc)\n <mask token>\n\n def class_info_btn(self):\n self.click(self.class_info_loc)\n <mask token>\n\n def element_attend_lectures_btn(self):\n return self.find_element(self.attend_lectures_btn_loc)\n <mask token>\n <mask token>\n\n def element_class_btn(self):\n return self.find_element(self.class_btn_loc)\n <mask token>\n <mask token>\n\n def element_get_to_know_btn(self):\n return self.find_element(self.get_to_know_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def input_buy_password(self, paw):\n self.send_keys(self.buy_password_loc, paw)\n <mask token>\n <mask token>\n <mask token>\n\n def click_success_btn(self):\n self.click(self.success_btn_loc)\n <mask token>\n\n def click_check_address_btn(self):\n self.click(self.check_address_btn_loc)\n <mask token>\n <mask token>\n\n def click_add_address_btn(self):\n self.click(self.add_address_btn_loc)\n <mask token>\n\n def input_name_btn(self, name):\n self.send_keys(self.name_loc, name)\n <mask token>\n\n def input_phone_btn(self, phone):\n self.send_keys(self.phone_btn_loc, phone)\n <mask token>\n <mask token>\n <mask token>\n\n def input_detailed_address_btn(self, address):\n self.send_keys(self.detailed_address_btn_loc, address)\n <mask token>\n\n def click_save_btn(self):\n self.click(self.save_btn_loc)\n <mask token>\n\n def click_receive_btn(self):\n self.click(self.receive_btn_loc)\n <mask token>\n <mask token>\n\n def clicks_addressee(self):\n self.clicks(self.addressee_loc, 0)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def element_all_curriculum_btn(self):\n return self.find_element(self.all_curriculum_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def element_my_btn(self):\n return self.find_element(self.my_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def text_my_baby_title(self):\n return self.get_text(self.my_baby_title_loc)\n <mask token>\n <mask token>\n\n def element_new_baby_btn(self):\n return self.find_element(self.new_baby_btn_loc)\n <mask token>\n\n def clicks_new_baby_btn(self, n):\n self.clicks(self.new_baby_btn_loc, n)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_baby_bir_btn(self):\n self.click(self.baby_bir_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_my_home(self):\n self.click(self.my_home_loc)\n\n def element_my_home(self):\n return self.find_element(self.my_home_loc)\n <mask token>\n\n def click_switch_btn(self):\n self.click(self.switch_btn_loc)\n <mask token>\n\n def click_baby_bri(self):\n self.click(self.baby_bri_loc)\n <mask token>\n\n def clicks_class_img(self):\n self.clicks(self.class_img_btn_loc, 0)\n <mask token>\n\n def click_collection_btn(self):\n self.click(self.collection_btn_loc)\n\n def clicks_collection_btn(self, n):\n self.clicks(self.collection_btn_loc, n)\n <mask token>\n <mask token>\n\n def click_write_record_btn(self):\n self.click(self.write_record_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_album_btn(self):\n return self.find_element(self.album_btn_loc)\n <mask token>\n <mask token>\n\n def element_small_video_btn(self):\n return self.find_element(self.small_video_btn_loc)\n <mask token>\n <mask token>\n\n def clicks_release_btn(self, n):\n self.clicks(self.release_btn_loc, n)\n\n def element_record_info(self, data):\n record_info_loc = 'xpath', '//*[contains(@text, \"{}\")]'.format(data)\n record_info = self.find_element(record_info_loc)\n if record_info:\n return True\n else:\n return False\n <mask token>\n <mask token>\n <mask token>\n\n def elements_class_name(self):\n return self.find_elements(self.class_name_loc)\n <mask token>\n\n def click_class2_name(self):\n self.click(self.class_name2_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def clicks_choice_album(self, n):\n self.clicks(self.choice_album_loc, n)\n\n def elements_choice_album(self):\n return self.find_elements(self.choice_album_loc)\n <mask token>\n\n def click_complete_btn(self):\n self.click(self.complete_btn_loc)\n <mask token>\n\n def click_my_collection_btn(self):\n self.click(self.my_collection_btn_loc)\n <mask token>\n\n def elements_my_collection_english_course_btn(self):\n return self.find_elements(self.my_collection_english_course_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_my_course_btn(self):\n self.click(self.my_course_btn_loc)\n <mask token>\n\n def elements_my_course_buy_btn(self):\n return self.find_elements(self.my_course_buy_btn_loc)\n <mask token>\n\n def click_my_order_btn(self):\n self.click(self.my_order_btn_loc)\n <mask token>\n\n def elements_my_order_card_btn(self):\n return self.find_elements(self.my_order_card_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def elements_my_record_class_btn(self):\n return self.find_elements(self.my_record_class_btn_loc)\n <mask token>\n <mask token>\n\n def click_back_btn(self):\n self.click(self.back_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_send(self):\n self.click(self.send_5_loc)\n <mask token>\n <mask token>\n\n def clicks_reply_code(self, n):\n self.clicks(self.reply_code_loc, n)\n <mask token>\n\n def element_long_code(self):\n return self.find_element(self.long_code_loc)\n\n def click_long_code(self):\n self.click(self.long_code_loc)\n <mask token>\n\n def click_discern_code(self):\n self.click(self.discern_code_loc)\n <mask token>\n\n def text_class_group(self):\n return self.get_text(self.class_group_loc)\n <mask token>\n\n def element_add_group_chat(self):\n return self.find_element(self.add_group_chat_loc)\n <mask token>\n\n def elements_reply_8(self):\n return self.find_elements(self.reply_8_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_more_games_btn(self):\n self.click(self.more_games_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def text_start_fingerprint_buy(self):\n return self.get_text(self.start_fingerprint_buy_loc)\n <mask token>\n\n def click_no_more_reminder_btn(self):\n self.click(self.no_more_reminder_btn_loc)\n <mask token>\n\n def click_cancel_btn(self):\n self.click(self.cancel_btn_loc)\n <mask token>\n\n def element_usd_password(self):\n return self.find_element(self.usd_password_loc)\n <mask token>\n <mask token>\n\n def element_password_error(self):\n return self.find_element(self.password_error_loc)\n <mask token>\n\n def click_again_btn(self):\n self.click(self.again_btn_loc)\n <mask token>\n\n def text_payment(self):\n return self.get_text(self.payment_loc)\n <mask token>\n\n def element_typewriting_finish_btn(self):\n return self.find_element(self.typewriting_finish_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_clock_btn(self):\n return self.find_element(self.clock_btn_loc)\n <mask token>\n\n def element_no_clock_btn(self):\n return self.find_element(self.no_clock_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_upload_card_btn(self):\n self.click(self.upload_card_btn_loc)\n <mask token>\n\n def click_again_upload_card_btn(self):\n self.click(self.again_upload_card_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_copy_text_btn(self):\n self.click(self.copy_text_btn_loc)\n <mask token>\n\n def element_copy_format_btn(self):\n return self.find_element(self.copy_format_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_upload_btn(self):\n self.click(self.upload_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_reset_img_btn(self):\n self.click(self.reset_img_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_reminder_btn(self):\n return self.find_element(self.reminder_btn_loc)\n <mask token>\n\n def element_page_expired(self):\n return self.find_element(self.page_expired_loc)\n <mask token>\n\n def click_x_btn(self):\n self.click(self.x_btn_loc)\n", "step-3": "<mask token>\n\n\nclass Zaojiaopage(Crazy):\n <mask token>\n <mask token>\n\n def click_zao(self):\n self.click(self.zao_btn_loc)\n\n def element_zao(self):\n return self.find_element(self.zao_btn_loc)\n <mask token>\n\n def click_find(self):\n self.click(self.find_loc)\n <mask token>\n\n def click_title_btn(self):\n self.click(self.title_btn_loc)\n <mask token>\n <mask token>\n\n def click_helper(self):\n self.click(self.helper_loc)\n <mask token>\n\n def click_small_help_btn(self):\n self.click(self.small_help_btn_loc)\n <mask token>\n\n def element_small_name(self):\n return self.find_element(self.small_name_loc)\n\n def click_small_name(self):\n self.click(self.small_name_loc)\n <mask token>\n\n def click_switching_applet_btn(self):\n self.click(self.switching_applet_btn_loc)\n <mask token>\n\n def click_delete_small_btn(self):\n self.click(self.delete_small_btn_loc)\n <mask token>\n\n def element_edition_btn(self):\n return self.find_element(self.edition_btn_loc)\n <mask token>\n\n def element_delete_small1_btn(self):\n return self.find_element(self.delete_small1_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def clicks_experience_version_btn(self):\n self.clicks(self.experience_version_btn_loc, -1)\n <mask token>\n\n def element_audition_class_btn(self):\n return self.find_element(self.audition_class_btn_loc)\n\n def click_audition_class_btn(self):\n self.click(self.audition_class_btn_loc)\n <mask token>\n\n def click_wechat_grant_btn(self):\n self.click(self.wechat_grant_btn_loc)\n\n def double_click_wechat_grant(self):\n self.double_click(self.wechat_grant_btn_loc)\n\n def element_wechat_grant_btn(self):\n return self.find_element(self.wechat_grant_btn_loc)\n <mask token>\n\n def click_allow_btn(self):\n self.click(self.allow_btn_loc)\n <mask token>\n\n def click_mouth_btn(self):\n self.click(self.month_btn_loc)\n <mask token>\n\n def click_sure_btn(self):\n self.click(self.sure_btn_loc)\n <mask token>\n\n def class_info_btn(self):\n self.click(self.class_info_loc)\n <mask token>\n\n def element_attend_lectures_btn(self):\n return self.find_element(self.attend_lectures_btn_loc)\n <mask token>\n <mask token>\n\n def element_class_btn(self):\n return self.find_element(self.class_btn_loc)\n <mask token>\n\n def click_get_to_know_btn(self):\n self.click(self.get_to_know_btn_loc)\n\n def element_get_to_know_btn(self):\n return self.find_element(self.get_to_know_btn_loc)\n <mask token>\n\n def click_sure_buy_btn(self):\n self.click(self.sure_buy_btn_loc)\n <mask token>\n\n def input_buy_password(self, paw):\n self.send_keys(self.buy_password_loc, paw)\n <mask token>\n\n def text_buy_money(self):\n return self.get_text(self.check_buy_money_loc)\n <mask token>\n\n def click_success_btn(self):\n self.click(self.success_btn_loc)\n <mask token>\n\n def click_check_address_btn(self):\n self.click(self.check_address_btn_loc)\n <mask token>\n <mask token>\n\n def click_add_address_btn(self):\n self.click(self.add_address_btn_loc)\n <mask token>\n\n def input_name_btn(self, name):\n self.send_keys(self.name_loc, name)\n <mask token>\n\n def input_phone_btn(self, phone):\n self.send_keys(self.phone_btn_loc, phone)\n <mask token>\n <mask token>\n <mask token>\n\n def input_detailed_address_btn(self, address):\n self.send_keys(self.detailed_address_btn_loc, address)\n <mask token>\n\n def click_save_btn(self):\n self.click(self.save_btn_loc)\n <mask token>\n\n def click_receive_btn(self):\n self.click(self.receive_btn_loc)\n <mask token>\n\n def elements_addressee(self):\n return self.find_elements(self.addressee_loc)\n\n def clicks_addressee(self):\n self.clicks(self.addressee_loc, 0)\n <mask token>\n <mask token>\n\n def click_know(self):\n self.click(self.know_btn_loc)\n <mask token>\n\n def element_all_curriculum_btn(self):\n return self.find_element(self.all_curriculum_btn_loc)\n\n def click_all_curriculum_btn(self):\n self.click(self.all_curriculum_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_my_btn(self):\n return self.find_element(self.my_btn_loc)\n <mask token>\n <mask token>\n\n def click_my_baby(self):\n self.click(self.my_baby_btn_loc)\n <mask token>\n\n def text_my_baby_title(self):\n return self.get_text(self.my_baby_title_loc)\n <mask token>\n <mask token>\n\n def element_new_baby_btn(self):\n return self.find_element(self.new_baby_btn_loc)\n\n def click_new_baby_btn(self):\n self.click(self.new_baby_btn_loc)\n\n def clicks_new_baby_btn(self, n):\n self.clicks(self.new_baby_btn_loc, n)\n <mask token>\n\n def element_get_set(self):\n return self.find_element(self.get_set_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def inputs_baby_name(self, name, n):\n self.sends_keys(self.baby_name_loc, name, n)\n <mask token>\n\n def click_baby_bir_btn(self):\n self.click(self.baby_bir_btn_loc)\n <mask token>\n\n def click_finish_btn(self):\n self.click(self.finish_btn_loc)\n <mask token>\n <mask token>\n\n def click_my_home(self):\n self.click(self.my_home_loc)\n\n def element_my_home(self):\n return self.find_element(self.my_home_loc)\n <mask token>\n\n def click_switch_btn(self):\n self.click(self.switch_btn_loc)\n <mask token>\n\n def click_baby_bri(self):\n self.click(self.baby_bri_loc)\n <mask token>\n\n def clicks_class_img(self):\n self.clicks(self.class_img_btn_loc, 0)\n <mask token>\n\n def click_collection_btn(self):\n self.click(self.collection_btn_loc)\n\n def clicks_collection_btn(self, n):\n self.clicks(self.collection_btn_loc, n)\n <mask token>\n <mask token>\n\n def click_write_record_btn(self):\n self.click(self.write_record_btn_loc)\n\n def clicks_write_record_btn(self, n):\n self.clicks(self.write_record_btn_loc, n)\n <mask token>\n\n def click_album_btn(self):\n self.click(self.album_btn_loc)\n\n def element_album_btn(self):\n return self.find_element(self.album_btn_loc)\n <mask token>\n\n def click_small_video_btn(self):\n self.click(self.small_video_btn_loc)\n\n def element_small_video_btn(self):\n return self.find_element(self.small_video_btn_loc)\n <mask token>\n <mask token>\n\n def clicks_release_btn(self, n):\n self.clicks(self.release_btn_loc, n)\n\n def element_record_info(self, data):\n record_info_loc = 'xpath', '//*[contains(@text, \"{}\")]'.format(data)\n record_info = self.find_element(record_info_loc)\n if record_info:\n return True\n else:\n return False\n <mask token>\n <mask token>\n <mask token>\n\n def elements_class_name(self):\n return self.find_elements(self.class_name_loc)\n <mask token>\n\n def click_class2_name(self):\n self.click(self.class_name2_loc)\n <mask token>\n <mask token>\n\n def input_write_text(self, text):\n self.send_keys(self.write_text_loc, text)\n <mask token>\n <mask token>\n\n def clicks_choice_album(self, n):\n self.clicks(self.choice_album_loc, n)\n\n def elements_choice_album(self):\n return self.find_elements(self.choice_album_loc)\n <mask token>\n\n def click_complete_btn(self):\n self.click(self.complete_btn_loc)\n <mask token>\n\n def click_my_collection_btn(self):\n self.click(self.my_collection_btn_loc)\n <mask token>\n\n def elements_my_collection_english_course_btn(self):\n return self.find_elements(self.my_collection_english_course_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_my_course_btn(self):\n self.click(self.my_course_btn_loc)\n <mask token>\n\n def elements_my_course_buy_btn(self):\n return self.find_elements(self.my_course_buy_btn_loc)\n <mask token>\n\n def click_my_order_btn(self):\n self.click(self.my_order_btn_loc)\n <mask token>\n\n def elements_my_order_card_btn(self):\n return self.find_elements(self.my_order_card_btn_loc)\n <mask token>\n\n def click_my_record_btn(self):\n self.click(self.my_record_btn_loc)\n <mask token>\n\n def elements_my_record_class_btn(self):\n return self.find_elements(self.my_record_class_btn_loc)\n <mask token>\n\n def element_back_btn(self):\n return self.find_element(self.back_btn_loc)\n\n def click_back_btn(self):\n self.click(self.back_btn_loc)\n <mask token>\n\n def click_reply_5(self):\n self.click(self.reply_5_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def input_reply_5(self, num):\n self.send_keys(self.reply_input_5_loc, num)\n <mask token>\n\n def click_send(self):\n self.click(self.send_5_loc)\n <mask token>\n <mask token>\n\n def clicks_reply_code(self, n):\n self.clicks(self.reply_code_loc, n)\n <mask token>\n\n def element_long_code(self):\n return self.find_element(self.long_code_loc)\n\n def click_long_code(self):\n self.click(self.long_code_loc)\n <mask token>\n\n def click_discern_code(self):\n self.click(self.discern_code_loc)\n <mask token>\n\n def text_class_group(self):\n return self.get_text(self.class_group_loc)\n <mask token>\n\n def element_add_group_chat(self):\n return self.find_element(self.add_group_chat_loc)\n <mask token>\n\n def elements_reply_8(self):\n return self.find_elements(self.reply_8_loc)\n <mask token>\n\n def element_parent_btn(self):\n return self.find_element(self.parent_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def click_more_games_btn(self):\n self.click(self.more_games_btn_loc)\n <mask token>\n\n def click_look_all_btn(self):\n self.click(self.look_all_btn_loc)\n\n def element_look_all_btn(self):\n return self.find_elements(self.look_all_btn_loc)\n <mask token>\n\n def text_start_fingerprint_buy(self):\n return self.get_text(self.start_fingerprint_buy_loc)\n <mask token>\n\n def click_no_more_reminder_btn(self):\n self.click(self.no_more_reminder_btn_loc)\n <mask token>\n\n def click_cancel_btn(self):\n self.click(self.cancel_btn_loc)\n <mask token>\n\n def element_usd_password(self):\n return self.find_element(self.usd_password_loc)\n <mask token>\n <mask token>\n\n def element_password_error(self):\n return self.find_element(self.password_error_loc)\n <mask token>\n\n def click_again_btn(self):\n self.click(self.again_btn_loc)\n <mask token>\n\n def text_payment(self):\n return self.get_text(self.payment_loc)\n <mask token>\n\n def element_typewriting_finish_btn(self):\n return self.find_element(self.typewriting_finish_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def element_clock_btn(self):\n return self.find_element(self.clock_btn_loc)\n <mask token>\n\n def element_no_clock_btn(self):\n return self.find_element(self.no_clock_btn_loc)\n <mask token>\n <mask token>\n <mask token>\n\n def click_upload_card_btn(self):\n self.click(self.upload_card_btn_loc)\n <mask token>\n\n def click_again_upload_card_btn(self):\n self.click(self.again_upload_card_btn_loc)\n <mask token>\n\n def click_save_img_btn(self):\n self.click(self.save_img_btn_loc)\n <mask token>\n\n def click_copy_text_btn(self):\n self.click(self.copy_text_btn_loc)\n <mask token>\n\n def element_copy_format_btn(self):\n return self.find_element(self.copy_format_btn_loc)\n <mask token>\n\n def click_card_go_btn(self):\n self.click(self.card_go_btn_loc)\n <mask token>\n\n def click_upload_btn(self):\n self.click(self.upload_btn_loc)\n <mask token>\n\n def element_today_card_btn(self):\n return self.find_element(self.today_card_btn_loc)\n <mask token>\n\n def click_reset_img_btn(self):\n self.click(self.reset_img_btn_loc)\n <mask token>\n\n def element_generated_loading(self):\n return self.find_element(self.generated_loading_loc)\n <mask token>\n\n def element_reminder_btn(self):\n return self.find_element(self.reminder_btn_loc)\n <mask token>\n\n def element_page_expired(self):\n return self.find_element(self.page_expired_loc)\n <mask token>\n\n def click_x_btn(self):\n self.click(self.x_btn_loc)\n", "step-4": "<mask token>\n\n\nclass Zaojiaopage(Crazy):\n <mask token>\n <mask token>\n\n def click_zao(self):\n self.click(self.zao_btn_loc)\n\n def element_zao(self):\n return self.find_element(self.zao_btn_loc)\n <mask token>\n\n def click_find(self):\n self.click(self.find_loc)\n <mask token>\n\n def click_title_btn(self):\n self.click(self.title_btn_loc)\n <mask token>\n\n def element_helper(self):\n return self.find_element(self.helper_loc)\n\n def click_helper(self):\n self.click(self.helper_loc)\n <mask token>\n\n def click_small_help_btn(self):\n self.click(self.small_help_btn_loc)\n <mask token>\n\n def element_small_name(self):\n return self.find_element(self.small_name_loc)\n\n def click_small_name(self):\n self.click(self.small_name_loc)\n <mask token>\n\n def click_switching_applet_btn(self):\n self.click(self.switching_applet_btn_loc)\n <mask token>\n\n def click_delete_small_btn(self):\n self.click(self.delete_small_btn_loc)\n <mask token>\n\n def element_edition_btn(self):\n return self.find_element(self.edition_btn_loc)\n <mask token>\n\n def element_delete_small1_btn(self):\n return self.find_element(self.delete_small1_btn_loc)\n <mask token>\n\n def click_version_btn(self):\n self.click(self.version_btn_loc)\n <mask token>\n\n def clicks_experience_version_btn(self):\n self.clicks(self.experience_version_btn_loc, -1)\n <mask token>\n\n def element_audition_class_btn(self):\n return self.find_element(self.audition_class_btn_loc)\n\n def click_audition_class_btn(self):\n self.click(self.audition_class_btn_loc)\n <mask token>\n\n def click_wechat_grant_btn(self):\n self.click(self.wechat_grant_btn_loc)\n\n def double_click_wechat_grant(self):\n self.double_click(self.wechat_grant_btn_loc)\n\n def element_wechat_grant_btn(self):\n return self.find_element(self.wechat_grant_btn_loc)\n <mask token>\n\n def click_allow_btn(self):\n self.click(self.allow_btn_loc)\n <mask token>\n\n def click_mouth_btn(self):\n self.click(self.month_btn_loc)\n <mask token>\n\n def click_sure_btn(self):\n self.click(self.sure_btn_loc)\n <mask token>\n\n def class_info_btn(self):\n self.click(self.class_info_loc)\n <mask token>\n\n def element_attend_lectures_btn(self):\n return self.find_element(self.attend_lectures_btn_loc)\n\n def click_attend_lectures_btn(self):\n self.click(self.attend_lectures_btn_loc)\n <mask token>\n\n def element_class_btn(self):\n return self.find_element(self.class_btn_loc)\n <mask token>\n\n def click_get_to_know_btn(self):\n self.click(self.get_to_know_btn_loc)\n\n def element_get_to_know_btn(self):\n return self.find_element(self.get_to_know_btn_loc)\n <mask token>\n\n def click_sure_buy_btn(self):\n self.click(self.sure_buy_btn_loc)\n <mask token>\n\n def input_buy_password(self, paw):\n self.send_keys(self.buy_password_loc, paw)\n <mask token>\n\n def text_buy_money(self):\n return self.get_text(self.check_buy_money_loc)\n <mask token>\n\n def click_success_btn(self):\n self.click(self.success_btn_loc)\n <mask token>\n\n def click_check_address_btn(self):\n self.click(self.check_address_btn_loc)\n\n def element_check_address_btn(self):\n return self.find_element(self.check_address_btn_loc)\n <mask token>\n\n def click_add_address_btn(self):\n self.click(self.add_address_btn_loc)\n <mask token>\n\n def input_name_btn(self, name):\n self.send_keys(self.name_loc, name)\n <mask token>\n\n def input_phone_btn(self, phone):\n self.send_keys(self.phone_btn_loc, phone)\n <mask token>\n\n def click_region_btn(self):\n self.click(self.region_btn_loc)\n <mask token>\n\n def input_detailed_address_btn(self, address):\n self.send_keys(self.detailed_address_btn_loc, address)\n <mask token>\n\n def click_save_btn(self):\n self.click(self.save_btn_loc)\n <mask token>\n\n def click_receive_btn(self):\n self.click(self.receive_btn_loc)\n <mask token>\n\n def elements_addressee(self):\n return self.find_elements(self.addressee_loc)\n\n def clicks_addressee(self):\n self.clicks(self.addressee_loc, 0)\n <mask token>\n\n def element_know(self):\n return self.find_element(self.know_btn_loc)\n\n def click_know(self):\n self.click(self.know_btn_loc)\n <mask token>\n\n def element_all_curriculum_btn(self):\n return self.find_element(self.all_curriculum_btn_loc)\n\n def click_all_curriculum_btn(self):\n self.click(self.all_curriculum_btn_loc)\n <mask token>\n\n def element_curriculum_date_btn(self):\n return self.find_element(self.curriculum_date_btn_loc)\n <mask token>\n\n def element_my_btn(self):\n return self.find_element(self.my_btn_loc)\n\n def click_my(self):\n self.click(self.my_btn_loc)\n <mask token>\n\n def click_my_baby(self):\n self.click(self.my_baby_btn_loc)\n <mask token>\n\n def text_my_baby_title(self):\n return self.get_text(self.my_baby_title_loc)\n\n def elements_title(self):\n return self.find_elements(self.my_baby_title_loc)\n <mask token>\n\n def element_new_baby_btn(self):\n return self.find_element(self.new_baby_btn_loc)\n\n def click_new_baby_btn(self):\n self.click(self.new_baby_btn_loc)\n\n def clicks_new_baby_btn(self, n):\n self.clicks(self.new_baby_btn_loc, n)\n <mask token>\n\n def element_get_set(self):\n return self.find_element(self.get_set_loc)\n <mask token>\n\n def click_next(self):\n self.click(self.next_btn_loc)\n <mask token>\n\n def inputs_baby_name(self, name, n):\n self.sends_keys(self.baby_name_loc, name, n)\n <mask token>\n\n def click_baby_bir_btn(self):\n self.click(self.baby_bir_btn_loc)\n <mask token>\n\n def click_finish_btn(self):\n self.click(self.finish_btn_loc)\n\n def clicks_finish_btn(self, n):\n self.clicks(self.finish_btn_loc, n)\n <mask token>\n\n def click_my_home(self):\n self.click(self.my_home_loc)\n\n def element_my_home(self):\n return self.find_element(self.my_home_loc)\n <mask token>\n\n def click_switch_btn(self):\n self.click(self.switch_btn_loc)\n <mask token>\n\n def click_baby_bri(self):\n self.click(self.baby_bri_loc)\n <mask token>\n\n def clicks_class_img(self):\n self.clicks(self.class_img_btn_loc, 0)\n <mask token>\n\n def click_collection_btn(self):\n self.click(self.collection_btn_loc)\n\n def clicks_collection_btn(self, n):\n self.clicks(self.collection_btn_loc, n)\n\n def element_collection_btn(self):\n return self.find_element(self.collection_btn_loc)\n <mask token>\n\n def click_write_record_btn(self):\n self.click(self.write_record_btn_loc)\n\n def clicks_write_record_btn(self, n):\n self.clicks(self.write_record_btn_loc, n)\n <mask token>\n\n def click_album_btn(self):\n self.click(self.album_btn_loc)\n\n def element_album_btn(self):\n return self.find_element(self.album_btn_loc)\n <mask token>\n\n def click_small_video_btn(self):\n self.click(self.small_video_btn_loc)\n\n def element_small_video_btn(self):\n return self.find_element(self.small_video_btn_loc)\n <mask token>\n\n def click_release_btn(self):\n self.click(self.release_btn_loc)\n\n def clicks_release_btn(self, n):\n self.clicks(self.release_btn_loc, n)\n\n def element_record_info(self, data):\n record_info_loc = 'xpath', '//*[contains(@text, \"{}\")]'.format(data)\n record_info = self.find_element(record_info_loc)\n if record_info:\n return True\n else:\n return False\n <mask token>\n\n def click_class_name(self):\n self.click(self.class_name_loc)\n\n def clicks_class_name(self, n):\n self.clicks(self.class_name_loc, n)\n\n def elements_class_name(self):\n return self.find_elements(self.class_name_loc)\n <mask token>\n\n def click_class2_name(self):\n self.click(self.class_name2_loc)\n\n def clicks_class2_name(self, n):\n self.clicks(self.class_name2_loc, n)\n <mask token>\n\n def input_write_text(self, text):\n self.send_keys(self.write_text_loc, text)\n\n def inputs_write_text(self, text, n):\n self.sends_keys(self.write_text_loc, text, n)\n <mask token>\n\n def clicks_choice_album(self, n):\n self.clicks(self.choice_album_loc, n)\n\n def elements_choice_album(self):\n return self.find_elements(self.choice_album_loc)\n <mask token>\n\n def click_complete_btn(self):\n self.click(self.complete_btn_loc)\n <mask token>\n\n def click_my_collection_btn(self):\n self.click(self.my_collection_btn_loc)\n <mask token>\n\n def elements_my_collection_english_course_btn(self):\n return self.find_elements(self.my_collection_english_course_btn_loc)\n <mask token>\n\n def elements_my_collection_game_course_btn(self):\n return self.find_elements(self.my_collection_game_course_btn_loc)\n <mask token>\n\n def click_my_course_btn(self):\n self.click(self.my_course_btn_loc)\n <mask token>\n\n def elements_my_course_buy_btn(self):\n return self.find_elements(self.my_course_buy_btn_loc)\n <mask token>\n\n def click_my_order_btn(self):\n self.click(self.my_order_btn_loc)\n <mask token>\n\n def elements_my_order_card_btn(self):\n return self.find_elements(self.my_order_card_btn_loc)\n <mask token>\n\n def click_my_record_btn(self):\n self.click(self.my_record_btn_loc)\n <mask token>\n\n def elements_my_record_class_btn(self):\n return self.find_elements(self.my_record_class_btn_loc)\n <mask token>\n\n def element_back_btn(self):\n return self.find_element(self.back_btn_loc)\n\n def click_back_btn(self):\n self.click(self.back_btn_loc)\n <mask token>\n\n def click_reply_5(self):\n self.click(self.reply_5_loc)\n\n def elements_reply_5(self):\n return self.find_elements(self.reply_5_loc)\n <mask token>\n\n def click_add_to_btn(self):\n self.click(self.add_to_btn_loc)\n <mask token>\n\n def input_reply_5(self, num):\n self.send_keys(self.reply_input_5_loc, num)\n <mask token>\n\n def click_send(self):\n self.click(self.send_5_loc)\n <mask token>\n\n def elements_reply_code(self):\n return self.find_elements(self.reply_code_loc)\n\n def clicks_reply_code(self, n):\n self.clicks(self.reply_code_loc, n)\n <mask token>\n\n def element_long_code(self):\n return self.find_element(self.long_code_loc)\n\n def click_long_code(self):\n self.click(self.long_code_loc)\n <mask token>\n\n def click_discern_code(self):\n self.click(self.discern_code_loc)\n <mask token>\n\n def text_class_group(self):\n return self.get_text(self.class_group_loc)\n <mask token>\n\n def element_add_group_chat(self):\n return self.find_element(self.add_group_chat_loc)\n <mask token>\n\n def elements_reply_8(self):\n return self.find_elements(self.reply_8_loc)\n <mask token>\n\n def element_parent_btn(self):\n return self.find_element(self.parent_btn_loc)\n <mask token>\n\n def elements_info_btn(self):\n return self.find_elements(self.info_btn_loc)\n\n def clicks_info_btn(self, n):\n self.clicks(self.info_btn_loc, n)\n <mask token>\n\n def click_more_games_btn(self):\n self.click(self.more_games_btn_loc)\n <mask token>\n\n def click_look_all_btn(self):\n self.click(self.look_all_btn_loc)\n\n def element_look_all_btn(self):\n return self.find_elements(self.look_all_btn_loc)\n <mask token>\n\n def text_start_fingerprint_buy(self):\n return self.get_text(self.start_fingerprint_buy_loc)\n <mask token>\n\n def click_no_more_reminder_btn(self):\n self.click(self.no_more_reminder_btn_loc)\n <mask token>\n\n def click_cancel_btn(self):\n self.click(self.cancel_btn_loc)\n <mask token>\n\n def element_usd_password(self):\n return self.find_element(self.usd_password_loc)\n\n def click_usd_password(self):\n self.click(self.usd_password_loc)\n <mask token>\n\n def element_password_error(self):\n return self.find_element(self.password_error_loc)\n <mask token>\n\n def click_again_btn(self):\n self.click(self.again_btn_loc)\n <mask token>\n\n def text_payment(self):\n return self.get_text(self.payment_loc)\n <mask token>\n\n def element_typewriting_finish_btn(self):\n return self.find_element(self.typewriting_finish_btn_loc)\n\n def click_typewriting_finish_btn(self):\n self.click(self.typewriting_finish_btn_loc)\n <mask token>\n\n def click_clock_btn(self):\n self.click(self.clock_btn_loc)\n\n def element_clock_btn(self):\n return self.find_element(self.clock_btn_loc)\n <mask token>\n\n def element_no_clock_btn(self):\n return self.find_element(self.no_clock_btn_loc)\n <mask token>\n\n def click_get_card_btn(self):\n self.click(self.get_card_btn_loc)\n <mask token>\n\n def click_upload_card_btn(self):\n self.click(self.upload_card_btn_loc)\n <mask token>\n\n def click_again_upload_card_btn(self):\n self.click(self.again_upload_card_btn_loc)\n <mask token>\n\n def click_save_img_btn(self):\n self.click(self.save_img_btn_loc)\n <mask token>\n\n def click_copy_text_btn(self):\n self.click(self.copy_text_btn_loc)\n <mask token>\n\n def element_copy_format_btn(self):\n return self.find_element(self.copy_format_btn_loc)\n <mask token>\n\n def click_card_go_btn(self):\n self.click(self.card_go_btn_loc)\n <mask token>\n\n def click_upload_btn(self):\n self.click(self.upload_btn_loc)\n <mask token>\n\n def element_today_card_btn(self):\n return self.find_element(self.today_card_btn_loc)\n <mask token>\n\n def click_reset_img_btn(self):\n self.click(self.reset_img_btn_loc)\n <mask token>\n\n def element_generated_loading(self):\n return self.find_element(self.generated_loading_loc)\n <mask token>\n\n def element_reminder_btn(self):\n return self.find_element(self.reminder_btn_loc)\n <mask token>\n\n def element_page_expired(self):\n return self.find_element(self.page_expired_loc)\n <mask token>\n\n def click_x_btn(self):\n self.click(self.x_btn_loc)\n", "step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# @Time : 2019/4/14 14:31\n# @Author : lixiaofeng\n# @File : page_zaojiao.py\n# @Software: PyCharm\n# @desc :\n\nfrom common.basics import Crazy\n\n\nclass Zaojiaopage(Crazy):\n \"\"\"早教小程序\"\"\"\n\n zao_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/cx\" and @text=\"包妈优选\"]')\n\n # zao_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/cx\" and @text=\"小小包早教\"]')\n\n def click_zao(self):\n self.click(self.zao_btn_loc)\n\n def element_zao(self):\n return self.find_element(self.zao_btn_loc)\n\n find_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/d7b\" and @text=\"发现\"]') # 发现按钮\n\n def click_find(self):\n self.click(self.find_loc)\n\n title_btn_loc = ('xpath', '//*[@resource-id=\"android:id/title\" and @text=\"小程序\"]') # 发现页小程序按钮\n\n def click_title_btn(self):\n self.click(self.title_btn_loc)\n\n helper_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/c5\" and @text=\"小程序助手\"]') # 小程序助手\n\n def element_helper(self):\n return self.find_element(self.helper_loc)\n\n def click_helper(self):\n self.click(self.helper_loc)\n\n small_help_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/cx\" and @text=\"小程序助手\"]') # 小程序助手\n\n def click_small_help_btn(self):\n self.click(self.small_help_btn_loc)\n\n small_name_loc = ('xpath', '//*[contains(@text, \"包妈优选\")]') # 包妈优选\n\n def element_small_name(self):\n return self.find_element(self.small_name_loc)\n\n def click_small_name(self):\n self.click(self.small_name_loc)\n\n switching_applet_btn_loc = ('xpath', '//*[contains(@text, \"切换小程序\")]') # 切换小程序\n\n def click_switching_applet_btn(self):\n self.click(self.switching_applet_btn_loc)\n\n delete_small_btn_loc = ('xpath', '//*[contains(@text, \"删除\")]') # 删除小程序按钮\n\n def click_delete_small_btn(self):\n self.click(self.delete_small_btn_loc)\n\n edition_btn_loc = ('xpath', '//*[contains(@text, \"百宝福利Buy\")]')\n\n def element_edition_btn(self):\n return self.find_element(self.edition_btn_loc)\n\n delete_small1_btn_loc = ('xpath', '//*[contains(@text, \"拖动到此处删除\")]')\n\n def element_delete_small1_btn(self):\n return self.find_element(self.delete_small1_btn_loc)\n\n version_btn_loc = ('xpath', '//*[contains(@text, \"版本查看\")]') # 版本查看按钮\n\n def click_version_btn(self):\n self.click(self.version_btn_loc)\n\n experience_version_btn_loc = ('xpath', '//*[contains(@text, \"6.0.09\")]') # 体验版\n\n def clicks_experience_version_btn(self):\n self.clicks(self.experience_version_btn_loc, -1)\n\n audition_class_btn_loc = ('xpath', '//*[contains(@text, \"0元领取10节试听课\")]') # 领取试听课\n\n def element_audition_class_btn(self):\n return self.find_element(self.audition_class_btn_loc)\n\n def click_audition_class_btn(self):\n self.click(self.audition_class_btn_loc)\n\n wechat_grant_btn_loc = (('xpath', '//*[contains(@text, \"微信授权\") and @class=\"android.widget.Button\" ]')) # 微信授权\n\n def click_wechat_grant_btn(self):\n self.click(self.wechat_grant_btn_loc)\n\n def double_click_wechat_grant(self):\n self.double_click(self.wechat_grant_btn_loc)\n\n def element_wechat_grant_btn(self):\n return self.find_element(self.wechat_grant_btn_loc)\n\n allow_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/st\" and @text=\"允许\"]') # 完成按钮\n\n def click_allow_btn(self):\n self.click(self.allow_btn_loc)\n\n month_btn_loc = ('xpath', '//*[contains(@text, \"2018\")]') # 选择月份\n\n def click_mouth_btn(self):\n self.click(self.month_btn_loc)\n\n sure_btn_loc = ('xpath', '//*[contains(@text, \"确定\")]') # 确定按钮\n\n def click_sure_btn(self):\n self.click(self.sure_btn_loc)\n\n class_info_loc = ('xpath', '//*[contains(@text, \"课程介绍\")]') # 课程介绍\n\n # class_info_loc = ('xpath', '//android.widget.FrameLayout/android.view.ViewGroup[0]') # 课程介绍\n\n def class_info_btn(self):\n self.click(self.class_info_loc)\n\n attend_lectures_btn_loc = ('xpath', '//*[contains(@text, \"立即听课\")]') # 立即听课\n\n def element_attend_lectures_btn(self):\n return self.find_element(self.attend_lectures_btn_loc)\n\n def click_attend_lectures_btn(self):\n self.click(self.attend_lectures_btn_loc)\n\n class_btn_loc = ('xpath', '//*[contains(@text, \"预备课 预备课\")]') # 预备课 预备课\n\n def element_class_btn(self):\n return self.find_element(self.class_btn_loc)\n\n get_to_know_btn_loc = ('xpath', '//*[contains(@text, \"立即了解正式课 \")]') # 立即了解正式课\n\n def click_get_to_know_btn(self):\n self.click(self.get_to_know_btn_loc)\n\n def element_get_to_know_btn(self):\n return self.find_element(self.get_to_know_btn_loc)\n\n sure_buy_btn_loc = ('xpath', '//*[contains(@text, \"立即购买\")]') # 立即购买\n\n def click_sure_buy_btn(self):\n self.click(self.sure_buy_btn_loc)\n\n buy_password_loc = ('id', 'com.tencent.mm:id/cfs') # 输入支付密码\n\n def input_buy_password(self, paw):\n self.send_keys(self.buy_password_loc, paw)\n\n check_buy_money_loc = ('id', 'com.tencent.mm:id/dlh') # 获取支付金额\n\n def text_buy_money(self):\n return self.get_text(self.check_buy_money_loc)\n\n success_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/f8o\" and @text=\"完成\"]') # 完成按钮\n\n def click_success_btn(self):\n self.click(self.success_btn_loc)\n\n check_address_btn_loc = ('xpath', '//*[contains(@text, \"收货地址:请选择地址\")]') # 选择收货地址\n\n def click_check_address_btn(self):\n self.click(self.check_address_btn_loc)\n\n def element_check_address_btn(self):\n return self.find_element(self.check_address_btn_loc)\n\n add_address_btn_loc = ('xpath', '//*[contains(@text, \"添加地址\")]') # 添加地址\n\n def click_add_address_btn(self):\n self.click(self.add_address_btn_loc)\n\n name_loc = ('xpath', '//*[contains(@text, \"请输入你的姓名\")]') # 请输入你的姓名\n\n def input_name_btn(self, name):\n self.send_keys(self.name_loc, name)\n\n phone_btn_loc = ('xpath', '//*[contains(@text, \"请填写收件人电话\")]') # 请填写收件人电话\n\n def input_phone_btn(self, phone):\n self.send_keys(self.phone_btn_loc, phone)\n\n region_btn_loc = ('xpath', '//*[contains(@text, \"请输入你所在地区\")]') # 请输入你所在地区\n\n def click_region_btn(self):\n self.click(self.region_btn_loc)\n\n detailed_address_btn_loc = ('xpath', '//*[contains(@text, \"请输入你的详细地址\")]') # 请输入你的详细地址\n\n def input_detailed_address_btn(self, address):\n self.send_keys(self.detailed_address_btn_loc, address)\n\n save_btn_loc = ('xpath', '//*[contains(@text, \"保存\")]') # 保存\n\n def click_save_btn(self):\n self.click(self.save_btn_loc)\n\n receive_btn_loc = ('xpath', '//*[contains(@text, \"立即领取\")]') # 立即领取\n\n def click_receive_btn(self):\n self.click(self.receive_btn_loc)\n\n addressee_loc = ('xpath', '//*[contains(@text, \"收件人:\")]') # 地址列表是否有地址信息\n\n def elements_addressee(self):\n return self.find_elements(self.addressee_loc)\n\n def clicks_addressee(self):\n self.clicks(self.addressee_loc, 0)\n\n know_btn_loc = ('xpath', '//*[contains(@text, \"知道了\")]') # 地址列表是否有地址信息\n\n def element_know(self):\n return self.find_element(self.know_btn_loc)\n\n def click_know(self):\n self.click(self.know_btn_loc)\n\n all_curriculum_btn_loc = ('xpath', '//*[contains(@text, \"查看全部课程\")]') # 查看全部课程\n\n def element_all_curriculum_btn(self):\n return self.find_element(self.all_curriculum_btn_loc)\n\n def click_all_curriculum_btn(self):\n self.click(self.all_curriculum_btn_loc)\n\n curriculum_date_btn_loc = ('xpath', '//*[contains(@text, \"2019-0\")]') # 历史推送\n\n def element_curriculum_date_btn(self):\n return self.find_element(self.curriculum_date_btn_loc)\n\n my_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/ct\" and @text=\"我的\"]') # 我的\n\n def element_my_btn(self):\n return self.find_element(self.my_btn_loc)\n\n def click_my(self):\n self.click(self.my_btn_loc)\n\n my_baby_btn_loc = ('xpath', '//*[contains(@text, \"我的宝宝\")]') # 我的宝宝\n\n def click_my_baby(self):\n self.click(self.my_baby_btn_loc)\n\n my_baby_title_loc = ('id', 'com.tencent.mm:id/ox')\n\n def text_my_baby_title(self):\n return self.get_text(self.my_baby_title_loc)\n\n def elements_title(self):\n return self.find_elements(self.my_baby_title_loc)\n\n new_baby_btn_loc = ('xpath', '//*[contains(@text, \"新建宝宝\")]') # 新建宝宝\n\n def element_new_baby_btn(self):\n return self.find_element(self.new_baby_btn_loc)\n\n def click_new_baby_btn(self):\n self.click(self.new_baby_btn_loc)\n\n def clicks_new_baby_btn(self, n):\n self.clicks(self.new_baby_btn_loc, n)\n\n get_set_loc = ('xpath', '//*[contains(@text, \"预备课 预备课\")]') # 新建宝宝\n\n def element_get_set(self):\n return self.find_element(self.get_set_loc)\n\n next_btn_loc = ('xpath', '//*[contains(@text, \"下一步\")]') # 我的宝宝\n\n def click_next(self):\n self.click(self.next_btn_loc)\n\n baby_name_loc = ('xpath', '//*[contains(@text, \"请输入宝宝姓名\")]') # 请输入宝宝姓名\n\n def inputs_baby_name(self, name, n):\n self.sends_keys(self.baby_name_loc, name, n)\n\n baby_bir_btn_loc = ('xpath', '//*[contains(@text, \"宝宝的生日:\")]') # 宝宝的生日\n\n def click_baby_bir_btn(self):\n self.click(self.baby_bir_btn_loc)\n\n finish_btn_loc = ('xpath', '//*[contains(@text, \"完成\")]') # 完成按钮\n\n def click_finish_btn(self):\n self.click(self.finish_btn_loc)\n\n def clicks_finish_btn(self, n):\n self.clicks(self.finish_btn_loc, n)\n\n my_home_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/ct\" and @text=\"首页\"]') # 首页\n\n def click_my_home(self):\n self.click(self.my_home_loc)\n\n def element_my_home(self):\n return self.find_element(self.my_home_loc)\n\n switch_btn_loc = ('xpath', '//*[contains(@text, \"切换\")]') # 切换\n\n def click_switch_btn(self):\n self.click(self.switch_btn_loc)\n\n baby_bri_loc = ('xpath', '//*[contains(@text, \"宝宝生日:\")]') # 宝宝生日:\n\n def click_baby_bri(self):\n self.click(self.baby_bri_loc)\n\n class_img_btn_loc = ('xpath', 'android.widget.Image')\n\n def clicks_class_img(self):\n self.clicks(self.class_img_btn_loc, 0)\n\n collection_btn_loc = ('xpath', '//*[contains(@text, \"收藏\")]') # 收藏\n\n def click_collection_btn(self):\n self.click(self.collection_btn_loc)\n\n def clicks_collection_btn(self, n):\n self.clicks(self.collection_btn_loc, n)\n\n def element_collection_btn(self):\n return self.find_element(self.collection_btn_loc)\n\n write_record_btn_loc = ('xpath', '//*[contains(@text, \"写记录\") and @class=\"android.widget.Button\" ]') # 写记录按钮\n\n def click_write_record_btn(self):\n self.click(self.write_record_btn_loc)\n\n def clicks_write_record_btn(self, n):\n self.clicks(self.write_record_btn_loc, n)\n\n album_btn_loc = ('xpath', '//*[contains(@text, \"相册\")]') # 相册\n\n def click_album_btn(self):\n self.click(self.album_btn_loc)\n\n def element_album_btn(self):\n return self.find_element(self.album_btn_loc)\n\n small_video_btn_loc = ('xpath', '//*[contains(@text, \"小视频\")]') # 小视频\n\n def click_small_video_btn(self):\n self.click(self.small_video_btn_loc)\n\n def element_small_video_btn(self):\n return self.find_element(self.small_video_btn_loc)\n\n release_btn_loc = ('xpath', '//*[contains(@text, \"发布\")]') # 发布\n\n def click_release_btn(self):\n self.click(self.release_btn_loc)\n\n def clicks_release_btn(self, n):\n self.clicks(self.release_btn_loc, n)\n\n def element_record_info(self, data): # 判断是否定位到包含text的元素\n record_info_loc = ('xpath', '//*[contains(@text, \"{}\")]'.format(data))\n record_info = self.find_element(record_info_loc)\n if record_info:\n return True\n else:\n return False\n\n class_name_loc = ('xpath', '//*[contains(@text, \"歌曲\")]') # 课程名称\n\n # class_name_loc = ('xpath', '//*[contains(@text, \"歌曲:Head and shoulders\")]') # 课程名称\n\n def click_class_name(self):\n self.click(self.class_name_loc)\n\n def clicks_class_name(self, n):\n self.clicks(self.class_name_loc, n)\n\n def elements_class_name(self):\n return self.find_elements(self.class_name_loc)\n\n class_name2_loc = ('xpath', '//*[contains(@text, \"一起走\")]') # 课程名称\n\n # class_name2_loc = ('xpath', '//*[contains(@text, \"弹出来的画\")]') # 课程名称\n\n def click_class2_name(self):\n self.click(self.class_name2_loc)\n\n def clicks_class2_name(self, n):\n self.clicks(self.class_name2_loc, n)\n\n write_text_loc = ('xpath', '//*[contains(@text, \"0/1000\")]') # 写记录\n\n def input_write_text(self, text):\n self.send_keys(self.write_text_loc, text)\n\n def inputs_write_text(self, text, n):\n self.sends_keys(self.write_text_loc, text, n)\n\n choice_album_loc = ('id', 'com.tencent.mm:id/bpy')\n\n def clicks_choice_album(self, n):\n self.clicks(self.choice_album_loc, n)\n\n def elements_choice_album(self):\n return self.find_elements(self.choice_album_loc)\n\n complete_btn_loc = ('id', 'com.tencent.mm:id/ki') # 完成\n\n def click_complete_btn(self):\n self.click(self.complete_btn_loc)\n\n my_collection_btn_loc = ('xpath', '//*[contains(@text, \"我的收藏\")]') # 我的收藏\n\n def click_my_collection_btn(self):\n self.click(self.my_collection_btn_loc)\n\n my_collection_english_course_btn_loc = ('xpath', '//*[contains(@text, \"早教\")]') # 早教英语课\n\n def elements_my_collection_english_course_btn(self):\n return self.find_elements(self.my_collection_english_course_btn_loc)\n\n my_collection_game_course_btn_loc = ('xpath', '//*[contains(@text, \"宝宝游戏馆\")]') # 宝宝游戏馆\n\n def elements_my_collection_game_course_btn(self):\n return self.find_elements(self.my_collection_game_course_btn_loc)\n\n my_course_btn_loc = ('xpath', '//*[contains(@text, \"我的课程\")]') # 我的课程\n\n def click_my_course_btn(self):\n self.click(self.my_course_btn_loc)\n\n my_course_buy_btn_loc = ('xpath', '//*[contains(@text, \"早教核心课年卡\")]') # 早教核心课年卡\n\n def elements_my_course_buy_btn(self):\n return self.find_elements(self.my_course_buy_btn_loc)\n\n my_order_btn_loc = ('xpath', '//*[contains(@text, \"我的订单\")]') # 我的订单\n\n def click_my_order_btn(self):\n self.click(self.my_order_btn_loc)\n\n my_order_card_btn_loc = ('xpath', '//*[contains(@text, \"订单编号:\")]') # 订单编号:\n\n def elements_my_order_card_btn(self):\n return self.find_elements(self.my_order_card_btn_loc)\n\n my_record_btn_loc = ('xpath', '//*[contains(@text, \"成长记录\")]') # 成长记录\n\n def click_my_record_btn(self):\n self.click(self.my_record_btn_loc)\n\n my_record_class_btn_loc = ('xpath', '//*[contains(@text, \"#\")]') # # 测试英语课程组\n\n def elements_my_record_class_btn(self):\n return self.find_elements(self.my_record_class_btn_loc)\n\n back_btn_loc = (\n 'xpath', '//*[@resource-id=\"com.tencent.mm:id/on\" and @class=\"android.widget.LinearLayout\"]') # 返回按钮\n\n def element_back_btn(self):\n return self.find_element(self.back_btn_loc)\n\n def click_back_btn(self):\n self.click(self.back_btn_loc)\n\n reply_5_loc = ('xpath', '//android.widget.Image') # 回复5\n\n def click_reply_5(self):\n self.click(self.reply_5_loc)\n\n def elements_reply_5(self):\n return self.find_elements(self.reply_5_loc)\n\n add_to_btn_loc = ('xpath', '//*[contains(@text, \"立即添加\")]') # 立即添加\n\n def click_add_to_btn(self):\n self.click(self.add_to_btn_loc)\n\n reply_input_5_loc = ('id', 'com.tencent.mm:id/ami')\n\n def input_reply_5(self, num):\n self.send_keys(self.reply_input_5_loc, num)\n\n send_5_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/amp\" and @text=\"发送\"]') # 发送\n\n def click_send(self):\n self.click(self.send_5_loc)\n\n reply_code_loc = ('id', 'com.tencent.mm:id/ap9') # 获取回复的二维码\n\n def elements_reply_code(self):\n return self.find_elements(self.reply_code_loc)\n\n def clicks_reply_code(self, n):\n self.clicks(self.reply_code_loc, n)\n\n long_code_loc = ('id', 'com.tencent.mm:id/adi') # 长按二维码\n\n def element_long_code(self):\n return self.find_element(self.long_code_loc)\n\n def click_long_code(self):\n self.click(self.long_code_loc)\n\n discern_code_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/cx\" and @text=\"识别图中二维码\"]') # 识别图中二维码\n\n def click_discern_code(self):\n self.click(self.discern_code_loc)\n\n class_group_loc = ('id', 'android:id/text1') # 群名称\n\n def text_class_group(self):\n return self.get_text(self.class_group_loc)\n\n add_group_chat_loc = ('xpath', '//*[contains(@text, \"加入该群聊\")]') # 加入该群聊\n\n def element_add_group_chat(self):\n return self.find_element(self.add_group_chat_loc)\n\n reply_8_loc = ('xpath', '//android.widget.Image') # 回复8的banner 回复8->进公众号->点击推送 看到的二维码\n\n def elements_reply_8(self):\n return self.find_elements(self.reply_8_loc)\n\n parent_btn_loc = ('xpath', '//*[contains(@text, \"亲爱的家长:\")]') # 亲爱的家长:\n\n def element_parent_btn(self):\n return self.find_element(self.parent_btn_loc)\n\n info_btn_loc = ('id', 'com.tencent.mm:id/a8q') # 详情\n\n def elements_info_btn(self):\n return self.find_elements(self.info_btn_loc)\n\n def clicks_info_btn(self, n):\n self.clicks(self.info_btn_loc, n)\n\n more_games_btn_loc = ('xpath', '//*[contains(@text, \"更多亲子游戏\")]') # 更多亲子游戏\n\n def click_more_games_btn(self):\n self.click(self.more_games_btn_loc)\n\n look_all_btn_loc = ('xpath', '//*[contains(@text, \"查看全部\")]') # 查看全部\n\n def click_look_all_btn(self):\n self.click(self.look_all_btn_loc)\n\n def element_look_all_btn(self):\n return self.find_elements(self.look_all_btn_loc)\n\n start_fingerprint_buy_loc = ('id', 'com.tencent.mm:id/btp') # 开启指纹支付弹窗文本 开启指纹支付,支付时可通过验证指纹快速完成付款。\n\n def text_start_fingerprint_buy(self):\n return self.get_text(self.start_fingerprint_buy_loc)\n\n no_more_reminder_btn_loc = ('id', 'com.tencent.mm:id/btq') # 不再提醒\n\n def click_no_more_reminder_btn(self):\n self.click(self.no_more_reminder_btn_loc)\n\n cancel_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/azz\" and @text=\"取消\"]') # 取消\n\n def click_cancel_btn(self):\n self.click(self.cancel_btn_loc)\n\n usd_password_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/fg4\" and @text=\"使用密码\"]') # 使用密码\n\n def element_usd_password(self):\n return self.find_element(self.usd_password_loc)\n\n def click_usd_password(self):\n self.click(self.usd_password_loc)\n\n password_error_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/d8x\" and @text=\"支付密码错误,请重试\"]') # 支付密码错误,请重试\n\n def element_password_error(self):\n return self.find_element(self.password_error_loc)\n\n again_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/azz\" and @text=\"重试\"]') # 重试\n\n def click_again_btn(self):\n self.click(self.again_btn_loc)\n\n payment_loc = ('id', 'com.tencent.mm:id/fg3') # 请输入支付密码 文本\n\n def text_payment(self):\n return self.get_text(self.payment_loc)\n\n typewriting_finish_btn_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/z2\" and @text=\"完成\"]') # 输入法上的完成按钮\n\n def element_typewriting_finish_btn(self):\n return self.find_element(self.typewriting_finish_btn_loc)\n\n def click_typewriting_finish_btn(self):\n self.click(self.typewriting_finish_btn_loc)\n\n # 打卡\n\n clock_btn_loc = ('xpath', '//*[contains(@text, \"打卡\")]') # 打卡\n\n def click_clock_btn(self):\n self.click(self.clock_btn_loc)\n\n def element_clock_btn(self):\n return self.find_element(self.clock_btn_loc)\n\n # com.tencent.mm:id/ox\n\n no_clock_btn_loc = ('xpath', '//*[contains(@text, \"你还未开启打卡\")]') # 你还未开启打卡\n\n def element_no_clock_btn(self):\n return self.find_element(self.no_clock_btn_loc)\n\n get_card_btn_loc = ('xpath', '//*[@text=\"获取打卡海报\" and @class=\"android.widget.Button\"]') # 获取打卡海报\n\n def click_get_card_btn(self):\n self.click(self.get_card_btn_loc)\n\n upload_card_btn_loc = ('xpath', '//*[@text=\"上传截图\" and @class=\"android.widget.Button\"]') # 上传截图\n\n def click_upload_card_btn(self):\n self.click(self.upload_card_btn_loc)\n\n again_upload_card_btn_loc = ('xpath', '//*[@text=\"重新上传截图\" and @class=\"android.widget.Button\"]') # 重新上传截图\n\n def click_again_upload_card_btn(self):\n self.click(self.again_upload_card_btn_loc)\n\n save_img_btn_loc = ('xpath', '//*[@text=\"保存图片\" and @class=\"android.widget.Button\"]') # 保存图片\n\n def click_save_img_btn(self):\n self.click(self.save_img_btn_loc)\n\n copy_text_btn_loc = ('xpath', '//*[@text=\"复制发圈文案\" and @class=\"android.widget.Button\"]') # 复制发圈文案\n\n def click_copy_text_btn(self):\n self.click(self.copy_text_btn_loc)\n\n copy_format_btn_loc = ('xpath', '//*[contains(@text, \"发布朋友圈截图规范\")]') # 发布朋友圈截图规范\n\n def element_copy_format_btn(self):\n return self.find_element(self.copy_format_btn_loc)\n\n card_go_btn_loc = ('xpath', '//*[contains(@text, \"关闭小程序,去朋友圈打卡截图\")]') # 关闭小程序,去朋友圈打卡截图\n\n def click_card_go_btn(self):\n self.click(self.card_go_btn_loc)\n\n upload_btn_loc = ('xpath', '//*[@text=\"上传\" and @class=\"android.widget.Button\"]') # 上传\n\n def click_upload_btn(self):\n self.click(self.upload_btn_loc)\n\n today_card_btn_loc = ('xpath', '//*[contains(@text, \"今日已提交打卡\")]') # 今日已提交打卡\n\n def element_today_card_btn(self):\n return self.find_element(self.today_card_btn_loc)\n\n reset_img_btn_loc = ('xpath', '//*[@text=\"重新选择截图\" and @class=\"android.widget.Button\"]') # 重新选择截图\n\n def click_reset_img_btn(self):\n self.click(self.reset_img_btn_loc)\n\n generated_loading_loc = ('xpath', '//*[@resource-id=\"com.tencent.mm:id/cx\" and @text=\"正在生成...\"]') # 正在生成...\n\n def element_generated_loading(self):\n return self.find_element(self.generated_loading_loc)\n\n reminder_btn_loc = ('xpath', '//*[contains(@text, \"温馨提示\")]') # 温馨提示\n\n def element_reminder_btn(self):\n return self.find_element(self.reminder_btn_loc)\n\n page_expired_loc = ('xpath', '//*[contains(@text, \"页面已经过期\")]') # 页面已经过期\n\n def element_page_expired(self):\n return self.find_element(self.page_expired_loc)\n\n x_btn_loc = ('id', 'com.tencent.mm:id/kx')\n\n def click_x_btn(self):\n self.click(self.x_btn_loc)\n", "step-ids": [ 73, 89, 121, 148, 152 ] }
[ 73, 89, 121, 148, 152 ]
from django.shortcuts import render from rest_framework import status from rest_framework.decorators import api_view, renderer_classes from rest_framework.renderers import BrowsableAPIRenderer, JSONRenderer from rest_framework.response import Response from feedback.models import Feedback from feedback.serializers import FeedbackSerializer @api_view(['GET', 'POST']) @renderer_classes([JSONRenderer, BrowsableAPIRenderer]) def feedback_list(request, format=None): """ List all feedback or create a new feedback """ if request.method == 'GET': feedback = Feedback.objects.all() serializer = FeedbackSerializer(feedback, many=True) return Response(serializer.data) elif request.method == 'POST': serializer = FeedbackSerializer(data=request.data) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) def feedback_index(request): feedback = Feedback.objects.all() context = {'feedback': feedback} return render(request, 'feedback_index.html', context)
normal
{ "blob_id": "bd6c72c3215265a349c5f47573063a9288f64198", "index": 5227, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\n@api_view(['GET', 'POST'])\n@renderer_classes([JSONRenderer, BrowsableAPIRenderer])\ndef feedback_list(request, format=None):\n \"\"\"\n List all feedback or create a new feedback\n \"\"\"\n if request.method == 'GET':\n feedback = Feedback.objects.all()\n serializer = FeedbackSerializer(feedback, many=True)\n return Response(serializer.data)\n elif request.method == 'POST':\n serializer = FeedbackSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\n@api_view(['GET', 'POST'])\n@renderer_classes([JSONRenderer, BrowsableAPIRenderer])\ndef feedback_list(request, format=None):\n \"\"\"\n List all feedback or create a new feedback\n \"\"\"\n if request.method == 'GET':\n feedback = Feedback.objects.all()\n serializer = FeedbackSerializer(feedback, many=True)\n return Response(serializer.data)\n elif request.method == 'POST':\n serializer = FeedbackSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\ndef feedback_index(request):\n feedback = Feedback.objects.all()\n context = {'feedback': feedback}\n return render(request, 'feedback_index.html', context)\n", "step-4": "from django.shortcuts import render\nfrom rest_framework import status\nfrom rest_framework.decorators import api_view, renderer_classes\nfrom rest_framework.renderers import BrowsableAPIRenderer, JSONRenderer\nfrom rest_framework.response import Response\nfrom feedback.models import Feedback\nfrom feedback.serializers import FeedbackSerializer\n\n\n@api_view(['GET', 'POST'])\n@renderer_classes([JSONRenderer, BrowsableAPIRenderer])\ndef feedback_list(request, format=None):\n \"\"\"\n List all feedback or create a new feedback\n \"\"\"\n if request.method == 'GET':\n feedback = Feedback.objects.all()\n serializer = FeedbackSerializer(feedback, many=True)\n return Response(serializer.data)\n elif request.method == 'POST':\n serializer = FeedbackSerializer(data=request.data)\n if serializer.is_valid():\n serializer.save()\n return Response(serializer.data, status=status.HTTP_201_CREATED)\n return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)\n\n\ndef feedback_index(request):\n feedback = Feedback.objects.all()\n context = {'feedback': feedback}\n return render(request, 'feedback_index.html', context)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
from framework import * from pebble_game import * from constructive_pebble_game import * from nose.tools import ok_ import numpy as np # initialise the seed for reproducibility np.random.seed(102) fw_2d = create_framework([0,1,2,3], [(0,1), (0,3), (1,2), (1,3), (2,3)], [(2,3), (4,4), (5,2), (1,1)]) # a 3d fw constricted to 2d fw_3d = create_framework([0,1,2,3], [(0,1), (0,3), (1,2), (1,3), (2,3)], [(2,3, 0), (4,4, 0), (5,2, 0), (1,1, 0)]) R = create_rigidity_matrix(fw_3d, 3) fig_39_nodes = [0,1,2,3] fig_39_edges = [(0,1), (0,2), (0,3), (1,2), (2,3)] fig_39_pos = [(0,0), (3,0), (3,2), (0,2)] fig_39_fw = create_framework(fig_39_nodes, fig_39_edges, fig_39_pos) R39 = create_rigidity_matrix(fig_39_fw, 2) rigid_3d = create_framework([0,1,2,3,4], [(0,1), (0,3), (1,2), (1,3), (2,3), (0,2), (0,4), (1,4), (2,4)], [(2,3, 0), (4,4, 5), (5,2, 0), (1,1, 0), (10,10,10)]) fw_1d = create_framework([0,1,2], [(0,1), (1,2), (0,2)], [1,6,20]) ok_(is_inf_rigid(fw_2d, 2)) ok_(not is_inf_rigid(fw_3d, 3)) ok_(is_inf_rigid(fw_1d, 1)) # ok_(not is_inf_rigid(deformable_fw, 2)) # draw_framework(deformable_fw) reduced_fw = create_reduced_fw(50,0.2, 1) # p = pebble_game(reduced_fw, 2, 3) # print(p[1]) # draw_framework(reduced_fw) # draw_comps(reduced_fw, p[1]) # experimenting with reducing a framework gradually and tracking the number of components rand_fw = create_random_fw(10,0.1, 1) print(len(rand_fw.nodes)) draw_framework(rand_fw) # num_comps = constructive_pebble_game(rand_fw, 2, 3) # fig = plt.figure(figsize=(20,10)) # plotting the number of comps(reversed to show removal) # plt.plot(num_comps) # # fig.savefig("comp_numbers.pdf") # plt.show() # draw_framework(rand_fw, "before.pdf") # num_comps = [] # counter = 0 # while len(rand_fw.edges) > 2*len(rand_fw.nodes): # index = np.random.choice(len(rand_fw.edges)) # edge = list(rand_fw.edges)[index] # if rand_fw.degree(edge[0]) > 2 and rand_fw.degree(edge[1]) > 2: # counter += 1 # rand_fw.remove_edge(edge[0], edge[1]) # comps = pebble_game(rand_fw, 2, 3)[1] # num_comps.append(len(comps)) # draw_comps(rand_fw, comps, filename="after"+str(counter)+".pdf", show=False) # plt.close("all") # draw_comps(rand_fw, comps, "after.pdf") # Edges are not reported consistently so will always sort them before indexing # of the edges will always be the same def_node = [0,1,2,3] def_edge = [(0,1), (0,3), (1,2), (2,3)] def_pos = [(0,0), (4,0), (4,2), (0,2)] deformable_fw = create_framework(def_node, def_edge, def_pos) R = create_rigidity_matrix(deformable_fw, 2) # creating a force to apply # as an example, move points 0 and 2 towards each other # f is a d*n length vector R = create_rigidity_matrix(rand_fw, 2) f = [0] * len(R[0]) f[2] = -0.1 f[3] = 0.1 f[14] = -0.1 f[15] = 0.1 f = np.array(f) print(R) print(f) print(R.dot(f)) draw_stresses(rand_fw, f) # draw_framework(fw_2d) sq_nodes = [0,1,2,3] sq_edges = [(0,1), (0,3), (1,2), (2,3), (0,2)] sq_pos = [(0,0), (4,0), (4,4), (0,4)] sq_fw = create_framework(sq_nodes, sq_edges, sq_pos) # print(sq_fw.edges) # print(sorted(sq_fw.edges)) f = [0] * len(sq_nodes) * 2 f[0] = 1 f[1] = 1 f[4] = -1 f[5] = -1 draw_stresses(sq_fw, f)
normal
{ "blob_id": "4e31619efcaf6eeab3b32116b21e71de8202aee2", "index": 8646, "step-1": "<mask token>\n", "step-2": "<mask token>\nok_(is_inf_rigid(fw_2d, 2))\nok_(not is_inf_rigid(fw_3d, 3))\nok_(is_inf_rigid(fw_1d, 1))\n<mask token>\nprint(len(rand_fw.nodes))\ndraw_framework(rand_fw)\n<mask token>\nprint(R)\nprint(f)\nprint(R.dot(f))\ndraw_stresses(rand_fw, f)\n<mask token>\ndraw_stresses(sq_fw, f)\n", "step-3": "<mask token>\nfw_2d = create_framework([0, 1, 2, 3], [(0, 1), (0, 3), (1, 2), (1, 3), (2,\n 3)], [(2, 3), (4, 4), (5, 2), (1, 1)])\nfw_3d = create_framework([0, 1, 2, 3], [(0, 1), (0, 3), (1, 2), (1, 3), (2,\n 3)], [(2, 3, 0), (4, 4, 0), (5, 2, 0), (1, 1, 0)])\nR = create_rigidity_matrix(fw_3d, 3)\nfig_39_nodes = [0, 1, 2, 3]\nfig_39_edges = [(0, 1), (0, 2), (0, 3), (1, 2), (2, 3)]\nfig_39_pos = [(0, 0), (3, 0), (3, 2), (0, 2)]\nfig_39_fw = create_framework(fig_39_nodes, fig_39_edges, fig_39_pos)\nR39 = create_rigidity_matrix(fig_39_fw, 2)\nrigid_3d = create_framework([0, 1, 2, 3, 4], [(0, 1), (0, 3), (1, 2), (1, 3\n ), (2, 3), (0, 2), (0, 4), (1, 4), (2, 4)], [(2, 3, 0), (4, 4, 5), (5, \n 2, 0), (1, 1, 0), (10, 10, 10)])\nfw_1d = create_framework([0, 1, 2], [(0, 1), (1, 2), (0, 2)], [1, 6, 20])\nok_(is_inf_rigid(fw_2d, 2))\nok_(not is_inf_rigid(fw_3d, 3))\nok_(is_inf_rigid(fw_1d, 1))\nreduced_fw = create_reduced_fw(50, 0.2, 1)\nrand_fw = create_random_fw(10, 0.1, 1)\nprint(len(rand_fw.nodes))\ndraw_framework(rand_fw)\ndef_node = [0, 1, 2, 3]\ndef_edge = [(0, 1), (0, 3), (1, 2), (2, 3)]\ndef_pos = [(0, 0), (4, 0), (4, 2), (0, 2)]\ndeformable_fw = create_framework(def_node, def_edge, def_pos)\nR = create_rigidity_matrix(deformable_fw, 2)\nR = create_rigidity_matrix(rand_fw, 2)\nf = [0] * len(R[0])\nf[2] = -0.1\nf[3] = 0.1\nf[14] = -0.1\nf[15] = 0.1\nf = np.array(f)\nprint(R)\nprint(f)\nprint(R.dot(f))\ndraw_stresses(rand_fw, f)\nsq_nodes = [0, 1, 2, 3]\nsq_edges = [(0, 1), (0, 3), (1, 2), (2, 3), (0, 2)]\nsq_pos = [(0, 0), (4, 0), (4, 4), (0, 4)]\nsq_fw = create_framework(sq_nodes, sq_edges, sq_pos)\nf = [0] * len(sq_nodes) * 2\nf[0] = 1\nf[1] = 1\nf[4] = -1\nf[5] = -1\ndraw_stresses(sq_fw, f)\n", "step-4": "from framework import *\nfrom pebble_game import *\nfrom constructive_pebble_game import *\nfrom nose.tools import ok_\nimport numpy as np\nfw_2d = create_framework([0, 1, 2, 3], [(0, 1), (0, 3), (1, 2), (1, 3), (2,\n 3)], [(2, 3), (4, 4), (5, 2), (1, 1)])\nfw_3d = create_framework([0, 1, 2, 3], [(0, 1), (0, 3), (1, 2), (1, 3), (2,\n 3)], [(2, 3, 0), (4, 4, 0), (5, 2, 0), (1, 1, 0)])\nR = create_rigidity_matrix(fw_3d, 3)\nfig_39_nodes = [0, 1, 2, 3]\nfig_39_edges = [(0, 1), (0, 2), (0, 3), (1, 2), (2, 3)]\nfig_39_pos = [(0, 0), (3, 0), (3, 2), (0, 2)]\nfig_39_fw = create_framework(fig_39_nodes, fig_39_edges, fig_39_pos)\nR39 = create_rigidity_matrix(fig_39_fw, 2)\nrigid_3d = create_framework([0, 1, 2, 3, 4], [(0, 1), (0, 3), (1, 2), (1, 3\n ), (2, 3), (0, 2), (0, 4), (1, 4), (2, 4)], [(2, 3, 0), (4, 4, 5), (5, \n 2, 0), (1, 1, 0), (10, 10, 10)])\nfw_1d = create_framework([0, 1, 2], [(0, 1), (1, 2), (0, 2)], [1, 6, 20])\nok_(is_inf_rigid(fw_2d, 2))\nok_(not is_inf_rigid(fw_3d, 3))\nok_(is_inf_rigid(fw_1d, 1))\nreduced_fw = create_reduced_fw(50, 0.2, 1)\nrand_fw = create_random_fw(10, 0.1, 1)\nprint(len(rand_fw.nodes))\ndraw_framework(rand_fw)\ndef_node = [0, 1, 2, 3]\ndef_edge = [(0, 1), (0, 3), (1, 2), (2, 3)]\ndef_pos = [(0, 0), (4, 0), (4, 2), (0, 2)]\ndeformable_fw = create_framework(def_node, def_edge, def_pos)\nR = create_rigidity_matrix(deformable_fw, 2)\nR = create_rigidity_matrix(rand_fw, 2)\nf = [0] * len(R[0])\nf[2] = -0.1\nf[3] = 0.1\nf[14] = -0.1\nf[15] = 0.1\nf = np.array(f)\nprint(R)\nprint(f)\nprint(R.dot(f))\ndraw_stresses(rand_fw, f)\nsq_nodes = [0, 1, 2, 3]\nsq_edges = [(0, 1), (0, 3), (1, 2), (2, 3), (0, 2)]\nsq_pos = [(0, 0), (4, 0), (4, 4), (0, 4)]\nsq_fw = create_framework(sq_nodes, sq_edges, sq_pos)\nf = [0] * len(sq_nodes) * 2\nf[0] = 1\nf[1] = 1\nf[4] = -1\nf[5] = -1\ndraw_stresses(sq_fw, f)\n", "step-5": "from framework import *\nfrom pebble_game import *\nfrom constructive_pebble_game import *\nfrom nose.tools import ok_\nimport numpy as np\n\n# initialise the seed for reproducibility np.random.seed(102)\n\nfw_2d = create_framework([0,1,2,3], [(0,1), (0,3), (1,2), (1,3), (2,3)], [(2,3), (4,4), (5,2), (1,1)])\n# a 3d fw constricted to 2d\nfw_3d = create_framework([0,1,2,3], [(0,1), (0,3), (1,2), (1,3), (2,3)], [(2,3, 0), (4,4, 0), (5,2, 0), (1,1, 0)])\nR = create_rigidity_matrix(fw_3d, 3)\n\nfig_39_nodes = [0,1,2,3]\nfig_39_edges = [(0,1), (0,2), (0,3), (1,2), (2,3)]\nfig_39_pos = [(0,0), (3,0), (3,2), (0,2)]\n\nfig_39_fw = create_framework(fig_39_nodes, fig_39_edges, fig_39_pos)\nR39 = create_rigidity_matrix(fig_39_fw, 2)\n\nrigid_3d = create_framework([0,1,2,3,4],\n [(0,1), (0,3), (1,2), (1,3), (2,3), (0,2), (0,4), (1,4), (2,4)],\n [(2,3, 0), (4,4, 5), (5,2, 0), (1,1, 0), (10,10,10)])\n\nfw_1d = create_framework([0,1,2],\n [(0,1), (1,2), (0,2)],\n [1,6,20])\n\nok_(is_inf_rigid(fw_2d, 2))\nok_(not is_inf_rigid(fw_3d, 3))\nok_(is_inf_rigid(fw_1d, 1))\n# ok_(not is_inf_rigid(deformable_fw, 2))\n# draw_framework(deformable_fw)\n\nreduced_fw = create_reduced_fw(50,0.2, 1)\n\n# p = pebble_game(reduced_fw, 2, 3)\n# print(p[1])\n# draw_framework(reduced_fw)\n# draw_comps(reduced_fw, p[1])\n# experimenting with reducing a framework gradually and tracking the number of components\nrand_fw = create_random_fw(10,0.1, 1) \nprint(len(rand_fw.nodes))\ndraw_framework(rand_fw)\n# num_comps = constructive_pebble_game(rand_fw, 2, 3) \n# fig = plt.figure(figsize=(20,10))\n# plotting the number of comps(reversed to show removal)\n# plt.plot(num_comps)\n# # fig.savefig(\"comp_numbers.pdf\")\n# plt.show()\n# draw_framework(rand_fw, \"before.pdf\")\n# num_comps = []\n# counter = 0\n# while len(rand_fw.edges) > 2*len(rand_fw.nodes):\n# index = np.random.choice(len(rand_fw.edges))\n# edge = list(rand_fw.edges)[index]\n# if rand_fw.degree(edge[0]) > 2 and rand_fw.degree(edge[1]) > 2:\n# counter += 1\n# rand_fw.remove_edge(edge[0], edge[1])\n# comps = pebble_game(rand_fw, 2, 3)[1]\n# num_comps.append(len(comps))\n# draw_comps(rand_fw, comps, filename=\"after\"+str(counter)+\".pdf\", show=False)\n# plt.close(\"all\")\n\n# draw_comps(rand_fw, comps, \"after.pdf\")\n\n# Edges are not reported consistently so will always sort them before indexing\n# of the edges will always be the same\ndef_node = [0,1,2,3]\ndef_edge = [(0,1), (0,3), (1,2), (2,3)]\ndef_pos = [(0,0), (4,0), (4,2), (0,2)]\ndeformable_fw = create_framework(def_node, def_edge, def_pos)\nR = create_rigidity_matrix(deformable_fw, 2)\n# creating a force to apply\n# as an example, move points 0 and 2 towards each other\n# f is a d*n length vector\nR = create_rigidity_matrix(rand_fw, 2)\nf = [0] * len(R[0])\nf[2] = -0.1\nf[3] = 0.1\nf[14] = -0.1\nf[15] = 0.1\nf = np.array(f)\nprint(R)\nprint(f)\n\nprint(R.dot(f))\ndraw_stresses(rand_fw, f)\n\n# draw_framework(fw_2d)\nsq_nodes = [0,1,2,3]\nsq_edges = [(0,1), (0,3), (1,2), (2,3), (0,2)]\nsq_pos = [(0,0), (4,0), (4,4), (0,4)]\nsq_fw = create_framework(sq_nodes, sq_edges, sq_pos)\n# print(sq_fw.edges)\n# print(sorted(sq_fw.edges))\nf = [0] * len(sq_nodes) * 2\nf[0] = 1\nf[1] = 1\nf[4] = -1\nf[5] = -1\n\ndraw_stresses(sq_fw, f)\n\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def infer(sample): return {'mean': np.mean(sample), 'std': np.std(sample)} <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def simulate_one_realisation(): return np.random.normal(1, 2, size=n_points) def infer(sample): return {'mean': np.mean(sample), 'std': np.std(sample)} <|reserved_special_token_0|> print(means) plt.hist([x['mean'] for x in inference], bins=25) plt.show() <|reserved_special_token_0|> print(standard_error) plt.hist([x['std'] for x in inference], bins=25) plt.show() <|reserved_special_token_1|> <|reserved_special_token_0|> n_points = 100 n_sims = 1000 def simulate_one_realisation(): return np.random.normal(1, 2, size=n_points) def infer(sample): return {'mean': np.mean(sample), 'std': np.std(sample)} inference = [infer(simulate_one_realisation()) for _ in range(n_sims)] means = np.percentile([x['mean'] for x in inference], [25, 50, 75]) print(means) plt.hist([x['mean'] for x in inference], bins=25) plt.show() standard_error = np.percentile([x['std'] for x in inference], [25, 50, 75]) print(standard_error) plt.hist([x['std'] for x in inference], bins=25) plt.show() <|reserved_special_token_1|> import matplotlib.pyplot as plt import numpy as np n_points = 100 n_sims = 1000 def simulate_one_realisation(): return np.random.normal(1, 2, size=n_points) def infer(sample): return {'mean': np.mean(sample), 'std': np.std(sample)} inference = [infer(simulate_one_realisation()) for _ in range(n_sims)] means = np.percentile([x['mean'] for x in inference], [25, 50, 75]) print(means) plt.hist([x['mean'] for x in inference], bins=25) plt.show() standard_error = np.percentile([x['std'] for x in inference], [25, 50, 75]) print(standard_error) plt.hist([x['std'] for x in inference], bins=25) plt.show() <|reserved_special_token_1|> #!/usr/bin/env python import matplotlib.pyplot as plt import numpy as np n_points = 100 n_sims = 1000 def simulate_one_realisation(): return np.random.normal(1, 2, size=n_points) def infer(sample): return {'mean': np.mean(sample), 'std': np.std(sample)} inference = [infer(simulate_one_realisation()) for _ in range(n_sims)] means = np.percentile([x['mean'] for x in inference], [25, 50, 75]) print(means) plt.hist([x['mean'] for x in inference], bins=25) plt.show() standard_error = np.percentile([x['std'] for x in inference], [25, 50, 75]) print(standard_error) plt.hist([x['std'] for x in inference], bins=25) plt.show() # The sample standard deviation and estimated standard error of the sample mean both have a slight right skew. But the skewness of the sample standard deviation is much more than the sample mean. # The sample standar deviation histogram has many modes signifying a random distributionwhereas the sample mean histogram has a more uniform distribution shape with only one mode.
flexible
{ "blob_id": "6e8ef901fc614ecbba25df01f84a43c429f25cf6", "index": 4919, "step-1": "<mask token>\n\n\ndef infer(sample):\n return {'mean': np.mean(sample), 'std': np.std(sample)}\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef simulate_one_realisation():\n return np.random.normal(1, 2, size=n_points)\n\n\ndef infer(sample):\n return {'mean': np.mean(sample), 'std': np.std(sample)}\n\n\n<mask token>\nprint(means)\nplt.hist([x['mean'] for x in inference], bins=25)\nplt.show()\n<mask token>\nprint(standard_error)\nplt.hist([x['std'] for x in inference], bins=25)\nplt.show()\n", "step-3": "<mask token>\nn_points = 100\nn_sims = 1000\n\n\ndef simulate_one_realisation():\n return np.random.normal(1, 2, size=n_points)\n\n\ndef infer(sample):\n return {'mean': np.mean(sample), 'std': np.std(sample)}\n\n\ninference = [infer(simulate_one_realisation()) for _ in range(n_sims)]\nmeans = np.percentile([x['mean'] for x in inference], [25, 50, 75])\nprint(means)\nplt.hist([x['mean'] for x in inference], bins=25)\nplt.show()\nstandard_error = np.percentile([x['std'] for x in inference], [25, 50, 75])\nprint(standard_error)\nplt.hist([x['std'] for x in inference], bins=25)\nplt.show()\n", "step-4": "import matplotlib.pyplot as plt\nimport numpy as np\nn_points = 100\nn_sims = 1000\n\n\ndef simulate_one_realisation():\n return np.random.normal(1, 2, size=n_points)\n\n\ndef infer(sample):\n return {'mean': np.mean(sample), 'std': np.std(sample)}\n\n\ninference = [infer(simulate_one_realisation()) for _ in range(n_sims)]\nmeans = np.percentile([x['mean'] for x in inference], [25, 50, 75])\nprint(means)\nplt.hist([x['mean'] for x in inference], bins=25)\nplt.show()\nstandard_error = np.percentile([x['std'] for x in inference], [25, 50, 75])\nprint(standard_error)\nplt.hist([x['std'] for x in inference], bins=25)\nplt.show()\n", "step-5": "#!/usr/bin/env python\n\nimport matplotlib.pyplot as plt\nimport numpy as np\n\nn_points = 100\nn_sims = 1000\n\ndef simulate_one_realisation():\n return np.random.normal(1, 2, size=n_points)\n\ndef infer(sample):\n return {'mean': np.mean(sample), 'std': np.std(sample)}\n\ninference = [infer(simulate_one_realisation()) for _ in range(n_sims)]\n\nmeans = np.percentile([x['mean'] for x in inference], [25, 50, 75])\nprint(means)\n\nplt.hist([x['mean'] for x in inference], bins=25)\nplt.show()\n\nstandard_error = np.percentile([x['std'] for x in inference], [25, 50, 75])\nprint(standard_error)\n\nplt.hist([x['std'] for x in inference], bins=25)\nplt.show()\n\n# The sample standard deviation and estimated standard error of the sample mean both have a slight right skew. But the skewness of the sample standard deviation is much more than the sample mean.\n# The sample standar deviation histogram has many modes signifying a random distributionwhereas the sample mean histogram has a more uniform distribution shape with only one mode. \n", "step-ids": [ 1, 3, 4, 5, 6 ] }
[ 1, 3, 4, 5, 6 ]
#!/usr/bin/python3 """takes in a URL and an email address, sends a POST request to the passed URL with the email as a parameter, and finally displays the body of the response. """ import requests import sys if __name__ == "__main__": url_arg = sys.argv[1] email = sys.argv[2] params = {'email': email} response = requests.post(url_arg, data=params) print(response.text)
normal
{ "blob_id": "0d9c50e55df5aa5614bd5a9679729cf7fa69c5df", "index": 1461, "step-1": "<mask token>\n", "step-2": "<mask token>\nif __name__ == '__main__':\n url_arg = sys.argv[1]\n email = sys.argv[2]\n params = {'email': email}\n response = requests.post(url_arg, data=params)\n print(response.text)\n", "step-3": "<mask token>\nimport requests\nimport sys\nif __name__ == '__main__':\n url_arg = sys.argv[1]\n email = sys.argv[2]\n params = {'email': email}\n response = requests.post(url_arg, data=params)\n print(response.text)\n", "step-4": "#!/usr/bin/python3\n\"\"\"takes in a URL and an email address, sends a POST request to the passed\nURL with the email as a parameter, and finally\ndisplays the body of the response.\n\"\"\"\nimport requests\nimport sys\n\n\nif __name__ == \"__main__\":\n url_arg = sys.argv[1]\n email = sys.argv[2]\n params = {'email': email}\n response = requests.post(url_arg, data=params)\n print(response.text)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
############################################################################### # Programming Essentials B8IT102 Assessment # # Student: Barry Sheppard ID: 10387786 # # Problem 1 # ############################################################################### ############################################################################### # Functions # ############################################################################### def LimitedInput(message, limit, isNumber=False): """ Prompt user for input and continue to do so until input is valid. This function takes two required inputs, the message to display, and the limit of characters required. If the user enters something too long, they are prompted again until the input is correct. If the optional isNumber parameter is True, then it will also continue to prompt the user until a valid number is input. """ keepAsking = True while keepAsking: answer = input(message) if len(answer) > limit: print("The input must be", limit, "characters or less.") else: keepAsking = False if isNumber is True and CheckNumber(answer) is False: print("The input must be a number.") keepAsking = True return answer def CheckNumber(userInput): """ This function returns True if userInput can be converted to a number and returns False if it cannot. """ try: float(userInput) return True except(ValueError): return False def DateInput(message): """ This function prompts the user for a date using the message variable. User will continue to be prompted until the format is correct. The date format is very specific in the format DD/MM/YYYYY This function will confirm there are the right number of characters, the / are in the right place, the input are numbers, the days are between 1 and 31, the months are between 1 and 12, and the year is between 2000 and 3000 (roll on year 3k bug!) """ askAgainMessage = "The date must be in the format DD/MM/YYYY" keepAsking = True while keepAsking: answer = input(message) # First we check if there are two / by splitting using / and looking # for 3 items in the returned list. dateCheck = answer.split(sep="/") if len(dateCheck) is not 3: print(askAgainMessage) else: # If all is order, we can assign the 3 items to day, month, year day = dateCheck[0] month = dateCheck[1] year = dateCheck[2] # Next we check each item has the right amount of characters # and they can all be converted into numbers. if (len(day) == 2 and len(month) == 2 and len(year) == 4 and CheckNumber(day) and CheckNumber(month) and CheckNumber(year)): day = int(day) month = int(month) year = int(year) if (day > 0 and day < 32 and month > 0 and month < 13 and year > 2000 and year < 3000): keepAsking = False else: print(askAgainMessage) else: print(askAgainMessage) return answer ############################################################################### # Prompt the user for the required input # ############################################################################### # Ask the user to input the required details employeeName = LimitedInput("Employee Name: ", 20) # Example Mark Bate employeeNumber = LimitedInput("Employee Number: ", 10) # Example 123456789A weekEnding = DateInput("Week ending: ") # Example 26/01/2018 hoursWorked = LimitedInput("Number of hours worked: ", 6, True) # Example 42.5 # As there are only 168 hours in the week this is a check to prevent errors # This could be modified to a lower number based on legal limit while float(hoursWorked) > 168: print("The number of hours worked is too large.") hoursWorked = LimitedInput("Number of hours worked: ", 6, True) standardRate = LimitedInput("Hourly Rate: ", 6, True) # Example 10.50 overtimeMultiplier = LimitedInput("Overtime Rate: ", 3, True) # Example 1.5 standardTaxRate = LimitedInput("Standard Tax Rate: ", 2, True) # Example 20 overtimeTaxRate = LimitedInput("Overtime Tax Rate: ", 2, True) # Example 50 # Cnvert input to numbers, during the input we validated these as numerals hoursWorked = float(hoursWorked) standardRate = float(standardRate) overtimeMultiplier = float(overtimeMultiplier) standardTaxRate = float(standardTaxRate) overtimeTaxRate = float(overtimeTaxRate) ############################################################################### # Calculate required details for ouput # ############################################################################### # Check if more than standard hours have been worked if hoursWorked > 37.50: standardHours = 37.50 overtimeHours = hoursWorked - 37.50 else: standardHours = hoursWorked overtimeHours = 0 # Complete additional calculations for pay and deductions standardPayTotal = standardHours * standardRate overtimeRate = overtimeMultiplier * standardRate # As overtime is multiplier overtimePayTotal = overtimeHours * overtimeRate standardTaxTotal = (standardPayTotal * standardTaxRate)/100 overtimeTaxTotal = (overtimePayTotal * overtimeTaxRate)/100 payTotal = standardPayTotal + overtimePayTotal totalDeductions = standardTaxTotal + overtimeTaxTotal netPay = payTotal - totalDeductions ############################################################################### # Printing out the Payslip # ############################################################################### # Output is one big chunk of text with the variables inserted using the format # function, this lets us define the float variables as two digit decimals. print(""" P A Y S L I P WEEK ENDING {:} Employee: {:} Employee Number: {:} Earnings Deductions Hours Rate Total Hours (normal) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f} Hours (overtime) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f} Total pay: {:7.2f} Total deductions: {:7.2f} Net pay: {:7.2f} """.format(weekEnding, employeeName, employeeNumber, standardHours, standardRate, standardPayTotal, standardTaxRate, standardTaxTotal, overtimeHours, overtimeRate, overtimePayTotal, overtimeTaxRate, overtimeTaxTotal, payTotal, totalDeductions, netPay))
normal
{ "blob_id": "77e985d94d3b47539f046a3a46cb1a197cef86f4", "index": 3409, "step-1": "<mask token>\n\n\ndef CheckNumber(userInput):\n \"\"\" This function returns True if userInput can be converted to a number and\n returns False if it cannot. \"\"\"\n try:\n float(userInput)\n return True\n except ValueError:\n return False\n\n\ndef DateInput(message):\n \"\"\" This function prompts the user for a date using the message variable.\n User will continue to be prompted until the format is correct.\n\n The date format is very specific in the format DD/MM/YYYYY\n This function will confirm there are the right number of characters,\n the / are in the right place, the input are numbers, the days are between\n 1 and 31, the months are between 1 and 12, and the year is between 2000\n and 3000 (roll on year 3k bug!)\n \"\"\"\n askAgainMessage = 'The date must be in the format DD/MM/YYYY'\n keepAsking = True\n while keepAsking:\n answer = input(message)\n dateCheck = answer.split(sep='/')\n if len(dateCheck) is not 3:\n print(askAgainMessage)\n else:\n day = dateCheck[0]\n month = dateCheck[1]\n year = dateCheck[2]\n if len(day) == 2 and len(month) == 2 and len(year\n ) == 4 and CheckNumber(day) and CheckNumber(month\n ) and CheckNumber(year):\n day = int(day)\n month = int(month)\n year = int(year)\n if (day > 0 and day < 32 and month > 0 and month < 13 and \n year > 2000 and year < 3000):\n keepAsking = False\n else:\n print(askAgainMessage)\n else:\n print(askAgainMessage)\n return answer\n\n\n<mask token>\n", "step-2": "def LimitedInput(message, limit, isNumber=False):\n \"\"\" Prompt user for input and continue to do so until input is valid.\n\n This function takes two required inputs, the message to display, and the\n limit of characters required. If the user enters something too long, they\n are prompted again until the input is correct.\n If the optional isNumber parameter is True, then it will also continue to\n prompt the user until a valid number is input.\n\n \"\"\"\n keepAsking = True\n while keepAsking:\n answer = input(message)\n if len(answer) > limit:\n print('The input must be', limit, 'characters or less.')\n else:\n keepAsking = False\n if isNumber is True and CheckNumber(answer) is False:\n print('The input must be a number.')\n keepAsking = True\n return answer\n\n\ndef CheckNumber(userInput):\n \"\"\" This function returns True if userInput can be converted to a number and\n returns False if it cannot. \"\"\"\n try:\n float(userInput)\n return True\n except ValueError:\n return False\n\n\ndef DateInput(message):\n \"\"\" This function prompts the user for a date using the message variable.\n User will continue to be prompted until the format is correct.\n\n The date format is very specific in the format DD/MM/YYYYY\n This function will confirm there are the right number of characters,\n the / are in the right place, the input are numbers, the days are between\n 1 and 31, the months are between 1 and 12, and the year is between 2000\n and 3000 (roll on year 3k bug!)\n \"\"\"\n askAgainMessage = 'The date must be in the format DD/MM/YYYY'\n keepAsking = True\n while keepAsking:\n answer = input(message)\n dateCheck = answer.split(sep='/')\n if len(dateCheck) is not 3:\n print(askAgainMessage)\n else:\n day = dateCheck[0]\n month = dateCheck[1]\n year = dateCheck[2]\n if len(day) == 2 and len(month) == 2 and len(year\n ) == 4 and CheckNumber(day) and CheckNumber(month\n ) and CheckNumber(year):\n day = int(day)\n month = int(month)\n year = int(year)\n if (day > 0 and day < 32 and month > 0 and month < 13 and \n year > 2000 and year < 3000):\n keepAsking = False\n else:\n print(askAgainMessage)\n else:\n print(askAgainMessage)\n return answer\n\n\n<mask token>\n", "step-3": "def LimitedInput(message, limit, isNumber=False):\n \"\"\" Prompt user for input and continue to do so until input is valid.\n\n This function takes two required inputs, the message to display, and the\n limit of characters required. If the user enters something too long, they\n are prompted again until the input is correct.\n If the optional isNumber parameter is True, then it will also continue to\n prompt the user until a valid number is input.\n\n \"\"\"\n keepAsking = True\n while keepAsking:\n answer = input(message)\n if len(answer) > limit:\n print('The input must be', limit, 'characters or less.')\n else:\n keepAsking = False\n if isNumber is True and CheckNumber(answer) is False:\n print('The input must be a number.')\n keepAsking = True\n return answer\n\n\ndef CheckNumber(userInput):\n \"\"\" This function returns True if userInput can be converted to a number and\n returns False if it cannot. \"\"\"\n try:\n float(userInput)\n return True\n except ValueError:\n return False\n\n\ndef DateInput(message):\n \"\"\" This function prompts the user for a date using the message variable.\n User will continue to be prompted until the format is correct.\n\n The date format is very specific in the format DD/MM/YYYYY\n This function will confirm there are the right number of characters,\n the / are in the right place, the input are numbers, the days are between\n 1 and 31, the months are between 1 and 12, and the year is between 2000\n and 3000 (roll on year 3k bug!)\n \"\"\"\n askAgainMessage = 'The date must be in the format DD/MM/YYYY'\n keepAsking = True\n while keepAsking:\n answer = input(message)\n dateCheck = answer.split(sep='/')\n if len(dateCheck) is not 3:\n print(askAgainMessage)\n else:\n day = dateCheck[0]\n month = dateCheck[1]\n year = dateCheck[2]\n if len(day) == 2 and len(month) == 2 and len(year\n ) == 4 and CheckNumber(day) and CheckNumber(month\n ) and CheckNumber(year):\n day = int(day)\n month = int(month)\n year = int(year)\n if (day > 0 and day < 32 and month > 0 and month < 13 and \n year > 2000 and year < 3000):\n keepAsking = False\n else:\n print(askAgainMessage)\n else:\n print(askAgainMessage)\n return answer\n\n\n<mask token>\nwhile float(hoursWorked) > 168:\n print('The number of hours worked is too large.')\n hoursWorked = LimitedInput('Number of hours worked: ', 6, True)\n<mask token>\nif hoursWorked > 37.5:\n standardHours = 37.5\n overtimeHours = hoursWorked - 37.5\nelse:\n standardHours = hoursWorked\n overtimeHours = 0\n<mask token>\nprint(\n \"\"\"\n P A Y S L I P\nWEEK ENDING {:}\nEmployee: {:}\nEmployee Number: {:}\n Earnings Deductions\n Hours Rate Total\nHours (normal) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f}\nHours (overtime) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f}\n\n Total pay: {:7.2f}\n Total deductions: {:7.2f}\n Net pay: {:7.2f}\n\"\"\"\n .format(weekEnding, employeeName, employeeNumber, standardHours,\n standardRate, standardPayTotal, standardTaxRate, standardTaxTotal,\n overtimeHours, overtimeRate, overtimePayTotal, overtimeTaxRate,\n overtimeTaxTotal, payTotal, totalDeductions, netPay))\n", "step-4": "def LimitedInput(message, limit, isNumber=False):\n \"\"\" Prompt user for input and continue to do so until input is valid.\n\n This function takes two required inputs, the message to display, and the\n limit of characters required. If the user enters something too long, they\n are prompted again until the input is correct.\n If the optional isNumber parameter is True, then it will also continue to\n prompt the user until a valid number is input.\n\n \"\"\"\n keepAsking = True\n while keepAsking:\n answer = input(message)\n if len(answer) > limit:\n print('The input must be', limit, 'characters or less.')\n else:\n keepAsking = False\n if isNumber is True and CheckNumber(answer) is False:\n print('The input must be a number.')\n keepAsking = True\n return answer\n\n\ndef CheckNumber(userInput):\n \"\"\" This function returns True if userInput can be converted to a number and\n returns False if it cannot. \"\"\"\n try:\n float(userInput)\n return True\n except ValueError:\n return False\n\n\ndef DateInput(message):\n \"\"\" This function prompts the user for a date using the message variable.\n User will continue to be prompted until the format is correct.\n\n The date format is very specific in the format DD/MM/YYYYY\n This function will confirm there are the right number of characters,\n the / are in the right place, the input are numbers, the days are between\n 1 and 31, the months are between 1 and 12, and the year is between 2000\n and 3000 (roll on year 3k bug!)\n \"\"\"\n askAgainMessage = 'The date must be in the format DD/MM/YYYY'\n keepAsking = True\n while keepAsking:\n answer = input(message)\n dateCheck = answer.split(sep='/')\n if len(dateCheck) is not 3:\n print(askAgainMessage)\n else:\n day = dateCheck[0]\n month = dateCheck[1]\n year = dateCheck[2]\n if len(day) == 2 and len(month) == 2 and len(year\n ) == 4 and CheckNumber(day) and CheckNumber(month\n ) and CheckNumber(year):\n day = int(day)\n month = int(month)\n year = int(year)\n if (day > 0 and day < 32 and month > 0 and month < 13 and \n year > 2000 and year < 3000):\n keepAsking = False\n else:\n print(askAgainMessage)\n else:\n print(askAgainMessage)\n return answer\n\n\nemployeeName = LimitedInput('Employee Name: ', 20)\nemployeeNumber = LimitedInput('Employee Number: ', 10)\nweekEnding = DateInput('Week ending: ')\nhoursWorked = LimitedInput('Number of hours worked: ', 6, True)\nwhile float(hoursWorked) > 168:\n print('The number of hours worked is too large.')\n hoursWorked = LimitedInput('Number of hours worked: ', 6, True)\nstandardRate = LimitedInput('Hourly Rate: ', 6, True)\novertimeMultiplier = LimitedInput('Overtime Rate: ', 3, True)\nstandardTaxRate = LimitedInput('Standard Tax Rate: ', 2, True)\novertimeTaxRate = LimitedInput('Overtime Tax Rate: ', 2, True)\nhoursWorked = float(hoursWorked)\nstandardRate = float(standardRate)\novertimeMultiplier = float(overtimeMultiplier)\nstandardTaxRate = float(standardTaxRate)\novertimeTaxRate = float(overtimeTaxRate)\nif hoursWorked > 37.5:\n standardHours = 37.5\n overtimeHours = hoursWorked - 37.5\nelse:\n standardHours = hoursWorked\n overtimeHours = 0\nstandardPayTotal = standardHours * standardRate\novertimeRate = overtimeMultiplier * standardRate\novertimePayTotal = overtimeHours * overtimeRate\nstandardTaxTotal = standardPayTotal * standardTaxRate / 100\novertimeTaxTotal = overtimePayTotal * overtimeTaxRate / 100\npayTotal = standardPayTotal + overtimePayTotal\ntotalDeductions = standardTaxTotal + overtimeTaxTotal\nnetPay = payTotal - totalDeductions\nprint(\n \"\"\"\n P A Y S L I P\nWEEK ENDING {:}\nEmployee: {:}\nEmployee Number: {:}\n Earnings Deductions\n Hours Rate Total\nHours (normal) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f}\nHours (overtime) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f}\n\n Total pay: {:7.2f}\n Total deductions: {:7.2f}\n Net pay: {:7.2f}\n\"\"\"\n .format(weekEnding, employeeName, employeeNumber, standardHours,\n standardRate, standardPayTotal, standardTaxRate, standardTaxTotal,\n overtimeHours, overtimeRate, overtimePayTotal, overtimeTaxRate,\n overtimeTaxTotal, payTotal, totalDeductions, netPay))\n", "step-5": "###############################################################################\n# Programming Essentials B8IT102 Assessment #\n# Student: Barry Sheppard ID: 10387786 #\n# Problem 1 #\n###############################################################################\n\n\n###############################################################################\n# Functions #\n###############################################################################\n\ndef LimitedInput(message, limit, isNumber=False):\n \"\"\" Prompt user for input and continue to do so until input is valid.\n\n This function takes two required inputs, the message to display, and the\n limit of characters required. If the user enters something too long, they\n are prompted again until the input is correct.\n If the optional isNumber parameter is True, then it will also continue to\n prompt the user until a valid number is input.\n\n \"\"\"\n keepAsking = True\n while keepAsking:\n answer = input(message)\n if len(answer) > limit:\n print(\"The input must be\", limit, \"characters or less.\")\n else:\n keepAsking = False\n if isNumber is True and CheckNumber(answer) is False:\n print(\"The input must be a number.\")\n keepAsking = True\n return answer\n\n\ndef CheckNumber(userInput):\n \"\"\" This function returns True if userInput can be converted to a number and\n returns False if it cannot. \"\"\"\n try:\n float(userInput)\n return True\n except(ValueError):\n return False\n\n\ndef DateInput(message):\n \"\"\" This function prompts the user for a date using the message variable.\n User will continue to be prompted until the format is correct.\n\n The date format is very specific in the format DD/MM/YYYYY\n This function will confirm there are the right number of characters,\n the / are in the right place, the input are numbers, the days are between\n 1 and 31, the months are between 1 and 12, and the year is between 2000\n and 3000 (roll on year 3k bug!)\n \"\"\"\n askAgainMessage = \"The date must be in the format DD/MM/YYYY\"\n keepAsking = True\n while keepAsking:\n answer = input(message)\n # First we check if there are two / by splitting using / and looking\n # for 3 items in the returned list.\n dateCheck = answer.split(sep=\"/\")\n if len(dateCheck) is not 3:\n print(askAgainMessage)\n else:\n # If all is order, we can assign the 3 items to day, month, year\n day = dateCheck[0]\n month = dateCheck[1]\n year = dateCheck[2]\n # Next we check each item has the right amount of characters\n # and they can all be converted into numbers.\n if (len(day) == 2 and len(month) == 2 and len(year) == 4 and\n CheckNumber(day) and CheckNumber(month) and\n CheckNumber(year)):\n day = int(day)\n month = int(month)\n year = int(year)\n if (day > 0 and day < 32 and month > 0 and month < 13 and\n year > 2000 and year < 3000):\n keepAsking = False\n else:\n print(askAgainMessage)\n else:\n print(askAgainMessage)\n return answer\n\n\n###############################################################################\n# Prompt the user for the required input #\n###############################################################################\n\n# Ask the user to input the required details\nemployeeName = LimitedInput(\"Employee Name: \", 20) # Example Mark Bate\nemployeeNumber = LimitedInput(\"Employee Number: \", 10) # Example 123456789A\nweekEnding = DateInput(\"Week ending: \") # Example 26/01/2018\nhoursWorked = LimitedInput(\"Number of hours worked: \", 6, True) # Example 42.5\n\n# As there are only 168 hours in the week this is a check to prevent errors\n# This could be modified to a lower number based on legal limit\nwhile float(hoursWorked) > 168:\n print(\"The number of hours worked is too large.\")\n hoursWorked = LimitedInput(\"Number of hours worked: \", 6, True)\n\nstandardRate = LimitedInput(\"Hourly Rate: \", 6, True) # Example 10.50\novertimeMultiplier = LimitedInput(\"Overtime Rate: \", 3, True) # Example 1.5\nstandardTaxRate = LimitedInput(\"Standard Tax Rate: \", 2, True) # Example 20\novertimeTaxRate = LimitedInput(\"Overtime Tax Rate: \", 2, True) # Example 50\n\n# Cnvert input to numbers, during the input we validated these as numerals\nhoursWorked = float(hoursWorked)\nstandardRate = float(standardRate)\novertimeMultiplier = float(overtimeMultiplier)\nstandardTaxRate = float(standardTaxRate)\novertimeTaxRate = float(overtimeTaxRate)\n\n\n###############################################################################\n# Calculate required details for ouput #\n###############################################################################\n\n# Check if more than standard hours have been worked\nif hoursWorked > 37.50:\n standardHours = 37.50\n overtimeHours = hoursWorked - 37.50\nelse:\n standardHours = hoursWorked\n overtimeHours = 0\n# Complete additional calculations for pay and deductions\nstandardPayTotal = standardHours * standardRate\novertimeRate = overtimeMultiplier * standardRate # As overtime is multiplier\novertimePayTotal = overtimeHours * overtimeRate\nstandardTaxTotal = (standardPayTotal * standardTaxRate)/100\novertimeTaxTotal = (overtimePayTotal * overtimeTaxRate)/100\npayTotal = standardPayTotal + overtimePayTotal\ntotalDeductions = standardTaxTotal + overtimeTaxTotal\nnetPay = payTotal - totalDeductions\n\n\n###############################################################################\n# Printing out the Payslip #\n###############################################################################\n\n# Output is one big chunk of text with the variables inserted using the format\n# function, this lets us define the float variables as two digit decimals.\n\nprint(\"\"\"\n P A Y S L I P\nWEEK ENDING {:}\nEmployee: {:}\nEmployee Number: {:}\n Earnings Deductions\n Hours Rate Total\nHours (normal) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f}\nHours (overtime) {:6.2f} {:6.2f} {:6.2f} Tax @ {:02.0f}% {:6.2f}\n\n Total pay: {:7.2f}\n Total deductions: {:7.2f}\n Net pay: {:7.2f}\n\"\"\".format(weekEnding, employeeName, employeeNumber, standardHours,\n standardRate, standardPayTotal, standardTaxRate, standardTaxTotal,\n overtimeHours, overtimeRate, overtimePayTotal, overtimeTaxRate,\n overtimeTaxTotal, payTotal, totalDeductions, netPay))\n", "step-ids": [ 2, 3, 4, 5, 6 ] }
[ 2, 3, 4, 5, 6 ]
''' Created on 27 Mar 2015 @author: Jon ''' import matplotlib.pyplot as plt from numerical_functions import Timer import numerical_functions.numba_funcs.indexing as indexing import numpy as np import unittest class Test(unittest.TestCase): def test_take(self): x = np.linspace( 0, 100 ) idx = np.random.random_integers( 0, 50, 20 ) result = indexing.take( x, idx ) expected = np.take( x, idx ) np.testing.assert_array_equal( expected, result ) def test_take_comparison(self): x = np.arange( 1e6 ) idx = np.random.random_integers( 0, 1e5, 1e6 ) indexing.take( x, idx ) np.take( x, idx ) with Timer( 'numba' ) as nbtimer: indexing.take( x, idx ) with Timer( 'numpy' ) as nptimer: np.take( x, idx ) ratio = nbtimer.interval / nptimer.interval print( 'numba version of take took %0.2f as long as numpy'%ratio) def test_square_take(self): X = np.random.random_integers( 0, 50, 25 ).reshape( 5, 5 ) idx = np.arange( 0, 4, 2 ) result = np.empty( ( idx.shape[0], idx.shape[0] ) ) indexing.square_take_to_out( X, idx, result ) print( result ) expected = X.take( idx, axis=0 ).take( idx, axis=1 ) print( expected ) np.testing.assert_array_equal( expected, result ) def test_square_take_to_out(self): X = np.arange(25).reshape(5,5) idx = np.arange( 0, 4, 2 ) result = np.empty( ( idx.shape[0], idx.shape[0] ) ) indexing.square_take_to_out( X, idx, result ) print( result ) expected = X.take( idx, axis=0 ).take( idx, axis=1 ) print( expected ) np.testing.assert_array_equal( expected, result ) def test_square_take_performance(self): X = np.arange(25).reshape(5,5) idx = np.arange( 0, 4, 2 ) result = np.empty( ( idx.shape[0], idx.shape[0] ) ) indexing.square_take_to_out( X, idx, result ) result2 = indexing.square_take( X, idx ) np.testing.assert_array_equal( result, result2 ) num_tests = 1000 nbts = [] nbts2 = [] npts = [] ms = ( 10, 20, 40, 80, 160 )#, 320, 640 ) for m in ms: X = np.arange(m*m).reshape(m,m) idx = np.random.random_integers( 0, m-1, m//2 ) result = np.empty( ( idx.shape[0], idx.shape[0] ) ) with Timer( 'numba' ) as nbt: for _ in range( num_tests ): indexing.square_take_to_out( X, idx, result ) nbts.append( nbt.interval ) with Timer( 'numba2' ) as nbt: for _ in range( num_tests ): r=indexing.square_take( X, idx ) nbts2.append( nbt.interval ) with Timer( 'numpy') as npt: for _ in range(num_tests): X.take( idx, axis=0 ).take( idx, axis=1 ) npts.append( npt.interval ) plt.plot( ms, nbts, label='nb to out' ) plt.plot( ms, nbts2, label='nb new result') plt.plot( ms, npts, label='np' ) plt.title( 'square_take_to_out performance test') plt.legend(loc='center left', bbox_to_anchor=(1, 0.5)) plt.show() def test_square_and_rect_take_to_out(self): X = np.arange( 100 ).reshape( (10, 10 ) ) idx0 = np.arange( 0, 4, 2 ) idx1 = np.arange( 4, 6 ) result = np.empty( ( idx0.shape[0], idx0.shape[0]+idx1.shape[0] ) ) indexing.square_and_rect_take_to_out( X, idx0, idx1, result ) np.testing.assert_array_equal( result[:,:2], indexing.square_take( X, idx0 ) ) r2 = np.array( [ [ 4, 5 ], [24, 25 ] ] ) np.testing.assert_array_equal( r2, result[:,2:]) def test_get_resample_indices(self): raw_index = np.arange( 10 ) resampled_index = np.arange( 1, 10, 2 ) result = indexing.get_resample_indices(raw_index, resampled_index) expected = np.arange( 0, 10, 2 ) np.testing.assert_array_equal( expected, result ) def test_take_upper_off_diagonal(self): X = np.array( [[ 1, 2, 3], [ np.nan, 5, 6], [ np.nan, np.nan, 9]]) idx = np.array( [ 0, 1 ] ) expected = np.array( [ 2 ] ) actual = indexing.take_upper_off_diagonal( X, idx ) np.testing.assert_array_equal( actual, expected ) idx = np.array( [ 1, 2 ] ) expected = np.array( [ 6 ] ) actual = indexing.take_upper_off_diagonal( X, idx ) np.testing.assert_array_equal( actual, expected ) idx = np.array( [ 0, 2 ] ) expected = np.array( [ 3 ] ) actual = indexing.take_upper_off_diagonal( X, idx ) np.testing.assert_array_equal( actual, expected ) idx = np.array( [ 0, 1, 2 ] ) expected = np.array( [ 2, 3, 6 ] ) actual = indexing.take_upper_off_diagonal( X, idx ) np.testing.assert_array_equal( actual, expected ) if __name__ == "__main__": #import sys;sys.argv = ['', 'Test.testName'] unittest.main()
normal
{ "blob_id": "ee80169afd4741854eff8619822a857bbf757575", "index": 291, "step-1": "<mask token>\n\n\nclass Test(unittest.TestCase):\n <mask token>\n\n def test_take_comparison(self):\n x = np.arange(1000000.0)\n idx = np.random.random_integers(0, 100000.0, 1000000.0)\n indexing.take(x, idx)\n np.take(x, idx)\n with Timer('numba') as nbtimer:\n indexing.take(x, idx)\n with Timer('numpy') as nptimer:\n np.take(x, idx)\n ratio = nbtimer.interval / nptimer.interval\n print('numba version of take took %0.2f as long as numpy' % ratio)\n\n def test_square_take(self):\n X = np.random.random_integers(0, 50, 25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n\n def test_square_take_to_out(self):\n X = np.arange(25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n <mask token>\n <mask token>\n\n def test_get_resample_indices(self):\n raw_index = np.arange(10)\n resampled_index = np.arange(1, 10, 2)\n result = indexing.get_resample_indices(raw_index, resampled_index)\n expected = np.arange(0, 10, 2)\n np.testing.assert_array_equal(expected, result)\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def test_take(self):\n x = np.linspace(0, 100)\n idx = np.random.random_integers(0, 50, 20)\n result = indexing.take(x, idx)\n expected = np.take(x, idx)\n np.testing.assert_array_equal(expected, result)\n\n def test_take_comparison(self):\n x = np.arange(1000000.0)\n idx = np.random.random_integers(0, 100000.0, 1000000.0)\n indexing.take(x, idx)\n np.take(x, idx)\n with Timer('numba') as nbtimer:\n indexing.take(x, idx)\n with Timer('numpy') as nptimer:\n np.take(x, idx)\n ratio = nbtimer.interval / nptimer.interval\n print('numba version of take took %0.2f as long as numpy' % ratio)\n\n def test_square_take(self):\n X = np.random.random_integers(0, 50, 25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n\n def test_square_take_to_out(self):\n X = np.arange(25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n <mask token>\n <mask token>\n\n def test_get_resample_indices(self):\n raw_index = np.arange(10)\n resampled_index = np.arange(1, 10, 2)\n result = indexing.get_resample_indices(raw_index, resampled_index)\n expected = np.arange(0, 10, 2)\n np.testing.assert_array_equal(expected, result)\n\n def test_take_upper_off_diagonal(self):\n X = np.array([[1, 2, 3], [np.nan, 5, 6], [np.nan, np.nan, 9]])\n idx = np.array([0, 1])\n expected = np.array([2])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([1, 2])\n expected = np.array([6])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([0, 2])\n expected = np.array([3])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([0, 1, 2])\n expected = np.array([2, 3, 6])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass Test(unittest.TestCase):\n\n def test_take(self):\n x = np.linspace(0, 100)\n idx = np.random.random_integers(0, 50, 20)\n result = indexing.take(x, idx)\n expected = np.take(x, idx)\n np.testing.assert_array_equal(expected, result)\n\n def test_take_comparison(self):\n x = np.arange(1000000.0)\n idx = np.random.random_integers(0, 100000.0, 1000000.0)\n indexing.take(x, idx)\n np.take(x, idx)\n with Timer('numba') as nbtimer:\n indexing.take(x, idx)\n with Timer('numpy') as nptimer:\n np.take(x, idx)\n ratio = nbtimer.interval / nptimer.interval\n print('numba version of take took %0.2f as long as numpy' % ratio)\n\n def test_square_take(self):\n X = np.random.random_integers(0, 50, 25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n\n def test_square_take_to_out(self):\n X = np.arange(25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n\n def test_square_take_performance(self):\n X = np.arange(25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n result2 = indexing.square_take(X, idx)\n np.testing.assert_array_equal(result, result2)\n num_tests = 1000\n nbts = []\n nbts2 = []\n npts = []\n ms = 10, 20, 40, 80, 160\n for m in ms:\n X = np.arange(m * m).reshape(m, m)\n idx = np.random.random_integers(0, m - 1, m // 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n with Timer('numba') as nbt:\n for _ in range(num_tests):\n indexing.square_take_to_out(X, idx, result)\n nbts.append(nbt.interval)\n with Timer('numba2') as nbt:\n for _ in range(num_tests):\n r = indexing.square_take(X, idx)\n nbts2.append(nbt.interval)\n with Timer('numpy') as npt:\n for _ in range(num_tests):\n X.take(idx, axis=0).take(idx, axis=1)\n npts.append(npt.interval)\n plt.plot(ms, nbts, label='nb to out')\n plt.plot(ms, nbts2, label='nb new result')\n plt.plot(ms, npts, label='np')\n plt.title('square_take_to_out performance test')\n plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\n plt.show()\n <mask token>\n\n def test_get_resample_indices(self):\n raw_index = np.arange(10)\n resampled_index = np.arange(1, 10, 2)\n result = indexing.get_resample_indices(raw_index, resampled_index)\n expected = np.arange(0, 10, 2)\n np.testing.assert_array_equal(expected, result)\n\n def test_take_upper_off_diagonal(self):\n X = np.array([[1, 2, 3], [np.nan, 5, 6], [np.nan, np.nan, 9]])\n idx = np.array([0, 1])\n expected = np.array([2])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([1, 2])\n expected = np.array([6])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([0, 2])\n expected = np.array([3])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([0, 1, 2])\n expected = np.array([2, 3, 6])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n\n\n<mask token>\n", "step-4": "<mask token>\nimport matplotlib.pyplot as plt\nfrom numerical_functions import Timer\nimport numerical_functions.numba_funcs.indexing as indexing\nimport numpy as np\nimport unittest\n\n\nclass Test(unittest.TestCase):\n\n def test_take(self):\n x = np.linspace(0, 100)\n idx = np.random.random_integers(0, 50, 20)\n result = indexing.take(x, idx)\n expected = np.take(x, idx)\n np.testing.assert_array_equal(expected, result)\n\n def test_take_comparison(self):\n x = np.arange(1000000.0)\n idx = np.random.random_integers(0, 100000.0, 1000000.0)\n indexing.take(x, idx)\n np.take(x, idx)\n with Timer('numba') as nbtimer:\n indexing.take(x, idx)\n with Timer('numpy') as nptimer:\n np.take(x, idx)\n ratio = nbtimer.interval / nptimer.interval\n print('numba version of take took %0.2f as long as numpy' % ratio)\n\n def test_square_take(self):\n X = np.random.random_integers(0, 50, 25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n\n def test_square_take_to_out(self):\n X = np.arange(25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n print(result)\n expected = X.take(idx, axis=0).take(idx, axis=1)\n print(expected)\n np.testing.assert_array_equal(expected, result)\n\n def test_square_take_performance(self):\n X = np.arange(25).reshape(5, 5)\n idx = np.arange(0, 4, 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n indexing.square_take_to_out(X, idx, result)\n result2 = indexing.square_take(X, idx)\n np.testing.assert_array_equal(result, result2)\n num_tests = 1000\n nbts = []\n nbts2 = []\n npts = []\n ms = 10, 20, 40, 80, 160\n for m in ms:\n X = np.arange(m * m).reshape(m, m)\n idx = np.random.random_integers(0, m - 1, m // 2)\n result = np.empty((idx.shape[0], idx.shape[0]))\n with Timer('numba') as nbt:\n for _ in range(num_tests):\n indexing.square_take_to_out(X, idx, result)\n nbts.append(nbt.interval)\n with Timer('numba2') as nbt:\n for _ in range(num_tests):\n r = indexing.square_take(X, idx)\n nbts2.append(nbt.interval)\n with Timer('numpy') as npt:\n for _ in range(num_tests):\n X.take(idx, axis=0).take(idx, axis=1)\n npts.append(npt.interval)\n plt.plot(ms, nbts, label='nb to out')\n plt.plot(ms, nbts2, label='nb new result')\n plt.plot(ms, npts, label='np')\n plt.title('square_take_to_out performance test')\n plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\n plt.show()\n\n def test_square_and_rect_take_to_out(self):\n X = np.arange(100).reshape((10, 10))\n idx0 = np.arange(0, 4, 2)\n idx1 = np.arange(4, 6)\n result = np.empty((idx0.shape[0], idx0.shape[0] + idx1.shape[0]))\n indexing.square_and_rect_take_to_out(X, idx0, idx1, result)\n np.testing.assert_array_equal(result[:, :2], indexing.square_take(X,\n idx0))\n r2 = np.array([[4, 5], [24, 25]])\n np.testing.assert_array_equal(r2, result[:, 2:])\n\n def test_get_resample_indices(self):\n raw_index = np.arange(10)\n resampled_index = np.arange(1, 10, 2)\n result = indexing.get_resample_indices(raw_index, resampled_index)\n expected = np.arange(0, 10, 2)\n np.testing.assert_array_equal(expected, result)\n\n def test_take_upper_off_diagonal(self):\n X = np.array([[1, 2, 3], [np.nan, 5, 6], [np.nan, np.nan, 9]])\n idx = np.array([0, 1])\n expected = np.array([2])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([1, 2])\n expected = np.array([6])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([0, 2])\n expected = np.array([3])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n idx = np.array([0, 1, 2])\n expected = np.array([2, 3, 6])\n actual = indexing.take_upper_off_diagonal(X, idx)\n np.testing.assert_array_equal(actual, expected)\n\n\nif __name__ == '__main__':\n unittest.main()\n", "step-5": "'''\nCreated on 27 Mar 2015\n\n@author: Jon\n'''\n\nimport matplotlib.pyplot as plt\nfrom numerical_functions import Timer\nimport numerical_functions.numba_funcs.indexing as indexing\nimport numpy as np\nimport unittest\n\n\nclass Test(unittest.TestCase):\n \n def test_take(self):\n x = np.linspace( 0, 100 )\n idx = np.random.random_integers( 0, 50, 20 )\n result = indexing.take( x, idx )\n expected = np.take( x, idx )\n np.testing.assert_array_equal( expected, result )\n \n def test_take_comparison(self):\n x = np.arange( 1e6 )\n idx = np.random.random_integers( 0, 1e5, 1e6 )\n \n indexing.take( x, idx )\n np.take( x, idx )\n \n with Timer( 'numba' ) as nbtimer:\n indexing.take( x, idx )\n \n with Timer( 'numpy' ) as nptimer:\n np.take( x, idx )\n \n ratio = nbtimer.interval / nptimer.interval\n print( 'numba version of take took %0.2f as long as numpy'%ratio) \n \n \n def test_square_take(self):\n\n X = np.random.random_integers( 0, 50, 25 ).reshape( 5, 5 )\n idx = np.arange( 0, 4, 2 )\n result = np.empty( ( idx.shape[0], idx.shape[0] ) )\n indexing.square_take_to_out( X, idx, result )\n print( result )\n \n expected = X.take( idx, axis=0 ).take( idx, axis=1 )\n print( expected )\n \n np.testing.assert_array_equal( expected, result )\n \n def test_square_take_to_out(self):\n X = np.arange(25).reshape(5,5)\n idx = np.arange( 0, 4, 2 )\n result = np.empty( ( idx.shape[0], idx.shape[0] ) )\n indexing.square_take_to_out( X, idx, result )\n print( result )\n \n expected = X.take( idx, axis=0 ).take( idx, axis=1 )\n print( expected )\n \n np.testing.assert_array_equal( expected, result )\n \n def test_square_take_performance(self):\n X = np.arange(25).reshape(5,5)\n idx = np.arange( 0, 4, 2 )\n result = np.empty( ( idx.shape[0], idx.shape[0] ) )\n indexing.square_take_to_out( X, idx, result )\n \n result2 = indexing.square_take( X, idx )\n \n np.testing.assert_array_equal( result, result2 )\n\n num_tests = 1000\n \n nbts = []\n nbts2 = []\n npts = [] \n \n ms = ( 10, 20, 40, 80, 160 )#, 320, 640 )\n for m in ms:\n X = np.arange(m*m).reshape(m,m)\n idx = np.random.random_integers( 0, m-1, m//2 )\n result = np.empty( ( idx.shape[0], idx.shape[0] ) )\n with Timer( 'numba' ) as nbt:\n for _ in range( num_tests ):\n indexing.square_take_to_out( X, idx, result )\n nbts.append( nbt.interval ) \n \n with Timer( 'numba2' ) as nbt:\n for _ in range( num_tests ):\n r=indexing.square_take( X, idx ) \n nbts2.append( nbt.interval ) \n \n with Timer( 'numpy') as npt:\n for _ in range(num_tests):\n X.take( idx, axis=0 ).take( idx, axis=1 )\n npts.append( npt.interval ) \n \n plt.plot( ms, nbts, label='nb to out' )\n plt.plot( ms, nbts2, label='nb new result')\n plt.plot( ms, npts, label='np' )\n plt.title( 'square_take_to_out performance test')\n plt.legend(loc='center left', bbox_to_anchor=(1, 0.5))\n plt.show()\n \n def test_square_and_rect_take_to_out(self):\n \n X = np.arange( 100 ).reshape( (10, 10 ) )\n idx0 = np.arange( 0, 4, 2 )\n idx1 = np.arange( 4, 6 )\n \n result = np.empty( ( idx0.shape[0], idx0.shape[0]+idx1.shape[0] ) )\n indexing.square_and_rect_take_to_out( X, idx0, idx1, result )\n \n np.testing.assert_array_equal( result[:,:2], indexing.square_take( X, idx0 ) )\n r2 = np.array( [ [ 4, 5 ], [24, 25 ] ] )\n np.testing.assert_array_equal( r2, result[:,2:]) \n\n def test_get_resample_indices(self):\n \n raw_index = np.arange( 10 )\n resampled_index = np.arange( 1, 10, 2 )\n\n result = indexing.get_resample_indices(raw_index, resampled_index)\n expected = np.arange( 0, 10, 2 )\n \n np.testing.assert_array_equal( expected, result )\n\n def test_take_upper_off_diagonal(self):\n\n X = np.array( [[ 1, 2, 3],\n [ np.nan, 5, 6],\n [ np.nan, np.nan, 9]])\n\n idx = np.array( [ 0, 1 ] )\n expected = np.array( [ 2 ] )\n actual = indexing.take_upper_off_diagonal( X, idx )\n np.testing.assert_array_equal( actual, expected )\n\n idx = np.array( [ 1, 2 ] )\n expected = np.array( [ 6 ] )\n actual = indexing.take_upper_off_diagonal( X, idx )\n np.testing.assert_array_equal( actual, expected )\n\n idx = np.array( [ 0, 2 ] )\n expected = np.array( [ 3 ] )\n actual = indexing.take_upper_off_diagonal( X, idx )\n np.testing.assert_array_equal( actual, expected )\n\n idx = np.array( [ 0, 1, 2 ] )\n expected = np.array( [ 2, 3, 6 ] )\n actual = indexing.take_upper_off_diagonal( X, idx )\n np.testing.assert_array_equal( actual, expected )\n\n\n\nif __name__ == \"__main__\":\n #import sys;sys.argv = ['', 'Test.testName']\n unittest.main()", "step-ids": [ 5, 7, 8, 11, 12 ] }
[ 5, 7, 8, 11, 12 ]
import pygame from pygame.locals import * pygame.init() ttt = pygame.display.set_mode((300,325)) #loome mänguakna pygame.display.set_caption = ("Trips-Traps-Trull") võitja = None def init_tabel(ttt): taust = pygame.Surface(ttt.get_size()) taust = taust.convert() taust.fill((250,250,250)) #tõmbame jooned pygame.draw.line (taust, (0,0,0), (100,0), (100,300), 2) #vertikaalsed jooned pygame.draw.line (taust, (0,0,0), (200,0), (200,300), 2) pygame.draw.line (taust, (0,0,0), (0,100), (300,100), 2) #horisontaalsed jooned pygame.draw.line (taust, (0,0,0), (0,200), (300,200), 2) return taust def näita_tabelit (ttt, tabel): hetkeseis(tabel) ttt.blit (tabel, (0,0)) pygame.display.flip() def hiire_positsioon_tabelis (Xkoordinaat, Ykoordinaat): if (Ykoordinaat < 100): #millisele reale klikib rida = 0 elif (Ykoordinaat < 200): rida = 1 else: rida = 2 if (Xkoordinaat < 100): #millisele veerule klikib veerg = 0 elif (Xkoordinaat < 200): veerg = 1 else: veerg = 2 return (rida, veerg) def klikk_tabelis (tabel): #teeme kindlaks kuhu klikiti global joonestik, XO (Xkoordinaat, Ykoordinaat) = pygame.mouse.get_pos() (rida, veerg) = hiire_positsioon_tabelis (Xkoordinaat, Ykoordinaat) if joonestik[rida][veerg] == 'X' or joonestik[rida][veerg] == 'O': #kontrollime kas lahter on kasutusel return #lahter on juba kasutusel joonistamine (tabel, rida, veerg, XO) #joonista X või O if (XO == 'X'): XO = 'O' #käigu üleandmine teisele inimesele else: XO = 'X' def joonistamine (tabel, tabelirida, tabeliveerg, Tähis): Xkeskkoht = tabeliveerg * 100 + 50 #leiame keskkoha Ykeskkoht = tabelirida * 100 + 50 if (Tähis == 'O'): #joonistame O pygame.draw.circle (tabel, (0,0,0), (Xkeskkoht, Ykeskkoht), 44, 2) else: pygame.draw.line (tabel, (0,0,0), (Xkeskkoht - 22, Ykeskkoht - 22), (Xkeskkoht + 22, Ykeskkoht + 22), 2) #joonistame X pygame.draw.line (tabel, (0,0,0), (Xkeskkoht + 22, Ykeskkoht - 22), (Xkeskkoht - 22, Ykeskkoht + 22), 2) joonestik[tabelirida][tabeliveerg] = Tähis #märgime lahtri kasutatuks def mängu_võitja(tabel): #kontrollib, kas kumbki võitis global joonestik, võitja for rida in range (0, 3): #kontrollime ridu if joonestik [rida][0] == joonestik[rida][1] == joonestik[rida][2] and joonestik [rida][0] is not None: võitja = joonestik[rida][0] #see rida võitis pygame.draw.line (tabel, (250,0,0), (0, (rida + 1)*100 - 50), (300, (rida + 1)*100 - 50), 2) break for veerg in range (0, 3): #kontrollime veerge if joonestik[0][veerg] == joonestik[1][veerg] == joonestik[2][veerg] and joonestik[0][veerg] is not None: võitja = joonestik[0][veerg] #see veerg võitis pygame.draw.line (tabel, (250,0,0), ((veerg + 1)* 100 - 50, 0), ((veerg + 1)* 100 - 50, 300), 2) break if joonestik[0][0] == joonestik[1][1] == joonestik[2][2] and joonestik[0][0] is not None: #kontrollime diagonaale võitja = joonestik[0][0] #vasakult paremale diagonaal võitis pygame.draw.line (tabel, (250,0,0), (50, 50), (250, 250), 2) if joonestik[0][2] == joonestik[1][1] == joonestik[2][0] and joonestik[0][2] is not None: võitja = joonestik[0][2] #paremalt vasakule diagonaal võitis pygame.draw.line (tabel, (250,0,0), (250, 50), (50, 250), 2) def hetkeseis (tabel): #kuva hetkeseis(kelle käik/kes võitis) global XO, võitja if võitja is None: sõnum = XO + " käib" else: sõnum = võitja + " võitis!" font = pygame.font.Font(None, 24) tekst = font.render(sõnum, 1, (0,0,0)) #kopeerime sõnumi mänguaknas tabel.fill ((250, 250, 250), (0, 300, 300, 25)) tabel.blit (tekst, (10, 300)) XO = 'X' #X alustab joonestik = [ [ None, None, None ], #tühjad lahtrid [ None, None, None ], [ None, None, None ] ] tabel = init_tabel(ttt) jooksutab = 1 while jooksutab == 1: for event in pygame.event.get(): if event.type is QUIT: jooksutab = 0 elif event.type is MOUSEBUTTONDOWN: klikk_tabelis(tabel) mängu_võitja(tabel) #kontrollib võitjat peale igat käiku näita_tabelit(ttt,tabel) #uuendab mängulauda if võitja is not None: break
normal
{ "blob_id": "a667c4cb0a30ee67fe982bb96ece6bb75f25f110", "index": 7084, "step-1": "<mask token>\n\n\ndef näita_tabelit(ttt, tabel):\n hetkeseis(tabel)\n ttt.blit(tabel, (0, 0))\n pygame.display.flip()\n\n\ndef hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat):\n if Ykoordinaat < 100:\n rida = 0\n elif Ykoordinaat < 200:\n rida = 1\n else:\n rida = 2\n if Xkoordinaat < 100:\n veerg = 0\n elif Xkoordinaat < 200:\n veerg = 1\n else:\n veerg = 2\n return rida, veerg\n\n\ndef klikk_tabelis(tabel):\n global joonestik, XO\n Xkoordinaat, Ykoordinaat = pygame.mouse.get_pos()\n rida, veerg = hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat)\n if joonestik[rida][veerg] == 'X' or joonestik[rida][veerg] == 'O':\n return\n joonistamine(tabel, rida, veerg, XO)\n if XO == 'X':\n XO = 'O'\n else:\n XO = 'X'\n\n\ndef joonistamine(tabel, tabelirida, tabeliveerg, Tähis):\n Xkeskkoht = tabeliveerg * 100 + 50\n Ykeskkoht = tabelirida * 100 + 50\n if Tähis == 'O':\n pygame.draw.circle(tabel, (0, 0, 0), (Xkeskkoht, Ykeskkoht), 44, 2)\n else:\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht - 22, Ykeskkoht - 22),\n (Xkeskkoht + 22, Ykeskkoht + 22), 2)\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht + 22, Ykeskkoht - 22),\n (Xkeskkoht - 22, Ykeskkoht + 22), 2)\n joonestik[tabelirida][tabeliveerg] = Tähis\n\n\ndef mängu_võitja(tabel):\n global joonestik, võitja\n for rida in range(0, 3):\n if joonestik[rida][0] == joonestik[rida][1] == joonestik[rida][2\n ] and joonestik[rida][0] is not None:\n võitja = joonestik[rida][0]\n pygame.draw.line(tabel, (250, 0, 0), (0, (rida + 1) * 100 - 50),\n (300, (rida + 1) * 100 - 50), 2)\n break\n for veerg in range(0, 3):\n if joonestik[0][veerg] == joonestik[1][veerg] == joonestik[2][veerg\n ] and joonestik[0][veerg] is not None:\n võitja = joonestik[0][veerg]\n pygame.draw.line(tabel, (250, 0, 0), ((veerg + 1) * 100 - 50, 0\n ), ((veerg + 1) * 100 - 50, 300), 2)\n break\n if joonestik[0][0] == joonestik[1][1] == joonestik[2][2] and joonestik[0][0\n ] is not None:\n võitja = joonestik[0][0]\n pygame.draw.line(tabel, (250, 0, 0), (50, 50), (250, 250), 2)\n if joonestik[0][2] == joonestik[1][1] == joonestik[2][0] and joonestik[0][2\n ] is not None:\n võitja = joonestik[0][2]\n pygame.draw.line(tabel, (250, 0, 0), (250, 50), (50, 250), 2)\n\n\ndef hetkeseis(tabel):\n global XO, võitja\n if võitja is None:\n sõnum = XO + ' käib'\n else:\n sõnum = võitja + ' võitis!'\n font = pygame.font.Font(None, 24)\n tekst = font.render(sõnum, 1, (0, 0, 0))\n tabel.fill((250, 250, 250), (0, 300, 300, 25))\n tabel.blit(tekst, (10, 300))\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef init_tabel(ttt):\n taust = pygame.Surface(ttt.get_size())\n taust = taust.convert()\n taust.fill((250, 250, 250))\n pygame.draw.line(taust, (0, 0, 0), (100, 0), (100, 300), 2)\n pygame.draw.line(taust, (0, 0, 0), (200, 0), (200, 300), 2)\n pygame.draw.line(taust, (0, 0, 0), (0, 100), (300, 100), 2)\n pygame.draw.line(taust, (0, 0, 0), (0, 200), (300, 200), 2)\n return taust\n\n\ndef näita_tabelit(ttt, tabel):\n hetkeseis(tabel)\n ttt.blit(tabel, (0, 0))\n pygame.display.flip()\n\n\ndef hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat):\n if Ykoordinaat < 100:\n rida = 0\n elif Ykoordinaat < 200:\n rida = 1\n else:\n rida = 2\n if Xkoordinaat < 100:\n veerg = 0\n elif Xkoordinaat < 200:\n veerg = 1\n else:\n veerg = 2\n return rida, veerg\n\n\ndef klikk_tabelis(tabel):\n global joonestik, XO\n Xkoordinaat, Ykoordinaat = pygame.mouse.get_pos()\n rida, veerg = hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat)\n if joonestik[rida][veerg] == 'X' or joonestik[rida][veerg] == 'O':\n return\n joonistamine(tabel, rida, veerg, XO)\n if XO == 'X':\n XO = 'O'\n else:\n XO = 'X'\n\n\ndef joonistamine(tabel, tabelirida, tabeliveerg, Tähis):\n Xkeskkoht = tabeliveerg * 100 + 50\n Ykeskkoht = tabelirida * 100 + 50\n if Tähis == 'O':\n pygame.draw.circle(tabel, (0, 0, 0), (Xkeskkoht, Ykeskkoht), 44, 2)\n else:\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht - 22, Ykeskkoht - 22),\n (Xkeskkoht + 22, Ykeskkoht + 22), 2)\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht + 22, Ykeskkoht - 22),\n (Xkeskkoht - 22, Ykeskkoht + 22), 2)\n joonestik[tabelirida][tabeliveerg] = Tähis\n\n\ndef mängu_võitja(tabel):\n global joonestik, võitja\n for rida in range(0, 3):\n if joonestik[rida][0] == joonestik[rida][1] == joonestik[rida][2\n ] and joonestik[rida][0] is not None:\n võitja = joonestik[rida][0]\n pygame.draw.line(tabel, (250, 0, 0), (0, (rida + 1) * 100 - 50),\n (300, (rida + 1) * 100 - 50), 2)\n break\n for veerg in range(0, 3):\n if joonestik[0][veerg] == joonestik[1][veerg] == joonestik[2][veerg\n ] and joonestik[0][veerg] is not None:\n võitja = joonestik[0][veerg]\n pygame.draw.line(tabel, (250, 0, 0), ((veerg + 1) * 100 - 50, 0\n ), ((veerg + 1) * 100 - 50, 300), 2)\n break\n if joonestik[0][0] == joonestik[1][1] == joonestik[2][2] and joonestik[0][0\n ] is not None:\n võitja = joonestik[0][0]\n pygame.draw.line(tabel, (250, 0, 0), (50, 50), (250, 250), 2)\n if joonestik[0][2] == joonestik[1][1] == joonestik[2][0] and joonestik[0][2\n ] is not None:\n võitja = joonestik[0][2]\n pygame.draw.line(tabel, (250, 0, 0), (250, 50), (50, 250), 2)\n\n\ndef hetkeseis(tabel):\n global XO, võitja\n if võitja is None:\n sõnum = XO + ' käib'\n else:\n sõnum = võitja + ' võitis!'\n font = pygame.font.Font(None, 24)\n tekst = font.render(sõnum, 1, (0, 0, 0))\n tabel.fill((250, 250, 250), (0, 300, 300, 25))\n tabel.blit(tekst, (10, 300))\n\n\n<mask token>\n", "step-3": "<mask token>\npygame.init()\nttt = pygame.display.set_mode((300, 325))\npygame.display.set_caption = 'Trips-Traps-Trull'\nvõitja = None\n\n\ndef init_tabel(ttt):\n taust = pygame.Surface(ttt.get_size())\n taust = taust.convert()\n taust.fill((250, 250, 250))\n pygame.draw.line(taust, (0, 0, 0), (100, 0), (100, 300), 2)\n pygame.draw.line(taust, (0, 0, 0), (200, 0), (200, 300), 2)\n pygame.draw.line(taust, (0, 0, 0), (0, 100), (300, 100), 2)\n pygame.draw.line(taust, (0, 0, 0), (0, 200), (300, 200), 2)\n return taust\n\n\ndef näita_tabelit(ttt, tabel):\n hetkeseis(tabel)\n ttt.blit(tabel, (0, 0))\n pygame.display.flip()\n\n\ndef hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat):\n if Ykoordinaat < 100:\n rida = 0\n elif Ykoordinaat < 200:\n rida = 1\n else:\n rida = 2\n if Xkoordinaat < 100:\n veerg = 0\n elif Xkoordinaat < 200:\n veerg = 1\n else:\n veerg = 2\n return rida, veerg\n\n\ndef klikk_tabelis(tabel):\n global joonestik, XO\n Xkoordinaat, Ykoordinaat = pygame.mouse.get_pos()\n rida, veerg = hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat)\n if joonestik[rida][veerg] == 'X' or joonestik[rida][veerg] == 'O':\n return\n joonistamine(tabel, rida, veerg, XO)\n if XO == 'X':\n XO = 'O'\n else:\n XO = 'X'\n\n\ndef joonistamine(tabel, tabelirida, tabeliveerg, Tähis):\n Xkeskkoht = tabeliveerg * 100 + 50\n Ykeskkoht = tabelirida * 100 + 50\n if Tähis == 'O':\n pygame.draw.circle(tabel, (0, 0, 0), (Xkeskkoht, Ykeskkoht), 44, 2)\n else:\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht - 22, Ykeskkoht - 22),\n (Xkeskkoht + 22, Ykeskkoht + 22), 2)\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht + 22, Ykeskkoht - 22),\n (Xkeskkoht - 22, Ykeskkoht + 22), 2)\n joonestik[tabelirida][tabeliveerg] = Tähis\n\n\ndef mängu_võitja(tabel):\n global joonestik, võitja\n for rida in range(0, 3):\n if joonestik[rida][0] == joonestik[rida][1] == joonestik[rida][2\n ] and joonestik[rida][0] is not None:\n võitja = joonestik[rida][0]\n pygame.draw.line(tabel, (250, 0, 0), (0, (rida + 1) * 100 - 50),\n (300, (rida + 1) * 100 - 50), 2)\n break\n for veerg in range(0, 3):\n if joonestik[0][veerg] == joonestik[1][veerg] == joonestik[2][veerg\n ] and joonestik[0][veerg] is not None:\n võitja = joonestik[0][veerg]\n pygame.draw.line(tabel, (250, 0, 0), ((veerg + 1) * 100 - 50, 0\n ), ((veerg + 1) * 100 - 50, 300), 2)\n break\n if joonestik[0][0] == joonestik[1][1] == joonestik[2][2] and joonestik[0][0\n ] is not None:\n võitja = joonestik[0][0]\n pygame.draw.line(tabel, (250, 0, 0), (50, 50), (250, 250), 2)\n if joonestik[0][2] == joonestik[1][1] == joonestik[2][0] and joonestik[0][2\n ] is not None:\n võitja = joonestik[0][2]\n pygame.draw.line(tabel, (250, 0, 0), (250, 50), (50, 250), 2)\n\n\ndef hetkeseis(tabel):\n global XO, võitja\n if võitja is None:\n sõnum = XO + ' käib'\n else:\n sõnum = võitja + ' võitis!'\n font = pygame.font.Font(None, 24)\n tekst = font.render(sõnum, 1, (0, 0, 0))\n tabel.fill((250, 250, 250), (0, 300, 300, 25))\n tabel.blit(tekst, (10, 300))\n\n\nXO = 'X'\njoonestik = [[None, None, None], [None, None, None], [None, None, None]]\ntabel = init_tabel(ttt)\njooksutab = 1\nwhile jooksutab == 1:\n for event in pygame.event.get():\n if event.type is QUIT:\n jooksutab = 0\n elif event.type is MOUSEBUTTONDOWN:\n klikk_tabelis(tabel)\n mängu_võitja(tabel)\n näita_tabelit(ttt, tabel)\n if võitja is not None:\n break\n", "step-4": "import pygame\nfrom pygame.locals import *\npygame.init()\nttt = pygame.display.set_mode((300, 325))\npygame.display.set_caption = 'Trips-Traps-Trull'\nvõitja = None\n\n\ndef init_tabel(ttt):\n taust = pygame.Surface(ttt.get_size())\n taust = taust.convert()\n taust.fill((250, 250, 250))\n pygame.draw.line(taust, (0, 0, 0), (100, 0), (100, 300), 2)\n pygame.draw.line(taust, (0, 0, 0), (200, 0), (200, 300), 2)\n pygame.draw.line(taust, (0, 0, 0), (0, 100), (300, 100), 2)\n pygame.draw.line(taust, (0, 0, 0), (0, 200), (300, 200), 2)\n return taust\n\n\ndef näita_tabelit(ttt, tabel):\n hetkeseis(tabel)\n ttt.blit(tabel, (0, 0))\n pygame.display.flip()\n\n\ndef hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat):\n if Ykoordinaat < 100:\n rida = 0\n elif Ykoordinaat < 200:\n rida = 1\n else:\n rida = 2\n if Xkoordinaat < 100:\n veerg = 0\n elif Xkoordinaat < 200:\n veerg = 1\n else:\n veerg = 2\n return rida, veerg\n\n\ndef klikk_tabelis(tabel):\n global joonestik, XO\n Xkoordinaat, Ykoordinaat = pygame.mouse.get_pos()\n rida, veerg = hiire_positsioon_tabelis(Xkoordinaat, Ykoordinaat)\n if joonestik[rida][veerg] == 'X' or joonestik[rida][veerg] == 'O':\n return\n joonistamine(tabel, rida, veerg, XO)\n if XO == 'X':\n XO = 'O'\n else:\n XO = 'X'\n\n\ndef joonistamine(tabel, tabelirida, tabeliveerg, Tähis):\n Xkeskkoht = tabeliveerg * 100 + 50\n Ykeskkoht = tabelirida * 100 + 50\n if Tähis == 'O':\n pygame.draw.circle(tabel, (0, 0, 0), (Xkeskkoht, Ykeskkoht), 44, 2)\n else:\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht - 22, Ykeskkoht - 22),\n (Xkeskkoht + 22, Ykeskkoht + 22), 2)\n pygame.draw.line(tabel, (0, 0, 0), (Xkeskkoht + 22, Ykeskkoht - 22),\n (Xkeskkoht - 22, Ykeskkoht + 22), 2)\n joonestik[tabelirida][tabeliveerg] = Tähis\n\n\ndef mängu_võitja(tabel):\n global joonestik, võitja\n for rida in range(0, 3):\n if joonestik[rida][0] == joonestik[rida][1] == joonestik[rida][2\n ] and joonestik[rida][0] is not None:\n võitja = joonestik[rida][0]\n pygame.draw.line(tabel, (250, 0, 0), (0, (rida + 1) * 100 - 50),\n (300, (rida + 1) * 100 - 50), 2)\n break\n for veerg in range(0, 3):\n if joonestik[0][veerg] == joonestik[1][veerg] == joonestik[2][veerg\n ] and joonestik[0][veerg] is not None:\n võitja = joonestik[0][veerg]\n pygame.draw.line(tabel, (250, 0, 0), ((veerg + 1) * 100 - 50, 0\n ), ((veerg + 1) * 100 - 50, 300), 2)\n break\n if joonestik[0][0] == joonestik[1][1] == joonestik[2][2] and joonestik[0][0\n ] is not None:\n võitja = joonestik[0][0]\n pygame.draw.line(tabel, (250, 0, 0), (50, 50), (250, 250), 2)\n if joonestik[0][2] == joonestik[1][1] == joonestik[2][0] and joonestik[0][2\n ] is not None:\n võitja = joonestik[0][2]\n pygame.draw.line(tabel, (250, 0, 0), (250, 50), (50, 250), 2)\n\n\ndef hetkeseis(tabel):\n global XO, võitja\n if võitja is None:\n sõnum = XO + ' käib'\n else:\n sõnum = võitja + ' võitis!'\n font = pygame.font.Font(None, 24)\n tekst = font.render(sõnum, 1, (0, 0, 0))\n tabel.fill((250, 250, 250), (0, 300, 300, 25))\n tabel.blit(tekst, (10, 300))\n\n\nXO = 'X'\njoonestik = [[None, None, None], [None, None, None], [None, None, None]]\ntabel = init_tabel(ttt)\njooksutab = 1\nwhile jooksutab == 1:\n for event in pygame.event.get():\n if event.type is QUIT:\n jooksutab = 0\n elif event.type is MOUSEBUTTONDOWN:\n klikk_tabelis(tabel)\n mängu_võitja(tabel)\n näita_tabelit(ttt, tabel)\n if võitja is not None:\n break\n", "step-5": "import pygame\n\nfrom pygame.locals import *\n\npygame.init()\nttt = pygame.display.set_mode((300,325)) #loome mänguakna\npygame.display.set_caption = (\"Trips-Traps-Trull\")\n\nvõitja = None\n\n\n\ndef init_tabel(ttt):\n taust = pygame.Surface(ttt.get_size())\n taust = taust.convert()\n taust.fill((250,250,250))\n \n #tõmbame jooned\n \n pygame.draw.line (taust, (0,0,0), (100,0), (100,300), 2) #vertikaalsed jooned\n pygame.draw.line (taust, (0,0,0), (200,0), (200,300), 2)\n\n pygame.draw.line (taust, (0,0,0), (0,100), (300,100), 2) #horisontaalsed jooned\n pygame.draw.line (taust, (0,0,0), (0,200), (300,200), 2)\n return taust\n\n\ndef näita_tabelit (ttt, tabel):\n hetkeseis(tabel)\n ttt.blit (tabel, (0,0))\n pygame.display.flip()\n\ndef hiire_positsioon_tabelis (Xkoordinaat, Ykoordinaat):\n if (Ykoordinaat < 100): #millisele reale klikib\n rida = 0\n elif (Ykoordinaat < 200):\n rida = 1\n else:\n rida = 2\n if (Xkoordinaat < 100): #millisele veerule klikib\n veerg = 0\n elif (Xkoordinaat < 200):\n veerg = 1\n else:\n veerg = 2\n return (rida, veerg)\n\ndef klikk_tabelis (tabel): #teeme kindlaks kuhu klikiti\n global joonestik, XO\n\n (Xkoordinaat, Ykoordinaat) = pygame.mouse.get_pos()\n\n (rida, veerg) = hiire_positsioon_tabelis (Xkoordinaat, Ykoordinaat)\n\n if joonestik[rida][veerg] == 'X' or joonestik[rida][veerg] == 'O': #kontrollime kas lahter on kasutusel\n return #lahter on juba kasutusel\n\n joonistamine (tabel, rida, veerg, XO) #joonista X või O\n \n if (XO == 'X'):\n XO = 'O' #käigu üleandmine teisele inimesele\n else:\n XO = 'X'\n\n\ndef joonistamine (tabel, tabelirida, tabeliveerg, Tähis):\n Xkeskkoht = tabeliveerg * 100 + 50\n #leiame keskkoha\n Ykeskkoht = tabelirida * 100 + 50\n\n if (Tähis == 'O'): #joonistame O\n pygame.draw.circle (tabel, (0,0,0), (Xkeskkoht, Ykeskkoht), 44, 2)\n\n else:\n pygame.draw.line (tabel, (0,0,0), (Xkeskkoht - 22, Ykeskkoht - 22), (Xkeskkoht + 22, Ykeskkoht + 22), 2)\n #joonistame X\n pygame.draw.line (tabel, (0,0,0), (Xkeskkoht + 22, Ykeskkoht - 22), (Xkeskkoht - 22, Ykeskkoht + 22), 2)\n\n joonestik[tabelirida][tabeliveerg] = Tähis #märgime lahtri kasutatuks\n\n\ndef mängu_võitja(tabel): #kontrollib, kas kumbki võitis\n global joonestik, võitja\n\n for rida in range (0, 3): #kontrollime ridu\n if joonestik [rida][0] == joonestik[rida][1] == joonestik[rida][2] and joonestik [rida][0] is not None:\n võitja = joonestik[rida][0] #see rida võitis\n pygame.draw.line (tabel, (250,0,0), (0, (rida + 1)*100 - 50), (300, (rida + 1)*100 - 50), 2)\n break\n\n for veerg in range (0, 3): #kontrollime veerge\n if joonestik[0][veerg] == joonestik[1][veerg] == joonestik[2][veerg] and joonestik[0][veerg] is not None:\n võitja = joonestik[0][veerg] #see veerg võitis\n pygame.draw.line (tabel, (250,0,0), ((veerg + 1)* 100 - 50, 0), ((veerg + 1)* 100 - 50, 300), 2)\n break\n\n if joonestik[0][0] == joonestik[1][1] == joonestik[2][2] and joonestik[0][0] is not None: #kontrollime diagonaale\n võitja = joonestik[0][0] #vasakult paremale diagonaal võitis\n pygame.draw.line (tabel, (250,0,0), (50, 50), (250, 250), 2)\n\n if joonestik[0][2] == joonestik[1][1] == joonestik[2][0] and joonestik[0][2] is not None:\n võitja = joonestik[0][2] #paremalt vasakule diagonaal võitis\n pygame.draw.line (tabel, (250,0,0), (250, 50), (50, 250), 2)\n\n\ndef hetkeseis (tabel): #kuva hetkeseis(kelle käik/kes võitis)\n global XO, võitja\n if võitja is None:\n sõnum = XO + \" käib\"\n else:\n sõnum = võitja + \" võitis!\"\n font = pygame.font.Font(None, 24)\n tekst = font.render(sõnum, 1, (0,0,0))\n#kopeerime sõnumi mänguaknas\n tabel.fill ((250, 250, 250), (0, 300, 300, 25))\n tabel.blit (tekst, (10, 300))\n\n\nXO = 'X' #X alustab\n\njoonestik = [ [ None, None, None ], #tühjad lahtrid\n\n [ None, None, None ],\n\n [ None, None, None ] ]\n\ntabel = init_tabel(ttt)\njooksutab = 1\nwhile jooksutab == 1:\n for event in pygame.event.get():\n if event.type is QUIT:\n jooksutab = 0\n elif event.type is MOUSEBUTTONDOWN:\n klikk_tabelis(tabel)\n\n mängu_võitja(tabel) #kontrollib võitjat peale igat käiku\n\n näita_tabelit(ttt,tabel) #uuendab mängulauda\n if võitja is not None:\n break\n", "step-ids": [ 6, 7, 9, 10, 11 ] }
[ 6, 7, 9, 10, 11 ]
import sys if __name__ == '__main__': cases = sys.stdin.readline() for i in range(int(cases)): sys.stdin.readline() lineas, columnas = sys.stdin.readline().strip().split(" ") lineas = int(lineas) columnas = int(columnas) list_lines = [] for linea in range(lineas): list_lines.append(list(sys.stdin.readline().strip().lower())) numWords = int(sys.stdin.readline().strip()) list_words = [] for word in range(numWords): list_words.append(list(sys.stdin.readline().strip().lower())) for word in list_words: palEncont = False for fila in range(lineas): for colum in range(columnas): if list_lines[fila][colum] == word[0]: tamPalab = len(word) #Centro -> Derecha cont = 0 punt = 0 while( cont < tamPalab ): if colum+punt < columnas and list_lines[fila][colum+punt] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Abajo-Derecha cont = 0 punt = 0 while( cont < tamPalab ): if colum+punt < columnas and fila+punt < lineas and list_lines[fila+punt][colum+punt] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Abajo cont = 0 punt = 0 while( cont < tamPalab ): if fila+punt < lineas and list_lines[fila+punt][colum] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Abajo-Izquierda cont = 0 punt = 0 while( cont < tamPalab ): if colum-punt >= 0 and fila+punt < lineas and list_lines[fila+punt][colum-punt] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Izquierda cont = 0 punt = 0 while( cont < tamPalab ): if colum-punt >= 0 and list_lines[fila][colum-punt] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Arriba-Izquierda cont = 0 punt = 0 while( cont < tamPalab ): if colum-punt >= 0 and fila-punt >= 0 and list_lines[fila-punt][colum-punt] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Arriba cont = 0 punt = 0 while( cont < tamPalab ): if fila-punt >= 0 and list_lines[fila-punt][colum] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break #Centro -> Arriba-Derecha cont = 0 punt = 0 while( cont < tamPalab ): if colum+punt < columnas and fila-punt >= 0 and list_lines[fila-punt][colum+punt] == word[cont]: cont += 1 punt += 1 else: break if( cont == tamPalab ): print( "" + str(fila+1) + " " + str(colum+1) ) palEncont = True break if palEncont: break if palEncont: break; if i < int(cases)-1: print()
normal
{ "blob_id": "22909e41e4f9ad0280c22ec11ecfbccff87efae1", "index": 1402, "step-1": "<mask token>\n", "step-2": "<mask token>\nif __name__ == '__main__':\n cases = sys.stdin.readline()\n for i in range(int(cases)):\n sys.stdin.readline()\n lineas, columnas = sys.stdin.readline().strip().split(' ')\n lineas = int(lineas)\n columnas = int(columnas)\n list_lines = []\n for linea in range(lineas):\n list_lines.append(list(sys.stdin.readline().strip().lower()))\n numWords = int(sys.stdin.readline().strip())\n list_words = []\n for word in range(numWords):\n list_words.append(list(sys.stdin.readline().strip().lower()))\n for word in list_words:\n palEncont = False\n for fila in range(lineas):\n for colum in range(columnas):\n if list_lines[fila][colum] == word[0]:\n tamPalab = len(word)\n cont = 0\n punt = 0\n while cont < tamPalab:\n if colum + punt < columnas and list_lines[fila][\n colum + punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum + punt < columnas and fila + punt <\n lineas and list_lines[fila + punt][colum +\n punt] == word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if fila + punt < lineas and list_lines[fila + punt\n ][colum] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum - punt >= 0 and fila + punt < lineas and\n list_lines[fila + punt][colum - punt] ==\n word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if colum - punt >= 0 and list_lines[fila][colum -\n punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum - punt >= 0 and fila - punt >= 0 and \n list_lines[fila - punt][colum - punt] ==\n word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if fila - punt >= 0 and list_lines[fila - punt][\n colum] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum + punt < columnas and fila - punt >= \n 0 and list_lines[fila - punt][colum + punt] ==\n word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n if palEncont:\n break\n if palEncont:\n break\n if i < int(cases) - 1:\n print()\n", "step-3": "import sys\nif __name__ == '__main__':\n cases = sys.stdin.readline()\n for i in range(int(cases)):\n sys.stdin.readline()\n lineas, columnas = sys.stdin.readline().strip().split(' ')\n lineas = int(lineas)\n columnas = int(columnas)\n list_lines = []\n for linea in range(lineas):\n list_lines.append(list(sys.stdin.readline().strip().lower()))\n numWords = int(sys.stdin.readline().strip())\n list_words = []\n for word in range(numWords):\n list_words.append(list(sys.stdin.readline().strip().lower()))\n for word in list_words:\n palEncont = False\n for fila in range(lineas):\n for colum in range(columnas):\n if list_lines[fila][colum] == word[0]:\n tamPalab = len(word)\n cont = 0\n punt = 0\n while cont < tamPalab:\n if colum + punt < columnas and list_lines[fila][\n colum + punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum + punt < columnas and fila + punt <\n lineas and list_lines[fila + punt][colum +\n punt] == word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if fila + punt < lineas and list_lines[fila + punt\n ][colum] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum - punt >= 0 and fila + punt < lineas and\n list_lines[fila + punt][colum - punt] ==\n word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if colum - punt >= 0 and list_lines[fila][colum -\n punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum - punt >= 0 and fila - punt >= 0 and \n list_lines[fila - punt][colum - punt] ==\n word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if fila - punt >= 0 and list_lines[fila - punt][\n colum] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n cont = 0\n punt = 0\n while cont < tamPalab:\n if (colum + punt < columnas and fila - punt >= \n 0 and list_lines[fila - punt][colum + punt] ==\n word[cont]):\n cont += 1\n punt += 1\n else:\n break\n if cont == tamPalab:\n print('' + str(fila + 1) + ' ' + str(colum + 1))\n palEncont = True\n break\n if palEncont:\n break\n if palEncont:\n break\n if i < int(cases) - 1:\n print()\n", "step-4": "import sys\n\n\nif __name__ == '__main__':\n cases = sys.stdin.readline()\n for i in range(int(cases)):\n sys.stdin.readline()\n lineas, columnas = sys.stdin.readline().strip().split(\" \")\n lineas = int(lineas)\n columnas = int(columnas)\n list_lines = []\n\n for linea in range(lineas):\n list_lines.append(list(sys.stdin.readline().strip().lower()))\n \n numWords = int(sys.stdin.readline().strip())\n\n list_words = []\n\n for word in range(numWords):\n list_words.append(list(sys.stdin.readline().strip().lower()))\n\n for word in list_words:\n\n palEncont = False\n for fila in range(lineas):\n for colum in range(columnas):\n if list_lines[fila][colum] == word[0]:\n \n tamPalab = len(word)\n \n #Centro -> Derecha\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if colum+punt < columnas and list_lines[fila][colum+punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n #Centro -> Abajo-Derecha\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if colum+punt < columnas and fila+punt < lineas and list_lines[fila+punt][colum+punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n #Centro -> Abajo\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if fila+punt < lineas and list_lines[fila+punt][colum] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n #Centro -> Abajo-Izquierda\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if colum-punt >= 0 and fila+punt < lineas and list_lines[fila+punt][colum-punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n #Centro -> Izquierda\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if colum-punt >= 0 and list_lines[fila][colum-punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n #Centro -> Arriba-Izquierda\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if colum-punt >= 0 and fila-punt >= 0 and list_lines[fila-punt][colum-punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n #Centro -> Arriba\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if fila-punt >= 0 and list_lines[fila-punt][colum] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n\n #Centro -> Arriba-Derecha\n cont = 0\n punt = 0\n while( cont < tamPalab ):\n if colum+punt < columnas and fila-punt >= 0 and list_lines[fila-punt][colum+punt] == word[cont]:\n cont += 1\n punt += 1\n else:\n break\n \n if( cont == tamPalab ):\n print( \"\" + str(fila+1) + \" \" + str(colum+1) )\n palEncont = True\n break\n \n \n if palEncont:\n break\n \n if palEncont:\n break;\n\n if i < int(cases)-1:\n print()\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> @routes.route('/signin', name='signin') class SigninEndpoint(PoolHTTPEndpoint): <|reserved_special_token_0|> <|reserved_special_token_0|> @back_to.setter def back_to(self, value: typing.Optional[str]): self.request.session['back_to'] = value def render_template(self, context: typing.Dict[str, typing.Any]={} ) ->Response: assert self.pool is not None if self.pool.username_attributes: email = (AuxiliaryIdentityAttribute.EMAIL in self.pool. username_attributes) phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self .pool.username_attributes) if email and phone_number: label = 'E-mail address or phone number' elif email: label = 'E-mail address' elif phone_number: label = 'Phone number' else: raise AssertionError() else: label = 'User name' context['username_label'] = label return self.templates(self.template, context=context) async def get(self): assert self.pool is not None back_to = self.request.query_params.get('back_to') reauth = bool_val(self.request.query_params.get('reauth')) if self.request.user.is_authenticated and not reauth: return RedirectResponse(back_to or self.success_page_url) parsed_back_to = urlparse(back_to) if (parsed_back_to.scheme and parsed_back_to.scheme != self.request .url.scheme or parsed_back_to.hostname and parsed_back_to. hostname != self.request.url.hostname): raise HTTPException(status_code=HTTP_400_BAD_REQUEST) if back_to is not None: self.back_to = back_to return self.render_template(context={'form': {'reauth': reauth}}) async def post(self): assert self.pool is not None form = await self.request.form() try: user = await async_(lambda : self.pool.query_user(form[ 'username']).one())() self.request.app.state.kdf.verify(user.password, form['password']) except Exception as e: logger.debug(f"failed login attempt: {form['username']} - {e!r}") return self.render_template(context={'form': form, 'alerts': [ 'No user registered with that user name and password.']}) self.per_pool_session['user_id'] = user.id return RedirectResponse(self.back_to or self.success_page_url, status_code=302) @routes.route('/signin/success', name='signin_success') class SignedinEndpoint(PoolHTTPEndpoint): template = 'pools/signin_success.html' async def get(self): return self.templates(self.template) @routes.route('/signout', name='signout', methods=['post']) class SignOutEndpoint(PoolHTTPEndpoint): async def post(self): form = await self.request.form() client_id = form.get('client_id') try: client = await async_(self.pool.clients.filter_by( oauth2_client_id=client_id).one)() except orm_exc.NoResultFound as e: raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e back_to = form.get('back_to') if back_to is None or back_to not in client.logout_uris: back_to = self.request.url_for('pools:signout_success', pool= self.pool.key) if self.request.user.is_authenticated: del self.per_pool_session['user_id'] return RedirectResponse(back_to, status_code=302) @routes.route('/signout/success', name='signout_success') class SignedOutEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/signout_success.html') @routes.route('/', name='index') class IndexEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/index.html') @routes.route('/.well-known/jwks.json', name='signin_success') class JWKSEndpoint(PoolHTTPEndpoint): async def get(self): keys = [] if isinstance(self.request.app.state.jwt_config.key, dict): public_jwk = build_jwt_public_key_from_private_key(self.request .app.state.jwt_config.key) public_jwk['use'] = 'sig' keys.append(public_jwk) return JSONResponse({'keys': keys}) <|reserved_special_token_1|> <|reserved_special_token_0|> class PoolHTTPEndpoint(ContextualHTTPEndpoint): <|reserved_special_token_0|> <|reserved_special_token_0|> @property def per_pool_session(self) ->typing.Dict[str, typing.Any]: pool = self.pool if pool is not None: return self.request.scope['session'].setdefault(pool.key, {}) else: return self.request.scope['session'] async def dispatch(self): if self.request.get(POOL_KEY) is None: raise HTTPException(status_code=HTTP_404_NOT_FOUND) await super().dispatch() <|reserved_special_token_0|> <|reserved_special_token_0|> @routes.route('/signin', name='signin') class SigninEndpoint(PoolHTTPEndpoint): template = 'pools/signin.html' @property def back_to(self) ->typing.Optional[str]: return self.request.session.get('back_to') @back_to.setter def back_to(self, value: typing.Optional[str]): self.request.session['back_to'] = value def render_template(self, context: typing.Dict[str, typing.Any]={} ) ->Response: assert self.pool is not None if self.pool.username_attributes: email = (AuxiliaryIdentityAttribute.EMAIL in self.pool. username_attributes) phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self .pool.username_attributes) if email and phone_number: label = 'E-mail address or phone number' elif email: label = 'E-mail address' elif phone_number: label = 'Phone number' else: raise AssertionError() else: label = 'User name' context['username_label'] = label return self.templates(self.template, context=context) async def get(self): assert self.pool is not None back_to = self.request.query_params.get('back_to') reauth = bool_val(self.request.query_params.get('reauth')) if self.request.user.is_authenticated and not reauth: return RedirectResponse(back_to or self.success_page_url) parsed_back_to = urlparse(back_to) if (parsed_back_to.scheme and parsed_back_to.scheme != self.request .url.scheme or parsed_back_to.hostname and parsed_back_to. hostname != self.request.url.hostname): raise HTTPException(status_code=HTTP_400_BAD_REQUEST) if back_to is not None: self.back_to = back_to return self.render_template(context={'form': {'reauth': reauth}}) async def post(self): assert self.pool is not None form = await self.request.form() try: user = await async_(lambda : self.pool.query_user(form[ 'username']).one())() self.request.app.state.kdf.verify(user.password, form['password']) except Exception as e: logger.debug(f"failed login attempt: {form['username']} - {e!r}") return self.render_template(context={'form': form, 'alerts': [ 'No user registered with that user name and password.']}) self.per_pool_session['user_id'] = user.id return RedirectResponse(self.back_to or self.success_page_url, status_code=302) @routes.route('/signin/success', name='signin_success') class SignedinEndpoint(PoolHTTPEndpoint): template = 'pools/signin_success.html' async def get(self): return self.templates(self.template) @routes.route('/signout', name='signout', methods=['post']) class SignOutEndpoint(PoolHTTPEndpoint): async def post(self): form = await self.request.form() client_id = form.get('client_id') try: client = await async_(self.pool.clients.filter_by( oauth2_client_id=client_id).one)() except orm_exc.NoResultFound as e: raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e back_to = form.get('back_to') if back_to is None or back_to not in client.logout_uris: back_to = self.request.url_for('pools:signout_success', pool= self.pool.key) if self.request.user.is_authenticated: del self.per_pool_session['user_id'] return RedirectResponse(back_to, status_code=302) @routes.route('/signout/success', name='signout_success') class SignedOutEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/signout_success.html') @routes.route('/', name='index') class IndexEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/index.html') @routes.route('/.well-known/jwks.json', name='signin_success') class JWKSEndpoint(PoolHTTPEndpoint): async def get(self): keys = [] if isinstance(self.request.app.state.jwt_config.key, dict): public_jwk = build_jwt_public_key_from_private_key(self.request .app.state.jwt_config.key) public_jwk['use'] = 'sig' keys.append(public_jwk) return JSONResponse({'keys': keys}) <|reserved_special_token_1|> <|reserved_special_token_0|> class PoolHTTPEndpoint(ContextualHTTPEndpoint): @property def templates(self): return lambda name, context={}, *args, **kwargs: typing.cast( WithTemplates, self.request).templates(name, {**context, 'pool': self.request.scope.get(POOL_KEY)}, *args, **kwargs) <|reserved_special_token_0|> @property def per_pool_session(self) ->typing.Dict[str, typing.Any]: pool = self.pool if pool is not None: return self.request.scope['session'].setdefault(pool.key, {}) else: return self.request.scope['session'] async def dispatch(self): if self.request.get(POOL_KEY) is None: raise HTTPException(status_code=HTTP_404_NOT_FOUND) await super().dispatch() <|reserved_special_token_0|> <|reserved_special_token_0|> @routes.route('/signin', name='signin') class SigninEndpoint(PoolHTTPEndpoint): template = 'pools/signin.html' @property def back_to(self) ->typing.Optional[str]: return self.request.session.get('back_to') @back_to.setter def back_to(self, value: typing.Optional[str]): self.request.session['back_to'] = value def render_template(self, context: typing.Dict[str, typing.Any]={} ) ->Response: assert self.pool is not None if self.pool.username_attributes: email = (AuxiliaryIdentityAttribute.EMAIL in self.pool. username_attributes) phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self .pool.username_attributes) if email and phone_number: label = 'E-mail address or phone number' elif email: label = 'E-mail address' elif phone_number: label = 'Phone number' else: raise AssertionError() else: label = 'User name' context['username_label'] = label return self.templates(self.template, context=context) async def get(self): assert self.pool is not None back_to = self.request.query_params.get('back_to') reauth = bool_val(self.request.query_params.get('reauth')) if self.request.user.is_authenticated and not reauth: return RedirectResponse(back_to or self.success_page_url) parsed_back_to = urlparse(back_to) if (parsed_back_to.scheme and parsed_back_to.scheme != self.request .url.scheme or parsed_back_to.hostname and parsed_back_to. hostname != self.request.url.hostname): raise HTTPException(status_code=HTTP_400_BAD_REQUEST) if back_to is not None: self.back_to = back_to return self.render_template(context={'form': {'reauth': reauth}}) async def post(self): assert self.pool is not None form = await self.request.form() try: user = await async_(lambda : self.pool.query_user(form[ 'username']).one())() self.request.app.state.kdf.verify(user.password, form['password']) except Exception as e: logger.debug(f"failed login attempt: {form['username']} - {e!r}") return self.render_template(context={'form': form, 'alerts': [ 'No user registered with that user name and password.']}) self.per_pool_session['user_id'] = user.id return RedirectResponse(self.back_to or self.success_page_url, status_code=302) @routes.route('/signin/success', name='signin_success') class SignedinEndpoint(PoolHTTPEndpoint): template = 'pools/signin_success.html' async def get(self): return self.templates(self.template) @routes.route('/signout', name='signout', methods=['post']) class SignOutEndpoint(PoolHTTPEndpoint): async def post(self): form = await self.request.form() client_id = form.get('client_id') try: client = await async_(self.pool.clients.filter_by( oauth2_client_id=client_id).one)() except orm_exc.NoResultFound as e: raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e back_to = form.get('back_to') if back_to is None or back_to not in client.logout_uris: back_to = self.request.url_for('pools:signout_success', pool= self.pool.key) if self.request.user.is_authenticated: del self.per_pool_session['user_id'] return RedirectResponse(back_to, status_code=302) @routes.route('/signout/success', name='signout_success') class SignedOutEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/signout_success.html') @routes.route('/', name='index') class IndexEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/index.html') @routes.route('/.well-known/jwks.json', name='signin_success') class JWKSEndpoint(PoolHTTPEndpoint): async def get(self): keys = [] if isinstance(self.request.app.state.jwt_config.key, dict): public_jwk = build_jwt_public_key_from_private_key(self.request .app.state.jwt_config.key) public_jwk['use'] = 'sig' keys.append(public_jwk) return JSONResponse({'keys': keys}) <|reserved_special_token_1|> import logging import typing from urllib.parse import urlparse from sqlalchemy.orm import exc as orm_exc from starlette.exceptions import HTTPException from starlette.responses import JSONResponse, RedirectResponse, Response from starlette.routing import Router from starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND from ...executor import async_ from ...middlewares import WithTemplates from ...utils import ContextualHTTPEndpoint from ..application import POOL_KEY from ..models import AuxiliaryIdentityAttribute, UserPool from ..utils import build_jwt_public_key_from_private_key logger = logging.getLogger(__name__) routes = Router() class PoolHTTPEndpoint(ContextualHTTPEndpoint): @property def templates(self): return lambda name, context={}, *args, **kwargs: typing.cast( WithTemplates, self.request).templates(name, {**context, 'pool': self.request.scope.get(POOL_KEY)}, *args, **kwargs) @property def pool(self) ->typing.Optional[UserPool]: return typing.cast(typing.Optional[UserPool], self.request.get( POOL_KEY)) @property def per_pool_session(self) ->typing.Dict[str, typing.Any]: pool = self.pool if pool is not None: return self.request.scope['session'].setdefault(pool.key, {}) else: return self.request.scope['session'] async def dispatch(self): if self.request.get(POOL_KEY) is None: raise HTTPException(status_code=HTTP_404_NOT_FOUND) await super().dispatch() @property def success_page_url(self): return self.request.url_for('pools:signin_success', pool=self.pool.key) def bool_val(v: typing.Optional[str]) ->bool: return v not in ('false', 'no', '0', None) @routes.route('/signin', name='signin') class SigninEndpoint(PoolHTTPEndpoint): template = 'pools/signin.html' @property def back_to(self) ->typing.Optional[str]: return self.request.session.get('back_to') @back_to.setter def back_to(self, value: typing.Optional[str]): self.request.session['back_to'] = value def render_template(self, context: typing.Dict[str, typing.Any]={} ) ->Response: assert self.pool is not None if self.pool.username_attributes: email = (AuxiliaryIdentityAttribute.EMAIL in self.pool. username_attributes) phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self .pool.username_attributes) if email and phone_number: label = 'E-mail address or phone number' elif email: label = 'E-mail address' elif phone_number: label = 'Phone number' else: raise AssertionError() else: label = 'User name' context['username_label'] = label return self.templates(self.template, context=context) async def get(self): assert self.pool is not None back_to = self.request.query_params.get('back_to') reauth = bool_val(self.request.query_params.get('reauth')) if self.request.user.is_authenticated and not reauth: return RedirectResponse(back_to or self.success_page_url) parsed_back_to = urlparse(back_to) if (parsed_back_to.scheme and parsed_back_to.scheme != self.request .url.scheme or parsed_back_to.hostname and parsed_back_to. hostname != self.request.url.hostname): raise HTTPException(status_code=HTTP_400_BAD_REQUEST) if back_to is not None: self.back_to = back_to return self.render_template(context={'form': {'reauth': reauth}}) async def post(self): assert self.pool is not None form = await self.request.form() try: user = await async_(lambda : self.pool.query_user(form[ 'username']).one())() self.request.app.state.kdf.verify(user.password, form['password']) except Exception as e: logger.debug(f"failed login attempt: {form['username']} - {e!r}") return self.render_template(context={'form': form, 'alerts': [ 'No user registered with that user name and password.']}) self.per_pool_session['user_id'] = user.id return RedirectResponse(self.back_to or self.success_page_url, status_code=302) @routes.route('/signin/success', name='signin_success') class SignedinEndpoint(PoolHTTPEndpoint): template = 'pools/signin_success.html' async def get(self): return self.templates(self.template) @routes.route('/signout', name='signout', methods=['post']) class SignOutEndpoint(PoolHTTPEndpoint): async def post(self): form = await self.request.form() client_id = form.get('client_id') try: client = await async_(self.pool.clients.filter_by( oauth2_client_id=client_id).one)() except orm_exc.NoResultFound as e: raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e back_to = form.get('back_to') if back_to is None or back_to not in client.logout_uris: back_to = self.request.url_for('pools:signout_success', pool= self.pool.key) if self.request.user.is_authenticated: del self.per_pool_session['user_id'] return RedirectResponse(back_to, status_code=302) @routes.route('/signout/success', name='signout_success') class SignedOutEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/signout_success.html') @routes.route('/', name='index') class IndexEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates('pools/index.html') @routes.route('/.well-known/jwks.json', name='signin_success') class JWKSEndpoint(PoolHTTPEndpoint): async def get(self): keys = [] if isinstance(self.request.app.state.jwt_config.key, dict): public_jwk = build_jwt_public_key_from_private_key(self.request .app.state.jwt_config.key) public_jwk['use'] = 'sig' keys.append(public_jwk) return JSONResponse({'keys': keys}) <|reserved_special_token_1|> # Copyright (c) 2020 Open Collector, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. import logging import typing from urllib.parse import urlparse from sqlalchemy.orm import exc as orm_exc from starlette.exceptions import HTTPException from starlette.responses import JSONResponse, RedirectResponse, Response from starlette.routing import Router from starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND from ...executor import async_ from ...middlewares import WithTemplates from ...utils import ContextualHTTPEndpoint from ..application import POOL_KEY from ..models import AuxiliaryIdentityAttribute, UserPool from ..utils import build_jwt_public_key_from_private_key logger = logging.getLogger(__name__) routes = Router() class PoolHTTPEndpoint(ContextualHTTPEndpoint): @property def templates(self): return lambda name, context={}, *args, **kwargs: ( typing.cast(WithTemplates, self.request).templates( name, {**context, "pool": self.request.scope.get(POOL_KEY)}, *args, **kwargs, ) ) @property def pool(self) -> typing.Optional[UserPool]: return typing.cast(typing.Optional[UserPool], self.request.get(POOL_KEY)) @property def per_pool_session(self) -> typing.Dict[str, typing.Any]: pool = self.pool if pool is not None: return self.request.scope["session"].setdefault(pool.key, {}) else: return self.request.scope["session"] async def dispatch(self): if self.request.get(POOL_KEY) is None: raise HTTPException(status_code=HTTP_404_NOT_FOUND) await super().dispatch() @property def success_page_url(self): return self.request.url_for("pools:signin_success", pool=self.pool.key) def bool_val(v: typing.Optional[str]) -> bool: return v not in ("false", "no", "0", None) @routes.route("/signin", name="signin") class SigninEndpoint(PoolHTTPEndpoint): template = "pools/signin.html" @property def back_to(self) -> typing.Optional[str]: return self.request.session.get("back_to") @back_to.setter def back_to(self, value: typing.Optional[str]): self.request.session["back_to"] = value def render_template(self, context: typing.Dict[str, typing.Any] = {}) -> Response: assert self.pool is not None if self.pool.username_attributes: email = AuxiliaryIdentityAttribute.EMAIL in self.pool.username_attributes phone_number = ( AuxiliaryIdentityAttribute.PHONE_NUMBER in self.pool.username_attributes ) if email and phone_number: label = "E-mail address or phone number" elif email: label = "E-mail address" elif phone_number: label = "Phone number" else: raise AssertionError() else: label = "User name" context["username_label"] = label return self.templates(self.template, context=context) async def get(self): assert self.pool is not None back_to = self.request.query_params.get("back_to") reauth = bool_val(self.request.query_params.get("reauth")) if self.request.user.is_authenticated and not reauth: return RedirectResponse(back_to or self.success_page_url) parsed_back_to = urlparse(back_to) if ( parsed_back_to.scheme and parsed_back_to.scheme != self.request.url.scheme ) or ( parsed_back_to.hostname and parsed_back_to.hostname != self.request.url.hostname ): raise HTTPException(status_code=HTTP_400_BAD_REQUEST) if back_to is not None: self.back_to = back_to return self.render_template(context={"form": {"reauth": reauth}}) async def post(self): assert self.pool is not None form = await self.request.form() try: user = await async_(lambda: self.pool.query_user(form["username"]).one())() self.request.app.state.kdf.verify(user.password, form["password"]) except Exception as e: logger.debug(f"failed login attempt: {form['username']} - {e!r}") return self.render_template( context={ "form": form, "alerts": ["No user registered with that user name and password."], } ) self.per_pool_session["user_id"] = user.id return RedirectResponse(self.back_to or self.success_page_url, status_code=302) @routes.route("/signin/success", name="signin_success") class SignedinEndpoint(PoolHTTPEndpoint): template = "pools/signin_success.html" async def get(self): return self.templates(self.template) @routes.route("/signout", name="signout", methods=["post"]) class SignOutEndpoint(PoolHTTPEndpoint): async def post(self): form = await self.request.form() client_id = form.get("client_id") try: client = await async_( self.pool.clients.filter_by(oauth2_client_id=client_id).one )() except orm_exc.NoResultFound as e: raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e back_to = form.get("back_to") if back_to is None or back_to not in client.logout_uris: back_to = self.request.url_for("pools:signout_success", pool=self.pool.key) if self.request.user.is_authenticated: del self.per_pool_session["user_id"] return RedirectResponse(back_to, status_code=302) @routes.route("/signout/success", name="signout_success") class SignedOutEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates("pools/signout_success.html") @routes.route("/", name="index") class IndexEndpoint(PoolHTTPEndpoint): async def get(self): return self.templates("pools/index.html") @routes.route("/.well-known/jwks.json", name="signin_success") class JWKSEndpoint(PoolHTTPEndpoint): async def get(self): keys = [] if isinstance(self.request.app.state.jwt_config.key, dict): public_jwk = build_jwt_public_key_from_private_key( self.request.app.state.jwt_config.key ) public_jwk["use"] = "sig" keys.append(public_jwk) return JSONResponse( { "keys": keys, } )
flexible
{ "blob_id": "6e01e36170f3f08f2030dbd4dd91019936fb9f5c", "index": 849, "step-1": "<mask token>\n\n\n@routes.route('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n <mask token>\n <mask token>\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\n@routes.route('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\n@routes.route('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\n@routes.route('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\n@routes.route('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\n@routes.route('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n", "step-2": "<mask token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n <mask token>\n <mask token>\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n <mask token>\n\n\n<mask token>\n\n\n@routes.route('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\n@routes.route('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\n@routes.route('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\n@routes.route('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\n@routes.route('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\n@routes.route('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n", "step-3": "<mask token>\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n <mask token>\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n <mask token>\n\n\n<mask token>\n\n\n@routes.route('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\n@routes.route('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\n@routes.route('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\n@routes.route('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\n@routes.route('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\n@routes.route('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n", "step-4": "import logging\nimport typing\nfrom urllib.parse import urlparse\nfrom sqlalchemy.orm import exc as orm_exc\nfrom starlette.exceptions import HTTPException\nfrom starlette.responses import JSONResponse, RedirectResponse, Response\nfrom starlette.routing import Router\nfrom starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND\nfrom ...executor import async_\nfrom ...middlewares import WithTemplates\nfrom ...utils import ContextualHTTPEndpoint\nfrom ..application import POOL_KEY\nfrom ..models import AuxiliaryIdentityAttribute, UserPool\nfrom ..utils import build_jwt_public_key_from_private_key\nlogger = logging.getLogger(__name__)\nroutes = Router()\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: typing.cast(\n WithTemplates, self.request).templates(name, {**context, 'pool':\n self.request.scope.get(POOL_KEY)}, *args, **kwargs)\n\n @property\n def pool(self) ->typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(\n POOL_KEY))\n\n @property\n def per_pool_session(self) ->typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope['session'].setdefault(pool.key, {})\n else:\n return self.request.scope['session']\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for('pools:signin_success', pool=self.pool.key)\n\n\ndef bool_val(v: typing.Optional[str]) ->bool:\n return v not in ('false', 'no', '0', None)\n\n\n@routes.route('/signin', name='signin')\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin.html'\n\n @property\n def back_to(self) ->typing.Optional[str]:\n return self.request.session.get('back_to')\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session['back_to'] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any]={}\n ) ->Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = (AuxiliaryIdentityAttribute.EMAIL in self.pool.\n username_attributes)\n phone_number = (AuxiliaryIdentityAttribute.PHONE_NUMBER in self\n .pool.username_attributes)\n if email and phone_number:\n label = 'E-mail address or phone number'\n elif email:\n label = 'E-mail address'\n elif phone_number:\n label = 'Phone number'\n else:\n raise AssertionError()\n else:\n label = 'User name'\n context['username_label'] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get('back_to')\n reauth = bool_val(self.request.query_params.get('reauth'))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (parsed_back_to.scheme and parsed_back_to.scheme != self.request\n .url.scheme or parsed_back_to.hostname and parsed_back_to.\n hostname != self.request.url.hostname):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={'form': {'reauth': reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda : self.pool.query_user(form[\n 'username']).one())()\n self.request.app.state.kdf.verify(user.password, form['password'])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(context={'form': form, 'alerts': [\n 'No user registered with that user name and password.']})\n self.per_pool_session['user_id'] = user.id\n return RedirectResponse(self.back_to or self.success_page_url,\n status_code=302)\n\n\n@routes.route('/signin/success', name='signin_success')\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = 'pools/signin_success.html'\n\n async def get(self):\n return self.templates(self.template)\n\n\n@routes.route('/signout', name='signout', methods=['post'])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n\n async def post(self):\n form = await self.request.form()\n client_id = form.get('client_id')\n try:\n client = await async_(self.pool.clients.filter_by(\n oauth2_client_id=client_id).one)()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get('back_to')\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for('pools:signout_success', pool=\n self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session['user_id']\n return RedirectResponse(back_to, status_code=302)\n\n\n@routes.route('/signout/success', name='signout_success')\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/signout_success.html')\n\n\n@routes.route('/', name='index')\nclass IndexEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n return self.templates('pools/index.html')\n\n\n@routes.route('/.well-known/jwks.json', name='signin_success')\nclass JWKSEndpoint(PoolHTTPEndpoint):\n\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(self.request\n .app.state.jwt_config.key)\n public_jwk['use'] = 'sig'\n keys.append(public_jwk)\n return JSONResponse({'keys': keys})\n", "step-5": "# Copyright (c) 2020 Open Collector, Inc.\n#\n# Permission is hereby granted, free of charge, to any person obtaining a copy\n# of this software and associated documentation files (the \"Software\"), to\n# deal in the Software without restriction, including without limitation the\n# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n# sell copies of the Software, and to permit persons to whom the Software is\n# furnished to do so, subject to the following conditions:\n#\n# The above copyright notice and this permission notice shall be included in\n# all copies or substantial portions of the Software.\n#\n# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER\n# DEALINGS IN THE SOFTWARE.\n\nimport logging\nimport typing\nfrom urllib.parse import urlparse\n\nfrom sqlalchemy.orm import exc as orm_exc\nfrom starlette.exceptions import HTTPException\nfrom starlette.responses import JSONResponse, RedirectResponse, Response\nfrom starlette.routing import Router\nfrom starlette.status import HTTP_400_BAD_REQUEST, HTTP_404_NOT_FOUND\n\nfrom ...executor import async_\nfrom ...middlewares import WithTemplates\nfrom ...utils import ContextualHTTPEndpoint\nfrom ..application import POOL_KEY\nfrom ..models import AuxiliaryIdentityAttribute, UserPool\nfrom ..utils import build_jwt_public_key_from_private_key\n\nlogger = logging.getLogger(__name__)\nroutes = Router()\n\n\nclass PoolHTTPEndpoint(ContextualHTTPEndpoint):\n @property\n def templates(self):\n return lambda name, context={}, *args, **kwargs: (\n typing.cast(WithTemplates, self.request).templates(\n name,\n {**context, \"pool\": self.request.scope.get(POOL_KEY)},\n *args,\n **kwargs,\n )\n )\n\n @property\n def pool(self) -> typing.Optional[UserPool]:\n return typing.cast(typing.Optional[UserPool], self.request.get(POOL_KEY))\n\n @property\n def per_pool_session(self) -> typing.Dict[str, typing.Any]:\n pool = self.pool\n if pool is not None:\n return self.request.scope[\"session\"].setdefault(pool.key, {})\n else:\n return self.request.scope[\"session\"]\n\n async def dispatch(self):\n if self.request.get(POOL_KEY) is None:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND)\n await super().dispatch()\n\n @property\n def success_page_url(self):\n return self.request.url_for(\"pools:signin_success\", pool=self.pool.key)\n\n\ndef bool_val(v: typing.Optional[str]) -> bool:\n return v not in (\"false\", \"no\", \"0\", None)\n\n\n@routes.route(\"/signin\", name=\"signin\")\nclass SigninEndpoint(PoolHTTPEndpoint):\n template = \"pools/signin.html\"\n\n @property\n def back_to(self) -> typing.Optional[str]:\n return self.request.session.get(\"back_to\")\n\n @back_to.setter\n def back_to(self, value: typing.Optional[str]):\n self.request.session[\"back_to\"] = value\n\n def render_template(self, context: typing.Dict[str, typing.Any] = {}) -> Response:\n assert self.pool is not None\n if self.pool.username_attributes:\n email = AuxiliaryIdentityAttribute.EMAIL in self.pool.username_attributes\n phone_number = (\n AuxiliaryIdentityAttribute.PHONE_NUMBER in self.pool.username_attributes\n )\n if email and phone_number:\n label = \"E-mail address or phone number\"\n elif email:\n label = \"E-mail address\"\n elif phone_number:\n label = \"Phone number\"\n else:\n raise AssertionError()\n else:\n label = \"User name\"\n context[\"username_label\"] = label\n return self.templates(self.template, context=context)\n\n async def get(self):\n assert self.pool is not None\n back_to = self.request.query_params.get(\"back_to\")\n reauth = bool_val(self.request.query_params.get(\"reauth\"))\n if self.request.user.is_authenticated and not reauth:\n return RedirectResponse(back_to or self.success_page_url)\n parsed_back_to = urlparse(back_to)\n if (\n parsed_back_to.scheme and parsed_back_to.scheme != self.request.url.scheme\n ) or (\n parsed_back_to.hostname\n and parsed_back_to.hostname != self.request.url.hostname\n ):\n raise HTTPException(status_code=HTTP_400_BAD_REQUEST)\n if back_to is not None:\n self.back_to = back_to\n return self.render_template(context={\"form\": {\"reauth\": reauth}})\n\n async def post(self):\n assert self.pool is not None\n form = await self.request.form()\n try:\n user = await async_(lambda: self.pool.query_user(form[\"username\"]).one())()\n self.request.app.state.kdf.verify(user.password, form[\"password\"])\n except Exception as e:\n logger.debug(f\"failed login attempt: {form['username']} - {e!r}\")\n return self.render_template(\n context={\n \"form\": form,\n \"alerts\": [\"No user registered with that user name and password.\"],\n }\n )\n self.per_pool_session[\"user_id\"] = user.id\n return RedirectResponse(self.back_to or self.success_page_url, status_code=302)\n\n\n@routes.route(\"/signin/success\", name=\"signin_success\")\nclass SignedinEndpoint(PoolHTTPEndpoint):\n template = \"pools/signin_success.html\"\n\n async def get(self):\n return self.templates(self.template)\n\n\n@routes.route(\"/signout\", name=\"signout\", methods=[\"post\"])\nclass SignOutEndpoint(PoolHTTPEndpoint):\n async def post(self):\n form = await self.request.form()\n client_id = form.get(\"client_id\")\n try:\n client = await async_(\n self.pool.clients.filter_by(oauth2_client_id=client_id).one\n )()\n except orm_exc.NoResultFound as e:\n raise HTTPException(status_code=HTTP_404_NOT_FOUND) from e\n back_to = form.get(\"back_to\")\n if back_to is None or back_to not in client.logout_uris:\n back_to = self.request.url_for(\"pools:signout_success\", pool=self.pool.key)\n if self.request.user.is_authenticated:\n del self.per_pool_session[\"user_id\"]\n return RedirectResponse(back_to, status_code=302)\n\n\n@routes.route(\"/signout/success\", name=\"signout_success\")\nclass SignedOutEndpoint(PoolHTTPEndpoint):\n async def get(self):\n return self.templates(\"pools/signout_success.html\")\n\n\n@routes.route(\"/\", name=\"index\")\nclass IndexEndpoint(PoolHTTPEndpoint):\n async def get(self):\n return self.templates(\"pools/index.html\")\n\n\n@routes.route(\"/.well-known/jwks.json\", name=\"signin_success\")\nclass JWKSEndpoint(PoolHTTPEndpoint):\n async def get(self):\n keys = []\n if isinstance(self.request.app.state.jwt_config.key, dict):\n public_jwk = build_jwt_public_key_from_private_key(\n self.request.app.state.jwt_config.key\n )\n public_jwk[\"use\"] = \"sig\"\n keys.append(public_jwk)\n return JSONResponse(\n {\n \"keys\": keys,\n }\n )\n", "step-ids": [ 9, 13, 14, 19, 20 ] }
[ 9, 13, 14, 19, 20 ]
<|reserved_special_token_0|> class LoginForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) <|reserved_special_token_1|> <|reserved_special_token_0|> class SignUpForm(forms.Form): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class Meta: model = AuthUser fields = ('username', 'email', 'password1', 'password2', 'first_name', 'last_name') class LoginForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) <|reserved_special_token_1|> <|reserved_special_token_0|> class SignUpForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'form-control'})) password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) first_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) last_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) class Meta: model = AuthUser fields = ('username', 'email', 'password1', 'password2', 'first_name', 'last_name') class LoginForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) <|reserved_special_token_1|> from django import forms from django.contrib.auth.forms import UserCreationForm from .models import AuthUser class SignUpForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'form-control'})) password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'})) first_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) last_name = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) class Meta: model = AuthUser fields = ('username', 'email', 'password1', 'password2', 'first_name', 'last_name') class LoginForm(forms.Form): username = forms.CharField(widget=forms.TextInput(attrs={'class': 'form-control'})) password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'form-control'}))
flexible
{ "blob_id": "7644dcd956e1ad179f42e44870864386744c6cdf", "index": 2553, "step-1": "<mask token>\n\n\nclass LoginForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n password = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n", "step-2": "<mask token>\n\n\nclass SignUpForm(forms.Form):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\n class Meta:\n model = AuthUser\n fields = ('username', 'email', 'password1', 'password2',\n 'first_name', 'last_name')\n\n\nclass LoginForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n password = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n", "step-3": "<mask token>\n\n\nclass SignUpForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n email = forms.EmailField(widget=forms.EmailInput(attrs={'class':\n 'form-control'}))\n password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n first_name = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n last_name = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n\n\n class Meta:\n model = AuthUser\n fields = ('username', 'email', 'password1', 'password2',\n 'first_name', 'last_name')\n\n\nclass LoginForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n password = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n", "step-4": "from django import forms\nfrom django.contrib.auth.forms import UserCreationForm\nfrom .models import AuthUser\n\n\nclass SignUpForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n email = forms.EmailField(widget=forms.EmailInput(attrs={'class':\n 'form-control'}))\n password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n first_name = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n last_name = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n\n\n class Meta:\n model = AuthUser\n fields = ('username', 'email', 'password1', 'password2',\n 'first_name', 'last_name')\n\n\nclass LoginForm(forms.Form):\n username = forms.CharField(widget=forms.TextInput(attrs={'class':\n 'form-control'}))\n password = forms.CharField(widget=forms.PasswordInput(attrs={'class':\n 'form-control'}))\n", "step-5": null, "step-ids": [ 2, 3, 4, 5 ] }
[ 2, 3, 4, 5 ]
import csv import json from urllib import request from urllib.error import HTTPError from urllib.parse import urljoin, urlparse, quote_plus from optparse import OptionParser HEADER = ["id", "module", "channel", "type", "value", "datetime"] def parse_options(): parser = OptionParser() parser.add_option("-H", "--host") parser.add_option("-t", "--token") parser.add_option("-r", "--recursive", action="store_true", default=False) return parser.parse_args() def write_csv(url, recursive=False, writer=None, token=""): response = fetch(url) if recursive: write_rows(writer, response) cursor = next_cursor(response) if cursor is not None: print(f"next cursor exists...{cursor}") ret = urlparse(url) next_url = f"{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}" write_csv(next_url, recursive=True, writer=writer, token=token) else: write_rows(writer, response) def fetch(url): print(f"url...{url}\n") urlData = request.urlopen(url) data = urlData.read() encoding = urlData.info().get_content_charset("utf-8") return json.loads(data.decode(encoding)) def write_rows(writer, response): for msg in response["results"]: values = [msg[k] for k in HEADER] writer.writerow(values) def next_cursor(response): return response["meta"]["cursor"] if __name__ == "__main__": opt, args = parse_options() if opt.host is not None: url = urljoin(f"https://{opt.host}", f"datastore/v1/channels?token={opt.token}") else: url = f"https://api.sakura.io/datastore/v1/channels?token={opt.token}" f = open('./datastore.csv', 'w') writer = csv.writer(f, lineterminator="\n") # write header writer.writerow(HEADER) write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token) f.close()
normal
{ "blob_id": "b47f15a79f7a82304c2be6af00a5854ff0f6ad3e", "index": 6987, "step-1": "<mask token>\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\n<mask token>\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n", "step-3": "<mask token>\nHEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n", "step-4": "import csv\nimport json\nfrom urllib import request\nfrom urllib.error import HTTPError\nfrom urllib.parse import urljoin, urlparse, quote_plus\nfrom optparse import OptionParser\nHEADER = ['id', 'module', 'channel', 'type', 'value', 'datetime']\n\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option('-H', '--host')\n parser.add_option('-t', '--token')\n parser.add_option('-r', '--recursive', action='store_true', default=False)\n return parser.parse_args()\n\n\ndef write_csv(url, recursive=False, writer=None, token=''):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f'next cursor exists...{cursor}')\n ret = urlparse(url)\n next_url = (\n f'{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}'\n )\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\n\ndef fetch(url):\n print(f'url...{url}\\n')\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset('utf-8')\n return json.loads(data.decode(encoding))\n\n\ndef write_rows(writer, response):\n for msg in response['results']:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\n\ndef next_cursor(response):\n return response['meta']['cursor']\n\n\nif __name__ == '__main__':\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f'https://{opt.host}',\n f'datastore/v1/channels?token={opt.token}')\n else:\n url = f'https://api.sakura.io/datastore/v1/channels?token={opt.token}'\n f = open('./datastore.csv', 'w')\n writer = csv.writer(f, lineterminator='\\n')\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()\n", "step-5": "import csv\nimport json\nfrom urllib import request\nfrom urllib.error import HTTPError\nfrom urllib.parse import urljoin, urlparse, quote_plus\nfrom optparse import OptionParser\n\nHEADER = [\"id\", \"module\", \"channel\", \"type\", \"value\", \"datetime\"]\n\ndef parse_options():\n parser = OptionParser()\n parser.add_option(\"-H\", \"--host\")\n parser.add_option(\"-t\", \"--token\")\n parser.add_option(\"-r\", \"--recursive\", action=\"store_true\", default=False)\n return parser.parse_args()\n\ndef write_csv(url, recursive=False, writer=None, token=\"\"):\n response = fetch(url)\n if recursive:\n write_rows(writer, response)\n cursor = next_cursor(response)\n if cursor is not None:\n print(f\"next cursor exists...{cursor}\")\n ret = urlparse(url)\n next_url = f\"{ret.scheme}://{ret.netloc}{ret.path}?cursor={quote_plus(cursor)}&token={token}\"\n write_csv(next_url, recursive=True, writer=writer, token=token)\n else:\n write_rows(writer, response)\n\ndef fetch(url):\n print(f\"url...{url}\\n\")\n urlData = request.urlopen(url)\n data = urlData.read()\n encoding = urlData.info().get_content_charset(\"utf-8\")\n return json.loads(data.decode(encoding))\n\ndef write_rows(writer, response):\n for msg in response[\"results\"]:\n values = [msg[k] for k in HEADER]\n writer.writerow(values)\n\ndef next_cursor(response):\n return response[\"meta\"][\"cursor\"]\n\nif __name__ == \"__main__\":\n opt, args = parse_options()\n if opt.host is not None:\n url = urljoin(f\"https://{opt.host}\",\n f\"datastore/v1/channels?token={opt.token}\")\n else:\n url = f\"https://api.sakura.io/datastore/v1/channels?token={opt.token}\"\n f = open('./datastore.csv', 'w')\n\n writer = csv.writer(f, lineterminator=\"\\n\")\n # write header\n writer.writerow(HEADER)\n write_csv(url, writer=writer, recursive=opt.recursive, token=opt.token)\n f.close()", "step-ids": [ 3, 6, 7, 8, 9 ] }
[ 3, 6, 7, 8, 9 ]
<|reserved_special_token_0|> class InviteAdmin(admin.ModelAdmin): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class InviteAdmin(admin.ModelAdmin): list_display = ('invitee', 'inviter', 'created_on', 'approved', 'rejected', 'used') <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class InviteAdmin(admin.ModelAdmin): list_display = ('invitee', 'inviter', 'created_on', 'approved', 'rejected', 'used') admin.site.register(Invite, InviteAdmin) <|reserved_special_token_1|> from django.contrib import admin from .models import Invite class InviteAdmin(admin.ModelAdmin): list_display = ('invitee', 'inviter', 'created_on', 'approved', 'rejected', 'used') admin.site.register(Invite, InviteAdmin)
flexible
{ "blob_id": "fcb13b087b9c967ab16b64885411cc4aae98583c", "index": 2130, "step-1": "<mask token>\n\n\nclass InviteAdmin(admin.ModelAdmin):\n <mask token>\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass InviteAdmin(admin.ModelAdmin):\n list_display = ('invitee', 'inviter', 'created_on', 'approved',\n 'rejected', 'used')\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass InviteAdmin(admin.ModelAdmin):\n list_display = ('invitee', 'inviter', 'created_on', 'approved',\n 'rejected', 'used')\n\n\nadmin.site.register(Invite, InviteAdmin)\n", "step-4": "from django.contrib import admin\nfrom .models import Invite\n\n\nclass InviteAdmin(admin.ModelAdmin):\n list_display = ('invitee', 'inviter', 'created_on', 'approved',\n 'rejected', 'used')\n\n\nadmin.site.register(Invite, InviteAdmin)\n", "step-5": null, "step-ids": [ 1, 2, 3, 4 ] }
[ 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> setup(name='isc-dhcpd-parser', version='0.1', description= 'Parser for isc-dhcp config files (dhcpd.conf)', author= 'Pavel Podkorytov', author_email='pod.pavel@gmail.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3'], packages=find_packages(), scripts=['bin/isc_dhcpd_leases.py'], install_requires=['ply']) <|reserved_special_token_1|> from setuptools import setup, find_packages setup(name='isc-dhcpd-parser', version='0.1', description= 'Parser for isc-dhcp config files (dhcpd.conf)', author= 'Pavel Podkorytov', author_email='pod.pavel@gmail.com', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3'], packages=find_packages(), scripts=['bin/isc_dhcpd_leases.py'], install_requires=['ply']) <|reserved_special_token_1|> #!/usr/bin/python # Always prefer setuptools over distutils from setuptools import setup, find_packages setup( name="isc-dhcpd-parser", version="0.1", description="Parser for isc-dhcp config files (dhcpd.conf)", author="Pavel Podkorytov", author_email="pod.pavel@gmail.com", classifiers=[ "Development Status :: 3 - Alpha", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", ], packages=find_packages(), scripts=["bin/isc_dhcpd_leases.py"], install_requires=["ply"], )
flexible
{ "blob_id": "79141679bb2839de9d4a25b6c6c285905dddbb0d", "index": 6460, "step-1": "<mask token>\n", "step-2": "<mask token>\nsetup(name='isc-dhcpd-parser', version='0.1', description=\n 'Parser for isc-dhcp config files (dhcpd.conf)', author=\n 'Pavel Podkorytov', author_email='pod.pavel@gmail.com', classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3'], packages=find_packages(),\n scripts=['bin/isc_dhcpd_leases.py'], install_requires=['ply'])\n", "step-3": "from setuptools import setup, find_packages\nsetup(name='isc-dhcpd-parser', version='0.1', description=\n 'Parser for isc-dhcp config files (dhcpd.conf)', author=\n 'Pavel Podkorytov', author_email='pod.pavel@gmail.com', classifiers=[\n 'Development Status :: 3 - Alpha',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3'], packages=find_packages(),\n scripts=['bin/isc_dhcpd_leases.py'], install_requires=['ply'])\n", "step-4": "#!/usr/bin/python\n\n# Always prefer setuptools over distutils\nfrom setuptools import setup, find_packages\n\nsetup(\n name=\"isc-dhcpd-parser\",\n version=\"0.1\",\n description=\"Parser for isc-dhcp config files (dhcpd.conf)\",\n author=\"Pavel Podkorytov\",\n author_email=\"pod.pavel@gmail.com\",\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"Programming Language :: Python :: 2\",\n \"Programming Language :: Python :: 3\",\n ],\n packages=find_packages(),\n scripts=[\"bin/isc_dhcpd_leases.py\"],\n install_requires=[\"ply\"],\n)\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class hrsalaryRule(models.Model): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class hrsalaryRule(models.Model): _inherit = 'hr.salary.rule' is_tax_fdfp = fields.Boolean('Est un impôt FDFP') <|reserved_special_token_1|> from odoo import api, models, fields, _ class hrsalaryRule(models.Model): _inherit = 'hr.salary.rule' is_tax_fdfp = fields.Boolean('Est un impôt FDFP') <|reserved_special_token_1|> # -*- coding:utf-8 -*- from odoo import api, models, fields, _ class hrsalaryRule(models.Model): _inherit = "hr.salary.rule" is_tax_fdfp = fields.Boolean("Est un impôt FDFP")
flexible
{ "blob_id": "097a87f7f1346e5db1599e59680232912348aef7", "index": 311, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass hrsalaryRule(models.Model):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass hrsalaryRule(models.Model):\n _inherit = 'hr.salary.rule'\n is_tax_fdfp = fields.Boolean('Est un impôt FDFP')\n", "step-4": "from odoo import api, models, fields, _\n\n\nclass hrsalaryRule(models.Model):\n _inherit = 'hr.salary.rule'\n is_tax_fdfp = fields.Boolean('Est un impôt FDFP')\n", "step-5": "# -*- coding:utf-8 -*-\r\n\r\nfrom odoo import api, models, fields, _\r\n\r\n\r\nclass hrsalaryRule(models.Model):\r\n _inherit = \"hr.salary.rule\"\r\n\r\n is_tax_fdfp = fields.Boolean(\"Est un impôt FDFP\")", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from django import forms class PasswordChangeForm(forms.Form): password = forms.CharField(min_length=8, label="New Password*", strip=False, widget=forms.PasswordInput( attrs={'autocomplete': 'current-password', 'class': 'form-control'}), )
normal
{ "blob_id": "85fff1f6e1f69dd0e2e9b5acc90db31d27329c7c", "index": 3352, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass PasswordChangeForm(forms.Form):\n <mask token>\n", "step-3": "<mask token>\n\n\nclass PasswordChangeForm(forms.Form):\n password = forms.CharField(min_length=8, label='New Password*', strip=\n False, widget=forms.PasswordInput(attrs={'autocomplete':\n 'current-password', 'class': 'form-control'}))\n", "step-4": "from django import forms\n\n\nclass PasswordChangeForm(forms.Form):\n password = forms.CharField(min_length=8, label='New Password*', strip=\n False, widget=forms.PasswordInput(attrs={'autocomplete':\n 'current-password', 'class': 'form-control'}))\n", "step-5": "from django import forms\n\n\nclass PasswordChangeForm(forms.Form):\n password = forms.CharField(min_length=8,\n label=\"New Password*\",\n strip=False,\n widget=forms.PasswordInput(\n attrs={'autocomplete': 'current-password', 'class': 'form-control'}),\n )\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> setup(name='twqq', version=twqq.__version__, description= 'An asynchronous webqq client library based on tornado', long_description=open('README.rst').read(), author='cold', author_email ='wh_linux@126.com', url='http://www.linuxzen.com', license= 'Apache 2.0', platforms='any', packages=packages, package_data={}, entry_points=entry_points, install_requires=requires, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7']) <|reserved_special_token_1|> <|reserved_special_token_0|> requires = ['tornado', 'pycurl', 'tornadohttpclient'] packages = ['twqq'] entry_points = {} setup(name='twqq', version=twqq.__version__, description= 'An asynchronous webqq client library based on tornado', long_description=open('README.rst').read(), author='cold', author_email ='wh_linux@126.com', url='http://www.linuxzen.com', license= 'Apache 2.0', platforms='any', packages=packages, package_data={}, entry_points=entry_points, install_requires=requires, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7']) <|reserved_special_token_1|> import twqq from setuptools import setup requires = ['tornado', 'pycurl', 'tornadohttpclient'] packages = ['twqq'] entry_points = {} setup(name='twqq', version=twqq.__version__, description= 'An asynchronous webqq client library based on tornado', long_description=open('README.rst').read(), author='cold', author_email ='wh_linux@126.com', url='http://www.linuxzen.com', license= 'Apache 2.0', platforms='any', packages=packages, package_data={}, entry_points=entry_points, install_requires=requires, classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7']) <|reserved_special_token_1|> #!/usr/bin/env python # -*- coding:utf-8 -*- # # Author : cold # E-mail : wh_linux@126.com # Date : 13/09/05 11:16:58 # Desc : # import twqq from setuptools import setup requires = ["tornado", "pycurl", "tornadohttpclient"] packages = ["twqq"] entry_points = { } setup( name = "twqq", version = twqq.__version__, description = 'An asynchronous webqq client library based on tornado', long_description = open("README.rst").read(), author = 'cold', author_email = 'wh_linux@126.com', url = 'http://www.linuxzen.com', license = 'Apache 2.0', platforms = 'any', packages = packages, package_data = { }, entry_points = entry_points, install_requires = requires, classifiers=['Development Status :: 3 - Alpha', 'Environment :: Console', "Intended Audience :: Developers", 'License :: OSI Approved :: Apache Software License', 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7', ], )
flexible
{ "blob_id": "9492142a569da1d21b1927e79d97f9cf6276efdc", "index": 2800, "step-1": "<mask token>\n", "step-2": "<mask token>\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='wh_linux@126.com', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n", "step-3": "<mask token>\nrequires = ['tornado', 'pycurl', 'tornadohttpclient']\npackages = ['twqq']\nentry_points = {}\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='wh_linux@126.com', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n", "step-4": "import twqq\nfrom setuptools import setup\nrequires = ['tornado', 'pycurl', 'tornadohttpclient']\npackages = ['twqq']\nentry_points = {}\nsetup(name='twqq', version=twqq.__version__, description=\n 'An asynchronous webqq client library based on tornado',\n long_description=open('README.rst').read(), author='cold', author_email\n ='wh_linux@126.com', url='http://www.linuxzen.com', license=\n 'Apache 2.0', platforms='any', packages=packages, package_data={},\n entry_points=entry_points, install_requires=requires, classifiers=[\n 'Development Status :: 3 - Alpha', 'Environment :: Console',\n 'Intended Audience :: Developers',\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP', 'Programming Language :: Python :: 2.7'])\n", "step-5": "#!/usr/bin/env python\n# -*- coding:utf-8 -*-\n#\n# Author : cold\n# E-mail : wh_linux@126.com\n# Date : 13/09/05 11:16:58\n# Desc :\n#\nimport twqq\nfrom setuptools import setup\n\nrequires = [\"tornado\", \"pycurl\", \"tornadohttpclient\"]\n\npackages = [\"twqq\"]\n\nentry_points = {\n}\n\n\nsetup(\n name = \"twqq\",\n version = twqq.__version__,\n description = 'An asynchronous webqq client library based on tornado',\n long_description = open(\"README.rst\").read(),\n author = 'cold',\n author_email = 'wh_linux@126.com',\n url = 'http://www.linuxzen.com',\n license = 'Apache 2.0',\n platforms = 'any',\n packages = packages,\n package_data = {\n },\n entry_points = entry_points,\n install_requires = requires,\n classifiers=['Development Status :: 3 - Alpha',\n 'Environment :: Console',\n \"Intended Audience :: Developers\",\n 'License :: OSI Approved :: Apache Software License',\n 'Topic :: Internet :: WWW/HTTP',\n 'Programming Language :: Python :: 2.7',\n ],\n)\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
# coding: utf-8 # 2019/11/27 @ tongshiwei import pytest def test_api(env): assert set(env.parameters.keys()) == {"knowledge_structure", "action_space", "learning_item_base"} @pytest.mark.parametrize("n_step", [True, False]) def test_env(env, tmp_path, n_step): from EduSim.Envs.KSS import kss_train_eval, KSSAgent agent = KSSAgent(env.action_space) kss_train_eval( agent, env, max_steps=20, max_episode_num=10, level="summary", )
normal
{ "blob_id": "b1ae3abb6decf4d70bc2372e70cf4f5b868e805d", "index": 8756, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef test_api(env):\n assert set(env.parameters.keys()) == {'knowledge_structure',\n 'action_space', 'learning_item_base'}\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\ndef test_api(env):\n assert set(env.parameters.keys()) == {'knowledge_structure',\n 'action_space', 'learning_item_base'}\n\n\n@pytest.mark.parametrize('n_step', [True, False])\ndef test_env(env, tmp_path, n_step):\n from EduSim.Envs.KSS import kss_train_eval, KSSAgent\n agent = KSSAgent(env.action_space)\n kss_train_eval(agent, env, max_steps=20, max_episode_num=10, level=\n 'summary')\n", "step-4": "import pytest\n\n\ndef test_api(env):\n assert set(env.parameters.keys()) == {'knowledge_structure',\n 'action_space', 'learning_item_base'}\n\n\n@pytest.mark.parametrize('n_step', [True, False])\ndef test_env(env, tmp_path, n_step):\n from EduSim.Envs.KSS import kss_train_eval, KSSAgent\n agent = KSSAgent(env.action_space)\n kss_train_eval(agent, env, max_steps=20, max_episode_num=10, level=\n 'summary')\n", "step-5": "# coding: utf-8\n# 2019/11/27 @ tongshiwei\n\nimport pytest\n\n\ndef test_api(env):\n assert set(env.parameters.keys()) == {\"knowledge_structure\", \"action_space\", \"learning_item_base\"}\n\n\n@pytest.mark.parametrize(\"n_step\", [True, False])\ndef test_env(env, tmp_path, n_step):\n from EduSim.Envs.KSS import kss_train_eval, KSSAgent\n agent = KSSAgent(env.action_space)\n\n kss_train_eval(\n agent,\n env,\n max_steps=20,\n max_episode_num=10,\n level=\"summary\",\n )\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> class EntryCreateView(CreateView): <|reserved_special_token_0|> <|reserved_special_token_0|> <|reserved_special_token_0|> class EntryUpdateView(UpdateView): model = Entry fields = ['title', 'content'] def get_success_url(self): return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id}) class EntryDeleteView(DeleteView): model = Entry success_url = reverse_lazy('entry-list') <|reserved_special_token_1|> <|reserved_special_token_0|> class EntryCreateView(CreateView): model = Entry fields = ['title', 'content'] success_url = reverse_lazy('entry-list') class EntryUpdateView(UpdateView): model = Entry fields = ['title', 'content'] def get_success_url(self): return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id}) class EntryDeleteView(DeleteView): model = Entry success_url = reverse_lazy('entry-list') <|reserved_special_token_1|> <|reserved_special_token_0|> class EntryListView(ListView): <|reserved_special_token_0|> <|reserved_special_token_0|> class EntryDetailView(DetailView): model = Entry class EntryCreateView(CreateView): model = Entry fields = ['title', 'content'] success_url = reverse_lazy('entry-list') class EntryUpdateView(UpdateView): model = Entry fields = ['title', 'content'] def get_success_url(self): return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id}) class EntryDeleteView(DeleteView): model = Entry success_url = reverse_lazy('entry-list') <|reserved_special_token_1|> <|reserved_special_token_0|> class EntryListView(ListView): model = Entry queryset = Entry.objects.all().order_by('-date_created') class EntryDetailView(DetailView): model = Entry class EntryCreateView(CreateView): model = Entry fields = ['title', 'content'] success_url = reverse_lazy('entry-list') class EntryUpdateView(UpdateView): model = Entry fields = ['title', 'content'] def get_success_url(self): return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id}) class EntryDeleteView(DeleteView): model = Entry success_url = reverse_lazy('entry-list') <|reserved_special_token_1|> from django.urls import reverse_lazy from django.views.generic import ( ListView, DetailView, CreateView, UpdateView, DeleteView, ) from .models import Entry class EntryListView(ListView): model = Entry queryset = Entry.objects.all().order_by("-date_created") class EntryDetailView(DetailView): model = Entry class EntryCreateView(CreateView): model = Entry fields = ["title", "content"] success_url = reverse_lazy("entry-list") class EntryUpdateView(UpdateView): model = Entry fields = ["title", "content"] def get_success_url(self): return reverse_lazy("entry-detail", kwargs={"pk": self.entry.id}) class EntryDeleteView(DeleteView): model = Entry success_url = reverse_lazy("entry-list")
flexible
{ "blob_id": "37c03732ae52171fc24aec85c940848b02d76dc1", "index": 1176, "step-1": "<mask token>\n\n\nclass EntryCreateView(CreateView):\n <mask token>\n <mask token>\n <mask token>\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n", "step-2": "<mask token>\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = ['title', 'content']\n success_url = reverse_lazy('entry-list')\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n", "step-3": "<mask token>\n\n\nclass EntryListView(ListView):\n <mask token>\n <mask token>\n\n\nclass EntryDetailView(DetailView):\n model = Entry\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = ['title', 'content']\n success_url = reverse_lazy('entry-list')\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n", "step-4": "<mask token>\n\n\nclass EntryListView(ListView):\n model = Entry\n queryset = Entry.objects.all().order_by('-date_created')\n\n\nclass EntryDetailView(DetailView):\n model = Entry\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = ['title', 'content']\n success_url = reverse_lazy('entry-list')\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = ['title', 'content']\n\n def get_success_url(self):\n return reverse_lazy('entry-detail', kwargs={'pk': self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy('entry-list')\n", "step-5": "from django.urls import reverse_lazy\nfrom django.views.generic import (\n ListView,\n DetailView,\n CreateView,\n UpdateView,\n DeleteView,\n)\n\nfrom .models import Entry\n\n\nclass EntryListView(ListView):\n model = Entry\n queryset = Entry.objects.all().order_by(\"-date_created\")\n\n\nclass EntryDetailView(DetailView):\n model = Entry\n\n\nclass EntryCreateView(CreateView):\n model = Entry\n fields = [\"title\", \"content\"]\n success_url = reverse_lazy(\"entry-list\")\n\n\nclass EntryUpdateView(UpdateView):\n model = Entry\n fields = [\"title\", \"content\"]\n\n def get_success_url(self):\n return reverse_lazy(\"entry-detail\", kwargs={\"pk\": self.entry.id})\n\n\nclass EntryDeleteView(DeleteView):\n model = Entry\n success_url = reverse_lazy(\"entry-list\")\n", "step-ids": [ 6, 7, 10, 11, 13 ] }
[ 6, 7, 10, 11, 13 ]
from matplotlib import pyplot as plt from read_and_calculate_speed import get_info_from_mongodb plt.rcParams['font.sans-serif'] = ['SimHei'] plt.rcParams['font.family'] = 'sans-serif' def mat_line(speed_time_info, interface, direction, last_time): # 调节图形大小,宽,高 fig = plt.figure(figsize=(6, 6)) # 一共一行,每行一图,第一图 ax = fig.add_subplot(111) # 处理X轴时间格式 import matplotlib.dates as mdate # 设置时间标签显示格式 # ax.xaxis.set_major_formatter(mdate.DateFormatter('%Y-%m-%d %H:%M:%S')) ax.xaxis.set_major_formatter(mdate.DateFormatter('%H:%M:%S')) # 处理Y轴百分比格式 import matplotlib.ticker as mtick ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%d')) # ax.set_ylim(0, 100) # 控制Y轴的取值范围 # 把cpu_usage_list的数据,拆分为x轴的时间,与y轴的利用率 x = [] y = [] for time, speed in speed_time_info: x.append(time) y.append(speed) # 添加主题和注释 plt.title('路由器' + interface + '接口,' + direction + '方向,' + str(last_time) + '分钟速率') plt.xlabel('采集时间') plt.ylabel('速率kbps') # 当x轴太拥挤的时候可以让他自适应 fig.autofmt_xdate() # 实线红色 ax.plot(x, y, linestyle='solid', color='r', label='R1') # 虚线黑色 # ax.plot(x, y, linestyle='dashed', color='b', label='R1') # 如果你有两套数据,完全可以在一幅图中绘制双线 # ax.plot(x2, y2, linestyle='dashed', color='b', label='R1') # 设置说明的位置 ax.legend(loc='upper left') # 绘制图形 plt.show() if __name__ == '__main__': list_info = ['GigabitEthernet1', 'out', 2] # 获取数据库两分钟内的信息 time_recode, speed = get_info_from_mongodb(*list_info) speed_time_info = list(zip(time_recode, speed)) # 绘图 mat_line(speed_time_info, list_info[0], list_info[1], list_info[2])
normal
{ "blob_id": "0aa419b0045914b066fbec457c918d83276f2583", "index": 3556, "step-1": "<mask token>\n\n\ndef mat_line(speed_time_info, interface, direction, last_time):\n fig = plt.figure(figsize=(6, 6))\n ax = fig.add_subplot(111)\n import matplotlib.dates as mdate\n ax.xaxis.set_major_formatter(mdate.DateFormatter('%H:%M:%S'))\n import matplotlib.ticker as mtick\n ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%d'))\n x = []\n y = []\n for time, speed in speed_time_info:\n x.append(time)\n y.append(speed)\n plt.title('路由器' + interface + '接口,' + direction + '方向,' + str(last_time\n ) + '分钟速率')\n plt.xlabel('采集时间')\n plt.ylabel('速率kbps')\n fig.autofmt_xdate()\n ax.plot(x, y, linestyle='solid', color='r', label='R1')\n ax.legend(loc='upper left')\n plt.show()\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef mat_line(speed_time_info, interface, direction, last_time):\n fig = plt.figure(figsize=(6, 6))\n ax = fig.add_subplot(111)\n import matplotlib.dates as mdate\n ax.xaxis.set_major_formatter(mdate.DateFormatter('%H:%M:%S'))\n import matplotlib.ticker as mtick\n ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%d'))\n x = []\n y = []\n for time, speed in speed_time_info:\n x.append(time)\n y.append(speed)\n plt.title('路由器' + interface + '接口,' + direction + '方向,' + str(last_time\n ) + '分钟速率')\n plt.xlabel('采集时间')\n plt.ylabel('速率kbps')\n fig.autofmt_xdate()\n ax.plot(x, y, linestyle='solid', color='r', label='R1')\n ax.legend(loc='upper left')\n plt.show()\n\n\nif __name__ == '__main__':\n list_info = ['GigabitEthernet1', 'out', 2]\n time_recode, speed = get_info_from_mongodb(*list_info)\n speed_time_info = list(zip(time_recode, speed))\n mat_line(speed_time_info, list_info[0], list_info[1], list_info[2])\n", "step-3": "<mask token>\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['font.family'] = 'sans-serif'\n\n\ndef mat_line(speed_time_info, interface, direction, last_time):\n fig = plt.figure(figsize=(6, 6))\n ax = fig.add_subplot(111)\n import matplotlib.dates as mdate\n ax.xaxis.set_major_formatter(mdate.DateFormatter('%H:%M:%S'))\n import matplotlib.ticker as mtick\n ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%d'))\n x = []\n y = []\n for time, speed in speed_time_info:\n x.append(time)\n y.append(speed)\n plt.title('路由器' + interface + '接口,' + direction + '方向,' + str(last_time\n ) + '分钟速率')\n plt.xlabel('采集时间')\n plt.ylabel('速率kbps')\n fig.autofmt_xdate()\n ax.plot(x, y, linestyle='solid', color='r', label='R1')\n ax.legend(loc='upper left')\n plt.show()\n\n\nif __name__ == '__main__':\n list_info = ['GigabitEthernet1', 'out', 2]\n time_recode, speed = get_info_from_mongodb(*list_info)\n speed_time_info = list(zip(time_recode, speed))\n mat_line(speed_time_info, list_info[0], list_info[1], list_info[2])\n", "step-4": "from matplotlib import pyplot as plt\nfrom read_and_calculate_speed import get_info_from_mongodb\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['font.family'] = 'sans-serif'\n\n\ndef mat_line(speed_time_info, interface, direction, last_time):\n fig = plt.figure(figsize=(6, 6))\n ax = fig.add_subplot(111)\n import matplotlib.dates as mdate\n ax.xaxis.set_major_formatter(mdate.DateFormatter('%H:%M:%S'))\n import matplotlib.ticker as mtick\n ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%d'))\n x = []\n y = []\n for time, speed in speed_time_info:\n x.append(time)\n y.append(speed)\n plt.title('路由器' + interface + '接口,' + direction + '方向,' + str(last_time\n ) + '分钟速率')\n plt.xlabel('采集时间')\n plt.ylabel('速率kbps')\n fig.autofmt_xdate()\n ax.plot(x, y, linestyle='solid', color='r', label='R1')\n ax.legend(loc='upper left')\n plt.show()\n\n\nif __name__ == '__main__':\n list_info = ['GigabitEthernet1', 'out', 2]\n time_recode, speed = get_info_from_mongodb(*list_info)\n speed_time_info = list(zip(time_recode, speed))\n mat_line(speed_time_info, list_info[0], list_info[1], list_info[2])\n", "step-5": "from matplotlib import pyplot as plt\nfrom read_and_calculate_speed import get_info_from_mongodb\n\nplt.rcParams['font.sans-serif'] = ['SimHei']\nplt.rcParams['font.family'] = 'sans-serif'\n\n\ndef mat_line(speed_time_info, interface, direction, last_time):\n # 调节图形大小,宽,高\n fig = plt.figure(figsize=(6, 6))\n # 一共一行,每行一图,第一图\n ax = fig.add_subplot(111)\n\n # 处理X轴时间格式\n import matplotlib.dates as mdate\n\n # 设置时间标签显示格式\n # ax.xaxis.set_major_formatter(mdate.DateFormatter('%Y-%m-%d %H:%M:%S'))\n ax.xaxis.set_major_formatter(mdate.DateFormatter('%H:%M:%S'))\n\n # 处理Y轴百分比格式\n import matplotlib.ticker as mtick\n ax.yaxis.set_major_formatter(mtick.FormatStrFormatter('%d'))\n # ax.set_ylim(0, 100) # 控制Y轴的取值范围\n\n # 把cpu_usage_list的数据,拆分为x轴的时间,与y轴的利用率\n x = []\n y = []\n\n for time, speed in speed_time_info:\n x.append(time)\n y.append(speed)\n\n # 添加主题和注释\n plt.title('路由器' + interface + '接口,' + direction + '方向,' + str(last_time) + '分钟速率')\n plt.xlabel('采集时间')\n plt.ylabel('速率kbps')\n\n # 当x轴太拥挤的时候可以让他自适应\n fig.autofmt_xdate()\n\n # 实线红色\n ax.plot(x, y, linestyle='solid', color='r', label='R1')\n # 虚线黑色\n # ax.plot(x, y, linestyle='dashed', color='b', label='R1')\n\n # 如果你有两套数据,完全可以在一幅图中绘制双线\n # ax.plot(x2, y2, linestyle='dashed', color='b', label='R1')\n\n # 设置说明的位置\n ax.legend(loc='upper left')\n\n # 绘制图形\n plt.show()\n\n\nif __name__ == '__main__':\n list_info = ['GigabitEthernet1', 'out', 2]\n # 获取数据库两分钟内的信息\n time_recode, speed = get_info_from_mongodb(*list_info)\n speed_time_info = list(zip(time_recode, speed))\n # 绘图\n mat_line(speed_time_info, list_info[0], list_info[1], list_info[2])\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
<|reserved_special_token_0|> class LinkedinSearch: <|reserved_special_token_0|> <|reserved_special_token_0|> def db_fetch(self, query): self.collection.create_index([('name', 'text')]) lst = [] cursor = self.collection.find({'$text': {'$search': query}}, { 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta': 'textScore'})]) total = cursor.count() n = 0 for i in cursor: i.pop('_id') lst.append(i) n += 1 print('fetched pages from db', len(lst)) return lst <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class LinkedinSearch: def __init__(self): self.client = MongoClient(Config.MONGO_URI) db = self.client.linkedin_db self.collection = db.search self.dict = {} self.obj = SearchClass() def db_check(self, query): r = self.obj.search(query) print(r) t = 0 for i in r['results']: if self.collection.find_one({'userid': i['userid']}): pass else: t += 1 self.collection.insert_one(i) self.client.close() print('no. of stored pages', t) results = self.db_fetch(query) return {'data': results} def db_fetch(self, query): self.collection.create_index([('name', 'text')]) lst = [] cursor = self.collection.find({'$text': {'$search': query}}, { 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta': 'textScore'})]) total = cursor.count() n = 0 for i in cursor: i.pop('_id') lst.append(i) n += 1 print('fetched pages from db', len(lst)) return lst <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class LinkedinSearch: def __init__(self): self.client = MongoClient(Config.MONGO_URI) db = self.client.linkedin_db self.collection = db.search self.dict = {} self.obj = SearchClass() def db_check(self, query): r = self.obj.search(query) print(r) t = 0 for i in r['results']: if self.collection.find_one({'userid': i['userid']}): pass else: t += 1 self.collection.insert_one(i) self.client.close() print('no. of stored pages', t) results = self.db_fetch(query) return {'data': results} def db_fetch(self, query): self.collection.create_index([('name', 'text')]) lst = [] cursor = self.collection.find({'$text': {'$search': query}}, { 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta': 'textScore'})]) total = cursor.count() n = 0 for i in cursor: i.pop('_id') lst.append(i) n += 1 print('fetched pages from db', len(lst)) return lst if __name__ == '__main__': obj = LinkedinSearch() print(obj.db_check('mark')) <|reserved_special_token_1|> from pymongo import MongoClient from modules.linkedinSearch import SearchClass from config import Config class LinkedinSearch: def __init__(self): self.client = MongoClient(Config.MONGO_URI) db = self.client.linkedin_db self.collection = db.search self.dict = {} self.obj = SearchClass() def db_check(self, query): r = self.obj.search(query) print(r) t = 0 for i in r['results']: if self.collection.find_one({'userid': i['userid']}): pass else: t += 1 self.collection.insert_one(i) self.client.close() print('no. of stored pages', t) results = self.db_fetch(query) return {'data': results} def db_fetch(self, query): self.collection.create_index([('name', 'text')]) lst = [] cursor = self.collection.find({'$text': {'$search': query}}, { 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta': 'textScore'})]) total = cursor.count() n = 0 for i in cursor: i.pop('_id') lst.append(i) n += 1 print('fetched pages from db', len(lst)) return lst if __name__ == '__main__': obj = LinkedinSearch() print(obj.db_check('mark')) <|reserved_special_token_1|> from pymongo import MongoClient from modules.linkedinSearch import SearchClass from config import Config class LinkedinSearch: def __init__(self): self.client = MongoClient(Config.MONGO_URI) db = self.client.linkedin_db self.collection = db.search self.dict = {} self.obj = SearchClass() def db_check(self, query): r = self.obj.search(query) print(r) t = 0 for i in r['results']: if self.collection.find_one({'userid': i['userid']}): pass else: # print(i) t += 1 self.collection.insert_one(i) self.client.close() print('no. of stored pages', t) # self.loop.close() results = self.db_fetch(query) # # # return {'results': m} return {'data': results} # ---------------------fetching total number of query pages from database---------------------------------------- def db_fetch(self, query): self.collection.create_index([("name", "text")]) lst = [] cursor = self.collection.find( {"$text": {"$search": query}}, {'score': {'$meta': "textScore"}}).sort([('score', {'$meta': "textScore"})]) total = cursor.count() n = 0 for i in cursor: # print(i) i.pop('_id') lst.append(i) n += 1 print('fetched pages from db', len(lst)) # return {'results': lst, # 'total': n} return lst if __name__ == '__main__': obj = LinkedinSearch() print(obj.db_check("mark"))
flexible
{ "blob_id": "3e8860c22ff3092304df57aa7f5dbcb6ccda7dd8", "index": 5249, "step-1": "<mask token>\n\n\nclass LinkedinSearch:\n <mask token>\n <mask token>\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n results = self.db_fetch(query)\n return {'data': results}\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\n<mask token>\n", "step-3": "<mask token>\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n results = self.db_fetch(query)\n return {'data': results}\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\nif __name__ == '__main__':\n obj = LinkedinSearch()\n print(obj.db_check('mark'))\n", "step-4": "from pymongo import MongoClient\nfrom modules.linkedinSearch import SearchClass\nfrom config import Config\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n results = self.db_fetch(query)\n return {'data': results}\n\n def db_fetch(self, query):\n self.collection.create_index([('name', 'text')])\n lst = []\n cursor = self.collection.find({'$text': {'$search': query}}, {\n 'score': {'$meta': 'textScore'}}).sort([('score', {'$meta':\n 'textScore'})])\n total = cursor.count()\n n = 0\n for i in cursor:\n i.pop('_id')\n lst.append(i)\n n += 1\n print('fetched pages from db', len(lst))\n return lst\n\n\nif __name__ == '__main__':\n obj = LinkedinSearch()\n print(obj.db_check('mark'))\n", "step-5": "from pymongo import MongoClient\nfrom modules.linkedinSearch import SearchClass\nfrom config import Config\n\n\nclass LinkedinSearch:\n\n def __init__(self):\n\n self.client = MongoClient(Config.MONGO_URI)\n db = self.client.linkedin_db\n self.collection = db.search\n self.dict = {}\n self.obj = SearchClass()\n\n def db_check(self, query):\n\n r = self.obj.search(query)\n print(r)\n t = 0\n for i in r['results']:\n if self.collection.find_one({'userid': i['userid']}):\n pass\n else:\n # print(i)\n t += 1\n self.collection.insert_one(i)\n self.client.close()\n print('no. of stored pages', t)\n # self.loop.close()\n\n results = self.db_fetch(query)\n #\n # # return {'results': m}\n return {'data': results}\n\n # ---------------------fetching total number of query pages from database----------------------------------------\n def db_fetch(self, query):\n self.collection.create_index([(\"name\", \"text\")])\n\n lst = []\n cursor = self.collection.find(\n {\"$text\": {\"$search\": query}},\n {'score': {'$meta': \"textScore\"}}).sort([('score', {'$meta': \"textScore\"})])\n total = cursor.count()\n n = 0\n for i in cursor:\n # print(i)\n i.pop('_id')\n lst.append(i)\n n += 1\n\n print('fetched pages from db', len(lst))\n # return {'results': lst,\n # 'total': n}\n return lst\n\n\nif __name__ == '__main__':\n obj = LinkedinSearch()\n print(obj.db_check(\"mark\"))\n\n", "step-ids": [ 2, 4, 5, 6, 7 ] }
[ 2, 4, 5, 6, 7 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> while True: net = Net.FeedForwardNet(input_count=784, layers=[100, 10], activation_function=Net.FeedForwardNet.leaky_relu) try: epoch_num = int(input('Epoch_num:')) batch_size = int(input('Batch_size:')) learning_rate = float(input('Learning rate:')) inertion_factor = float(input('Inertion factor:')) except: print('Parse error') continue for i in range(epoch_num): batch_in, batch_out = net.generate_random_batch(in_values, out_values, batch_size) net.forward_propagation(batch_in) net.backpropagation(batch_out, learning_rate=learning_rate, inertion_factor=inertion_factor) if i % 50 == 0: print() output = net.forward_propagation(in_testing_values) if net.check_total_squared_error(output_values= out_testing_values, epsilon=1000, verbose=True): break output_numbers = mnist_parser.one_hots_to_ints(output) correct = np.sum(out_gt_numbers_test == output_numbers) print('Epoch: ', i, ' br tocnih:', correct, '/', output_numbers .size, '(', correct / output_numbers.size, '%)') output = net.forward_propagation(in_testing_values) conf_mat = net.calculate_confusion_matrix(out_testing_values) output_numbers = mnist_parser.one_hots_to_ints(output) correct = np.sum(out_gt_numbers_test == output_numbers) print('Correct:', correct, '/', output_numbers.size, '(', correct / output_numbers.size, '%)') print(conf_mat) save = int(input('Save?(1/0)')) if save == 1: name = input('Save as?') net.save_state(name) exit = int(input('Exit?(1/0)')) if exit == 1: break <|reserved_special_token_1|> <|reserved_special_token_0|> in_values = np.load('MNIST/mnist_train_images.npy') out_values = np.load('MNIST/mnist_train_labels.npy') out_gt_numbers = mnist_parser.one_hots_to_ints(out_values) in_testing_values = np.load('MNIST/mnist_test_images.npy') out_testing_values = np.load('MNIST/mnist_test_labels.npy') out_gt_numbers_test = mnist_parser.one_hots_to_ints(out_testing_values) while True: net = Net.FeedForwardNet(input_count=784, layers=[100, 10], activation_function=Net.FeedForwardNet.leaky_relu) try: epoch_num = int(input('Epoch_num:')) batch_size = int(input('Batch_size:')) learning_rate = float(input('Learning rate:')) inertion_factor = float(input('Inertion factor:')) except: print('Parse error') continue for i in range(epoch_num): batch_in, batch_out = net.generate_random_batch(in_values, out_values, batch_size) net.forward_propagation(batch_in) net.backpropagation(batch_out, learning_rate=learning_rate, inertion_factor=inertion_factor) if i % 50 == 0: print() output = net.forward_propagation(in_testing_values) if net.check_total_squared_error(output_values= out_testing_values, epsilon=1000, verbose=True): break output_numbers = mnist_parser.one_hots_to_ints(output) correct = np.sum(out_gt_numbers_test == output_numbers) print('Epoch: ', i, ' br tocnih:', correct, '/', output_numbers .size, '(', correct / output_numbers.size, '%)') output = net.forward_propagation(in_testing_values) conf_mat = net.calculate_confusion_matrix(out_testing_values) output_numbers = mnist_parser.one_hots_to_ints(output) correct = np.sum(out_gt_numbers_test == output_numbers) print('Correct:', correct, '/', output_numbers.size, '(', correct / output_numbers.size, '%)') print(conf_mat) save = int(input('Save?(1/0)')) if save == 1: name = input('Save as?') net.save_state(name) exit = int(input('Exit?(1/0)')) if exit == 1: break <|reserved_special_token_1|> import Net import mnist_parser import numpy as np in_values = np.load('MNIST/mnist_train_images.npy') out_values = np.load('MNIST/mnist_train_labels.npy') out_gt_numbers = mnist_parser.one_hots_to_ints(out_values) in_testing_values = np.load('MNIST/mnist_test_images.npy') out_testing_values = np.load('MNIST/mnist_test_labels.npy') out_gt_numbers_test = mnist_parser.one_hots_to_ints(out_testing_values) while True: net = Net.FeedForwardNet(input_count=784, layers=[100, 10], activation_function=Net.FeedForwardNet.leaky_relu) try: epoch_num = int(input('Epoch_num:')) batch_size = int(input('Batch_size:')) learning_rate = float(input('Learning rate:')) inertion_factor = float(input('Inertion factor:')) except: print('Parse error') continue for i in range(epoch_num): batch_in, batch_out = net.generate_random_batch(in_values, out_values, batch_size) net.forward_propagation(batch_in) net.backpropagation(batch_out, learning_rate=learning_rate, inertion_factor=inertion_factor) if i % 50 == 0: print() output = net.forward_propagation(in_testing_values) if net.check_total_squared_error(output_values= out_testing_values, epsilon=1000, verbose=True): break output_numbers = mnist_parser.one_hots_to_ints(output) correct = np.sum(out_gt_numbers_test == output_numbers) print('Epoch: ', i, ' br tocnih:', correct, '/', output_numbers .size, '(', correct / output_numbers.size, '%)') output = net.forward_propagation(in_testing_values) conf_mat = net.calculate_confusion_matrix(out_testing_values) output_numbers = mnist_parser.one_hots_to_ints(output) correct = np.sum(out_gt_numbers_test == output_numbers) print('Correct:', correct, '/', output_numbers.size, '(', correct / output_numbers.size, '%)') print(conf_mat) save = int(input('Save?(1/0)')) if save == 1: name = input('Save as?') net.save_state(name) exit = int(input('Exit?(1/0)')) if exit == 1: break <|reserved_special_token_1|> import Net import mnist_parser import numpy as np #To use this model it is required to download the MNIST database #The donwloaded base is then needet parse to numpy using mnist_parser.parse_to_npy method #The files genetared using mnist_parser.parse_to_npy are then loaded using np.load in_values = np.load("MNIST/mnist_train_images.npy") out_values = np.load("MNIST/mnist_train_labels.npy") out_gt_numbers=mnist_parser.one_hots_to_ints(out_values) in_testing_values = np.load("MNIST/mnist_test_images.npy") out_testing_values = np.load("MNIST/mnist_test_labels.npy") out_gt_numbers_test=mnist_parser.one_hots_to_ints(out_testing_values) while(True): net = Net.FeedForwardNet(input_count=784, layers=[100, 10], activation_function=Net.FeedForwardNet.leaky_relu) try: epoch_num=int(input("Epoch_num:")) batch_size=int(input("Batch_size:")) #30 learning_rate=float(input("Learning rate:")) #0.001 inertion_factor=float(input("Inertion factor:")) #0.5 # max_error=float(input("Maximum error")) except: print("Parse error") continue for i in range(epoch_num): batch_in,batch_out=net.generate_random_batch(in_values,out_values,batch_size) net.forward_propagation(batch_in) net.backpropagation(batch_out, learning_rate=learning_rate, inertion_factor=inertion_factor) # print("X:",net.X[-1]) # net.stochastic_backpropagation(batch_out, learning_rate=learning_rate) if i % 50 == 0: print() output=net.forward_propagation(in_testing_values) if net.check_total_squared_error(output_values=out_testing_values, epsilon=1000, verbose=True): break output_numbers=mnist_parser.one_hots_to_ints(output) correct=np.sum( out_gt_numbers_test == output_numbers) print("Epoch: ", i, " br tocnih:",correct,"/",output_numbers.size,"(",correct/output_numbers.size,"%)") output=net.forward_propagation(in_testing_values) conf_mat=net.calculate_confusion_matrix(out_testing_values) output_numbers = mnist_parser.one_hots_to_ints(output) correct=np.sum(out_gt_numbers_test == output_numbers) print("Correct:",correct,"/",output_numbers.size,"(",correct/output_numbers.size ,"%)") print(conf_mat) save=int(input("Save?(1/0)")) if(save == 1): name=input("Save as?") net.save_state(name) exit=int(input("Exit?(1/0)")) if(exit == 1): break
flexible
{ "blob_id": "49005500b299ca276f663fe8431bb955e5585bbd", "index": 335, "step-1": "<mask token>\n", "step-2": "<mask token>\nwhile True:\n net = Net.FeedForwardNet(input_count=784, layers=[100, 10],\n activation_function=Net.FeedForwardNet.leaky_relu)\n try:\n epoch_num = int(input('Epoch_num:'))\n batch_size = int(input('Batch_size:'))\n learning_rate = float(input('Learning rate:'))\n inertion_factor = float(input('Inertion factor:'))\n except:\n print('Parse error')\n continue\n for i in range(epoch_num):\n batch_in, batch_out = net.generate_random_batch(in_values,\n out_values, batch_size)\n net.forward_propagation(batch_in)\n net.backpropagation(batch_out, learning_rate=learning_rate,\n inertion_factor=inertion_factor)\n if i % 50 == 0:\n print()\n output = net.forward_propagation(in_testing_values)\n if net.check_total_squared_error(output_values=\n out_testing_values, epsilon=1000, verbose=True):\n break\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct = np.sum(out_gt_numbers_test == output_numbers)\n print('Epoch: ', i, ' br tocnih:', correct, '/', output_numbers\n .size, '(', correct / output_numbers.size, '%)')\n output = net.forward_propagation(in_testing_values)\n conf_mat = net.calculate_confusion_matrix(out_testing_values)\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct = np.sum(out_gt_numbers_test == output_numbers)\n print('Correct:', correct, '/', output_numbers.size, '(', correct /\n output_numbers.size, '%)')\n print(conf_mat)\n save = int(input('Save?(1/0)'))\n if save == 1:\n name = input('Save as?')\n net.save_state(name)\n exit = int(input('Exit?(1/0)'))\n if exit == 1:\n break\n", "step-3": "<mask token>\nin_values = np.load('MNIST/mnist_train_images.npy')\nout_values = np.load('MNIST/mnist_train_labels.npy')\nout_gt_numbers = mnist_parser.one_hots_to_ints(out_values)\nin_testing_values = np.load('MNIST/mnist_test_images.npy')\nout_testing_values = np.load('MNIST/mnist_test_labels.npy')\nout_gt_numbers_test = mnist_parser.one_hots_to_ints(out_testing_values)\nwhile True:\n net = Net.FeedForwardNet(input_count=784, layers=[100, 10],\n activation_function=Net.FeedForwardNet.leaky_relu)\n try:\n epoch_num = int(input('Epoch_num:'))\n batch_size = int(input('Batch_size:'))\n learning_rate = float(input('Learning rate:'))\n inertion_factor = float(input('Inertion factor:'))\n except:\n print('Parse error')\n continue\n for i in range(epoch_num):\n batch_in, batch_out = net.generate_random_batch(in_values,\n out_values, batch_size)\n net.forward_propagation(batch_in)\n net.backpropagation(batch_out, learning_rate=learning_rate,\n inertion_factor=inertion_factor)\n if i % 50 == 0:\n print()\n output = net.forward_propagation(in_testing_values)\n if net.check_total_squared_error(output_values=\n out_testing_values, epsilon=1000, verbose=True):\n break\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct = np.sum(out_gt_numbers_test == output_numbers)\n print('Epoch: ', i, ' br tocnih:', correct, '/', output_numbers\n .size, '(', correct / output_numbers.size, '%)')\n output = net.forward_propagation(in_testing_values)\n conf_mat = net.calculate_confusion_matrix(out_testing_values)\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct = np.sum(out_gt_numbers_test == output_numbers)\n print('Correct:', correct, '/', output_numbers.size, '(', correct /\n output_numbers.size, '%)')\n print(conf_mat)\n save = int(input('Save?(1/0)'))\n if save == 1:\n name = input('Save as?')\n net.save_state(name)\n exit = int(input('Exit?(1/0)'))\n if exit == 1:\n break\n", "step-4": "import Net\nimport mnist_parser\nimport numpy as np\nin_values = np.load('MNIST/mnist_train_images.npy')\nout_values = np.load('MNIST/mnist_train_labels.npy')\nout_gt_numbers = mnist_parser.one_hots_to_ints(out_values)\nin_testing_values = np.load('MNIST/mnist_test_images.npy')\nout_testing_values = np.load('MNIST/mnist_test_labels.npy')\nout_gt_numbers_test = mnist_parser.one_hots_to_ints(out_testing_values)\nwhile True:\n net = Net.FeedForwardNet(input_count=784, layers=[100, 10],\n activation_function=Net.FeedForwardNet.leaky_relu)\n try:\n epoch_num = int(input('Epoch_num:'))\n batch_size = int(input('Batch_size:'))\n learning_rate = float(input('Learning rate:'))\n inertion_factor = float(input('Inertion factor:'))\n except:\n print('Parse error')\n continue\n for i in range(epoch_num):\n batch_in, batch_out = net.generate_random_batch(in_values,\n out_values, batch_size)\n net.forward_propagation(batch_in)\n net.backpropagation(batch_out, learning_rate=learning_rate,\n inertion_factor=inertion_factor)\n if i % 50 == 0:\n print()\n output = net.forward_propagation(in_testing_values)\n if net.check_total_squared_error(output_values=\n out_testing_values, epsilon=1000, verbose=True):\n break\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct = np.sum(out_gt_numbers_test == output_numbers)\n print('Epoch: ', i, ' br tocnih:', correct, '/', output_numbers\n .size, '(', correct / output_numbers.size, '%)')\n output = net.forward_propagation(in_testing_values)\n conf_mat = net.calculate_confusion_matrix(out_testing_values)\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct = np.sum(out_gt_numbers_test == output_numbers)\n print('Correct:', correct, '/', output_numbers.size, '(', correct /\n output_numbers.size, '%)')\n print(conf_mat)\n save = int(input('Save?(1/0)'))\n if save == 1:\n name = input('Save as?')\n net.save_state(name)\n exit = int(input('Exit?(1/0)'))\n if exit == 1:\n break\n", "step-5": "import Net\nimport mnist_parser\nimport numpy as np\n#To use this model it is required to download the MNIST database\n#The donwloaded base is then needet parse to numpy using mnist_parser.parse_to_npy method\n#The files genetared using mnist_parser.parse_to_npy are then loaded using np.load\nin_values = np.load(\"MNIST/mnist_train_images.npy\")\nout_values = np.load(\"MNIST/mnist_train_labels.npy\")\nout_gt_numbers=mnist_parser.one_hots_to_ints(out_values)\n\nin_testing_values = np.load(\"MNIST/mnist_test_images.npy\")\nout_testing_values = np.load(\"MNIST/mnist_test_labels.npy\")\nout_gt_numbers_test=mnist_parser.one_hots_to_ints(out_testing_values)\n\nwhile(True):\n\n net = Net.FeedForwardNet(input_count=784, layers=[100, 10], activation_function=Net.FeedForwardNet.leaky_relu)\n\n try:\n epoch_num=int(input(\"Epoch_num:\"))\n batch_size=int(input(\"Batch_size:\")) #30\n learning_rate=float(input(\"Learning rate:\")) #0.001\n inertion_factor=float(input(\"Inertion factor:\")) #0.5\n # max_error=float(input(\"Maximum error\"))\n except:\n print(\"Parse error\")\n continue\n\n for i in range(epoch_num):\n batch_in,batch_out=net.generate_random_batch(in_values,out_values,batch_size)\n net.forward_propagation(batch_in)\n net.backpropagation(batch_out, learning_rate=learning_rate, inertion_factor=inertion_factor)\n # print(\"X:\",net.X[-1])\n # net.stochastic_backpropagation(batch_out, learning_rate=learning_rate)\n\n if i % 50 == 0:\n print()\n output=net.forward_propagation(in_testing_values)\n if net.check_total_squared_error(output_values=out_testing_values, epsilon=1000, verbose=True):\n break\n output_numbers=mnist_parser.one_hots_to_ints(output)\n correct=np.sum( out_gt_numbers_test == output_numbers)\n print(\"Epoch: \", i, \" br tocnih:\",correct,\"/\",output_numbers.size,\"(\",correct/output_numbers.size,\"%)\")\n\n\n output=net.forward_propagation(in_testing_values)\n conf_mat=net.calculate_confusion_matrix(out_testing_values)\n\n output_numbers = mnist_parser.one_hots_to_ints(output)\n correct=np.sum(out_gt_numbers_test == output_numbers)\n print(\"Correct:\",correct,\"/\",output_numbers.size,\"(\",correct/output_numbers.size ,\"%)\")\n print(conf_mat)\n\n\n save=int(input(\"Save?(1/0)\"))\n if(save == 1):\n name=input(\"Save as?\")\n net.save_state(name)\n exit=int(input(\"Exit?(1/0)\"))\n if(exit == 1):\n break\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
from helper import * tree_type = TREE_TYPE_SPLIT file_name = '' file_path = '' split_scalars = {} visited = {} adjacency = {} pairs = {} index_map = {} postorder_map = {} preorder_map = {} birth = {} death = {} string = '' class Tree(object): def __init__(self): self.index = None self.children = [] self.parent = None self.label = None self.pair = None self.birth = None self.death = None self.postorder = None self.preorder = None def __str__(self): return str(self.index) def initialize_tree(index): root = Tree() root.index = index root.label = split_scalars[index] root.pair = pairs[index] # add mapping to dictionary index_map[index] = root return root def add_node(index, parent): node = Tree() node.index = index parent.children.append(node) node.parent = parent node.label = split_scalars[index] node.pair = pairs[index] # add mapping to dictionary index_map[index] = node return node def compare_nodes(a, b): # try to sort using the split_scalars # if they are equal, sort using index value if split_scalars[a] > split_scalars[b]: return 1 elif split_scalars[a] == split_scalars[b]: if a > b: return 1 else: return -1 else: return -1 def traverse(index, parent): #print index, split_scalars[index] visited[index] = True adjacency[index].sort(compare_nodes) for node in adjacency[index]: if not visited[node]: current = add_node(node, parent) traverse(node, current) def add_pairs(node): if(node == None): return else: node.pair = index_map[pairs[node.index]] node.birth = index_map[birth[node.index]] node.death = index_map[death[node.index]] for child in node.children: add_pairs(child) def postorder(node): # python needs a mutable object for updation order = {'index': 1} def set_order(node): if(node == None): return else: for child in node.children: set_order(child) node.postorder = order['index'] postorder_map[order['index']] = node order['index'] += 1 set_order(node) def preorder(node): # python needs a mutable object for updation order = {'index': 1} def set_order(node): if(node == None): return else: node.preorder = order['index'] preorder_map[order['index']] = node order['index'] += 1 for child in node.children: set_order(child) set_order(node) def stringify_tree(node): global string if(node == None): return else: string += '{' string += str(node.postorder) + '|' string += str(node.index) + '|' string += str(node.label) + '|' string += str(node.birth.label) + '|' string += str(node.death.label) for child in node.children: stringify_tree(child) string += '}' return string def get_merge_tree(): # Get merge tree path tree_file_arguments = [tree_type, TREE_INFIX, file_name, CSV_EXTENSION] tree_file_path = get_output_path(file_path, tree_file_arguments, folder_name = TREES_FOLDER) # Read merge tree file with open(tree_file_path, 'rb') as csvfile: csvfile.readline() spamreader = csv.reader(csvfile, delimiter=' ') for r in spamreader: row = r[0].split(',') node1 = int(row[0]) node2 = int(row[1]) split_scalars[node1] = float(row[2]) split_scalars[node2] = float(row[3]) visited[node1] = False visited[node2] = False if node1 not in adjacency.keys(): adjacency[node1] = [] if node2 not in adjacency.keys(): adjacency[node2] = [] adjacency[node1].append(node2) adjacency[node2].append(node1) for i in adjacency.keys(): if len(adjacency[i]) == 1: if (split_scalars[i] < split_scalars[adjacency[i][0]]): root = i return root def get_persistent_pairs(): # Get persistence pairs pairs_file_arguments = [tree_type, PAIRS_INFIX, file_name, CSV_EXTENSION] pairs_file_path = get_output_path(file_path, pairs_file_arguments, folder_name = PAIRS_FOLDER) with open(pairs_file_path, 'rb') as persistence_pairs: persistence_pairs.readline() spamreader = csv.reader(persistence_pairs, delimiter=' ') for r in spamreader: row = r[0].split(',') node1 = int(row[0]) node2 = int(row[1]) #if (node1 in split_scalars.keys()) and (node2 in split_scalars.keys()): # there will be pairs that do not exist in the merge tree # they will be removed/ignored subsequently pairs[node1] = node2 pairs[node2] = node1 # add birth and death values of nodes to dictionaries birth[node1] = node1 death[node1] = node2 birth[node2] = node1 death[node2] = node2 def write_tree(node): tuple_file_arguments = [file_name, TXT_EXTENSION] tuple_file_path = get_output_path(file_path, tuple_file_arguments, folder_name = TUPLES_FOLDER) tuple_file = open(tuple_file_path, 'w') fieldnames = ['timestep', 'postorder', 'value', 'birth', 'death'] writer = csv.writer(tuple_file, delimiter=',') writer.writerow(fieldnames) def pretty_print_tree(node): if(node == None): return else: timestep = file_name.split('tv_')[1] values = [timestep, node.postorder, node.label, node.birth.label, node.death.label] writer.writerow(values) for child in node.children: pretty_print_tree(child) pretty_print_tree(node) def print_treemap(node): processed_nodes = {} treemap_string = {} treemap_value = {} treemap_parent = {} treemap_container = {} def find_treemap_parent(node): if node.preorder not in processed_nodes: parent_node = node.parent paired_node = node.pair parent_found = False # keep going up the merge tree till you find a parent that itself and its pair within the range while((parent_node != None) and (parent_found == False)): if parent_node.preorder < node.preorder < parent_node.pair.preorder: parent_found = True else: parent_node = parent_node.parent if not parent_found: treemap_container[node.preorder] = str(node.preorder) treemap_parent[node] = None treemap_parent[node.pair] = node else: treemap_container[node.preorder] = treemap_container[parent_node.preorder] + "." + str(node.preorder) treemap_parent[node.pair] = node treemap_parent[node] = parent_node treemap_string[node.preorder] = treemap_container[node.preorder] + "." + str(node.preorder) treemap_string[node.pair.preorder] = treemap_container[node.preorder] + "." + str(node.pair.preorder) treemap_value[node.pair.preorder] = node.pair.label treemap_value[node.preorder] = node.label processed_nodes[node.preorder] = True processed_nodes[node.pair.preorder] = True def get_tree_structure(node): if(node == None): return else: find_treemap_parent(node) for child in node.children: get_tree_structure(child) get_tree_structure(node) for key in treemap_container.keys(): print str(treemap_container[key]) + "," for key in treemap_string.keys(): print str(treemap_string[key]) + ","+ str(int((treemap_value[key]+0.05)*1000)) def print_label(node): print str(node.preorder) + " [label=\""+ str(node.preorder) + " \\n["+ str(node.pair.preorder) + "]"+"\"]" def print_edge(node): print str(node.parent.preorder) + "->" + str(node.preorder) def print_tree_dot(node): if(node == None): return else: print_label(node) for child in node.children: print_edge(child) print_tree_dot(child) def make_tree(name, path): global file_name, file_path file_name = name file_path = path root = get_merge_tree() get_persistent_pairs() tree = initialize_tree(root) traverse(root, tree) add_pairs(tree) postorder(tree) preorder(tree) #write_tree(tree) print_treemap(tree) #print "digraph {" #print_tree_dot(tree) #print "}"
normal
{ "blob_id": "4daab8b8db1e394e3132ab5550fe0236b67074d8", "index": 5527, "step-1": "from helper import *\n\ntree_type = TREE_TYPE_SPLIT\n\nfile_name = ''\nfile_path = ''\n\nsplit_scalars = {}\nvisited = {}\nadjacency = {}\npairs = {}\n\nindex_map = {}\npostorder_map = {}\npreorder_map = {}\n\nbirth = {}\ndeath = {}\n\nstring = ''\n\nclass Tree(object):\n\tdef __init__(self):\n\t\tself.index = None\n\t\tself.children = []\n\t\tself.parent = None\n\t\tself.label = None\n\t\tself.pair = None\n\t\tself.birth = None\n\t\tself.death = None\n\t\tself.postorder = None\n\t\tself.preorder = None\n\n\tdef __str__(self):\n\t\treturn str(self.index)\n\ndef initialize_tree(index):\n\troot = Tree()\n\troot.index = index\n\troot.label = split_scalars[index]\n\troot.pair = pairs[index]\n\n\t# add mapping to dictionary\n\tindex_map[index] = root\n\n\treturn root\n\ndef add_node(index, parent):\n\tnode = Tree()\n\tnode.index = index\n\tparent.children.append(node)\n\tnode.parent = parent\n\tnode.label = split_scalars[index]\n\tnode.pair = pairs[index]\n\n\t# add mapping to dictionary\n\tindex_map[index] = node\n\n\treturn node\n\n\ndef compare_nodes(a, b):\n\t# try to sort using the split_scalars\n\t# if they are equal, sort using index value\n\tif split_scalars[a] > split_scalars[b]:\n\t\treturn 1\n\telif split_scalars[a] == split_scalars[b]:\n\t\tif a > b:\n\t\t\treturn 1\n\t\telse:\n\t\t\treturn -1\n\telse:\n\t\treturn -1\n\ndef traverse(index, parent):\n\t#print index, split_scalars[index]\n\tvisited[index] = True\n\tadjacency[index].sort(compare_nodes)\n\tfor node in adjacency[index]:\n\t\tif not visited[node]:\n\t\t\tcurrent = add_node(node, parent)\n\t\t\ttraverse(node, current)\n\ndef add_pairs(node):\n\tif(node == None):\n\t\treturn\n\telse:\n\t\tnode.pair = index_map[pairs[node.index]]\n\t\tnode.birth = index_map[birth[node.index]]\n\t\tnode.death = index_map[death[node.index]]\n\t\tfor child in node.children:\n\t\t\tadd_pairs(child)\n\ndef postorder(node):\n\t# python needs a mutable object for updation\n\torder = {'index': 1}\n\n\tdef set_order(node):\n\t\tif(node == None):\n\t\t\treturn\n\t\telse:\n\t\t\tfor child in node.children:\n\t\t\t\tset_order(child)\n\n\t\t\tnode.postorder = order['index']\n\t\t\tpostorder_map[order['index']] = node\n\t\t\torder['index'] += 1\n\n\tset_order(node)\n\ndef preorder(node):\n\t# python needs a mutable object for updation\n\torder = {'index': 1}\n\n\tdef set_order(node):\n\t\tif(node == None):\n\t\t\treturn\n\t\telse:\n\t\t\tnode.preorder = order['index']\n\t\t\tpreorder_map[order['index']] = node\n\t\t\torder['index'] += 1\n\n\t\t\tfor child in node.children:\n\t\t\t\tset_order(child)\n\n\tset_order(node)\n\ndef stringify_tree(node):\n\tglobal string\n\tif(node == None):\n\t\treturn\n\telse:\n\t\tstring += '{'\n\t\tstring += str(node.postorder) + '|'\n\t\tstring += str(node.index) + '|'\n\t\tstring += str(node.label) + '|'\n\t\tstring += str(node.birth.label) + '|'\n\t\tstring += str(node.death.label)\n\n\t\tfor child in node.children:\n\t\t\tstringify_tree(child)\n\n\t\tstring += '}'\n\n\treturn string\n\ndef get_merge_tree():\n\t# Get merge tree path\n\ttree_file_arguments = [tree_type, TREE_INFIX, file_name, CSV_EXTENSION]\n\ttree_file_path = get_output_path(file_path, tree_file_arguments, folder_name = TREES_FOLDER)\n\n\t# Read merge tree file\n\twith open(tree_file_path, 'rb') as csvfile:\n\t\tcsvfile.readline()\n\t\tspamreader = csv.reader(csvfile, delimiter=' ')\n\t\tfor r in spamreader:\n\t\t\trow = r[0].split(',')\n\t\t\tnode1 = int(row[0])\n\t\t\tnode2 = int(row[1])\n\n\t\t\tsplit_scalars[node1] = float(row[2])\n\t\t\tsplit_scalars[node2] = float(row[3])\n\n\t\t\tvisited[node1] = False\n\t\t\tvisited[node2] = False\n\n\t\t\tif node1 not in adjacency.keys():\n\t\t\t\tadjacency[node1] = []\n\n\t\t\tif node2 not in adjacency.keys():\n\t\t\t\tadjacency[node2] = []\n\n\t\t\tadjacency[node1].append(node2)\n\t\t\tadjacency[node2].append(node1)\n\n\tfor i in adjacency.keys():\n\t\tif len(adjacency[i]) == 1:\n\t\t\tif (split_scalars[i] < split_scalars[adjacency[i][0]]):\n\t\t\t\troot = i\n\n\treturn root\n\ndef get_persistent_pairs():\n\t# Get persistence pairs\n\tpairs_file_arguments = [tree_type, PAIRS_INFIX, file_name, CSV_EXTENSION]\n\tpairs_file_path = get_output_path(file_path, pairs_file_arguments, folder_name = PAIRS_FOLDER)\n\n\twith open(pairs_file_path, 'rb') as persistence_pairs:\n\t\tpersistence_pairs.readline()\n\t\tspamreader = csv.reader(persistence_pairs, delimiter=' ')\n\t\tfor r in spamreader:\n\t\t\trow = r[0].split(',')\n\t\t\tnode1 = int(row[0])\n\t\t\tnode2 = int(row[1])\n\n\t\t\t#if (node1 in split_scalars.keys()) and (node2 in split_scalars.keys()):\n\t\t\t# there will be pairs that do not exist in the merge tree\n\t\t\t# they will be removed/ignored subsequently\n\n\t\t\tpairs[node1] = node2\n\t\t\tpairs[node2] = node1\n\n\t\t\t# add birth and death values of nodes to dictionaries\n\t\t\tbirth[node1] = node1\n\t\t\tdeath[node1] = node2\n\n\t\t\tbirth[node2] = node1\n\t\t\tdeath[node2] = node2\n\ndef write_tree(node):\n\ttuple_file_arguments = [file_name, TXT_EXTENSION]\n\ttuple_file_path = get_output_path(file_path, tuple_file_arguments, folder_name = TUPLES_FOLDER)\n\n\ttuple_file = open(tuple_file_path, 'w')\n\tfieldnames = ['timestep', 'postorder', 'value', 'birth', 'death']\n\n\twriter = csv.writer(tuple_file, delimiter=',')\n\twriter.writerow(fieldnames)\n\n\tdef pretty_print_tree(node):\n\t\tif(node == None):\n\t\t\treturn\n\t\telse:\n\t\t\ttimestep = file_name.split('tv_')[1]\n\t\t\tvalues = [timestep, node.postorder, node.label, node.birth.label, node.death.label]\n\t\t\twriter.writerow(values)\n\n\t\t\tfor child in node.children:\n\t\t\t\tpretty_print_tree(child)\n\n\tpretty_print_tree(node)\n\ndef print_treemap(node):\n\tprocessed_nodes = {}\n\ttreemap_string = {}\n\ttreemap_value = {}\n\ttreemap_parent = {}\n\ttreemap_container = {}\n\n\tdef find_treemap_parent(node):\n\t\tif node.preorder not in processed_nodes:\n\t\t\tparent_node = node.parent\n\t\t\tpaired_node = node.pair\n\t\t\tparent_found = False\n\n\t\t\t# keep going up the merge tree till you find a parent that itself and its pair within the range\n\t\t\twhile((parent_node != None) and (parent_found == False)):\n\t\t\t\tif parent_node.preorder < node.preorder < parent_node.pair.preorder:\n\t\t\t\t\tparent_found = True\n\t\t\t\telse:\n\t\t\t\t\tparent_node = parent_node.parent\n\n\t\t\tif not parent_found:\n\t\t\t\ttreemap_container[node.preorder] = str(node.preorder)\n\t\t\t\ttreemap_parent[node] = None\n\t\t\t\ttreemap_parent[node.pair] = node\n\t\t\telse:\n\t\t\t\ttreemap_container[node.preorder] = treemap_container[parent_node.preorder] + \".\" + str(node.preorder)\n\t\t\t\ttreemap_parent[node.pair] = node\n\t\t\t\ttreemap_parent[node] = parent_node\n\n\t\t\ttreemap_string[node.preorder] = treemap_container[node.preorder] + \".\" + str(node.preorder)\n\t\t\ttreemap_string[node.pair.preorder] = treemap_container[node.preorder] + \".\" + str(node.pair.preorder)\n\n\t\t\ttreemap_value[node.pair.preorder] = node.pair.label\n\t\t\ttreemap_value[node.preorder] = node.label\n\n\t\t\tprocessed_nodes[node.preorder] = True\n\t\t\tprocessed_nodes[node.pair.preorder] = True\n\n\tdef get_tree_structure(node):\n\t\tif(node == None):\n\t\t\treturn\n\t\telse:\n\t\t\tfind_treemap_parent(node)\n\t\t\tfor child in node.children:\n\t\t\t\tget_tree_structure(child)\n\n\tget_tree_structure(node)\n\tfor key in treemap_container.keys():\n\t\tprint str(treemap_container[key]) + \",\"\n\n\tfor key in treemap_string.keys():\n\t\tprint str(treemap_string[key]) + \",\"+ str(int((treemap_value[key]+0.05)*1000))\n\ndef print_label(node):\n\tprint str(node.preorder) + \" [label=\\\"\"+ str(node.preorder) + \" \\\\n[\"+ str(node.pair.preorder) + \"]\"+\"\\\"]\"\n\ndef print_edge(node):\n\tprint str(node.parent.preorder) + \"->\" + str(node.preorder)\n\ndef print_tree_dot(node):\n\tif(node == None):\n\t\treturn\n\telse:\n\t\tprint_label(node)\n\t\tfor child in node.children:\n\t\t\tprint_edge(child)\n\t\t\tprint_tree_dot(child)\n\n\ndef make_tree(name, path):\n\tglobal file_name, file_path\n\tfile_name = name\n\tfile_path = path\n\troot = get_merge_tree()\n\tget_persistent_pairs()\n\n\n\ttree = initialize_tree(root)\n\ttraverse(root, tree)\n\tadd_pairs(tree)\n\tpostorder(tree)\n\tpreorder(tree)\n\n\t#write_tree(tree)\n\n\tprint_treemap(tree)\n\n\t#print \"digraph {\"\n\t#print_tree_dot(tree)\n\t#print \"}\"\n", "step-2": null, "step-3": null, "step-4": null, "step-5": null, "step-ids": [ 0 ] }
[ 0 ]
DEBUG = True SQLALCHEMY_DATABASE_URI = "postgresql://username:password@IPOrDomain/databasename" SQLALCHEMY_TRACK_MODIFICATIONS = True DATABASE_CONNECT_OPTIONS = {} THREADS_PER_PAGE = 2
normal
{ "blob_id": "a1b0e72b62abc89d5292f199ec5b6193b544e271", "index": 7813, "step-1": "<mask token>\n", "step-2": "DEBUG = True\nSQLALCHEMY_DATABASE_URI = (\n 'postgresql://username:password@IPOrDomain/databasename')\nSQLALCHEMY_TRACK_MODIFICATIONS = True\nDATABASE_CONNECT_OPTIONS = {}\nTHREADS_PER_PAGE = 2\n", "step-3": "DEBUG = True\nSQLALCHEMY_DATABASE_URI = \"postgresql://username:password@IPOrDomain/databasename\"\n\nSQLALCHEMY_TRACK_MODIFICATIONS = True\nDATABASE_CONNECT_OPTIONS = {}\nTHREADS_PER_PAGE = 2\n", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
#_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-# # PROJECT : RegCEl - Registro para el Consumo Eléctrico # # VERSION : 1.2 # # AUTHOR : Yunior Barceló Chávez barceloch@gmail.com # # DATE : 9/01/2021 # #_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-# """ This file contains different customized widgets Availabe classes: ----------------- - HoverOneLineListItem - LabelForList - LabelForListStudent - AdminInfoLabel - AdminInfoEditField - CustomRecycleView """ from kivymd.uix.list import OneLineListItem from kivy.uix.label import Label from kivy.uix.boxlayout import BoxLayout from kivy.uix.recycleview import RecycleView from kivy.uix.bubble import Bubble, BubbleButton from hoverable import HoverBehavior from kivy.uix.floatlayout import FloatLayout from kivy.app import App class LabelForList(Label): """ This class creates universal label to be used in list items across this application """ pass class TotalsInfoLabel(BoxLayout): """ Customized `Label` to show personal/credential informations of the admin """ pass class PanelInfoLabel(BoxLayout): """ Customized `Label` to show personal/credential informations of the admin """ pass class CustomBubbleButton(BubbleButton): def add_text(self): app= App.get_running_app() index=app.root.ids.registerScreen.ids.input_field.cursor[0]-1 if self.text!="<-": app.root.ids.registerScreen.ids.input_field.text=app.root.ids.registerScreen.ids.input_field.text[:index+1]+self.text + app.root.ids.registerScreen.ids.input_field.text[index+1:] app.root.ids.registerScreen.ids.input_field.cursor=(index+2,0) else: app.root.ids.registerScreen.ids.input_field.text=app.root.ids.registerScreen.ids.input_field.text[:index] + app.root.ids.registerScreen.ids.input_field.text[index+1:] if index != -1 and app.root.ids.registerScreen.ids.input_field.cursor != (0,0) else app.root.ids.registerScreen.ids.input_field.text app.root.ids.registerScreen.ids.input_field.cursor=(index,0) pass class NumericKeyboard(Bubble): def on_touch_up(self, touch): app= App.get_running_app() if not self.collide_point(*touch.pos) and not self.parent.collide_point(*touch.pos): self.parent.remove_widget(self.parent.bubb) app.root.ids.registerScreen.ids.input_field.focus=False delattr(app.root.ids.registerScreen.ids.input_field.parent, 'bubb') def __init__(self, **kwargs): super(NumericKeyboard, self).__init__(**kwargs) self.create_bubble_button() def create_bubble_button(self): numeric_keypad = ['7', '8', '9', '4', '5', '6', '1', '2', '3', '0', '', '<-'] for x in numeric_keypad: if x == '': bubb_btn = CustomBubbleButton(disabled=True, text=str(x),font_name='zekton__.ttf', bold=True, font_size="20sp") else: bubb_btn = CustomBubbleButton(text=str(x),font_name='zekton__.ttf', bold=True, font_size="20sp") self.numeric_keyboard_layout.add_widget(bubb_btn) class ShowInputBubble(FloatLayout): def show_bubble(self, *l): if not hasattr(self, 'bubb'): self.bubb = NumericKeyboard() self.bubb.arrow_pos = "top_mid" self.add_widget(self.bubb)
normal
{ "blob_id": "7da8a074704b1851ac352477ef72a4c11cea1a0b", "index": 6737, "step-1": "<mask token>\n\n\nclass NumericKeyboard(Bubble):\n\n def on_touch_up(self, touch):\n app = App.get_running_app()\n if not self.collide_point(*touch.pos\n ) and not self.parent.collide_point(*touch.pos):\n self.parent.remove_widget(self.parent.bubb)\n app.root.ids.registerScreen.ids.input_field.focus = False\n delattr(app.root.ids.registerScreen.ids.input_field.parent, 'bubb')\n <mask token>\n\n def create_bubble_button(self):\n numeric_keypad = ['7', '8', '9', '4', '5', '6', '1', '2', '3', '0',\n '', '<-']\n for x in numeric_keypad:\n if x == '':\n bubb_btn = CustomBubbleButton(disabled=True, text=str(x),\n font_name='zekton__.ttf', bold=True, font_size='20sp')\n else:\n bubb_btn = CustomBubbleButton(text=str(x), font_name=\n 'zekton__.ttf', bold=True, font_size='20sp')\n self.numeric_keyboard_layout.add_widget(bubb_btn)\n\n\nclass ShowInputBubble(FloatLayout):\n\n def show_bubble(self, *l):\n if not hasattr(self, 'bubb'):\n self.bubb = NumericKeyboard()\n self.bubb.arrow_pos = 'top_mid'\n self.add_widget(self.bubb)\n", "step-2": "<mask token>\n\n\nclass PanelInfoLabel(BoxLayout):\n <mask token>\n pass\n\n\nclass CustomBubbleButton(BubbleButton):\n\n def add_text(self):\n app = App.get_running_app()\n index = app.root.ids.registerScreen.ids.input_field.cursor[0] - 1\n if self.text != '<-':\n app.root.ids.registerScreen.ids.input_field.text = (app.root.\n ids.registerScreen.ids.input_field.text[:index + 1] + self.\n text + app.root.ids.registerScreen.ids.input_field.text[\n index + 1:])\n app.root.ids.registerScreen.ids.input_field.cursor = index + 2, 0\n else:\n app.root.ids.registerScreen.ids.input_field.text = (app.root.\n ids.registerScreen.ids.input_field.text[:index] + app.root.\n ids.registerScreen.ids.input_field.text[index + 1:] if \n index != -1 and app.root.ids.registerScreen.ids.input_field\n .cursor != (0, 0) else app.root.ids.registerScreen.ids.\n input_field.text)\n app.root.ids.registerScreen.ids.input_field.cursor = index, 0\n pass\n\n\nclass NumericKeyboard(Bubble):\n\n def on_touch_up(self, touch):\n app = App.get_running_app()\n if not self.collide_point(*touch.pos\n ) and not self.parent.collide_point(*touch.pos):\n self.parent.remove_widget(self.parent.bubb)\n app.root.ids.registerScreen.ids.input_field.focus = False\n delattr(app.root.ids.registerScreen.ids.input_field.parent, 'bubb')\n\n def __init__(self, **kwargs):\n super(NumericKeyboard, self).__init__(**kwargs)\n self.create_bubble_button()\n\n def create_bubble_button(self):\n numeric_keypad = ['7', '8', '9', '4', '5', '6', '1', '2', '3', '0',\n '', '<-']\n for x in numeric_keypad:\n if x == '':\n bubb_btn = CustomBubbleButton(disabled=True, text=str(x),\n font_name='zekton__.ttf', bold=True, font_size='20sp')\n else:\n bubb_btn = CustomBubbleButton(text=str(x), font_name=\n 'zekton__.ttf', bold=True, font_size='20sp')\n self.numeric_keyboard_layout.add_widget(bubb_btn)\n\n\nclass ShowInputBubble(FloatLayout):\n\n def show_bubble(self, *l):\n if not hasattr(self, 'bubb'):\n self.bubb = NumericKeyboard()\n self.bubb.arrow_pos = 'top_mid'\n self.add_widget(self.bubb)\n", "step-3": "<mask token>\n\n\nclass PanelInfoLabel(BoxLayout):\n \"\"\"\n Customized `Label` to show personal/credential informations of the admin\n \"\"\"\n pass\n\n\nclass CustomBubbleButton(BubbleButton):\n\n def add_text(self):\n app = App.get_running_app()\n index = app.root.ids.registerScreen.ids.input_field.cursor[0] - 1\n if self.text != '<-':\n app.root.ids.registerScreen.ids.input_field.text = (app.root.\n ids.registerScreen.ids.input_field.text[:index + 1] + self.\n text + app.root.ids.registerScreen.ids.input_field.text[\n index + 1:])\n app.root.ids.registerScreen.ids.input_field.cursor = index + 2, 0\n else:\n app.root.ids.registerScreen.ids.input_field.text = (app.root.\n ids.registerScreen.ids.input_field.text[:index] + app.root.\n ids.registerScreen.ids.input_field.text[index + 1:] if \n index != -1 and app.root.ids.registerScreen.ids.input_field\n .cursor != (0, 0) else app.root.ids.registerScreen.ids.\n input_field.text)\n app.root.ids.registerScreen.ids.input_field.cursor = index, 0\n pass\n\n\nclass NumericKeyboard(Bubble):\n\n def on_touch_up(self, touch):\n app = App.get_running_app()\n if not self.collide_point(*touch.pos\n ) and not self.parent.collide_point(*touch.pos):\n self.parent.remove_widget(self.parent.bubb)\n app.root.ids.registerScreen.ids.input_field.focus = False\n delattr(app.root.ids.registerScreen.ids.input_field.parent, 'bubb')\n\n def __init__(self, **kwargs):\n super(NumericKeyboard, self).__init__(**kwargs)\n self.create_bubble_button()\n\n def create_bubble_button(self):\n numeric_keypad = ['7', '8', '9', '4', '5', '6', '1', '2', '3', '0',\n '', '<-']\n for x in numeric_keypad:\n if x == '':\n bubb_btn = CustomBubbleButton(disabled=True, text=str(x),\n font_name='zekton__.ttf', bold=True, font_size='20sp')\n else:\n bubb_btn = CustomBubbleButton(text=str(x), font_name=\n 'zekton__.ttf', bold=True, font_size='20sp')\n self.numeric_keyboard_layout.add_widget(bubb_btn)\n\n\nclass ShowInputBubble(FloatLayout):\n\n def show_bubble(self, *l):\n if not hasattr(self, 'bubb'):\n self.bubb = NumericKeyboard()\n self.bubb.arrow_pos = 'top_mid'\n self.add_widget(self.bubb)\n", "step-4": "<mask token>\n\n\nclass TotalsInfoLabel(BoxLayout):\n <mask token>\n pass\n\n\nclass PanelInfoLabel(BoxLayout):\n \"\"\"\n Customized `Label` to show personal/credential informations of the admin\n \"\"\"\n pass\n\n\nclass CustomBubbleButton(BubbleButton):\n\n def add_text(self):\n app = App.get_running_app()\n index = app.root.ids.registerScreen.ids.input_field.cursor[0] - 1\n if self.text != '<-':\n app.root.ids.registerScreen.ids.input_field.text = (app.root.\n ids.registerScreen.ids.input_field.text[:index + 1] + self.\n text + app.root.ids.registerScreen.ids.input_field.text[\n index + 1:])\n app.root.ids.registerScreen.ids.input_field.cursor = index + 2, 0\n else:\n app.root.ids.registerScreen.ids.input_field.text = (app.root.\n ids.registerScreen.ids.input_field.text[:index] + app.root.\n ids.registerScreen.ids.input_field.text[index + 1:] if \n index != -1 and app.root.ids.registerScreen.ids.input_field\n .cursor != (0, 0) else app.root.ids.registerScreen.ids.\n input_field.text)\n app.root.ids.registerScreen.ids.input_field.cursor = index, 0\n pass\n\n\nclass NumericKeyboard(Bubble):\n\n def on_touch_up(self, touch):\n app = App.get_running_app()\n if not self.collide_point(*touch.pos\n ) and not self.parent.collide_point(*touch.pos):\n self.parent.remove_widget(self.parent.bubb)\n app.root.ids.registerScreen.ids.input_field.focus = False\n delattr(app.root.ids.registerScreen.ids.input_field.parent, 'bubb')\n\n def __init__(self, **kwargs):\n super(NumericKeyboard, self).__init__(**kwargs)\n self.create_bubble_button()\n\n def create_bubble_button(self):\n numeric_keypad = ['7', '8', '9', '4', '5', '6', '1', '2', '3', '0',\n '', '<-']\n for x in numeric_keypad:\n if x == '':\n bubb_btn = CustomBubbleButton(disabled=True, text=str(x),\n font_name='zekton__.ttf', bold=True, font_size='20sp')\n else:\n bubb_btn = CustomBubbleButton(text=str(x), font_name=\n 'zekton__.ttf', bold=True, font_size='20sp')\n self.numeric_keyboard_layout.add_widget(bubb_btn)\n\n\nclass ShowInputBubble(FloatLayout):\n\n def show_bubble(self, *l):\n if not hasattr(self, 'bubb'):\n self.bubb = NumericKeyboard()\n self.bubb.arrow_pos = 'top_mid'\n self.add_widget(self.bubb)\n", "step-5": "#_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-#\n# PROJECT : RegCEl - Registro para el Consumo Eléctrico #\n# VERSION : 1.2 #\n# AUTHOR : Yunior Barceló Chávez barceloch@gmail.com #\n# DATE : 9/01/2021 #\n#_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-#\n\"\"\"\nThis file contains different customized widgets\n\nAvailabe classes:\n-----------------\n - HoverOneLineListItem\n - LabelForList\n - LabelForListStudent\n - AdminInfoLabel\n - AdminInfoEditField\n - CustomRecycleView\n\n\"\"\"\n\nfrom kivymd.uix.list import OneLineListItem\nfrom kivy.uix.label import Label\nfrom kivy.uix.boxlayout import BoxLayout\nfrom kivy.uix.recycleview import RecycleView\nfrom kivy.uix.bubble import Bubble, BubbleButton\nfrom hoverable import HoverBehavior\nfrom kivy.uix.floatlayout import FloatLayout\n\nfrom kivy.app import App\n\n\nclass LabelForList(Label):\n \"\"\"\n This class creates universal label to be used in list items across this application\n \"\"\"\n\n pass\n\nclass TotalsInfoLabel(BoxLayout):\n \"\"\"\n Customized `Label` to show personal/credential informations of the admin\n \"\"\"\n\n pass\n\nclass PanelInfoLabel(BoxLayout):\n \"\"\"\n Customized `Label` to show personal/credential informations of the admin\n \"\"\"\n\n pass\n\n\nclass CustomBubbleButton(BubbleButton):\n \n def add_text(self):\n app= App.get_running_app()\n index=app.root.ids.registerScreen.ids.input_field.cursor[0]-1\n \n if self.text!=\"<-\":\n app.root.ids.registerScreen.ids.input_field.text=app.root.ids.registerScreen.ids.input_field.text[:index+1]+self.text + app.root.ids.registerScreen.ids.input_field.text[index+1:]\n app.root.ids.registerScreen.ids.input_field.cursor=(index+2,0)\n else:\n app.root.ids.registerScreen.ids.input_field.text=app.root.ids.registerScreen.ids.input_field.text[:index] + app.root.ids.registerScreen.ids.input_field.text[index+1:] if index != -1 and app.root.ids.registerScreen.ids.input_field.cursor != (0,0) else app.root.ids.registerScreen.ids.input_field.text\n app.root.ids.registerScreen.ids.input_field.cursor=(index,0)\n \n pass\n\n\nclass NumericKeyboard(Bubble):\n \n def on_touch_up(self, touch):\n app= App.get_running_app()\n if not self.collide_point(*touch.pos) and not self.parent.collide_point(*touch.pos):\n self.parent.remove_widget(self.parent.bubb)\n app.root.ids.registerScreen.ids.input_field.focus=False\n delattr(app.root.ids.registerScreen.ids.input_field.parent, 'bubb') \n \n def __init__(self, **kwargs):\n super(NumericKeyboard, self).__init__(**kwargs)\n self.create_bubble_button()\n\n def create_bubble_button(self):\n numeric_keypad = ['7', '8', '9', '4', '5', '6', '1', '2', '3', '0', '', '<-']\n for x in numeric_keypad:\n if x == '':\n bubb_btn = CustomBubbleButton(disabled=True, text=str(x),font_name='zekton__.ttf', bold=True, font_size=\"20sp\")\n else: \n bubb_btn = CustomBubbleButton(text=str(x),font_name='zekton__.ttf', bold=True, font_size=\"20sp\")\n self.numeric_keyboard_layout.add_widget(bubb_btn)\n\n\nclass ShowInputBubble(FloatLayout): \n def show_bubble(self, *l):\n if not hasattr(self, 'bubb'):\n self.bubb = NumericKeyboard()\n self.bubb.arrow_pos = \"top_mid\"\n self.add_widget(self.bubb)", "step-ids": [ 5, 9, 10, 11, 16 ] }
[ 5, 9, 10, 11, 16 ]
import torch import torch_scatter import torchgraphs as tg import textwrap from . import autograd_tricks as lrp def patch(): torch.add = lrp.add torch.cat = lrp.cat torch.index_select = lrp.index_select tg.utils.repeat_tensor = lrp.repeat_tensor torch_scatter.scatter_add = lrp.scatter_add torch_scatter.scatter_mean = lrp.scatter_mean torch_scatter.scatter_max = lrp.scatter_max torch.nn.functional.linear = lrp.linear_eps def computational_graph(op): if op is None: return 'None' res = f'{op.__class__.__name__} at {hex(id(op))}:' if op.__class__.__name__ == 'AccumulateGrad': res += f'variable at {hex(id(op.variable))}' for op in op.next_functions: res += '\n-' + textwrap.indent(computational_graph(op[0]), ' ') return res
normal
{ "blob_id": "faafc7cfd900d3f6fd6df30af5580f71eecfb279", "index": 8298, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef computational_graph(op):\n if op is None:\n return 'None'\n res = f'{op.__class__.__name__} at {hex(id(op))}:'\n if op.__class__.__name__ == 'AccumulateGrad':\n res += f'variable at {hex(id(op.variable))}'\n for op in op.next_functions:\n res += '\\n-' + textwrap.indent(computational_graph(op[0]), ' ')\n return res\n", "step-3": "<mask token>\n\n\ndef patch():\n torch.add = lrp.add\n torch.cat = lrp.cat\n torch.index_select = lrp.index_select\n tg.utils.repeat_tensor = lrp.repeat_tensor\n torch_scatter.scatter_add = lrp.scatter_add\n torch_scatter.scatter_mean = lrp.scatter_mean\n torch_scatter.scatter_max = lrp.scatter_max\n torch.nn.functional.linear = lrp.linear_eps\n\n\ndef computational_graph(op):\n if op is None:\n return 'None'\n res = f'{op.__class__.__name__} at {hex(id(op))}:'\n if op.__class__.__name__ == 'AccumulateGrad':\n res += f'variable at {hex(id(op.variable))}'\n for op in op.next_functions:\n res += '\\n-' + textwrap.indent(computational_graph(op[0]), ' ')\n return res\n", "step-4": "import torch\nimport torch_scatter\nimport torchgraphs as tg\nimport textwrap\nfrom . import autograd_tricks as lrp\n\n\ndef patch():\n torch.add = lrp.add\n torch.cat = lrp.cat\n torch.index_select = lrp.index_select\n tg.utils.repeat_tensor = lrp.repeat_tensor\n torch_scatter.scatter_add = lrp.scatter_add\n torch_scatter.scatter_mean = lrp.scatter_mean\n torch_scatter.scatter_max = lrp.scatter_max\n torch.nn.functional.linear = lrp.linear_eps\n\n\ndef computational_graph(op):\n if op is None:\n return 'None'\n res = f'{op.__class__.__name__} at {hex(id(op))}:'\n if op.__class__.__name__ == 'AccumulateGrad':\n res += f'variable at {hex(id(op.variable))}'\n for op in op.next_functions:\n res += '\\n-' + textwrap.indent(computational_graph(op[0]), ' ')\n return res\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
<|reserved_special_token_0|> class Controller: <|reserved_special_token_0|> <|reserved_special_token_0|> class RandomController(Controller): def __init__(self, env): """ YOUR CODE HERE """ pass def get_action(self, state): """ YOUR CODE HERE """ """ Your code should randomly sample an action uniformly from the action space """ pass class MPCcontroller(Controller): """ Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """ def __init__(self, env, dyn_model, horizon=5, cost_fn=None, num_simulated_paths=10): self.env = env self.dyn_model = dyn_model self.horizon = horizon self.cost_fn = cost_fn self.num_simulated_paths = num_simulated_paths def get_action(self, state): """ YOUR CODE HERE """ """ Note: be careful to batch your simulations through the model for speed """ <|reserved_special_token_1|> <|reserved_special_token_0|> class Controller: <|reserved_special_token_0|> def get_action(self, state): pass class RandomController(Controller): def __init__(self, env): """ YOUR CODE HERE """ pass def get_action(self, state): """ YOUR CODE HERE """ """ Your code should randomly sample an action uniformly from the action space """ pass class MPCcontroller(Controller): """ Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """ def __init__(self, env, dyn_model, horizon=5, cost_fn=None, num_simulated_paths=10): self.env = env self.dyn_model = dyn_model self.horizon = horizon self.cost_fn = cost_fn self.num_simulated_paths = num_simulated_paths def get_action(self, state): """ YOUR CODE HERE """ """ Note: be careful to batch your simulations through the model for speed """ <|reserved_special_token_1|> <|reserved_special_token_0|> class Controller: def __init__(self): pass def get_action(self, state): pass class RandomController(Controller): def __init__(self, env): """ YOUR CODE HERE """ pass def get_action(self, state): """ YOUR CODE HERE """ """ Your code should randomly sample an action uniformly from the action space """ pass class MPCcontroller(Controller): """ Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """ def __init__(self, env, dyn_model, horizon=5, cost_fn=None, num_simulated_paths=10): self.env = env self.dyn_model = dyn_model self.horizon = horizon self.cost_fn = cost_fn self.num_simulated_paths = num_simulated_paths def get_action(self, state): """ YOUR CODE HERE """ """ Note: be careful to batch your simulations through the model for speed """ <|reserved_special_token_1|> import numpy as np from cost_functions import trajectory_cost_fn import time class Controller: def __init__(self): pass def get_action(self, state): pass class RandomController(Controller): def __init__(self, env): """ YOUR CODE HERE """ pass def get_action(self, state): """ YOUR CODE HERE """ """ Your code should randomly sample an action uniformly from the action space """ pass class MPCcontroller(Controller): """ Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """ def __init__(self, env, dyn_model, horizon=5, cost_fn=None, num_simulated_paths=10): self.env = env self.dyn_model = dyn_model self.horizon = horizon self.cost_fn = cost_fn self.num_simulated_paths = num_simulated_paths def get_action(self, state): """ YOUR CODE HERE """ """ Note: be careful to batch your simulations through the model for speed """ <|reserved_special_token_1|> import numpy as np from cost_functions import trajectory_cost_fn import time class Controller(): def __init__(self): pass # Get the appropriate action(s) for this state(s) def get_action(self, state): pass class RandomController(Controller): def __init__(self, env): """ YOUR CODE HERE """ pass def get_action(self, state): """ YOUR CODE HERE """ """ Your code should randomly sample an action uniformly from the action space """ pass class MPCcontroller(Controller): """ Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 """ def __init__(self, env, dyn_model, horizon=5, cost_fn=None, num_simulated_paths=10, ): self.env = env self.dyn_model = dyn_model self.horizon = horizon self.cost_fn = cost_fn self.num_simulated_paths = num_simulated_paths def get_action(self, state): """ YOUR CODE HERE """ """ Note: be careful to batch your simulations through the model for speed """
flexible
{ "blob_id": "7112eb52aea9be6f8e682b4dacc6b615365c8cea", "index": 7510, "step-1": "<mask token>\n\n\nclass Controller:\n <mask token>\n <mask token>\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n", "step-2": "<mask token>\n\n\nclass Controller:\n <mask token>\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n", "step-3": "<mask token>\n\n\nclass Controller:\n\n def __init__(self):\n pass\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n", "step-4": "import numpy as np\nfrom cost_functions import trajectory_cost_fn\nimport time\n\n\nclass Controller:\n\n def __init__(self):\n pass\n\n def get_action(self, state):\n pass\n\n\nclass RandomController(Controller):\n\n def __init__(self, env):\n \"\"\" YOUR CODE HERE \"\"\"\n pass\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\n pass\n\n\nclass MPCcontroller(Controller):\n \"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\n\n def __init__(self, env, dyn_model, horizon=5, cost_fn=None,\n num_simulated_paths=10):\n self.env = env\n self.dyn_model = dyn_model\n self.horizon = horizon\n self.cost_fn = cost_fn\n self.num_simulated_paths = num_simulated_paths\n\n def get_action(self, state):\n \"\"\" YOUR CODE HERE \"\"\"\n \"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\n", "step-5": "import numpy as np\r\nfrom cost_functions import trajectory_cost_fn\r\nimport time\r\n\r\nclass Controller():\r\n\tdef __init__(self):\r\n\t\tpass\r\n\r\n\t# Get the appropriate action(s) for this state(s)\r\n\tdef get_action(self, state):\r\n\t\tpass\r\n\r\n\r\nclass RandomController(Controller):\r\n\tdef __init__(self, env):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\tpass\r\n\r\n\tdef get_action(self, state):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\t\"\"\" Your code should randomly sample an action uniformly from the action space \"\"\"\r\n\t\tpass\r\n\r\n\r\nclass MPCcontroller(Controller):\r\n\t\"\"\" Controller built using the MPC method outlined in https://arxiv.org/abs/1708.02596 \"\"\"\r\n\tdef __init__(self, \r\n\t\t\t\t env, \r\n\t\t\t\t dyn_model, \r\n\t\t\t\t horizon=5, \r\n\t\t\t\t cost_fn=None, \r\n\t\t\t\t num_simulated_paths=10,\r\n\t\t\t\t ):\r\n\t\tself.env = env\r\n\t\tself.dyn_model = dyn_model\r\n\t\tself.horizon = horizon\r\n\t\tself.cost_fn = cost_fn\r\n\t\tself.num_simulated_paths = num_simulated_paths\r\n\r\n\tdef get_action(self, state):\r\n\t\t\"\"\" YOUR CODE HERE \"\"\"\r\n\t\t\"\"\" Note: be careful to batch your simulations through the model for speed \"\"\"\r\n\r\n", "step-ids": [ 8, 9, 10, 11, 12 ] }
[ 8, 9, 10, 11, 12 ]
# Generated by Django 3.0.5 on 2020-05-12 13:26 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='idcard', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=20, null=True)), ('employment_id', models.CharField(max_length=20, null=True)), ('customer_account_no', models.CharField(max_length=20, null=True)), ('circle', models.CharField(max_length=20, null=True)), ('company_name', models.CharField(max_length=20, null=True)), ('department', models.CharField(max_length=20, null=True)), ('certificate_no', models.CharField(max_length=20)), ('date', models.CharField(max_length=20, null=True)), ], ), ]
normal
{ "blob_id": "422873f89468b1faabed96f72f463b6294b85276", "index": 5314, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='idcard', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=20,\n null=True)), ('employment_id', models.CharField(max_length=20, null\n =True)), ('customer_account_no', models.CharField(max_length=20,\n null=True)), ('circle', models.CharField(max_length=20, null=True)),\n ('company_name', models.CharField(max_length=20, null=True)), (\n 'department', models.CharField(max_length=20, null=True)), (\n 'certificate_no', models.CharField(max_length=20)), ('date', models\n .CharField(max_length=20, null=True))])]\n", "step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = []\n operations = [migrations.CreateModel(name='idcard', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('name', models.CharField(max_length=20,\n null=True)), ('employment_id', models.CharField(max_length=20, null\n =True)), ('customer_account_no', models.CharField(max_length=20,\n null=True)), ('circle', models.CharField(max_length=20, null=True)),\n ('company_name', models.CharField(max_length=20, null=True)), (\n 'department', models.CharField(max_length=20, null=True)), (\n 'certificate_no', models.CharField(max_length=20)), ('date', models\n .CharField(max_length=20, null=True))])]\n", "step-5": "# Generated by Django 3.0.5 on 2020-05-12 13:26\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='idcard',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('name', models.CharField(max_length=20, null=True)),\n ('employment_id', models.CharField(max_length=20, null=True)),\n ('customer_account_no', models.CharField(max_length=20, null=True)),\n ('circle', models.CharField(max_length=20, null=True)),\n ('company_name', models.CharField(max_length=20, null=True)),\n ('department', models.CharField(max_length=20, null=True)),\n ('certificate_no', models.CharField(max_length=20)),\n ('date', models.CharField(max_length=20, null=True)),\n ],\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def gen_function(b=[0]): a = 0 global carry_on while (a < 100) & carry_on: yield a a = a + 1 <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> with open('in.txt', newline='') as raster: dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC) environment = [] for row in dataset: rowlist = [] for value in row: rowlist.append(value) environment.append(rowlist) <|reserved_special_token_0|> ax.set_autoscale_on(False) for i in range(num_of_agents): agents.append(agentframeworkanimate.Agent(environment, agents)) <|reserved_special_token_0|> def update(frame_number): fig.clear() global carry_on for j in range(num_of_iterations): random.shuffle(agents) for i in range(num_of_agents): agents[i].move() agents[i].eat() agents[i].share_with_neighbours(neighbourhood) if agents[i].store == 100: carry_on = False print('Stopping condition met') matplotlib.pyplot.xlim(0, 99) matplotlib.pyplot.ylim(0, 99) matplotlib.pyplot.imshow(environment) for i in range(num_of_agents): matplotlib.pyplot.scatter(agents[i].x, agents[i].y) def gen_function(b=[0]): a = 0 global carry_on while (a < 100) & carry_on: yield a a = a + 1 <|reserved_special_token_0|> matplotlib.pyplot.show() with open('out.txt', 'w', newline='') as finalenviron: writer = csv.writer(finalenviron, delimiter=',') for row in environment: writer.writerow(row) <|reserved_special_token_1|> <|reserved_special_token_0|> with open('in.txt', newline='') as raster: dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC) environment = [] for row in dataset: rowlist = [] for value in row: rowlist.append(value) environment.append(rowlist) num_of_agents = 10 num_of_iterations = 100 neighbourhood = 20 agents = [] fig = matplotlib.pyplot.figure(figsize=(7, 7)) ax = fig.add_axes([0, 0, 1, 1]) ax.set_autoscale_on(False) for i in range(num_of_agents): agents.append(agentframeworkanimate.Agent(environment, agents)) carry_on = True def update(frame_number): fig.clear() global carry_on for j in range(num_of_iterations): random.shuffle(agents) for i in range(num_of_agents): agents[i].move() agents[i].eat() agents[i].share_with_neighbours(neighbourhood) if agents[i].store == 100: carry_on = False print('Stopping condition met') matplotlib.pyplot.xlim(0, 99) matplotlib.pyplot.ylim(0, 99) matplotlib.pyplot.imshow(environment) for i in range(num_of_agents): matplotlib.pyplot.scatter(agents[i].x, agents[i].y) def gen_function(b=[0]): a = 0 global carry_on while (a < 100) & carry_on: yield a a = a + 1 animation = matplotlib.animation.FuncAnimation(fig, update, frames= gen_function, repeat=False) matplotlib.pyplot.show() with open('out.txt', 'w', newline='') as finalenviron: writer = csv.writer(finalenviron, delimiter=',') for row in environment: writer.writerow(row) <|reserved_special_token_1|> <|reserved_special_token_0|> import random import operator import matplotlib.pyplot import matplotlib.animation import agentframeworkanimate import csv with open('in.txt', newline='') as raster: dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC) environment = [] for row in dataset: rowlist = [] for value in row: rowlist.append(value) environment.append(rowlist) num_of_agents = 10 num_of_iterations = 100 neighbourhood = 20 agents = [] fig = matplotlib.pyplot.figure(figsize=(7, 7)) ax = fig.add_axes([0, 0, 1, 1]) ax.set_autoscale_on(False) for i in range(num_of_agents): agents.append(agentframeworkanimate.Agent(environment, agents)) carry_on = True def update(frame_number): fig.clear() global carry_on for j in range(num_of_iterations): random.shuffle(agents) for i in range(num_of_agents): agents[i].move() agents[i].eat() agents[i].share_with_neighbours(neighbourhood) if agents[i].store == 100: carry_on = False print('Stopping condition met') matplotlib.pyplot.xlim(0, 99) matplotlib.pyplot.ylim(0, 99) matplotlib.pyplot.imshow(environment) for i in range(num_of_agents): matplotlib.pyplot.scatter(agents[i].x, agents[i].y) def gen_function(b=[0]): a = 0 global carry_on while (a < 100) & carry_on: yield a a = a + 1 animation = matplotlib.animation.FuncAnimation(fig, update, frames= gen_function, repeat=False) matplotlib.pyplot.show() with open('out.txt', 'w', newline='') as finalenviron: writer = csv.writer(finalenviron, delimiter=',') for row in environment: writer.writerow(row) <|reserved_special_token_1|> # -*- coding: utf-8 -*- """ Animation practical output The code that follows builds on the "Communications.py" file Additional code that follows has in part been modified from that of https://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/index.html https://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel.py https://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel2.py """ import random import operator import matplotlib.pyplot import matplotlib.animation import agentframeworkanimate import csv # Reading the in.txt file to create the environment. with open("in.txt", newline="") as raster: dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC) environment = [] for row in dataset: rowlist = [] for value in row: rowlist.append(value) environment.append(rowlist) # Setting initial parameters. num_of_agents = 10 num_of_iterations = 100 neighbourhood = 20 agents = [] # Variables to animate the model. fig = matplotlib.pyplot.figure(figsize=(7, 7)) ax = fig.add_axes([0, 0, 1, 1]) ax.set_autoscale_on(False) # Make the agents. # Addition of environment as argument for Agent class to allow interaction between agents and environment. # Addition of agents as argument for Agent class to allow agents to interact with each other. for i in range(num_of_agents): agents.append(agentframeworkanimate.Agent(environment, agents)) carry_on = True # Creating model animation. def update(frame_number): fig.clear() global carry_on # Move the agents and store what they eat for j in range(num_of_iterations): # Shuffle function used to randomise the order agents are processed with each iteration. random.shuffle(agents) for i in range(num_of_agents): agents[i].move() agents[i].eat() agents[i].share_with_neighbours(neighbourhood) # Stopping condition for animation when all agents have 100 in their store. if agents[i].store == 100: carry_on = False print("Stopping condition met") # Generate scatterplot of agents after model iterations. matplotlib.pyplot.xlim(0, 99) matplotlib.pyplot.ylim(0, 99) matplotlib.pyplot.imshow(environment) for i in range(num_of_agents): matplotlib.pyplot.scatter(agents[i].x,agents[i].y) # Generator function to stop animation. # Will stop animation after 10 iterations unless carry_on variable is set to False. def gen_function(b = [0]): a = 0 global carry_on while (a < 100) & (carry_on): yield a a = a + 1 # Animation will run until generator function condition is met #animation = matplotlib.animation.FuncAnimation(fig, update, interval=1, repeat=False, frames=10) animation = matplotlib.animation.FuncAnimation(fig, update, frames=gen_function, repeat=False) matplotlib.pyplot.show() # Writing the final environment to a text file. with open("out.txt", "w", newline="") as finalenviron: writer = csv.writer(finalenviron, delimiter=",") for row in environment: writer.writerow(row)
flexible
{ "blob_id": "4ea266d4f4c18efbba4204d7301652f8966c18a5", "index": 9724, "step-1": "<mask token>\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\n<mask token>\n", "step-2": "<mask token>\nwith open('in.txt', newline='') as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\n<mask token>\nax.set_autoscale_on(False)\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\n<mask token>\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\n<mask token>\nmatplotlib.pyplot.show()\nwith open('out.txt', 'w', newline='') as finalenviron:\n writer = csv.writer(finalenviron, delimiter=',')\n for row in environment:\n writer.writerow(row)\n", "step-3": "<mask token>\nwith open('in.txt', newline='') as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\nnum_of_agents = 10\nnum_of_iterations = 100\nneighbourhood = 20\nagents = []\nfig = matplotlib.pyplot.figure(figsize=(7, 7))\nax = fig.add_axes([0, 0, 1, 1])\nax.set_autoscale_on(False)\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\ncarry_on = True\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\nanimation = matplotlib.animation.FuncAnimation(fig, update, frames=\n gen_function, repeat=False)\nmatplotlib.pyplot.show()\nwith open('out.txt', 'w', newline='') as finalenviron:\n writer = csv.writer(finalenviron, delimiter=',')\n for row in environment:\n writer.writerow(row)\n", "step-4": "<mask token>\nimport random\nimport operator\nimport matplotlib.pyplot\nimport matplotlib.animation\nimport agentframeworkanimate\nimport csv\nwith open('in.txt', newline='') as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\nnum_of_agents = 10\nnum_of_iterations = 100\nneighbourhood = 20\nagents = []\nfig = matplotlib.pyplot.figure(figsize=(7, 7))\nax = fig.add_axes([0, 0, 1, 1])\nax.set_autoscale_on(False)\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\ncarry_on = True\n\n\ndef update(frame_number):\n fig.clear()\n global carry_on\n for j in range(num_of_iterations):\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n if agents[i].store == 100:\n carry_on = False\n print('Stopping condition met')\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment)\n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x, agents[i].y)\n\n\ndef gen_function(b=[0]):\n a = 0\n global carry_on\n while (a < 100) & carry_on:\n yield a\n a = a + 1\n\n\nanimation = matplotlib.animation.FuncAnimation(fig, update, frames=\n gen_function, repeat=False)\nmatplotlib.pyplot.show()\nwith open('out.txt', 'w', newline='') as finalenviron:\n writer = csv.writer(finalenviron, delimiter=',')\n for row in environment:\n writer.writerow(row)\n", "step-5": "# -*- coding: utf-8 -*-\n\"\"\"\nAnimation practical output\n\nThe code that follows builds on the \"Communications.py\" file\n\nAdditional code that follows has in part been modified from that of\nhttps://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/index.html\nhttps://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel.py\nhttps://www.geog.leeds.ac.uk/courses/computing/practicals/python/agent-framework/part8/examples/animatedmodel2.py\n\"\"\"\n\nimport random\nimport operator\nimport matplotlib.pyplot\nimport matplotlib.animation\nimport agentframeworkanimate\nimport csv\n\n\n# Reading the in.txt file to create the environment.\nwith open(\"in.txt\", newline=\"\") as raster:\n dataset = csv.reader(raster, quoting=csv.QUOTE_NONNUMERIC)\n environment = []\n for row in dataset:\n rowlist = []\n for value in row:\n rowlist.append(value)\n environment.append(rowlist)\n \n# Setting initial parameters.\nnum_of_agents = 10\nnum_of_iterations = 100\nneighbourhood = 20\nagents = []\n\n# Variables to animate the model.\nfig = matplotlib.pyplot.figure(figsize=(7, 7))\nax = fig.add_axes([0, 0, 1, 1])\n\nax.set_autoscale_on(False)\n\n# Make the agents.\n# Addition of environment as argument for Agent class to allow interaction between agents and environment.\n# Addition of agents as argument for Agent class to allow agents to interact with each other.\nfor i in range(num_of_agents):\n agents.append(agentframeworkanimate.Agent(environment, agents))\n\n\ncarry_on = True\n\n# Creating model animation.\ndef update(frame_number):\n fig.clear()\n global carry_on \n\n# Move the agents and store what they eat\n for j in range(num_of_iterations):\n # Shuffle function used to randomise the order agents are processed with each iteration.\n random.shuffle(agents)\n for i in range(num_of_agents):\n agents[i].move()\n agents[i].eat()\n agents[i].share_with_neighbours(neighbourhood)\n \n # Stopping condition for animation when all agents have 100 in their store.\n if agents[i].store == 100:\n carry_on = False\n print(\"Stopping condition met\")\n\n # Generate scatterplot of agents after model iterations.\n matplotlib.pyplot.xlim(0, 99)\n matplotlib.pyplot.ylim(0, 99)\n matplotlib.pyplot.imshow(environment) \n for i in range(num_of_agents):\n matplotlib.pyplot.scatter(agents[i].x,agents[i].y)\n \n# Generator function to stop animation.\n# Will stop animation after 10 iterations unless carry_on variable is set to False.\ndef gen_function(b = [0]):\n a = 0\n global carry_on\n while (a < 100) & (carry_on):\n yield a\n a = a + 1 \n\n# Animation will run until generator function condition is met\n#animation = matplotlib.animation.FuncAnimation(fig, update, interval=1, repeat=False, frames=10)\nanimation = matplotlib.animation.FuncAnimation(fig, update, frames=gen_function, repeat=False)\n\nmatplotlib.pyplot.show()\n\n \n# Writing the final environment to a text file.\nwith open(\"out.txt\", \"w\", newline=\"\") as finalenviron:\n writer = csv.writer(finalenviron, delimiter=\",\")\n for row in environment:\n writer.writerow(row)\n", "step-ids": [ 1, 3, 4, 5, 6 ] }
[ 1, 3, 4, 5, 6 ]
<|reserved_special_token_0|> @pytest.fixture def families_fixture(): ped_content = io.StringIO(convert_to_tab_separated( """ familyId personId dadId momId sex status role f1 mom1 0 0 2 1 mom f1 dad1 0 0 1 1 dad f1 prb1 dad1 mom1 1 2 prb f1 sib1 dad1 mom1 2 2 sib f1 sib2 dad1 mom1 2 2 sib f2 grmom2 0 0 2 0 maternal_grandmother f2 grdad2 0 0 1 0 maternal_grandfather f2 mom2 grdad2 grmom2 2 1 mom f2 dad2 0 0 1 1 dad f2 prb2 dad2 mom2 1 2 prb f2 sib2_3 dad2 mom2 2 2 sib """ )) families = FamiliesLoader(ped_content).load() assert families is not None return families def get_person_set_collections_config(content: str): return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads( content)), {'person_set_collections': person_set_collections_schema} ).person_set_collections <|reserved_special_token_0|> def test_status_person_set_collection(status_collection): assert status_collection is not None psc = status_collection assert len(psc.person_sets) == 3 assert len(psc.person_sets['unknown'].persons) == 2 assert len(psc.person_sets['affected'].persons) == 5 assert len(psc.person_sets['unaffected'].persons) == 4 def test_status_person_set_collection_all_selected(status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unaffected', 'unknown'})) assert query == () <|reserved_special_token_0|> def test_status_person_set_collection_some_selected_and_default( status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unknown'})) assert query == ([], [{'status': 'unaffected'}]) @pytest.fixture def status_sex_collection(families_fixture): config = get_person_set_collections_config(textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status_sex"] status_sex.id = "status_sex" status_sex.name = "Affected Status and Sex" status_sex.sources = [ { from = "pedigree", source = "status" }, { from = "pedigree", source = "sex" }, ] status_sex.domain = [ { id = "affected_male", name = "Affected Male", values = ["affected", "M"], color = "#ffffff" }, { id = "affected_female", name = "Affected Female", values = ["affected", "F"], color = "#ffffff" }, { id = "unaffected_male", name = "Unaffected Male", values = ["unaffected", "M"], color = "#ffffff" }, { id = "unaffected_female", name = "Unaffected Female", values = ["unaffected", "F"], color = "#ffffff" }, ] status_sex.default = { id="other", name="Other", color="#aaaaaa"} """ )) return PersonSetCollection.from_families(config.status_sex, families_fixture) <|reserved_special_token_0|> def test_status_sex_person_set_collection_some_selected_with_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'unaffected'}]) <|reserved_special_token_1|> <|reserved_special_token_0|> @pytest.fixture def families_fixture(): ped_content = io.StringIO(convert_to_tab_separated( """ familyId personId dadId momId sex status role f1 mom1 0 0 2 1 mom f1 dad1 0 0 1 1 dad f1 prb1 dad1 mom1 1 2 prb f1 sib1 dad1 mom1 2 2 sib f1 sib2 dad1 mom1 2 2 sib f2 grmom2 0 0 2 0 maternal_grandmother f2 grdad2 0 0 1 0 maternal_grandfather f2 mom2 grdad2 grmom2 2 1 mom f2 dad2 0 0 1 1 dad f2 prb2 dad2 mom2 1 2 prb f2 sib2_3 dad2 mom2 2 2 sib """ )) families = FamiliesLoader(ped_content).load() assert families is not None return families def get_person_set_collections_config(content: str): return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads( content)), {'person_set_collections': person_set_collections_schema} ).person_set_collections <|reserved_special_token_0|> def test_status_person_set_collection(status_collection): assert status_collection is not None psc = status_collection assert len(psc.person_sets) == 3 assert len(psc.person_sets['unknown'].persons) == 2 assert len(psc.person_sets['affected'].persons) == 5 assert len(psc.person_sets['unaffected'].persons) == 4 def test_status_person_set_collection_all_selected(status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unaffected', 'unknown'})) assert query == () <|reserved_special_token_0|> def test_status_person_set_collection_some_selected_and_default( status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unknown'})) assert query == ([], [{'status': 'unaffected'}]) @pytest.fixture def status_sex_collection(families_fixture): config = get_person_set_collections_config(textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status_sex"] status_sex.id = "status_sex" status_sex.name = "Affected Status and Sex" status_sex.sources = [ { from = "pedigree", source = "status" }, { from = "pedigree", source = "sex" }, ] status_sex.domain = [ { id = "affected_male", name = "Affected Male", values = ["affected", "M"], color = "#ffffff" }, { id = "affected_female", name = "Affected Female", values = ["affected", "F"], color = "#ffffff" }, { id = "unaffected_male", name = "Unaffected Male", values = ["unaffected", "M"], color = "#ffffff" }, { id = "unaffected_female", name = "Unaffected Female", values = ["unaffected", "F"], color = "#ffffff" }, ] status_sex.default = { id="other", name="Other", color="#aaaaaa"} """ )) return PersonSetCollection.from_families(config.status_sex, families_fixture) <|reserved_special_token_0|> def test_status_sex_person_set_collection_some_selected_no_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female'})) assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female'})) assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female'})) assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F', 'status': 'unaffected'}], []) def test_status_sex_person_set_collection_some_selected_with_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'unaffected'}]) <|reserved_special_token_1|> <|reserved_special_token_0|> @pytest.fixture def families_fixture(): ped_content = io.StringIO(convert_to_tab_separated( """ familyId personId dadId momId sex status role f1 mom1 0 0 2 1 mom f1 dad1 0 0 1 1 dad f1 prb1 dad1 mom1 1 2 prb f1 sib1 dad1 mom1 2 2 sib f1 sib2 dad1 mom1 2 2 sib f2 grmom2 0 0 2 0 maternal_grandmother f2 grdad2 0 0 1 0 maternal_grandfather f2 mom2 grdad2 grmom2 2 1 mom f2 dad2 0 0 1 1 dad f2 prb2 dad2 mom2 1 2 prb f2 sib2_3 dad2 mom2 2 2 sib """ )) families = FamiliesLoader(ped_content).load() assert families is not None return families def get_person_set_collections_config(content: str): return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads( content)), {'person_set_collections': person_set_collections_schema} ).person_set_collections @pytest.fixture def status_collection(families_fixture): content = textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status"] status.id = "status" status.name = "Affected Status" status.sources = [{ from = "pedigree", source = "status" }] status.domain = [ { id = "affected", name = "Affected", values = ["affected"], color = "#aabbcc" }, { id = "unaffected", name = "Unaffected", values = ["unaffected"], color = "#ffffff" }, ] status.default = {id = "unknown",name = "Unknown",color = "#aaaaaa"} """ ) config = get_person_set_collections_config(content) collection = PersonSetCollection.from_families(config.status, families_fixture) return collection def test_status_person_set_collection(status_collection): assert status_collection is not None psc = status_collection assert len(psc.person_sets) == 3 assert len(psc.person_sets['unknown'].persons) == 2 assert len(psc.person_sets['affected'].persons) == 5 assert len(psc.person_sets['unaffected'].persons) == 4 def test_status_person_set_collection_all_selected(status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unaffected', 'unknown'})) assert query == () def test_status_person_set_collection_some_selected_no_default( status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected'})) assert query == ([{'status': 'affected'}], []) def test_status_person_set_collection_some_selected_and_default( status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unknown'})) assert query == ([], [{'status': 'unaffected'}]) @pytest.fixture def status_sex_collection(families_fixture): config = get_person_set_collections_config(textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status_sex"] status_sex.id = "status_sex" status_sex.name = "Affected Status and Sex" status_sex.sources = [ { from = "pedigree", source = "status" }, { from = "pedigree", source = "sex" }, ] status_sex.domain = [ { id = "affected_male", name = "Affected Male", values = ["affected", "M"], color = "#ffffff" }, { id = "affected_female", name = "Affected Female", values = ["affected", "F"], color = "#ffffff" }, { id = "unaffected_male", name = "Unaffected Male", values = ["unaffected", "M"], color = "#ffffff" }, { id = "unaffected_female", name = "Unaffected Female", values = ["unaffected", "F"], color = "#ffffff" }, ] status_sex.default = { id="other", name="Other", color="#aaaaaa"} """ )) return PersonSetCollection.from_families(config.status_sex, families_fixture) <|reserved_special_token_0|> def test_status_sex_person_set_collection_some_selected_no_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female'})) assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female'})) assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female'})) assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F', 'status': 'unaffected'}], []) def test_status_sex_person_set_collection_some_selected_with_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'unaffected'}]) <|reserved_special_token_1|> <|reserved_special_token_0|> @pytest.fixture def families_fixture(): ped_content = io.StringIO(convert_to_tab_separated( """ familyId personId dadId momId sex status role f1 mom1 0 0 2 1 mom f1 dad1 0 0 1 1 dad f1 prb1 dad1 mom1 1 2 prb f1 sib1 dad1 mom1 2 2 sib f1 sib2 dad1 mom1 2 2 sib f2 grmom2 0 0 2 0 maternal_grandmother f2 grdad2 0 0 1 0 maternal_grandfather f2 mom2 grdad2 grmom2 2 1 mom f2 dad2 0 0 1 1 dad f2 prb2 dad2 mom2 1 2 prb f2 sib2_3 dad2 mom2 2 2 sib """ )) families = FamiliesLoader(ped_content).load() assert families is not None return families def get_person_set_collections_config(content: str): return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads( content)), {'person_set_collections': person_set_collections_schema} ).person_set_collections @pytest.fixture def status_collection(families_fixture): content = textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status"] status.id = "status" status.name = "Affected Status" status.sources = [{ from = "pedigree", source = "status" }] status.domain = [ { id = "affected", name = "Affected", values = ["affected"], color = "#aabbcc" }, { id = "unaffected", name = "Unaffected", values = ["unaffected"], color = "#ffffff" }, ] status.default = {id = "unknown",name = "Unknown",color = "#aaaaaa"} """ ) config = get_person_set_collections_config(content) collection = PersonSetCollection.from_families(config.status, families_fixture) return collection def test_status_person_set_collection(status_collection): assert status_collection is not None psc = status_collection assert len(psc.person_sets) == 3 assert len(psc.person_sets['unknown'].persons) == 2 assert len(psc.person_sets['affected'].persons) == 5 assert len(psc.person_sets['unaffected'].persons) == 4 def test_status_person_set_collection_all_selected(status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unaffected', 'unknown'})) assert query == () def test_status_person_set_collection_some_selected_no_default( status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected'})) assert query == ([{'status': 'affected'}], []) def test_status_person_set_collection_some_selected_and_default( status_collection): query = ImpalaVariants.build_person_set_collection_query(status_collection, ('status', {'affected', 'unknown'})) assert query == ([], [{'status': 'unaffected'}]) @pytest.fixture def status_sex_collection(families_fixture): config = get_person_set_collections_config(textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status_sex"] status_sex.id = "status_sex" status_sex.name = "Affected Status and Sex" status_sex.sources = [ { from = "pedigree", source = "status" }, { from = "pedigree", source = "sex" }, ] status_sex.domain = [ { id = "affected_male", name = "Affected Male", values = ["affected", "M"], color = "#ffffff" }, { id = "affected_female", name = "Affected Female", values = ["affected", "F"], color = "#ffffff" }, { id = "unaffected_male", name = "Unaffected Male", values = ["unaffected", "M"], color = "#ffffff" }, { id = "unaffected_female", name = "Unaffected Female", values = ["unaffected", "F"], color = "#ffffff" }, ] status_sex.default = { id="other", name="Other", color="#aaaaaa"} """ )) return PersonSetCollection.from_families(config.status_sex, families_fixture) def test_status_sex_person_set_collection_all_selected(status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female', 'unaffected_male', 'unaffected_female', 'other'})) assert query == () def test_status_sex_person_set_collection_some_selected_no_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female'})) assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female'})) assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female'})) assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F', 'status': 'unaffected'}], []) def test_status_sex_person_set_collection_some_selected_with_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'affected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M', 'status': 'unaffected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'unaffected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'affected'}]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ('status_sex', {'affected_male', 'unaffected_female', 'other'})) assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M', 'status': 'unaffected'}]) <|reserved_special_token_1|> # pylint: disable=W0621,C0114,C0116,W0212,W0613 import io import textwrap from typing import cast, Any, Dict import toml import pytest from dae.testing import convert_to_tab_separated from dae.configuration.gpf_config_parser import GPFConfigParser from dae.configuration.schemas.person_sets import person_set_collections_schema from dae.pedigrees.loader import FamiliesLoader from dae.person_sets import PersonSetCollection from impala_storage.schema1.impala_variants import ImpalaVariants @pytest.fixture def families_fixture(): ped_content = io.StringIO(convert_to_tab_separated( """ familyId personId dadId momId sex status role f1 mom1 0 0 2 1 mom f1 dad1 0 0 1 1 dad f1 prb1 dad1 mom1 1 2 prb f1 sib1 dad1 mom1 2 2 sib f1 sib2 dad1 mom1 2 2 sib f2 grmom2 0 0 2 0 maternal_grandmother f2 grdad2 0 0 1 0 maternal_grandfather f2 mom2 grdad2 grmom2 2 1 mom f2 dad2 0 0 1 1 dad f2 prb2 dad2 mom2 1 2 prb f2 sib2_3 dad2 mom2 2 2 sib """)) families = FamiliesLoader(ped_content).load() assert families is not None return families def get_person_set_collections_config(content: str): return GPFConfigParser.process_config( cast(Dict[str, Any], toml.loads(content)), {"person_set_collections": person_set_collections_schema}, ).person_set_collections @pytest.fixture def status_collection(families_fixture): content = textwrap.dedent( """ [person_set_collections] selected_person_set_collections = ["status"] status.id = "status" status.name = "Affected Status" status.sources = [{ from = "pedigree", source = "status" }] status.domain = [ { id = "affected", name = "Affected", values = ["affected"], color = "#aabbcc" }, { id = "unaffected", name = "Unaffected", values = ["unaffected"], color = "#ffffff" }, ] status.default = {id = "unknown",name = "Unknown",color = "#aaaaaa"} """) config = get_person_set_collections_config(content) collection = PersonSetCollection.from_families( config.status, families_fixture) return collection def test_status_person_set_collection(status_collection): assert status_collection is not None psc = status_collection assert len(psc.person_sets) == 3 assert len(psc.person_sets["unknown"].persons) == 2 assert len(psc.person_sets["affected"].persons) == 5 assert len(psc.person_sets["unaffected"].persons) == 4 def test_status_person_set_collection_all_selected( status_collection): query = ImpalaVariants.build_person_set_collection_query( status_collection, ("status", {"affected", "unaffected", "unknown"}) ) assert query == () def test_status_person_set_collection_some_selected_no_default( status_collection): query = ImpalaVariants.build_person_set_collection_query( status_collection, ("status", {"affected"}) ) assert query == ([{"status": "affected"}], []) def test_status_person_set_collection_some_selected_and_default( status_collection): query = ImpalaVariants.build_person_set_collection_query( status_collection, ("status", {"affected", "unknown"}) ) assert query == ([], [{"status": "unaffected"}]) @pytest.fixture def status_sex_collection(families_fixture): config = get_person_set_collections_config(textwrap.dedent(""" [person_set_collections] selected_person_set_collections = ["status_sex"] status_sex.id = "status_sex" status_sex.name = "Affected Status and Sex" status_sex.sources = [ { from = "pedigree", source = "status" }, { from = "pedigree", source = "sex" }, ] status_sex.domain = [ { id = "affected_male", name = "Affected Male", values = ["affected", "M"], color = "#ffffff" }, { id = "affected_female", name = "Affected Female", values = ["affected", "F"], color = "#ffffff" }, { id = "unaffected_male", name = "Unaffected Male", values = ["unaffected", "M"], color = "#ffffff" }, { id = "unaffected_female", name = "Unaffected Female", values = ["unaffected", "F"], color = "#ffffff" }, ] status_sex.default = { id="other", name="Other", color="#aaaaaa"} """)) return PersonSetCollection.from_families( config.status_sex, families_fixture ) def test_status_sex_person_set_collection_all_selected( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "affected_male", "affected_female", "unaffected_male", "unaffected_female", "other"}) ) assert query == () def test_status_sex_person_set_collection_some_selected_no_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "affected_male", "affected_female"}) ) assert query == ( [ {"sex": "F", "status": "affected"}, {"sex": "M", "status": "affected"}, ], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "unaffected_male", "unaffected_female"}) ) assert query == ( [ {"sex": "F", "status": "unaffected"}, {"sex": "M", "status": "unaffected"} ], []) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "affected_male", "unaffected_female"}) ) assert query == ([ {"sex": "M", "status": "affected"}, {"sex": "F", "status": "unaffected"}, ], []) def test_status_sex_person_set_collection_some_selected_with_default( status_sex_collection): query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "affected_male", "affected_female", "other"}) ) assert query == ([], [ {"sex": "F", "status": "unaffected"}, {"sex": "M", "status": "unaffected"}, ]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "unaffected_male", "unaffected_female", "other"})) assert query == ([], [ {"sex": "F", "status": "affected"}, {"sex": "M", "status": "affected"}, ]) query = ImpalaVariants.build_person_set_collection_query( status_sex_collection, ("status_sex", { "affected_male", "unaffected_female", "other"}) ) assert query == ([], [ {"sex": "F", "status": "affected"}, {"sex": "M", "status": "unaffected"}, ])
flexible
{ "blob_id": "6c8f690e1b43d459535238e24cccc8aa118e2d57", "index": 3038, "step-1": "<mask token>\n\n\n@pytest.fixture\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\n<mask token>\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\n<mask token>\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\n@pytest.fixture\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\n<mask token>\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n", "step-2": "<mask token>\n\n\n@pytest.fixture\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\n<mask token>\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\n<mask token>\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\n@pytest.fixture\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\n<mask token>\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female'}))\n assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F',\n 'status': 'unaffected'}], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n", "step-3": "<mask token>\n\n\n@pytest.fixture\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\n@pytest.fixture\ndef status_collection(families_fixture):\n content = textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status\"]\n status.id = \"status\"\n status.name = \"Affected Status\"\n status.sources = [{ from = \"pedigree\", source = \"status\" }]\n status.domain = [\n {\n id = \"affected\",\n name = \"Affected\",\n values = [\"affected\"],\n color = \"#aabbcc\"\n },\n {\n id = \"unaffected\",\n name = \"Unaffected\",\n values = [\"unaffected\"],\n color = \"#ffffff\"\n },\n ]\n status.default = {id = \"unknown\",name = \"Unknown\",color = \"#aaaaaa\"}\n\n \"\"\"\n )\n config = get_person_set_collections_config(content)\n collection = PersonSetCollection.from_families(config.status,\n families_fixture)\n return collection\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\ndef test_status_person_set_collection_some_selected_no_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected'}))\n assert query == ([{'status': 'affected'}], [])\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\n@pytest.fixture\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\n<mask token>\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female'}))\n assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F',\n 'status': 'unaffected'}], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n", "step-4": "<mask token>\n\n\n@pytest.fixture\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"\n ))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(cast(Dict[str, Any], toml.loads(\n content)), {'person_set_collections': person_set_collections_schema}\n ).person_set_collections\n\n\n@pytest.fixture\ndef status_collection(families_fixture):\n content = textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status\"]\n status.id = \"status\"\n status.name = \"Affected Status\"\n status.sources = [{ from = \"pedigree\", source = \"status\" }]\n status.domain = [\n {\n id = \"affected\",\n name = \"Affected\",\n values = [\"affected\"],\n color = \"#aabbcc\"\n },\n {\n id = \"unaffected\",\n name = \"Unaffected\",\n values = [\"unaffected\"],\n color = \"#ffffff\"\n },\n ]\n status.default = {id = \"unknown\",name = \"Unknown\",color = \"#aaaaaa\"}\n\n \"\"\"\n )\n config = get_person_set_collections_config(content)\n collection = PersonSetCollection.from_families(config.status,\n families_fixture)\n return collection\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets['unknown'].persons) == 2\n assert len(psc.person_sets['affected'].persons) == 5\n assert len(psc.person_sets['unaffected'].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unaffected', 'unknown'}))\n assert query == ()\n\n\ndef test_status_person_set_collection_some_selected_no_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected'}))\n assert query == ([{'status': 'affected'}], [])\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n query = ImpalaVariants.build_person_set_collection_query(status_collection,\n ('status', {'affected', 'unknown'}))\n assert query == ([], [{'status': 'unaffected'}])\n\n\n@pytest.fixture\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"\n ))\n return PersonSetCollection.from_families(config.status_sex,\n families_fixture)\n\n\ndef test_status_sex_person_set_collection_all_selected(status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'unaffected_male', 'unaffected_female', 'other'}))\n assert query == ()\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female'}))\n assert query == ([{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}], [])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female'}))\n assert query == ([{'sex': 'M', 'status': 'affected'}, {'sex': 'F',\n 'status': 'unaffected'}], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'affected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'unaffected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'unaffected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'affected'}])\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection, ('status_sex', {'affected_male',\n 'unaffected_female', 'other'}))\n assert query == ([], [{'sex': 'F', 'status': 'affected'}, {'sex': 'M',\n 'status': 'unaffected'}])\n", "step-5": "# pylint: disable=W0621,C0114,C0116,W0212,W0613\nimport io\nimport textwrap\nfrom typing import cast, Any, Dict\n\nimport toml\nimport pytest\n\nfrom dae.testing import convert_to_tab_separated\nfrom dae.configuration.gpf_config_parser import GPFConfigParser\nfrom dae.configuration.schemas.person_sets import person_set_collections_schema\nfrom dae.pedigrees.loader import FamiliesLoader\nfrom dae.person_sets import PersonSetCollection\n\nfrom impala_storage.schema1.impala_variants import ImpalaVariants\n\n\n@pytest.fixture\ndef families_fixture():\n ped_content = io.StringIO(convert_to_tab_separated(\n \"\"\"\n familyId personId dadId\t momId\tsex status role\n f1 mom1 0 0 2 1 mom\n f1 dad1 0 0 1 1 dad\n f1 prb1 dad1 mom1 1 2 prb\n f1 sib1 dad1 mom1 2 2 sib\n f1 sib2 dad1 mom1 2 2 sib\n f2 grmom2 0 0 2 0 maternal_grandmother\n f2 grdad2 0 0 1 0 maternal_grandfather\n f2 mom2 grdad2 grmom2 2 1 mom\n f2 dad2 0 0 1 1 dad\n f2 prb2 dad2 mom2 1 2 prb\n f2 sib2_3 dad2 mom2 2 2 sib\n \"\"\"))\n families = FamiliesLoader(ped_content).load()\n assert families is not None\n return families\n\n\ndef get_person_set_collections_config(content: str):\n return GPFConfigParser.process_config(\n cast(Dict[str, Any], toml.loads(content)),\n {\"person_set_collections\": person_set_collections_schema},\n ).person_set_collections\n\n\n@pytest.fixture\ndef status_collection(families_fixture):\n content = textwrap.dedent(\n \"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status\"]\n status.id = \"status\"\n status.name = \"Affected Status\"\n status.sources = [{ from = \"pedigree\", source = \"status\" }]\n status.domain = [\n {\n id = \"affected\",\n name = \"Affected\",\n values = [\"affected\"],\n color = \"#aabbcc\"\n },\n {\n id = \"unaffected\",\n name = \"Unaffected\",\n values = [\"unaffected\"],\n color = \"#ffffff\"\n },\n ]\n status.default = {id = \"unknown\",name = \"Unknown\",color = \"#aaaaaa\"}\n\n \"\"\")\n\n config = get_person_set_collections_config(content)\n\n collection = PersonSetCollection.from_families(\n config.status, families_fixture)\n return collection\n\n\ndef test_status_person_set_collection(status_collection):\n assert status_collection is not None\n psc = status_collection\n\n assert len(psc.person_sets) == 3\n assert len(psc.person_sets[\"unknown\"].persons) == 2\n assert len(psc.person_sets[\"affected\"].persons) == 5\n assert len(psc.person_sets[\"unaffected\"].persons) == 4\n\n\ndef test_status_person_set_collection_all_selected(\n status_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_collection,\n (\"status\", {\"affected\", \"unaffected\", \"unknown\"})\n )\n\n assert query == ()\n\n\ndef test_status_person_set_collection_some_selected_no_default(\n status_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_collection,\n (\"status\", {\"affected\"})\n )\n\n assert query == ([{\"status\": \"affected\"}], [])\n\n\ndef test_status_person_set_collection_some_selected_and_default(\n status_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_collection,\n (\"status\", {\"affected\", \"unknown\"})\n )\n\n assert query == ([], [{\"status\": \"unaffected\"}])\n\n\n@pytest.fixture\ndef status_sex_collection(families_fixture):\n config = get_person_set_collections_config(textwrap.dedent(\"\"\"\n [person_set_collections]\n selected_person_set_collections = [\"status_sex\"]\n\n status_sex.id = \"status_sex\"\n status_sex.name = \"Affected Status and Sex\"\n status_sex.sources = [\n { from = \"pedigree\", source = \"status\" },\n { from = \"pedigree\", source = \"sex\" },\n ]\n status_sex.domain = [\n { id = \"affected_male\", name = \"Affected Male\",\n values = [\"affected\", \"M\"], color = \"#ffffff\" },\n { id = \"affected_female\", name = \"Affected Female\",\n values = [\"affected\", \"F\"], color = \"#ffffff\" },\n { id = \"unaffected_male\", name = \"Unaffected Male\",\n values = [\"unaffected\", \"M\"], color = \"#ffffff\" },\n { id = \"unaffected_female\", name = \"Unaffected Female\",\n values = [\"unaffected\", \"F\"], color = \"#ffffff\" },\n ]\n status_sex.default = { id=\"other\", name=\"Other\", color=\"#aaaaaa\"}\n \"\"\"))\n\n return PersonSetCollection.from_families(\n config.status_sex, families_fixture\n )\n\n\ndef test_status_sex_person_set_collection_all_selected(\n status_sex_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"affected_female\",\n \"unaffected_male\", \"unaffected_female\",\n \"other\"})\n )\n\n assert query == ()\n\n\ndef test_status_sex_person_set_collection_some_selected_no_default(\n status_sex_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"affected_female\"})\n )\n\n assert query == (\n [\n {\"sex\": \"F\", \"status\": \"affected\"},\n {\"sex\": \"M\", \"status\": \"affected\"},\n ], [])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"unaffected_male\", \"unaffected_female\"})\n )\n\n assert query == (\n [\n {\"sex\": \"F\", \"status\": \"unaffected\"},\n {\"sex\": \"M\", \"status\": \"unaffected\"}\n ], [])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"unaffected_female\"})\n )\n\n assert query == ([\n {\"sex\": \"M\", \"status\": \"affected\"},\n {\"sex\": \"F\", \"status\": \"unaffected\"},\n ], [])\n\n\ndef test_status_sex_person_set_collection_some_selected_with_default(\n status_sex_collection):\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"affected_female\", \"other\"})\n )\n\n assert query == ([], [\n {\"sex\": \"F\", \"status\": \"unaffected\"},\n {\"sex\": \"M\", \"status\": \"unaffected\"},\n ])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"unaffected_male\", \"unaffected_female\", \"other\"}))\n\n assert query == ([], [\n {\"sex\": \"F\", \"status\": \"affected\"},\n {\"sex\": \"M\", \"status\": \"affected\"},\n ])\n\n query = ImpalaVariants.build_person_set_collection_query(\n status_sex_collection,\n (\"status_sex\", {\n \"affected_male\", \"unaffected_female\", \"other\"})\n )\n\n assert query == ([], [\n {\"sex\": \"F\", \"status\": \"affected\"},\n {\"sex\": \"M\", \"status\": \"unaffected\"},\n ])\n", "step-ids": [ 7, 8, 10, 11, 13 ] }
[ 7, 8, 10, 11, 13 ]
#!/usr/bin/env python import pathlib from blastsight.view.viewer import Viewer """ In this demo, we'll show how you can create a basic animation. An animation is interpreted as changing the state of the viewer one frame at the time. That means we'll define a function that makes a change in one single frame. The function must receive a single argument, of the same type of the 'start' and 'end' values. """ v = Viewer() path = f'{pathlib.Path(__file__).parent.parent}/test_files/caseron.off' mesh = v.load_mesh(path, highlight=True) def autorotate(angle): v.set_rotation_angle([0.0, -angle, 0.0]) """ The animate() method receives a 'start' value, an 'end' value, a 'method' (the function that changes one frame in the viewer), and two optional kwargs: 'milliseconds' (how much time should the animation last) and 'steps' (smoothness of the animation depends on this). """ # Start animation v.animate(0, 360, autorotate, milliseconds=3000, steps=100) # Show viewer v.show()
normal
{ "blob_id": "00be3d813ce4335ff9ea02ed9f1884d3210f3d5a", "index": 3101, "step-1": "<mask token>\n\n\ndef autorotate(angle):\n v.set_rotation_angle([0.0, -angle, 0.0])\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\ndef autorotate(angle):\n v.set_rotation_angle([0.0, -angle, 0.0])\n\n\n<mask token>\nv.animate(0, 360, autorotate, milliseconds=3000, steps=100)\nv.show()\n", "step-3": "<mask token>\nv = Viewer()\npath = f'{pathlib.Path(__file__).parent.parent}/test_files/caseron.off'\nmesh = v.load_mesh(path, highlight=True)\n\n\ndef autorotate(angle):\n v.set_rotation_angle([0.0, -angle, 0.0])\n\n\n<mask token>\nv.animate(0, 360, autorotate, milliseconds=3000, steps=100)\nv.show()\n", "step-4": "import pathlib\nfrom blastsight.view.viewer import Viewer\n<mask token>\nv = Viewer()\npath = f'{pathlib.Path(__file__).parent.parent}/test_files/caseron.off'\nmesh = v.load_mesh(path, highlight=True)\n\n\ndef autorotate(angle):\n v.set_rotation_angle([0.0, -angle, 0.0])\n\n\n<mask token>\nv.animate(0, 360, autorotate, milliseconds=3000, steps=100)\nv.show()\n", "step-5": "#!/usr/bin/env python\n\nimport pathlib\n\nfrom blastsight.view.viewer import Viewer\n\n\"\"\"\nIn this demo, we'll show how you can create a basic animation.\n\nAn animation is interpreted as changing the state of the viewer one frame at the time.\nThat means we'll define a function that makes a change in one single frame.\nThe function must receive a single argument, of the same type of the 'start' and 'end' values.\n\"\"\"\n\nv = Viewer()\npath = f'{pathlib.Path(__file__).parent.parent}/test_files/caseron.off'\nmesh = v.load_mesh(path, highlight=True)\n\n\ndef autorotate(angle):\n v.set_rotation_angle([0.0, -angle, 0.0])\n\n\n\"\"\"\nThe animate() method receives a 'start' value, an 'end' value, a 'method' (the function that changes\none frame in the viewer), and two optional kwargs: 'milliseconds' (how much time should the\nanimation last) and 'steps' (smoothness of the animation depends on this).\n\"\"\"\n\n# Start animation\nv.animate(0, 360, autorotate, milliseconds=3000, steps=100)\n\n# Show viewer\nv.show()\n", "step-ids": [ 1, 2, 3, 4, 5 ] }
[ 1, 2, 3, 4, 5 ]
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2016-11-17 14:47 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('votes', '0003_choice_votes'), ] operations = [ migrations.CreateModel( name='Token', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('token', models.CharField(editable=False, max_length=6)), ('used', models.BooleanField(default=False, editable=False)), ], ), migrations.AlterField( model_name='choice', name='votes', field=models.IntegerField(default=0, editable=False), ), ]
normal
{ "blob_id": "781cb59fb9b6d22547fd4acf895457868342e125", "index": 8290, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('votes', '0003_choice_votes')]\n operations = [migrations.CreateModel(name='Token', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('token', models.CharField(editable=\n False, max_length=6)), ('used', models.BooleanField(default=False,\n editable=False))]), migrations.AlterField(model_name='choice', name\n ='votes', field=models.IntegerField(default=0, editable=False))]\n", "step-4": "from __future__ import unicode_literals\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('votes', '0003_choice_votes')]\n operations = [migrations.CreateModel(name='Token', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID')), ('token', models.CharField(editable=\n False, max_length=6)), ('used', models.BooleanField(default=False,\n editable=False))]), migrations.AlterField(model_name='choice', name\n ='votes', field=models.IntegerField(default=0, editable=False))]\n", "step-5": "# -*- coding: utf-8 -*-\n# Generated by Django 1.10.3 on 2016-11-17 14:47\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('votes', '0003_choice_votes'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Token',\n fields=[\n ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('token', models.CharField(editable=False, max_length=6)),\n ('used', models.BooleanField(default=False, editable=False)),\n ],\n ),\n migrations.AlterField(\n model_name='choice',\n name='votes',\n field=models.IntegerField(default=0, editable=False),\n ),\n ]\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> def contact(request): form_class = ContactForm if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): contact_name = request.POST.get('contact_name', '') contact_email = request.POST.get('contact_email', '') form_content = request.POST.get('content', '') form_content = request.POST.get('content', '') template = get_template('contact_template.txt') context = {'contact_name': contact_name, 'contact_email': contact_email, 'form_content': form_content} content = template.render(context) email = EmailMessage('New contact form submission', content, 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To': contact_email}) email.send() return redirect('/success') return render(request, 'contact.html', {'form': form_class}) <|reserved_special_token_0|> @login_required(login_url='/login/') def services(request): return render(request, 'services.html') <|reserved_special_token_0|> @login_required(login_url='/login/') def creditcard(request): return render(request, 'creditcard.html') @login_required(login_url='/login/') def mobilefraud(request): return render(request, 'mobile.html') @login_required(login_url='/login/') def bankresult(request): age = request.POST.get('age') job = request.POST.get('job') print(job) if job == 'Unemployed': new_job = 1 elif job == 'Management': new_job = 2 elif job == 'Services': new_job = 3 elif job == 'Blue-Collar': new_job = 4 elif job == 'Entrepreneur': new_job = 5 elif job == 'Admin': new_job = 6 elif job == 'Unknown': new_job = 7 elif job == 'Self-employed': new_job = 8 elif job == 'Student': new_job = 9 elif job == 'House maid': new_job = 10 elif job == 'Technician': new_job = 11 elif job == 'Retired': new_job = 12 print(new_job) marital = request.POST.get('marital') if marital == 'Single': new_marital = 1 elif marital == 'Divorced': new_marital = 2 elif marital == 'Married': new_marital = 3 print(new_marital) education = request.POST.get('education') if education == 'Unknown': new_education = 1 elif education == 'Primary': new_education = 2 elif education == 'Secondary': new_education = 3 elif education == 'Graduate': new_education = 4 print(new_education) balance = request.POST.get('balance') housing = request.POST.get('housing') if housing == 'Yes': new_housing = 1 elif housing == 'No': new_housing = 2 print(new_housing) loan = request.POST.get('loan') if loan == 'Yes': new_loan = 1 elif loan == 'No': new_loan = 2 print(new_loan) duration = int(request.POST.get('duration')) campaign = int(request.POST.get('campaign')) pdays = int(request.POST.get('pdays')) previous = int(request.POST.get('previous')) poutcome = request.POST.get('poutcome') if poutcome == 'Unknown': new_poutcome = 3 elif poutcome == 'Failure': new_poutcome = 1 elif poutcome == 'Successs': new_poutcome = 4 elif poutcome == 'Failure': new_poutcome = 2 print(new_poutcome) bank_data = np.array([age, new_job, new_marital, new_education, balance, new_housing, new_loan, duration, campaign, pdays, previous, new_poutcome]) clf = bank_model() c = clf.predict([bank_data]) print(c) if c == [1]: response = 'Not Fraud' else: response = 'Fraud' accuracy = 0.8962983425414365 return render(request, 'bank/result.html', {'result': response, 'accuracy': accuracy}) <|reserved_special_token_0|> @login_required(login_url='/login/') def mobileresult(request): step = request.POST.get('step') type = request.POST.get('type') if type == 'Payment': new_type = 1 elif type == 'Transfer': new_type = 4 elif type == 'Cash-out': new_type = 5 elif type == 'Debit': new_type = 2 print(new_type) amount = request.POST.get('amount') nameOrig = request.POST.get('nameOrig') oldbalanceOrg = request.POST.get('oldbalanceOrg') newbalanceOrig = request.POST.get('newbalanceOrig') nameDest = request.POST.get('nameDest') oldbalanceDest = request.POST.get('oldbalanceDest') newbalanceDest = request.POST.get('newbalanceDest') isFlaggedFraud = 1 mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest, isFlaggedFraud]) clf = mobile_model() c = clf.predict([mobile_data]) print(c) if c == [0]: response = 'Not Fraud' else: response = 'Fraud' return render(request, 'mobile/result.html', {'result': response}) def analytics(request): return render(request, 'analytics.html', {'analytics': result, 'mobile_analytics': mobile_result, 'creditcard_analytics': creditcard_result}) <|reserved_special_token_1|> <|reserved_special_token_0|> def index(request): return render(request, 'index.html') <|reserved_special_token_0|> def contact(request): form_class = ContactForm if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): contact_name = request.POST.get('contact_name', '') contact_email = request.POST.get('contact_email', '') form_content = request.POST.get('content', '') form_content = request.POST.get('content', '') template = get_template('contact_template.txt') context = {'contact_name': contact_name, 'contact_email': contact_email, 'form_content': form_content} content = template.render(context) email = EmailMessage('New contact form submission', content, 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To': contact_email}) email.send() return redirect('/success') return render(request, 'contact.html', {'form': form_class}) <|reserved_special_token_0|> @login_required(login_url='/login/') def services(request): return render(request, 'services.html') <|reserved_special_token_0|> @login_required(login_url='/login/') def creditcard(request): return render(request, 'creditcard.html') @login_required(login_url='/login/') def mobilefraud(request): return render(request, 'mobile.html') @login_required(login_url='/login/') def bankresult(request): age = request.POST.get('age') job = request.POST.get('job') print(job) if job == 'Unemployed': new_job = 1 elif job == 'Management': new_job = 2 elif job == 'Services': new_job = 3 elif job == 'Blue-Collar': new_job = 4 elif job == 'Entrepreneur': new_job = 5 elif job == 'Admin': new_job = 6 elif job == 'Unknown': new_job = 7 elif job == 'Self-employed': new_job = 8 elif job == 'Student': new_job = 9 elif job == 'House maid': new_job = 10 elif job == 'Technician': new_job = 11 elif job == 'Retired': new_job = 12 print(new_job) marital = request.POST.get('marital') if marital == 'Single': new_marital = 1 elif marital == 'Divorced': new_marital = 2 elif marital == 'Married': new_marital = 3 print(new_marital) education = request.POST.get('education') if education == 'Unknown': new_education = 1 elif education == 'Primary': new_education = 2 elif education == 'Secondary': new_education = 3 elif education == 'Graduate': new_education = 4 print(new_education) balance = request.POST.get('balance') housing = request.POST.get('housing') if housing == 'Yes': new_housing = 1 elif housing == 'No': new_housing = 2 print(new_housing) loan = request.POST.get('loan') if loan == 'Yes': new_loan = 1 elif loan == 'No': new_loan = 2 print(new_loan) duration = int(request.POST.get('duration')) campaign = int(request.POST.get('campaign')) pdays = int(request.POST.get('pdays')) previous = int(request.POST.get('previous')) poutcome = request.POST.get('poutcome') if poutcome == 'Unknown': new_poutcome = 3 elif poutcome == 'Failure': new_poutcome = 1 elif poutcome == 'Successs': new_poutcome = 4 elif poutcome == 'Failure': new_poutcome = 2 print(new_poutcome) bank_data = np.array([age, new_job, new_marital, new_education, balance, new_housing, new_loan, duration, campaign, pdays, previous, new_poutcome]) clf = bank_model() c = clf.predict([bank_data]) print(c) if c == [1]: response = 'Not Fraud' else: response = 'Fraud' accuracy = 0.8962983425414365 return render(request, 'bank/result.html', {'result': response, 'accuracy': accuracy}) @login_required(login_url='/login/') def creditresult(request): if request.method == 'POST': limit_balance = request.POST.get('limit_balance') sex = request.POST.get('sex') print(sex) if sex == 'Male': new_sex = 1 else: new_sex = 2 print(new_sex) education = request.POST.get('education') if education == 'Primary': new_education = 1 elif education == 'Secondary': new_education = 2 elif education == 'Graduate': new_education = 3 print(new_education) marriage = request.POST.get('marriage') if marriage == 'Single': new_marriage = 1 elif marriage == 'Married': new_marriage = 2 elif education == 'Divorced': new_marriage = 3 print(new_marriage) age = request.POST.get('age') pay_1 = int(request.POST.get('pay_1')) pay_2 = int(request.POST.get('pay_2')) pay_3 = int(request.POST.get('pay_3')) pay_4 = int(request.POST.get('pay_4')) pay_5 = int(request.POST.get('pay_5')) pay_6 = int(request.POST.get('pay_6')) Bill_Amt_1 = int(request.POST.get('Bill_Amt_1')) Bill_Amt_2 = int(request.POST.get('Bill_Amt_2')) Bill_Amt_3 = int(request.POST.get('Bill_Amt_3')) Bill_Amt_4 = int(request.POST.get('Bill_Amt_4')) Bill_Amt_5 = int(request.POST.get('Bill_Amt_5')) Bill_Amt_6 = int(request.POST.get('Bill_Amt_6')) Pay_Amt_1 = int(request.POST.get('Pay_Amt_1')) Pay_Amt_2 = int(request.POST.get('Pay_Amt_2')) Pay_Amt_3 = int(request.POST.get('Pay_Amt_3')) Pay_Amt_4 = int(request.POST.get('Pay_Amt_4')) Pay_Amt_5 = int(request.POST.get('Pay_Amt_5')) Pay_Amt_6 = int(request.POST.get('Pay_Amt_6')) credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6]) print(credit_data) clf = credit_model() c = clf.predict([credit_data]) print(c) if c == [0]: response = 'Not a Fraud' else: response = 'fraud' return render(request, 'creditcard/result.html', {'result': response}) else: return redirect('/creditcard', request) @login_required(login_url='/login/') def mobileresult(request): step = request.POST.get('step') type = request.POST.get('type') if type == 'Payment': new_type = 1 elif type == 'Transfer': new_type = 4 elif type == 'Cash-out': new_type = 5 elif type == 'Debit': new_type = 2 print(new_type) amount = request.POST.get('amount') nameOrig = request.POST.get('nameOrig') oldbalanceOrg = request.POST.get('oldbalanceOrg') newbalanceOrig = request.POST.get('newbalanceOrig') nameDest = request.POST.get('nameDest') oldbalanceDest = request.POST.get('oldbalanceDest') newbalanceDest = request.POST.get('newbalanceDest') isFlaggedFraud = 1 mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest, isFlaggedFraud]) clf = mobile_model() c = clf.predict([mobile_data]) print(c) if c == [0]: response = 'Not Fraud' else: response = 'Fraud' return render(request, 'mobile/result.html', {'result': response}) def analytics(request): return render(request, 'analytics.html', {'analytics': result, 'mobile_analytics': mobile_result, 'creditcard_analytics': creditcard_result}) <|reserved_special_token_1|> <|reserved_special_token_0|> def index(request): return render(request, 'index.html') <|reserved_special_token_0|> def contact(request): form_class = ContactForm if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): contact_name = request.POST.get('contact_name', '') contact_email = request.POST.get('contact_email', '') form_content = request.POST.get('content', '') form_content = request.POST.get('content', '') template = get_template('contact_template.txt') context = {'contact_name': contact_name, 'contact_email': contact_email, 'form_content': form_content} content = template.render(context) email = EmailMessage('New contact form submission', content, 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To': contact_email}) email.send() return redirect('/success') return render(request, 'contact.html', {'form': form_class}) <|reserved_special_token_0|> def login_view(request): next = request.GET.get('next') form = UserLoginForm(request.POST or None) if form.is_valid(): username = form.cleaned_data.get('username') password = form.cleaned_data.get('password') user = authenticate(username=username, password=password) login(request, user) if next: return redirect(next) return redirect('/') return render(request, 'login.html', {'form': form}) <|reserved_special_token_0|> @login_required(login_url='/login/') def services(request): return render(request, 'services.html') @login_required(login_url='/login/') def bank(request): return render(request, 'bank.html') @login_required(login_url='/login/') def creditcard(request): return render(request, 'creditcard.html') @login_required(login_url='/login/') def mobilefraud(request): return render(request, 'mobile.html') @login_required(login_url='/login/') def bankresult(request): age = request.POST.get('age') job = request.POST.get('job') print(job) if job == 'Unemployed': new_job = 1 elif job == 'Management': new_job = 2 elif job == 'Services': new_job = 3 elif job == 'Blue-Collar': new_job = 4 elif job == 'Entrepreneur': new_job = 5 elif job == 'Admin': new_job = 6 elif job == 'Unknown': new_job = 7 elif job == 'Self-employed': new_job = 8 elif job == 'Student': new_job = 9 elif job == 'House maid': new_job = 10 elif job == 'Technician': new_job = 11 elif job == 'Retired': new_job = 12 print(new_job) marital = request.POST.get('marital') if marital == 'Single': new_marital = 1 elif marital == 'Divorced': new_marital = 2 elif marital == 'Married': new_marital = 3 print(new_marital) education = request.POST.get('education') if education == 'Unknown': new_education = 1 elif education == 'Primary': new_education = 2 elif education == 'Secondary': new_education = 3 elif education == 'Graduate': new_education = 4 print(new_education) balance = request.POST.get('balance') housing = request.POST.get('housing') if housing == 'Yes': new_housing = 1 elif housing == 'No': new_housing = 2 print(new_housing) loan = request.POST.get('loan') if loan == 'Yes': new_loan = 1 elif loan == 'No': new_loan = 2 print(new_loan) duration = int(request.POST.get('duration')) campaign = int(request.POST.get('campaign')) pdays = int(request.POST.get('pdays')) previous = int(request.POST.get('previous')) poutcome = request.POST.get('poutcome') if poutcome == 'Unknown': new_poutcome = 3 elif poutcome == 'Failure': new_poutcome = 1 elif poutcome == 'Successs': new_poutcome = 4 elif poutcome == 'Failure': new_poutcome = 2 print(new_poutcome) bank_data = np.array([age, new_job, new_marital, new_education, balance, new_housing, new_loan, duration, campaign, pdays, previous, new_poutcome]) clf = bank_model() c = clf.predict([bank_data]) print(c) if c == [1]: response = 'Not Fraud' else: response = 'Fraud' accuracy = 0.8962983425414365 return render(request, 'bank/result.html', {'result': response, 'accuracy': accuracy}) @login_required(login_url='/login/') def creditresult(request): if request.method == 'POST': limit_balance = request.POST.get('limit_balance') sex = request.POST.get('sex') print(sex) if sex == 'Male': new_sex = 1 else: new_sex = 2 print(new_sex) education = request.POST.get('education') if education == 'Primary': new_education = 1 elif education == 'Secondary': new_education = 2 elif education == 'Graduate': new_education = 3 print(new_education) marriage = request.POST.get('marriage') if marriage == 'Single': new_marriage = 1 elif marriage == 'Married': new_marriage = 2 elif education == 'Divorced': new_marriage = 3 print(new_marriage) age = request.POST.get('age') pay_1 = int(request.POST.get('pay_1')) pay_2 = int(request.POST.get('pay_2')) pay_3 = int(request.POST.get('pay_3')) pay_4 = int(request.POST.get('pay_4')) pay_5 = int(request.POST.get('pay_5')) pay_6 = int(request.POST.get('pay_6')) Bill_Amt_1 = int(request.POST.get('Bill_Amt_1')) Bill_Amt_2 = int(request.POST.get('Bill_Amt_2')) Bill_Amt_3 = int(request.POST.get('Bill_Amt_3')) Bill_Amt_4 = int(request.POST.get('Bill_Amt_4')) Bill_Amt_5 = int(request.POST.get('Bill_Amt_5')) Bill_Amt_6 = int(request.POST.get('Bill_Amt_6')) Pay_Amt_1 = int(request.POST.get('Pay_Amt_1')) Pay_Amt_2 = int(request.POST.get('Pay_Amt_2')) Pay_Amt_3 = int(request.POST.get('Pay_Amt_3')) Pay_Amt_4 = int(request.POST.get('Pay_Amt_4')) Pay_Amt_5 = int(request.POST.get('Pay_Amt_5')) Pay_Amt_6 = int(request.POST.get('Pay_Amt_6')) credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6]) print(credit_data) clf = credit_model() c = clf.predict([credit_data]) print(c) if c == [0]: response = 'Not a Fraud' else: response = 'fraud' return render(request, 'creditcard/result.html', {'result': response}) else: return redirect('/creditcard', request) @login_required(login_url='/login/') def mobileresult(request): step = request.POST.get('step') type = request.POST.get('type') if type == 'Payment': new_type = 1 elif type == 'Transfer': new_type = 4 elif type == 'Cash-out': new_type = 5 elif type == 'Debit': new_type = 2 print(new_type) amount = request.POST.get('amount') nameOrig = request.POST.get('nameOrig') oldbalanceOrg = request.POST.get('oldbalanceOrg') newbalanceOrig = request.POST.get('newbalanceOrig') nameDest = request.POST.get('nameDest') oldbalanceDest = request.POST.get('oldbalanceDest') newbalanceDest = request.POST.get('newbalanceDest') isFlaggedFraud = 1 mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest, isFlaggedFraud]) clf = mobile_model() c = clf.predict([mobile_data]) print(c) if c == [0]: response = 'Not Fraud' else: response = 'Fraud' return render(request, 'mobile/result.html', {'result': response}) def analytics(request): return render(request, 'analytics.html', {'analytics': result, 'mobile_analytics': mobile_result, 'creditcard_analytics': creditcard_result}) <|reserved_special_token_1|> <|reserved_special_token_0|> def index(request): return render(request, 'index.html') def about(request): return render(request, 'about.html') def contact(request): form_class = ContactForm if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): contact_name = request.POST.get('contact_name', '') contact_email = request.POST.get('contact_email', '') form_content = request.POST.get('content', '') form_content = request.POST.get('content', '') template = get_template('contact_template.txt') context = {'contact_name': contact_name, 'contact_email': contact_email, 'form_content': form_content} content = template.render(context) email = EmailMessage('New contact form submission', content, 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To': contact_email}) email.send() return redirect('/success') return render(request, 'contact.html', {'form': form_class}) def success(request): return render(request, 'success.html') def login_view(request): next = request.GET.get('next') form = UserLoginForm(request.POST or None) if form.is_valid(): username = form.cleaned_data.get('username') password = form.cleaned_data.get('password') user = authenticate(username=username, password=password) login(request, user) if next: return redirect(next) return redirect('/') return render(request, 'login.html', {'form': form}) @login_required(login_url='/login/') def logout_view(request): logout(request) return render(request, 'index.html') @login_required(login_url='/login/') def services(request): return render(request, 'services.html') @login_required(login_url='/login/') def bank(request): return render(request, 'bank.html') @login_required(login_url='/login/') def creditcard(request): return render(request, 'creditcard.html') @login_required(login_url='/login/') def mobilefraud(request): return render(request, 'mobile.html') @login_required(login_url='/login/') def bankresult(request): age = request.POST.get('age') job = request.POST.get('job') print(job) if job == 'Unemployed': new_job = 1 elif job == 'Management': new_job = 2 elif job == 'Services': new_job = 3 elif job == 'Blue-Collar': new_job = 4 elif job == 'Entrepreneur': new_job = 5 elif job == 'Admin': new_job = 6 elif job == 'Unknown': new_job = 7 elif job == 'Self-employed': new_job = 8 elif job == 'Student': new_job = 9 elif job == 'House maid': new_job = 10 elif job == 'Technician': new_job = 11 elif job == 'Retired': new_job = 12 print(new_job) marital = request.POST.get('marital') if marital == 'Single': new_marital = 1 elif marital == 'Divorced': new_marital = 2 elif marital == 'Married': new_marital = 3 print(new_marital) education = request.POST.get('education') if education == 'Unknown': new_education = 1 elif education == 'Primary': new_education = 2 elif education == 'Secondary': new_education = 3 elif education == 'Graduate': new_education = 4 print(new_education) balance = request.POST.get('balance') housing = request.POST.get('housing') if housing == 'Yes': new_housing = 1 elif housing == 'No': new_housing = 2 print(new_housing) loan = request.POST.get('loan') if loan == 'Yes': new_loan = 1 elif loan == 'No': new_loan = 2 print(new_loan) duration = int(request.POST.get('duration')) campaign = int(request.POST.get('campaign')) pdays = int(request.POST.get('pdays')) previous = int(request.POST.get('previous')) poutcome = request.POST.get('poutcome') if poutcome == 'Unknown': new_poutcome = 3 elif poutcome == 'Failure': new_poutcome = 1 elif poutcome == 'Successs': new_poutcome = 4 elif poutcome == 'Failure': new_poutcome = 2 print(new_poutcome) bank_data = np.array([age, new_job, new_marital, new_education, balance, new_housing, new_loan, duration, campaign, pdays, previous, new_poutcome]) clf = bank_model() c = clf.predict([bank_data]) print(c) if c == [1]: response = 'Not Fraud' else: response = 'Fraud' accuracy = 0.8962983425414365 return render(request, 'bank/result.html', {'result': response, 'accuracy': accuracy}) @login_required(login_url='/login/') def creditresult(request): if request.method == 'POST': limit_balance = request.POST.get('limit_balance') sex = request.POST.get('sex') print(sex) if sex == 'Male': new_sex = 1 else: new_sex = 2 print(new_sex) education = request.POST.get('education') if education == 'Primary': new_education = 1 elif education == 'Secondary': new_education = 2 elif education == 'Graduate': new_education = 3 print(new_education) marriage = request.POST.get('marriage') if marriage == 'Single': new_marriage = 1 elif marriage == 'Married': new_marriage = 2 elif education == 'Divorced': new_marriage = 3 print(new_marriage) age = request.POST.get('age') pay_1 = int(request.POST.get('pay_1')) pay_2 = int(request.POST.get('pay_2')) pay_3 = int(request.POST.get('pay_3')) pay_4 = int(request.POST.get('pay_4')) pay_5 = int(request.POST.get('pay_5')) pay_6 = int(request.POST.get('pay_6')) Bill_Amt_1 = int(request.POST.get('Bill_Amt_1')) Bill_Amt_2 = int(request.POST.get('Bill_Amt_2')) Bill_Amt_3 = int(request.POST.get('Bill_Amt_3')) Bill_Amt_4 = int(request.POST.get('Bill_Amt_4')) Bill_Amt_5 = int(request.POST.get('Bill_Amt_5')) Bill_Amt_6 = int(request.POST.get('Bill_Amt_6')) Pay_Amt_1 = int(request.POST.get('Pay_Amt_1')) Pay_Amt_2 = int(request.POST.get('Pay_Amt_2')) Pay_Amt_3 = int(request.POST.get('Pay_Amt_3')) Pay_Amt_4 = int(request.POST.get('Pay_Amt_4')) Pay_Amt_5 = int(request.POST.get('Pay_Amt_5')) Pay_Amt_6 = int(request.POST.get('Pay_Amt_6')) credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6]) print(credit_data) clf = credit_model() c = clf.predict([credit_data]) print(c) if c == [0]: response = 'Not a Fraud' else: response = 'fraud' return render(request, 'creditcard/result.html', {'result': response}) else: return redirect('/creditcard', request) @login_required(login_url='/login/') def mobileresult(request): step = request.POST.get('step') type = request.POST.get('type') if type == 'Payment': new_type = 1 elif type == 'Transfer': new_type = 4 elif type == 'Cash-out': new_type = 5 elif type == 'Debit': new_type = 2 print(new_type) amount = request.POST.get('amount') nameOrig = request.POST.get('nameOrig') oldbalanceOrg = request.POST.get('oldbalanceOrg') newbalanceOrig = request.POST.get('newbalanceOrig') nameDest = request.POST.get('nameDest') oldbalanceDest = request.POST.get('oldbalanceDest') newbalanceDest = request.POST.get('newbalanceDest') isFlaggedFraud = 1 mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest, isFlaggedFraud]) clf = mobile_model() c = clf.predict([mobile_data]) print(c) if c == [0]: response = 'Not Fraud' else: response = 'Fraud' return render(request, 'mobile/result.html', {'result': response}) def analytics(request): return render(request, 'analytics.html', {'analytics': result, 'mobile_analytics': mobile_result, 'creditcard_analytics': creditcard_result}) <|reserved_special_token_1|> import numpy as np from django.contrib.auth import logout, login, authenticate from django.contrib.auth.decorators import login_required from django.core.mail import EmailMessage from django.shortcuts import render, redirect from django.template.loader import get_template from dashboard.notebook.creditcard import credit_model from dashboard.notebook.bank import bank_model from dashboard.notebook.mobile_data import mobile_model from dashboard.notebook.graphs import result from dashboard.notebook.mobile_analytics import mobile_result from dashboard.notebook.creditcard_analytics import creditcard_result from .forms import ContactForm, UserLoginForm # view for index page def index(request): return render(request, 'index.html') # view for about page def about(request): return render(request, 'about.html') ### contact view def contact(request): form_class = ContactForm # new logic! if request.method == 'POST': form = form_class(data=request.POST) if form.is_valid(): contact_name = request.POST.get('contact_name', '') contact_email = request.POST.get('contact_email', '') form_content = request.POST.get('content', '') form_content = request.POST.get('content', '') # Email the profile with the # contact information template = get_template('contact_template.txt') context = { 'contact_name': contact_name, 'contact_email': contact_email, 'form_content': form_content, } content = template.render(context) email = EmailMessage( "New contact form submission", content, "FDS" + '', ['b200jst@gmail.com'], headers={'Reply-To': contact_email} ) email.send() return redirect('/success') return render(request, 'contact.html', { 'form': form_class, }) # success page def success(request): return render(request, 'success.html') # login page def login_view(request): next = request.GET.get('next') form = UserLoginForm(request.POST or None) if form.is_valid(): username = form.cleaned_data.get('username') password = form.cleaned_data.get('password') user = authenticate(username=username, password=password) login(request, user) if next: return redirect(next) return redirect("/") return render(request, 'login.html',{"form":form}) # logout view @login_required(login_url='/login/') def logout_view(request): logout(request) return render(request, "index.html") # service view @login_required(login_url='/login/') def services(request): return render(request, 'services.html') # bank fraud page @login_required(login_url='/login/') def bank(request): return render(request, 'bank.html') # creditcard fraud page @login_required(login_url='/login/') def creditcard(request): return render(request, 'creditcard.html') # mobile transaction @login_required(login_url='/login/') def mobilefraud(request): return render(request, 'mobile.html') #banking services @login_required(login_url='/login/') def bankresult(request): # get the data and print prediction age = request.POST.get("age") job = request.POST.get("job") print(job) if (job == "Unemployed"): new_job = 1 elif (job == "Management"): new_job = 2 elif (job == "Services"): new_job = 3 elif (job == "Blue-Collar"): new_job = 4 elif (job == "Entrepreneur"): new_job = 5 elif (job == "Admin"): new_job = 6 elif (job == "Unknown"): new_job = 7 elif (job == "Self-employed"): new_job = 8 elif (job == "Student"): new_job = 9 elif (job == "House maid"): new_job = 10 elif (job == "Technician"): new_job = 11 elif (job == "Retired"): new_job = 12 print(new_job) marital = request.POST.get("marital") if (marital == "Single"): new_marital = 1 elif (marital == "Divorced"): new_marital = 2 elif (marital == "Married"): new_marital = 3 print(new_marital) education = request.POST.get("education") if (education == "Unknown"): new_education = 1 elif (education == "Primary"): new_education = 2 elif (education == "Secondary"): new_education = 3 elif (education == "Graduate"): new_education = 4 print(new_education) balance = request.POST.get("balance") housing = request.POST.get("housing") if (housing == "Yes"): new_housing = 1 elif (housing == "No"): new_housing = 2 print(new_housing) loan = request.POST.get("loan") if (loan == "Yes"): new_loan = 1 elif (loan == "No"): new_loan = 2 print(new_loan) duration = int(request.POST.get("duration")) campaign = int(request.POST.get('campaign')) pdays = int(request.POST.get('pdays')) previous = int(request.POST.get('previous')) poutcome = (request.POST.get("poutcome")) if (poutcome == "Unknown"): new_poutcome = 3 elif (poutcome == "Failure"): new_poutcome = 1 elif (poutcome == "Successs"): new_poutcome = 4 elif (poutcome == "Failure"): new_poutcome = 2 print(new_poutcome) bank_data = np.array([age,new_job,new_marital,new_education,balance,new_housing,new_loan,duration,campaign,pdays,previous,new_poutcome]) clf = bank_model() c = clf.predict([bank_data]) print(c) if c == [1]: # print("Not fraud") response = 'Not Fraud' else: # print("Fraud") response = 'Fraud' accuracy = 0.8962983425414365 return render(request, 'bank/result.html', {"result": response, 'accuracy':accuracy}) # analytics # def analysis(request): # return render(request, 'analysis.html', {'accuracy': accuracy}) # credit card services @login_required(login_url='/login/') def creditresult(request): if request.method == "POST": # get the data and print limit_balance = request.POST.get("limit_balance") sex = request.POST.get("sex") print(sex) if(sex=="Male"): new_sex = 1 else: new_sex = 2 print(new_sex) education = request.POST.get("education") if (education == "Primary"): new_education = 1 elif (education == "Secondary"): new_education = 2 elif (education == "Graduate"): new_education = 3 print(new_education) marriage = request.POST.get("marriage") if (marriage == "Single"): new_marriage = 1 elif (marriage == "Married"): new_marriage = 2 elif (education == "Divorced"): new_marriage = 3 print(new_marriage) age = request.POST.get("age") pay_1 = int(request.POST.get("pay_1")) pay_2 = int(request.POST.get("pay_2")) pay_3 = int(request.POST.get("pay_3")) pay_4 = int(request.POST.get("pay_4")) pay_5 = int(request.POST.get("pay_5")) pay_6 = int(request.POST.get("pay_6")) Bill_Amt_1 = int(request.POST.get("Bill_Amt_1")) Bill_Amt_2 = int(request.POST.get("Bill_Amt_2")) Bill_Amt_3 = int(request.POST.get("Bill_Amt_3")) Bill_Amt_4 = int(request.POST.get("Bill_Amt_4")) Bill_Amt_5 = int(request.POST.get("Bill_Amt_5")) Bill_Amt_6 = int(request.POST.get("Bill_Amt_6")) Pay_Amt_1 = int(request.POST.get("Pay_Amt_1")) Pay_Amt_2 = int(request.POST.get("Pay_Amt_2")) Pay_Amt_3 = int(request.POST.get("Pay_Amt_3")) Pay_Amt_4 = int(request.POST.get("Pay_Amt_4")) Pay_Amt_5 = int(request.POST.get("Pay_Amt_5")) Pay_Amt_6 = int(request.POST.get("Pay_Amt_6")) credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6]) print(credit_data) clf = credit_model() c = clf.predict([credit_data]) print(c) if c == [0]: response = 'Not a Fraud' else: response = 'fraud' # print(c) return render(request, 'creditcard/result.html', {"result": response}) else: return redirect('/creditcard',request) # mobile fraud services @login_required(login_url='/login/') def mobileresult(request): # get the data and print step = request.POST.get("step") type = request.POST.get("type") if (type == "Payment"): new_type = 1 elif (type == "Transfer"): new_type = 4 elif (type == "Cash-out"): new_type = 5 elif (type == "Debit"): new_type = 2 print(new_type) amount = request.POST.get("amount") nameOrig = request.POST.get("nameOrig") oldbalanceOrg = request.POST.get("oldbalanceOrg") newbalanceOrig = request.POST.get("newbalanceOrig") nameDest = request.POST.get("nameDest") oldbalanceDest = request.POST.get("oldbalanceDest") newbalanceDest = request.POST.get("newbalanceDest") # isFraud = int(request.POST.get("isFraud"))) isFlaggedFraud = 1 mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest,oldbalanceDest, newbalanceDest, isFlaggedFraud]) # print(bank_data) clf = mobile_model() c = clf.predict([mobile_data]) print(c) if c == [0]: # print("Not fraud") response = 'Not Fraud' else: # print("Fraud") response = 'Fraud' return render(request, 'mobile/result.html', {"result": response}) # analytics page def analytics(request): return render(request, 'analytics.html', {'analytics':result, "mobile_analytics": mobile_result, "creditcard_analytics": creditcard_result})
flexible
{ "blob_id": "26bb5dc2679a4375d0950667ed02369df10857a8", "index": 8410, "step-1": "<mask token>\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-2": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\n<mask token>\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == 'POST':\n limit_balance = request.POST.get('limit_balance')\n sex = request.POST.get('sex')\n print(sex)\n if sex == 'Male':\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get('education')\n if education == 'Primary':\n new_education = 1\n elif education == 'Secondary':\n new_education = 2\n elif education == 'Graduate':\n new_education = 3\n print(new_education)\n marriage = request.POST.get('marriage')\n if marriage == 'Single':\n new_marriage = 1\n elif marriage == 'Married':\n new_marriage = 2\n elif education == 'Divorced':\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get('age')\n pay_1 = int(request.POST.get('pay_1'))\n pay_2 = int(request.POST.get('pay_2'))\n pay_3 = int(request.POST.get('pay_3'))\n pay_4 = int(request.POST.get('pay_4'))\n pay_5 = int(request.POST.get('pay_5'))\n pay_6 = int(request.POST.get('pay_6'))\n Bill_Amt_1 = int(request.POST.get('Bill_Amt_1'))\n Bill_Amt_2 = int(request.POST.get('Bill_Amt_2'))\n Bill_Amt_3 = int(request.POST.get('Bill_Amt_3'))\n Bill_Amt_4 = int(request.POST.get('Bill_Amt_4'))\n Bill_Amt_5 = int(request.POST.get('Bill_Amt_5'))\n Bill_Amt_6 = int(request.POST.get('Bill_Amt_6'))\n Pay_Amt_1 = int(request.POST.get('Pay_Amt_1'))\n Pay_Amt_2 = int(request.POST.get('Pay_Amt_2'))\n Pay_Amt_3 = int(request.POST.get('Pay_Amt_3'))\n Pay_Amt_4 = int(request.POST.get('Pay_Amt_4'))\n Pay_Amt_5 = int(request.POST.get('Pay_Amt_5'))\n Pay_Amt_6 = int(request.POST.get('Pay_Amt_6'))\n credit_data = np.array([limit_balance, new_sex, new_education,\n new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6,\n Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5,\n Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4,\n Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n return render(request, 'creditcard/result.html', {'result': response})\n else:\n return redirect('/creditcard', request)\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-3": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\n<mask token>\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\n<mask token>\n\n\ndef login_view(request):\n next = request.GET.get('next')\n form = UserLoginForm(request.POST or None)\n if form.is_valid():\n username = form.cleaned_data.get('username')\n password = form.cleaned_data.get('password')\n user = authenticate(username=username, password=password)\n login(request, user)\n if next:\n return redirect(next)\n return redirect('/')\n return render(request, 'login.html', {'form': form})\n\n\n<mask token>\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n@login_required(login_url='/login/')\ndef bank(request):\n return render(request, 'bank.html')\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == 'POST':\n limit_balance = request.POST.get('limit_balance')\n sex = request.POST.get('sex')\n print(sex)\n if sex == 'Male':\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get('education')\n if education == 'Primary':\n new_education = 1\n elif education == 'Secondary':\n new_education = 2\n elif education == 'Graduate':\n new_education = 3\n print(new_education)\n marriage = request.POST.get('marriage')\n if marriage == 'Single':\n new_marriage = 1\n elif marriage == 'Married':\n new_marriage = 2\n elif education == 'Divorced':\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get('age')\n pay_1 = int(request.POST.get('pay_1'))\n pay_2 = int(request.POST.get('pay_2'))\n pay_3 = int(request.POST.get('pay_3'))\n pay_4 = int(request.POST.get('pay_4'))\n pay_5 = int(request.POST.get('pay_5'))\n pay_6 = int(request.POST.get('pay_6'))\n Bill_Amt_1 = int(request.POST.get('Bill_Amt_1'))\n Bill_Amt_2 = int(request.POST.get('Bill_Amt_2'))\n Bill_Amt_3 = int(request.POST.get('Bill_Amt_3'))\n Bill_Amt_4 = int(request.POST.get('Bill_Amt_4'))\n Bill_Amt_5 = int(request.POST.get('Bill_Amt_5'))\n Bill_Amt_6 = int(request.POST.get('Bill_Amt_6'))\n Pay_Amt_1 = int(request.POST.get('Pay_Amt_1'))\n Pay_Amt_2 = int(request.POST.get('Pay_Amt_2'))\n Pay_Amt_3 = int(request.POST.get('Pay_Amt_3'))\n Pay_Amt_4 = int(request.POST.get('Pay_Amt_4'))\n Pay_Amt_5 = int(request.POST.get('Pay_Amt_5'))\n Pay_Amt_6 = int(request.POST.get('Pay_Amt_6'))\n credit_data = np.array([limit_balance, new_sex, new_education,\n new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6,\n Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5,\n Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4,\n Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n return render(request, 'creditcard/result.html', {'result': response})\n else:\n return redirect('/creditcard', request)\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-4": "<mask token>\n\n\ndef index(request):\n return render(request, 'index.html')\n\n\ndef about(request):\n return render(request, 'about.html')\n\n\ndef contact(request):\n form_class = ContactForm\n if request.method == 'POST':\n form = form_class(data=request.POST)\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n template = get_template('contact_template.txt')\n context = {'contact_name': contact_name, 'contact_email':\n contact_email, 'form_content': form_content}\n content = template.render(context)\n email = EmailMessage('New contact form submission', content, \n 'FDS' + '', ['b200jst@gmail.com'], headers={'Reply-To':\n contact_email})\n email.send()\n return redirect('/success')\n return render(request, 'contact.html', {'form': form_class})\n\n\ndef success(request):\n return render(request, 'success.html')\n\n\ndef login_view(request):\n next = request.GET.get('next')\n form = UserLoginForm(request.POST or None)\n if form.is_valid():\n username = form.cleaned_data.get('username')\n password = form.cleaned_data.get('password')\n user = authenticate(username=username, password=password)\n login(request, user)\n if next:\n return redirect(next)\n return redirect('/')\n return render(request, 'login.html', {'form': form})\n\n\n@login_required(login_url='/login/')\ndef logout_view(request):\n logout(request)\n return render(request, 'index.html')\n\n\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n\n@login_required(login_url='/login/')\ndef bank(request):\n return render(request, 'bank.html')\n\n\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n\n\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n\n@login_required(login_url='/login/')\ndef bankresult(request):\n age = request.POST.get('age')\n job = request.POST.get('job')\n print(job)\n if job == 'Unemployed':\n new_job = 1\n elif job == 'Management':\n new_job = 2\n elif job == 'Services':\n new_job = 3\n elif job == 'Blue-Collar':\n new_job = 4\n elif job == 'Entrepreneur':\n new_job = 5\n elif job == 'Admin':\n new_job = 6\n elif job == 'Unknown':\n new_job = 7\n elif job == 'Self-employed':\n new_job = 8\n elif job == 'Student':\n new_job = 9\n elif job == 'House maid':\n new_job = 10\n elif job == 'Technician':\n new_job = 11\n elif job == 'Retired':\n new_job = 12\n print(new_job)\n marital = request.POST.get('marital')\n if marital == 'Single':\n new_marital = 1\n elif marital == 'Divorced':\n new_marital = 2\n elif marital == 'Married':\n new_marital = 3\n print(new_marital)\n education = request.POST.get('education')\n if education == 'Unknown':\n new_education = 1\n elif education == 'Primary':\n new_education = 2\n elif education == 'Secondary':\n new_education = 3\n elif education == 'Graduate':\n new_education = 4\n print(new_education)\n balance = request.POST.get('balance')\n housing = request.POST.get('housing')\n if housing == 'Yes':\n new_housing = 1\n elif housing == 'No':\n new_housing = 2\n print(new_housing)\n loan = request.POST.get('loan')\n if loan == 'Yes':\n new_loan = 1\n elif loan == 'No':\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get('duration'))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = request.POST.get('poutcome')\n if poutcome == 'Unknown':\n new_poutcome = 3\n elif poutcome == 'Failure':\n new_poutcome = 1\n elif poutcome == 'Successs':\n new_poutcome = 4\n elif poutcome == 'Failure':\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age, new_job, new_marital, new_education, balance,\n new_housing, new_loan, duration, campaign, pdays, previous,\n new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {'result': response,\n 'accuracy': accuracy})\n\n\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == 'POST':\n limit_balance = request.POST.get('limit_balance')\n sex = request.POST.get('sex')\n print(sex)\n if sex == 'Male':\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get('education')\n if education == 'Primary':\n new_education = 1\n elif education == 'Secondary':\n new_education = 2\n elif education == 'Graduate':\n new_education = 3\n print(new_education)\n marriage = request.POST.get('marriage')\n if marriage == 'Single':\n new_marriage = 1\n elif marriage == 'Married':\n new_marriage = 2\n elif education == 'Divorced':\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get('age')\n pay_1 = int(request.POST.get('pay_1'))\n pay_2 = int(request.POST.get('pay_2'))\n pay_3 = int(request.POST.get('pay_3'))\n pay_4 = int(request.POST.get('pay_4'))\n pay_5 = int(request.POST.get('pay_5'))\n pay_6 = int(request.POST.get('pay_6'))\n Bill_Amt_1 = int(request.POST.get('Bill_Amt_1'))\n Bill_Amt_2 = int(request.POST.get('Bill_Amt_2'))\n Bill_Amt_3 = int(request.POST.get('Bill_Amt_3'))\n Bill_Amt_4 = int(request.POST.get('Bill_Amt_4'))\n Bill_Amt_5 = int(request.POST.get('Bill_Amt_5'))\n Bill_Amt_6 = int(request.POST.get('Bill_Amt_6'))\n Pay_Amt_1 = int(request.POST.get('Pay_Amt_1'))\n Pay_Amt_2 = int(request.POST.get('Pay_Amt_2'))\n Pay_Amt_3 = int(request.POST.get('Pay_Amt_3'))\n Pay_Amt_4 = int(request.POST.get('Pay_Amt_4'))\n Pay_Amt_5 = int(request.POST.get('Pay_Amt_5'))\n Pay_Amt_6 = int(request.POST.get('Pay_Amt_6'))\n credit_data = np.array([limit_balance, new_sex, new_education,\n new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6,\n Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5,\n Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4,\n Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n return render(request, 'creditcard/result.html', {'result': response})\n else:\n return redirect('/creditcard', request)\n\n\n@login_required(login_url='/login/')\ndef mobileresult(request):\n step = request.POST.get('step')\n type = request.POST.get('type')\n if type == 'Payment':\n new_type = 1\n elif type == 'Transfer':\n new_type = 4\n elif type == 'Cash-out':\n new_type = 5\n elif type == 'Debit':\n new_type = 2\n print(new_type)\n amount = request.POST.get('amount')\n nameOrig = request.POST.get('nameOrig')\n oldbalanceOrg = request.POST.get('oldbalanceOrg')\n newbalanceOrig = request.POST.get('newbalanceOrig')\n nameDest = request.POST.get('nameDest')\n oldbalanceDest = request.POST.get('oldbalanceDest')\n newbalanceDest = request.POST.get('newbalanceDest')\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg,\n newbalanceOrig, nameDest, oldbalanceDest, newbalanceDest,\n isFlaggedFraud])\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n response = 'Not Fraud'\n else:\n response = 'Fraud'\n return render(request, 'mobile/result.html', {'result': response})\n\n\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics': result,\n 'mobile_analytics': mobile_result, 'creditcard_analytics':\n creditcard_result})\n", "step-5": "import numpy as np\nfrom django.contrib.auth import logout, login, authenticate\nfrom django.contrib.auth.decorators import login_required\nfrom django.core.mail import EmailMessage\nfrom django.shortcuts import render, redirect\nfrom django.template.loader import get_template\n\nfrom dashboard.notebook.creditcard import credit_model\nfrom dashboard.notebook.bank import bank_model\nfrom dashboard.notebook.mobile_data import mobile_model\n\nfrom dashboard.notebook.graphs import result\n\nfrom dashboard.notebook.mobile_analytics import mobile_result\n\nfrom dashboard.notebook.creditcard_analytics import creditcard_result\nfrom .forms import ContactForm, UserLoginForm\n\n\n# view for index page\ndef index(request):\n return render(request, 'index.html')\n# view for about page\ndef about(request):\n return render(request, 'about.html')\n\n### contact view\ndef contact(request):\n form_class = ContactForm\n\n # new logic!\n if request.method == 'POST':\n form = form_class(data=request.POST)\n\n if form.is_valid():\n contact_name = request.POST.get('contact_name', '')\n contact_email = request.POST.get('contact_email', '')\n form_content = request.POST.get('content', '')\n form_content = request.POST.get('content', '')\n\n # Email the profile with the\n # contact information\n template = get_template('contact_template.txt')\n context = {\n 'contact_name': contact_name,\n 'contact_email': contact_email,\n 'form_content': form_content,\n }\n\n content = template.render(context)\n email = EmailMessage(\n \"New contact form submission\",\n content,\n \"FDS\" + '',\n ['b200jst@gmail.com'],\n headers={'Reply-To': contact_email}\n )\n email.send()\n return redirect('/success')\n\n return render(request, 'contact.html', {\n 'form': form_class,\n })\n\n# success page\ndef success(request):\n return render(request, 'success.html')\n\n# login page\ndef login_view(request):\n next = request.GET.get('next')\n form = UserLoginForm(request.POST or None)\n if form.is_valid():\n username = form.cleaned_data.get('username')\n password = form.cleaned_data.get('password')\n user = authenticate(username=username, password=password)\n login(request, user)\n if next:\n return redirect(next)\n return redirect(\"/\")\n return render(request, 'login.html',{\"form\":form})\n\n# logout view\n@login_required(login_url='/login/')\ndef logout_view(request):\n logout(request)\n return render(request, \"index.html\")\n\n# service view\n@login_required(login_url='/login/')\ndef services(request):\n return render(request, 'services.html')\n\n# bank fraud page\n@login_required(login_url='/login/')\ndef bank(request):\n return render(request, 'bank.html')\n# creditcard fraud page\n@login_required(login_url='/login/')\ndef creditcard(request):\n return render(request, 'creditcard.html')\n# mobile transaction\n@login_required(login_url='/login/')\ndef mobilefraud(request):\n return render(request, 'mobile.html')\n\n#banking services\n@login_required(login_url='/login/')\ndef bankresult(request):\n # get the data and print prediction\n age = request.POST.get(\"age\")\n job = request.POST.get(\"job\")\n print(job)\n if (job == \"Unemployed\"):\n new_job = 1\n elif (job == \"Management\"):\n new_job = 2\n elif (job == \"Services\"):\n new_job = 3\n elif (job == \"Blue-Collar\"):\n new_job = 4\n elif (job == \"Entrepreneur\"):\n new_job = 5\n elif (job == \"Admin\"):\n new_job = 6\n elif (job == \"Unknown\"):\n new_job = 7\n elif (job == \"Self-employed\"):\n new_job = 8\n elif (job == \"Student\"):\n new_job = 9\n elif (job == \"House maid\"):\n new_job = 10\n elif (job == \"Technician\"):\n new_job = 11\n elif (job == \"Retired\"):\n new_job = 12\n print(new_job)\n marital = request.POST.get(\"marital\")\n if (marital == \"Single\"):\n new_marital = 1\n elif (marital == \"Divorced\"):\n new_marital = 2\n elif (marital == \"Married\"):\n new_marital = 3\n print(new_marital)\n education = request.POST.get(\"education\")\n if (education == \"Unknown\"):\n new_education = 1\n elif (education == \"Primary\"):\n new_education = 2\n elif (education == \"Secondary\"):\n new_education = 3\n elif (education == \"Graduate\"):\n new_education = 4\n print(new_education)\n balance = request.POST.get(\"balance\")\n housing = request.POST.get(\"housing\")\n if (housing == \"Yes\"):\n new_housing = 1\n elif (housing == \"No\"):\n new_housing = 2\n print(new_housing)\n loan = request.POST.get(\"loan\")\n if (loan == \"Yes\"):\n new_loan = 1\n elif (loan == \"No\"):\n new_loan = 2\n print(new_loan)\n duration = int(request.POST.get(\"duration\"))\n campaign = int(request.POST.get('campaign'))\n pdays = int(request.POST.get('pdays'))\n previous = int(request.POST.get('previous'))\n poutcome = (request.POST.get(\"poutcome\"))\n if (poutcome == \"Unknown\"):\n new_poutcome = 3\n elif (poutcome == \"Failure\"):\n new_poutcome = 1\n elif (poutcome == \"Successs\"):\n new_poutcome = 4\n elif (poutcome == \"Failure\"):\n new_poutcome = 2\n print(new_poutcome)\n bank_data = np.array([age,new_job,new_marital,new_education,balance,new_housing,new_loan,duration,campaign,pdays,previous,new_poutcome])\n clf = bank_model()\n c = clf.predict([bank_data])\n print(c)\n if c == [1]:\n # print(\"Not fraud\")\n response = 'Not Fraud'\n else:\n # print(\"Fraud\")\n response = 'Fraud'\n\n\n accuracy = 0.8962983425414365\n return render(request, 'bank/result.html', {\"result\": response, 'accuracy':accuracy})\n\n# analytics\n# def analysis(request):\n# return render(request, 'analysis.html', {'accuracy': accuracy})\n\n# credit card services\n@login_required(login_url='/login/')\ndef creditresult(request):\n if request.method == \"POST\":\n # get the data and print\n limit_balance = request.POST.get(\"limit_balance\")\n sex = request.POST.get(\"sex\")\n print(sex)\n if(sex==\"Male\"):\n new_sex = 1\n else:\n new_sex = 2\n print(new_sex)\n education = request.POST.get(\"education\")\n if (education == \"Primary\"):\n new_education = 1\n elif (education == \"Secondary\"):\n new_education = 2\n elif (education == \"Graduate\"):\n new_education = 3\n print(new_education)\n marriage = request.POST.get(\"marriage\")\n if (marriage == \"Single\"):\n new_marriage = 1\n elif (marriage == \"Married\"):\n new_marriage = 2\n elif (education == \"Divorced\"):\n new_marriage = 3\n print(new_marriage)\n age = request.POST.get(\"age\")\n pay_1 = int(request.POST.get(\"pay_1\"))\n pay_2 = int(request.POST.get(\"pay_2\"))\n pay_3 = int(request.POST.get(\"pay_3\"))\n pay_4 = int(request.POST.get(\"pay_4\"))\n pay_5 = int(request.POST.get(\"pay_5\"))\n pay_6 = int(request.POST.get(\"pay_6\"))\n Bill_Amt_1 = int(request.POST.get(\"Bill_Amt_1\"))\n Bill_Amt_2 = int(request.POST.get(\"Bill_Amt_2\"))\n Bill_Amt_3 = int(request.POST.get(\"Bill_Amt_3\"))\n Bill_Amt_4 = int(request.POST.get(\"Bill_Amt_4\"))\n Bill_Amt_5 = int(request.POST.get(\"Bill_Amt_5\"))\n Bill_Amt_6 = int(request.POST.get(\"Bill_Amt_6\"))\n Pay_Amt_1 = int(request.POST.get(\"Pay_Amt_1\"))\n Pay_Amt_2 = int(request.POST.get(\"Pay_Amt_2\"))\n Pay_Amt_3 = int(request.POST.get(\"Pay_Amt_3\"))\n Pay_Amt_4 = int(request.POST.get(\"Pay_Amt_4\"))\n Pay_Amt_5 = int(request.POST.get(\"Pay_Amt_5\"))\n Pay_Amt_6 = int(request.POST.get(\"Pay_Amt_6\"))\n credit_data = np.array([limit_balance, new_sex, new_education, new_marriage, age, pay_1, pay_2, pay_3, pay_4, pay_5, pay_6, Bill_Amt_1, Bill_Amt_2, Bill_Amt_3, Bill_Amt_4, Bill_Amt_5, Bill_Amt_6, Pay_Amt_1, Pay_Amt_2, Pay_Amt_3, Pay_Amt_4, Pay_Amt_5, Pay_Amt_6])\n print(credit_data)\n clf = credit_model()\n c = clf.predict([credit_data])\n print(c)\n if c == [0]:\n response = 'Not a Fraud'\n else:\n response = 'fraud'\n # print(c)\n return render(request, 'creditcard/result.html', {\"result\": response})\n else:\n return redirect('/creditcard',request)\n\n# mobile fraud services\n@login_required(login_url='/login/')\ndef mobileresult(request):\n # get the data and print\n step = request.POST.get(\"step\")\n type = request.POST.get(\"type\")\n if (type == \"Payment\"):\n new_type = 1\n elif (type == \"Transfer\"):\n new_type = 4\n elif (type == \"Cash-out\"):\n new_type = 5\n elif (type == \"Debit\"):\n new_type = 2\n print(new_type)\n amount = request.POST.get(\"amount\")\n nameOrig = request.POST.get(\"nameOrig\")\n oldbalanceOrg = request.POST.get(\"oldbalanceOrg\")\n newbalanceOrig = request.POST.get(\"newbalanceOrig\")\n nameDest = request.POST.get(\"nameDest\")\n oldbalanceDest = request.POST.get(\"oldbalanceDest\")\n newbalanceDest = request.POST.get(\"newbalanceDest\")\n # isFraud = int(request.POST.get(\"isFraud\")))\n isFlaggedFraud = 1\n mobile_data = np.array([step, new_type, amount, nameOrig, oldbalanceOrg, newbalanceOrig, nameDest,oldbalanceDest, newbalanceDest, isFlaggedFraud])\n # print(bank_data)\n clf = mobile_model()\n c = clf.predict([mobile_data])\n print(c)\n if c == [0]:\n # print(\"Not fraud\")\n response = 'Not Fraud'\n else:\n # print(\"Fraud\")\n response = 'Fraud'\n return render(request, 'mobile/result.html', {\"result\": response})\n\n# analytics page\ndef analytics(request):\n return render(request, 'analytics.html', {'analytics':result, \"mobile_analytics\": mobile_result, \"creditcard_analytics\": creditcard_result})", "step-ids": [ 7, 9, 11, 14, 16 ] }
[ 7, 9, 11, 14, 16 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def index_get(request, user_id, user, pto): schedules = Schedule.to_calendar(Schedule.objects.filter(pto=pto)) context = pto.__dict__ context.update({'schedules': schedules, 'current_user': user}) return render(request, 'users/paid_time_off.html', context=context) def index_post(request, user_id, user, pto): form = request.POST if not form: return HttpResponse('No form found') err_msg = PaidTimeOff.validate_PTO_form(form) if len(err_msg) > 0: messages.add_message(request, messages.INFO, err_msg) else: try: date_begin = Schedule.reformat(form['date_begin']) date_end = Schedule.reformat(form['date_end']) Schedule.objects.create(user=user, pto=pto, date_begin= date_begin, date_end=date_end, event_name=form['event_name' ], event_type='PTO', event_desc=form['event_description'], created_at=timezone.now(), updated_at=timezone.now()) messages.add_message(request, messages.INFO, 'Information successfully updated') except Exception as e: messages.add_message(request, messages.INFO, str(e)) url = '/users/%s/paid_time_off/' % user_id return redirect(url, permanent=False) <|reserved_special_token_1|> <|reserved_special_token_0|> @require_http_methods(['GET', 'POST']) @user_is_authenticated def index(request, user_id): user = utils.current_user(request) if not user: return HttpResponse('User ' + str(user_id) + ' NOT FOUND') pto = PaidTimeOff.objects.filter(user=user).first() if not pto: return HttpResponse('PTO ' + str(user_id) + ' NOT FOUND') if request.method == 'GET': return index_get(request, user_id, user, pto) elif request.method == 'POST': return index_post(request, user_id, user, pto) else: return HttpResponse('Invalid HTTP method') def index_get(request, user_id, user, pto): schedules = Schedule.to_calendar(Schedule.objects.filter(pto=pto)) context = pto.__dict__ context.update({'schedules': schedules, 'current_user': user}) return render(request, 'users/paid_time_off.html', context=context) def index_post(request, user_id, user, pto): form = request.POST if not form: return HttpResponse('No form found') err_msg = PaidTimeOff.validate_PTO_form(form) if len(err_msg) > 0: messages.add_message(request, messages.INFO, err_msg) else: try: date_begin = Schedule.reformat(form['date_begin']) date_end = Schedule.reformat(form['date_end']) Schedule.objects.create(user=user, pto=pto, date_begin= date_begin, date_end=date_end, event_name=form['event_name' ], event_type='PTO', event_desc=form['event_description'], created_at=timezone.now(), updated_at=timezone.now()) messages.add_message(request, messages.INFO, 'Information successfully updated') except Exception as e: messages.add_message(request, messages.INFO, str(e)) url = '/users/%s/paid_time_off/' % user_id return redirect(url, permanent=False) <|reserved_special_token_1|> from django.http import HttpResponse from django.views.decorators.http import require_http_methods from django.shortcuts import render, redirect from app.models import PaidTimeOff, Schedule from django.utils import timezone from django.contrib import messages from app.decorators import user_is_authenticated from app.views import utils @require_http_methods(['GET', 'POST']) @user_is_authenticated def index(request, user_id): user = utils.current_user(request) if not user: return HttpResponse('User ' + str(user_id) + ' NOT FOUND') pto = PaidTimeOff.objects.filter(user=user).first() if not pto: return HttpResponse('PTO ' + str(user_id) + ' NOT FOUND') if request.method == 'GET': return index_get(request, user_id, user, pto) elif request.method == 'POST': return index_post(request, user_id, user, pto) else: return HttpResponse('Invalid HTTP method') def index_get(request, user_id, user, pto): schedules = Schedule.to_calendar(Schedule.objects.filter(pto=pto)) context = pto.__dict__ context.update({'schedules': schedules, 'current_user': user}) return render(request, 'users/paid_time_off.html', context=context) def index_post(request, user_id, user, pto): form = request.POST if not form: return HttpResponse('No form found') err_msg = PaidTimeOff.validate_PTO_form(form) if len(err_msg) > 0: messages.add_message(request, messages.INFO, err_msg) else: try: date_begin = Schedule.reformat(form['date_begin']) date_end = Schedule.reformat(form['date_end']) Schedule.objects.create(user=user, pto=pto, date_begin= date_begin, date_end=date_end, event_name=form['event_name' ], event_type='PTO', event_desc=form['event_description'], created_at=timezone.now(), updated_at=timezone.now()) messages.add_message(request, messages.INFO, 'Information successfully updated') except Exception as e: messages.add_message(request, messages.INFO, str(e)) url = '/users/%s/paid_time_off/' % user_id return redirect(url, permanent=False) <|reserved_special_token_1|> from django.http import HttpResponse from django.views.decorators.http import require_http_methods from django.shortcuts import render, redirect from app.models import PaidTimeOff, Schedule from django.utils import timezone from django.contrib import messages from app.decorators import user_is_authenticated from app.views import utils @require_http_methods(["GET", "POST"]) @user_is_authenticated def index(request, user_id): user = utils.current_user(request) if not user: return HttpResponse("User " + str(user_id) + " NOT FOUND") pto = PaidTimeOff.objects.filter(user=user).first() if not pto: return HttpResponse("PTO " + str(user_id) + " NOT FOUND") if request.method == "GET": return index_get(request, user_id, user, pto) elif request.method == "POST": return index_post(request, user_id, user, pto) else: return HttpResponse("Invalid HTTP method") def index_get(request, user_id, user, pto): # pylint: disable=unused-argument schedules = Schedule.to_calendar((Schedule.objects.filter(pto=pto))) context = pto.__dict__ context.update({"schedules": schedules, "current_user": user}) return render(request, "users/paid_time_off.html", context=context) def index_post(request, user_id, user, pto): form = request.POST if not form: return HttpResponse("No form found") err_msg = PaidTimeOff.validate_PTO_form(form) if len(err_msg) > 0: messages.add_message(request, messages.INFO, err_msg) else: try: date_begin = Schedule.reformat(form['date_begin']) date_end = Schedule.reformat(form['date_end']) Schedule.objects.create( user=user, pto=pto, date_begin=date_begin, date_end=date_end, event_name=form['event_name'], event_type='PTO', event_desc=form['event_description'], created_at=timezone.now(), updated_at=timezone.now()) messages.add_message(request, messages.INFO, "Information successfully updated") except Exception as e: messages.add_message(request, messages.INFO, str(e)) url = "/users/%s/paid_time_off/" % user_id return redirect(url, permanent=False)
flexible
{ "blob_id": "7245d4db6440d38b9302907a6203c1507c373112", "index": 6970, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef index_get(request, user_id, user, pto):\n schedules = Schedule.to_calendar(Schedule.objects.filter(pto=pto))\n context = pto.__dict__\n context.update({'schedules': schedules, 'current_user': user})\n return render(request, 'users/paid_time_off.html', context=context)\n\n\ndef index_post(request, user_id, user, pto):\n form = request.POST\n if not form:\n return HttpResponse('No form found')\n err_msg = PaidTimeOff.validate_PTO_form(form)\n if len(err_msg) > 0:\n messages.add_message(request, messages.INFO, err_msg)\n else:\n try:\n date_begin = Schedule.reformat(form['date_begin'])\n date_end = Schedule.reformat(form['date_end'])\n Schedule.objects.create(user=user, pto=pto, date_begin=\n date_begin, date_end=date_end, event_name=form['event_name'\n ], event_type='PTO', event_desc=form['event_description'],\n created_at=timezone.now(), updated_at=timezone.now())\n messages.add_message(request, messages.INFO,\n 'Information successfully updated')\n except Exception as e:\n messages.add_message(request, messages.INFO, str(e))\n url = '/users/%s/paid_time_off/' % user_id\n return redirect(url, permanent=False)\n", "step-3": "<mask token>\n\n\n@require_http_methods(['GET', 'POST'])\n@user_is_authenticated\ndef index(request, user_id):\n user = utils.current_user(request)\n if not user:\n return HttpResponse('User ' + str(user_id) + ' NOT FOUND')\n pto = PaidTimeOff.objects.filter(user=user).first()\n if not pto:\n return HttpResponse('PTO ' + str(user_id) + ' NOT FOUND')\n if request.method == 'GET':\n return index_get(request, user_id, user, pto)\n elif request.method == 'POST':\n return index_post(request, user_id, user, pto)\n else:\n return HttpResponse('Invalid HTTP method')\n\n\ndef index_get(request, user_id, user, pto):\n schedules = Schedule.to_calendar(Schedule.objects.filter(pto=pto))\n context = pto.__dict__\n context.update({'schedules': schedules, 'current_user': user})\n return render(request, 'users/paid_time_off.html', context=context)\n\n\ndef index_post(request, user_id, user, pto):\n form = request.POST\n if not form:\n return HttpResponse('No form found')\n err_msg = PaidTimeOff.validate_PTO_form(form)\n if len(err_msg) > 0:\n messages.add_message(request, messages.INFO, err_msg)\n else:\n try:\n date_begin = Schedule.reformat(form['date_begin'])\n date_end = Schedule.reformat(form['date_end'])\n Schedule.objects.create(user=user, pto=pto, date_begin=\n date_begin, date_end=date_end, event_name=form['event_name'\n ], event_type='PTO', event_desc=form['event_description'],\n created_at=timezone.now(), updated_at=timezone.now())\n messages.add_message(request, messages.INFO,\n 'Information successfully updated')\n except Exception as e:\n messages.add_message(request, messages.INFO, str(e))\n url = '/users/%s/paid_time_off/' % user_id\n return redirect(url, permanent=False)\n", "step-4": "from django.http import HttpResponse\nfrom django.views.decorators.http import require_http_methods\nfrom django.shortcuts import render, redirect\nfrom app.models import PaidTimeOff, Schedule\nfrom django.utils import timezone\nfrom django.contrib import messages\nfrom app.decorators import user_is_authenticated\nfrom app.views import utils\n\n\n@require_http_methods(['GET', 'POST'])\n@user_is_authenticated\ndef index(request, user_id):\n user = utils.current_user(request)\n if not user:\n return HttpResponse('User ' + str(user_id) + ' NOT FOUND')\n pto = PaidTimeOff.objects.filter(user=user).first()\n if not pto:\n return HttpResponse('PTO ' + str(user_id) + ' NOT FOUND')\n if request.method == 'GET':\n return index_get(request, user_id, user, pto)\n elif request.method == 'POST':\n return index_post(request, user_id, user, pto)\n else:\n return HttpResponse('Invalid HTTP method')\n\n\ndef index_get(request, user_id, user, pto):\n schedules = Schedule.to_calendar(Schedule.objects.filter(pto=pto))\n context = pto.__dict__\n context.update({'schedules': schedules, 'current_user': user})\n return render(request, 'users/paid_time_off.html', context=context)\n\n\ndef index_post(request, user_id, user, pto):\n form = request.POST\n if not form:\n return HttpResponse('No form found')\n err_msg = PaidTimeOff.validate_PTO_form(form)\n if len(err_msg) > 0:\n messages.add_message(request, messages.INFO, err_msg)\n else:\n try:\n date_begin = Schedule.reformat(form['date_begin'])\n date_end = Schedule.reformat(form['date_end'])\n Schedule.objects.create(user=user, pto=pto, date_begin=\n date_begin, date_end=date_end, event_name=form['event_name'\n ], event_type='PTO', event_desc=form['event_description'],\n created_at=timezone.now(), updated_at=timezone.now())\n messages.add_message(request, messages.INFO,\n 'Information successfully updated')\n except Exception as e:\n messages.add_message(request, messages.INFO, str(e))\n url = '/users/%s/paid_time_off/' % user_id\n return redirect(url, permanent=False)\n", "step-5": "\n\nfrom django.http import HttpResponse\nfrom django.views.decorators.http import require_http_methods\nfrom django.shortcuts import render, redirect\nfrom app.models import PaidTimeOff, Schedule\nfrom django.utils import timezone\nfrom django.contrib import messages\nfrom app.decorators import user_is_authenticated\nfrom app.views import utils\n\n\n@require_http_methods([\"GET\", \"POST\"])\n@user_is_authenticated\ndef index(request, user_id):\n user = utils.current_user(request)\n if not user:\n return HttpResponse(\"User \" + str(user_id) + \" NOT FOUND\")\n pto = PaidTimeOff.objects.filter(user=user).first()\n if not pto:\n return HttpResponse(\"PTO \" + str(user_id) + \" NOT FOUND\")\n if request.method == \"GET\":\n return index_get(request, user_id, user, pto)\n elif request.method == \"POST\":\n return index_post(request, user_id, user, pto)\n else:\n return HttpResponse(\"Invalid HTTP method\")\n\n\ndef index_get(request, user_id, user, pto): # pylint: disable=unused-argument\n schedules = Schedule.to_calendar((Schedule.objects.filter(pto=pto)))\n context = pto.__dict__\n context.update({\"schedules\": schedules, \"current_user\": user})\n return render(request, \"users/paid_time_off.html\",\n context=context)\n\n\ndef index_post(request, user_id, user, pto):\n form = request.POST\n if not form:\n return HttpResponse(\"No form found\")\n err_msg = PaidTimeOff.validate_PTO_form(form)\n if len(err_msg) > 0:\n messages.add_message(request, messages.INFO, err_msg)\n else:\n try:\n date_begin = Schedule.reformat(form['date_begin'])\n date_end = Schedule.reformat(form['date_end'])\n Schedule.objects.create(\n user=user, pto=pto, date_begin=date_begin,\n date_end=date_end, event_name=form['event_name'],\n event_type='PTO', event_desc=form['event_description'],\n created_at=timezone.now(), updated_at=timezone.now())\n messages.add_message(request, messages.INFO,\n \"Information successfully updated\")\n except Exception as e:\n messages.add_message(request, messages.INFO, str(e))\n url = \"/users/%s/paid_time_off/\" % user_id\n return redirect(url, permanent=False)\n", "step-ids": [ 0, 2, 3, 4, 5 ] }
[ 0, 2, 3, 4, 5 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> print('hello world') print('lol') print('new changes in vis') <|reserved_special_token_1|> print("hello world") print("lol") print("new changes in vis")
flexible
{ "blob_id": "6c88e55a76cbd84cee0ebd6c51d930cc2da100d2", "index": 2945, "step-1": "<mask token>\n", "step-2": "print('hello world')\nprint('lol')\nprint('new changes in vis')\n", "step-3": "print(\"hello world\")\nprint(\"lol\")\nprint(\"new changes in vis\")", "step-4": null, "step-5": null, "step-ids": [ 0, 1, 2 ] }
[ 0, 1, 2 ]
<|reserved_special_token_0|> class LLKEventsBot(Bot): <|reserved_special_token_0|> async def on_ready(self): if not os.path.exists('db'): os.makedirs('db') if not os.path.exists('logs'): os.makedirs('logs') print('\nLoading extensions...') for extension in extensions: print(f'Loading {extension}') bot.load_extension(extension) await bot.change_presence(activity=discord.Game(f'{PREFIX}help')) print( f'\nLogged in as: {bot.user.name} - {bot.user.id}\nVersion: {discord.__version__}\n' ) async def on_command_error(self, ctx, error): if isinstance(error, commands.BotMissingPermissions): await ctx.send(f'I have no permission to do that') return elif isinstance(error, commands.CheckFailure): await ctx.send(f'You have no permission to use this command') return elif isinstance(error, commands.MissingRequiredArgument): await ctx.send( f'You forgot to inform the following parameter: {error.param}') else: d = datetime.datetime.now() with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding ='utf8') as f: f.write( f"""------------- {d.hour}:{d.minute}:{d.second}.{d.microsecond} Command: {ctx.message.content} Author: {ctx.author} Exception: {type(error)} Description: {error} ------------- """ ) await ctx.send(f'It seems something went wrong:```{error}```') return <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class LLKEventsBot(Bot): def __init__(self): super().__init__(description='Bot created by Oto#2494', command_prefix=PREFIX, owner_id=271992863175344130, intents= intents, help_command=None) print('\nLoading embed data...') try: with open(f'{dir_path}/db/embed_id.json', 'r+') as f: try: self.embed_data = json.load(f) if self.embed_data: self.embed_id = self.embed_data['eventEmbed']['id'] except: self.embed_data = {'eventEmbed': {'id': None}} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) except: with open(f'{dir_path}/db/embed_id.json', 'w+'): self.embed_data = {'eventEmbed': {'id': self.bot.embed_id}} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) print('Loading permissions data...') try: with open('db/roles.json', 'r+') as f: try: self.perms_data = json.load(f) if self.perms_data: self.perms = self.perms_data['permissions'] except Exception as e: print(f'{e}') except: with open(f'{dir_path}/db/roles.json', 'w+') as f: self.perms_data = {'permissions': {'admins': [], 'mods': [], 'hosts': []}} self.perms = self.perms_data['permissions'] json.dump(self.perms_data, f, indent=4) print('Loading roles DB...') self.conn = sqlite3.connect(f'{dir_path}/db/events.db') self.cursor = self.conn.cursor() self.cursor.execute( """ CREATE TABLE IF NOT EXISTS events ( event_id STRING NOT NULL, user_id STRING NOT NULL, description STRING NOT NULL, target STRING NOT NULL ) """ ) async def on_ready(self): if not os.path.exists('db'): os.makedirs('db') if not os.path.exists('logs'): os.makedirs('logs') print('\nLoading extensions...') for extension in extensions: print(f'Loading {extension}') bot.load_extension(extension) await bot.change_presence(activity=discord.Game(f'{PREFIX}help')) print( f'\nLogged in as: {bot.user.name} - {bot.user.id}\nVersion: {discord.__version__}\n' ) async def on_command_error(self, ctx, error): if isinstance(error, commands.BotMissingPermissions): await ctx.send(f'I have no permission to do that') return elif isinstance(error, commands.CheckFailure): await ctx.send(f'You have no permission to use this command') return elif isinstance(error, commands.MissingRequiredArgument): await ctx.send( f'You forgot to inform the following parameter: {error.param}') else: d = datetime.datetime.now() with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding ='utf8') as f: f.write( f"""------------- {d.hour}:{d.minute}:{d.second}.{d.microsecond} Command: {ctx.message.content} Author: {ctx.author} Exception: {type(error)} Description: {error} ------------- """ ) await ctx.send(f'It seems something went wrong:```{error}```') return <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> intents = discord.Intents.default() intents.members = True load_dotenv() TOKEN = os.getenv('DISCORD_TOKEN') GUILD = os.getenv('DISCORD_GUILD') PREFIX = os.getenv('BOT_PREFIX') dir_path = os.path.dirname(os.path.realpath(__file__)) extensions = ['cogs.general', 'cogs.events', 'cogs.moderation'] class LLKEventsBot(Bot): def __init__(self): super().__init__(description='Bot created by Oto#2494', command_prefix=PREFIX, owner_id=271992863175344130, intents= intents, help_command=None) print('\nLoading embed data...') try: with open(f'{dir_path}/db/embed_id.json', 'r+') as f: try: self.embed_data = json.load(f) if self.embed_data: self.embed_id = self.embed_data['eventEmbed']['id'] except: self.embed_data = {'eventEmbed': {'id': None}} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) except: with open(f'{dir_path}/db/embed_id.json', 'w+'): self.embed_data = {'eventEmbed': {'id': self.bot.embed_id}} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) print('Loading permissions data...') try: with open('db/roles.json', 'r+') as f: try: self.perms_data = json.load(f) if self.perms_data: self.perms = self.perms_data['permissions'] except Exception as e: print(f'{e}') except: with open(f'{dir_path}/db/roles.json', 'w+') as f: self.perms_data = {'permissions': {'admins': [], 'mods': [], 'hosts': []}} self.perms = self.perms_data['permissions'] json.dump(self.perms_data, f, indent=4) print('Loading roles DB...') self.conn = sqlite3.connect(f'{dir_path}/db/events.db') self.cursor = self.conn.cursor() self.cursor.execute( """ CREATE TABLE IF NOT EXISTS events ( event_id STRING NOT NULL, user_id STRING NOT NULL, description STRING NOT NULL, target STRING NOT NULL ) """ ) async def on_ready(self): if not os.path.exists('db'): os.makedirs('db') if not os.path.exists('logs'): os.makedirs('logs') print('\nLoading extensions...') for extension in extensions: print(f'Loading {extension}') bot.load_extension(extension) await bot.change_presence(activity=discord.Game(f'{PREFIX}help')) print( f'\nLogged in as: {bot.user.name} - {bot.user.id}\nVersion: {discord.__version__}\n' ) async def on_command_error(self, ctx, error): if isinstance(error, commands.BotMissingPermissions): await ctx.send(f'I have no permission to do that') return elif isinstance(error, commands.CheckFailure): await ctx.send(f'You have no permission to use this command') return elif isinstance(error, commands.MissingRequiredArgument): await ctx.send( f'You forgot to inform the following parameter: {error.param}') else: d = datetime.datetime.now() with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding ='utf8') as f: f.write( f"""------------- {d.hour}:{d.minute}:{d.second}.{d.microsecond} Command: {ctx.message.content} Author: {ctx.author} Exception: {type(error)} Description: {error} ------------- """ ) await ctx.send(f'It seems something went wrong:```{error}```') return bot = LLKEventsBot() bot.run(TOKEN) <|reserved_special_token_1|> import os import sqlite3 import json import datetime from dotenv import load_dotenv import discord from discord.ext import commands from discord.ext.commands import Bot from cogs.utils import helper as h intents = discord.Intents.default() intents.members = True load_dotenv() TOKEN = os.getenv('DISCORD_TOKEN') GUILD = os.getenv('DISCORD_GUILD') PREFIX = os.getenv('BOT_PREFIX') dir_path = os.path.dirname(os.path.realpath(__file__)) extensions = ['cogs.general', 'cogs.events', 'cogs.moderation'] class LLKEventsBot(Bot): def __init__(self): super().__init__(description='Bot created by Oto#2494', command_prefix=PREFIX, owner_id=271992863175344130, intents= intents, help_command=None) print('\nLoading embed data...') try: with open(f'{dir_path}/db/embed_id.json', 'r+') as f: try: self.embed_data = json.load(f) if self.embed_data: self.embed_id = self.embed_data['eventEmbed']['id'] except: self.embed_data = {'eventEmbed': {'id': None}} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) except: with open(f'{dir_path}/db/embed_id.json', 'w+'): self.embed_data = {'eventEmbed': {'id': self.bot.embed_id}} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) print('Loading permissions data...') try: with open('db/roles.json', 'r+') as f: try: self.perms_data = json.load(f) if self.perms_data: self.perms = self.perms_data['permissions'] except Exception as e: print(f'{e}') except: with open(f'{dir_path}/db/roles.json', 'w+') as f: self.perms_data = {'permissions': {'admins': [], 'mods': [], 'hosts': []}} self.perms = self.perms_data['permissions'] json.dump(self.perms_data, f, indent=4) print('Loading roles DB...') self.conn = sqlite3.connect(f'{dir_path}/db/events.db') self.cursor = self.conn.cursor() self.cursor.execute( """ CREATE TABLE IF NOT EXISTS events ( event_id STRING NOT NULL, user_id STRING NOT NULL, description STRING NOT NULL, target STRING NOT NULL ) """ ) async def on_ready(self): if not os.path.exists('db'): os.makedirs('db') if not os.path.exists('logs'): os.makedirs('logs') print('\nLoading extensions...') for extension in extensions: print(f'Loading {extension}') bot.load_extension(extension) await bot.change_presence(activity=discord.Game(f'{PREFIX}help')) print( f'\nLogged in as: {bot.user.name} - {bot.user.id}\nVersion: {discord.__version__}\n' ) async def on_command_error(self, ctx, error): if isinstance(error, commands.BotMissingPermissions): await ctx.send(f'I have no permission to do that') return elif isinstance(error, commands.CheckFailure): await ctx.send(f'You have no permission to use this command') return elif isinstance(error, commands.MissingRequiredArgument): await ctx.send( f'You forgot to inform the following parameter: {error.param}') else: d = datetime.datetime.now() with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding ='utf8') as f: f.write( f"""------------- {d.hour}:{d.minute}:{d.second}.{d.microsecond} Command: {ctx.message.content} Author: {ctx.author} Exception: {type(error)} Description: {error} ------------- """ ) await ctx.send(f'It seems something went wrong:```{error}```') return bot = LLKEventsBot() bot.run(TOKEN) <|reserved_special_token_1|> # bot.py import os import sqlite3 import json import datetime from dotenv import load_dotenv import discord from discord.ext import commands from discord.ext.commands import Bot from cogs.utils import helper as h intents = discord.Intents.default() intents.members = True load_dotenv() TOKEN = os.getenv('DISCORD_TOKEN') GUILD = os.getenv('DISCORD_GUILD') PREFIX = os.getenv('BOT_PREFIX') dir_path = os.path.dirname(os.path.realpath(__file__)) extensions = ['cogs.general', 'cogs.events', 'cogs.moderation'] class LLKEventsBot(Bot): def __init__(self): super().__init__( description="Bot created by Oto#2494", command_prefix=PREFIX, owner_id=271992863175344130, intents=intents, help_command=None ) print('\nLoading embed data...') try: with open(f'{dir_path}/db/embed_id.json', 'r+') as f: try: self.embed_data = json.load(f) if self.embed_data: self.embed_id = self.embed_data['eventEmbed']['id'] except: self.embed_data = {"eventEmbed":{ "id": None }} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) except: with open(f'{dir_path}/db/embed_id.json', 'w+'): self.embed_data = {"eventEmbed":{ "id": self.bot.embed_id }} self.embed_id = self.embed_data['eventEmbed']['id'] json.dump(self.embed_data, f, indent=4) print('Loading permissions data...') try: with open('db/roles.json', 'r+') as f: try: self.perms_data = json.load(f) if self.perms_data: self.perms = self.perms_data['permissions'] except Exception as e: print(f'{e}') except: with open(f'{dir_path}/db/roles.json', 'w+') as f: self.perms_data = {"permissions":{ "admins": [], "mods": [], "hosts": [] }} self.perms = self.perms_data['permissions'] json.dump(self.perms_data, f, indent=4) print('Loading roles DB...') self.conn = sqlite3.connect(f'{dir_path}/db/events.db') self.cursor = self.conn.cursor() self.cursor.execute(""" CREATE TABLE IF NOT EXISTS events ( event_id STRING NOT NULL, user_id STRING NOT NULL, description STRING NOT NULL, target STRING NOT NULL ) """) # print('Loading embed data...') # try: # with open('db/embed_id.json', 'r+') as f: # try: # self.embed_data = json.load(f) # if self.embed_data: # self.embed_id = self.embed_data['eventEmbed']['id'] # except Exception as e: # print(f'{e}') # except: # open('db/embed_id.json', 'w+') async def on_ready(self): if not os.path.exists('db'): os.makedirs('db') if not os.path.exists('logs'): os.makedirs('logs') print('\nLoading extensions...') for extension in extensions: print(f'Loading {extension}') bot.load_extension(extension) await bot.change_presence(activity=discord.Game(f'{PREFIX}help')) print(f'\nLogged in as: {bot.user.name} - {bot.user.id}\nVersion: {discord.__version__}\n') # async def on_message(self, msg): # if msg.author.bot: # return async def on_command_error(self, ctx, error): if isinstance(error, commands.BotMissingPermissions): await ctx.send(f'I have no permission to do that') return elif isinstance(error, commands.CheckFailure): await ctx.send(f'You have no permission to use this command') return elif isinstance(error, commands.MissingRequiredArgument): await ctx.send(f'You forgot to inform the following parameter: {error.param}') else: d = datetime.datetime.now() with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding='utf8') as f: # f.write(f'''-------------\n{d.hour}:{d.minute}:{d.second}.{d.microsecond}\n{type(error)}\n{error}\n-------------\n\n'''') f.write( '-------------\n' f'{d.hour}:{d.minute}:{d.second}.{d.microsecond}\n' f'Command: {ctx.message.content}\n' f'Author: {ctx.author}\n' f'Exception: {type(error)}\n' f'Description: {error}\n' '-------------\n\n' ) await ctx.send(f'It seems something went wrong:```{error}```') return bot = LLKEventsBot() bot.run(TOKEN)
flexible
{ "blob_id": "849343561dd9bdcfc1da66c604e1bfa4aa10ddf3", "index": 5359, "step-1": "<mask token>\n\n\nclass LLKEventsBot(Bot):\n <mask token>\n\n async def on_ready(self):\n if not os.path.exists('db'):\n os.makedirs('db')\n if not os.path.exists('logs'):\n os.makedirs('logs')\n print('\\nLoading extensions...')\n for extension in extensions:\n print(f'Loading {extension}')\n bot.load_extension(extension)\n await bot.change_presence(activity=discord.Game(f'{PREFIX}help'))\n print(\n f'\\nLogged in as: {bot.user.name} - {bot.user.id}\\nVersion: {discord.__version__}\\n'\n )\n\n async def on_command_error(self, ctx, error):\n if isinstance(error, commands.BotMissingPermissions):\n await ctx.send(f'I have no permission to do that')\n return\n elif isinstance(error, commands.CheckFailure):\n await ctx.send(f'You have no permission to use this command')\n return\n elif isinstance(error, commands.MissingRequiredArgument):\n await ctx.send(\n f'You forgot to inform the following parameter: {error.param}')\n else:\n d = datetime.datetime.now()\n with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding\n ='utf8') as f:\n f.write(\n f\"\"\"-------------\n{d.hour}:{d.minute}:{d.second}.{d.microsecond}\nCommand: {ctx.message.content}\nAuthor: {ctx.author}\nException: {type(error)}\nDescription: {error}\n-------------\n\n\"\"\"\n )\n await ctx.send(f'It seems something went wrong:```{error}```')\n return\n\n\n<mask token>\n", "step-2": "<mask token>\n\n\nclass LLKEventsBot(Bot):\n\n def __init__(self):\n super().__init__(description='Bot created by Oto#2494',\n command_prefix=PREFIX, owner_id=271992863175344130, intents=\n intents, help_command=None)\n print('\\nLoading embed data...')\n try:\n with open(f'{dir_path}/db/embed_id.json', 'r+') as f:\n try:\n self.embed_data = json.load(f)\n if self.embed_data:\n self.embed_id = self.embed_data['eventEmbed']['id']\n except:\n self.embed_data = {'eventEmbed': {'id': None}}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n except:\n with open(f'{dir_path}/db/embed_id.json', 'w+'):\n self.embed_data = {'eventEmbed': {'id': self.bot.embed_id}}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n print('Loading permissions data...')\n try:\n with open('db/roles.json', 'r+') as f:\n try:\n self.perms_data = json.load(f)\n if self.perms_data:\n self.perms = self.perms_data['permissions']\n except Exception as e:\n print(f'{e}')\n except:\n with open(f'{dir_path}/db/roles.json', 'w+') as f:\n self.perms_data = {'permissions': {'admins': [], 'mods': [],\n 'hosts': []}}\n self.perms = self.perms_data['permissions']\n json.dump(self.perms_data, f, indent=4)\n print('Loading roles DB...')\n self.conn = sqlite3.connect(f'{dir_path}/db/events.db')\n self.cursor = self.conn.cursor()\n self.cursor.execute(\n \"\"\"\n CREATE TABLE IF NOT EXISTS events (\n event_id STRING NOT NULL,\n user_id STRING NOT NULL,\n description STRING NOT NULL,\n target STRING NOT NULL\n )\n \"\"\"\n )\n\n async def on_ready(self):\n if not os.path.exists('db'):\n os.makedirs('db')\n if not os.path.exists('logs'):\n os.makedirs('logs')\n print('\\nLoading extensions...')\n for extension in extensions:\n print(f'Loading {extension}')\n bot.load_extension(extension)\n await bot.change_presence(activity=discord.Game(f'{PREFIX}help'))\n print(\n f'\\nLogged in as: {bot.user.name} - {bot.user.id}\\nVersion: {discord.__version__}\\n'\n )\n\n async def on_command_error(self, ctx, error):\n if isinstance(error, commands.BotMissingPermissions):\n await ctx.send(f'I have no permission to do that')\n return\n elif isinstance(error, commands.CheckFailure):\n await ctx.send(f'You have no permission to use this command')\n return\n elif isinstance(error, commands.MissingRequiredArgument):\n await ctx.send(\n f'You forgot to inform the following parameter: {error.param}')\n else:\n d = datetime.datetime.now()\n with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding\n ='utf8') as f:\n f.write(\n f\"\"\"-------------\n{d.hour}:{d.minute}:{d.second}.{d.microsecond}\nCommand: {ctx.message.content}\nAuthor: {ctx.author}\nException: {type(error)}\nDescription: {error}\n-------------\n\n\"\"\"\n )\n await ctx.send(f'It seems something went wrong:```{error}```')\n return\n\n\n<mask token>\n", "step-3": "<mask token>\nintents = discord.Intents.default()\nintents.members = True\nload_dotenv()\nTOKEN = os.getenv('DISCORD_TOKEN')\nGUILD = os.getenv('DISCORD_GUILD')\nPREFIX = os.getenv('BOT_PREFIX')\ndir_path = os.path.dirname(os.path.realpath(__file__))\nextensions = ['cogs.general', 'cogs.events', 'cogs.moderation']\n\n\nclass LLKEventsBot(Bot):\n\n def __init__(self):\n super().__init__(description='Bot created by Oto#2494',\n command_prefix=PREFIX, owner_id=271992863175344130, intents=\n intents, help_command=None)\n print('\\nLoading embed data...')\n try:\n with open(f'{dir_path}/db/embed_id.json', 'r+') as f:\n try:\n self.embed_data = json.load(f)\n if self.embed_data:\n self.embed_id = self.embed_data['eventEmbed']['id']\n except:\n self.embed_data = {'eventEmbed': {'id': None}}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n except:\n with open(f'{dir_path}/db/embed_id.json', 'w+'):\n self.embed_data = {'eventEmbed': {'id': self.bot.embed_id}}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n print('Loading permissions data...')\n try:\n with open('db/roles.json', 'r+') as f:\n try:\n self.perms_data = json.load(f)\n if self.perms_data:\n self.perms = self.perms_data['permissions']\n except Exception as e:\n print(f'{e}')\n except:\n with open(f'{dir_path}/db/roles.json', 'w+') as f:\n self.perms_data = {'permissions': {'admins': [], 'mods': [],\n 'hosts': []}}\n self.perms = self.perms_data['permissions']\n json.dump(self.perms_data, f, indent=4)\n print('Loading roles DB...')\n self.conn = sqlite3.connect(f'{dir_path}/db/events.db')\n self.cursor = self.conn.cursor()\n self.cursor.execute(\n \"\"\"\n CREATE TABLE IF NOT EXISTS events (\n event_id STRING NOT NULL,\n user_id STRING NOT NULL,\n description STRING NOT NULL,\n target STRING NOT NULL\n )\n \"\"\"\n )\n\n async def on_ready(self):\n if not os.path.exists('db'):\n os.makedirs('db')\n if not os.path.exists('logs'):\n os.makedirs('logs')\n print('\\nLoading extensions...')\n for extension in extensions:\n print(f'Loading {extension}')\n bot.load_extension(extension)\n await bot.change_presence(activity=discord.Game(f'{PREFIX}help'))\n print(\n f'\\nLogged in as: {bot.user.name} - {bot.user.id}\\nVersion: {discord.__version__}\\n'\n )\n\n async def on_command_error(self, ctx, error):\n if isinstance(error, commands.BotMissingPermissions):\n await ctx.send(f'I have no permission to do that')\n return\n elif isinstance(error, commands.CheckFailure):\n await ctx.send(f'You have no permission to use this command')\n return\n elif isinstance(error, commands.MissingRequiredArgument):\n await ctx.send(\n f'You forgot to inform the following parameter: {error.param}')\n else:\n d = datetime.datetime.now()\n with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding\n ='utf8') as f:\n f.write(\n f\"\"\"-------------\n{d.hour}:{d.minute}:{d.second}.{d.microsecond}\nCommand: {ctx.message.content}\nAuthor: {ctx.author}\nException: {type(error)}\nDescription: {error}\n-------------\n\n\"\"\"\n )\n await ctx.send(f'It seems something went wrong:```{error}```')\n return\n\n\nbot = LLKEventsBot()\nbot.run(TOKEN)\n", "step-4": "import os\nimport sqlite3\nimport json\nimport datetime\nfrom dotenv import load_dotenv\nimport discord\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot\nfrom cogs.utils import helper as h\nintents = discord.Intents.default()\nintents.members = True\nload_dotenv()\nTOKEN = os.getenv('DISCORD_TOKEN')\nGUILD = os.getenv('DISCORD_GUILD')\nPREFIX = os.getenv('BOT_PREFIX')\ndir_path = os.path.dirname(os.path.realpath(__file__))\nextensions = ['cogs.general', 'cogs.events', 'cogs.moderation']\n\n\nclass LLKEventsBot(Bot):\n\n def __init__(self):\n super().__init__(description='Bot created by Oto#2494',\n command_prefix=PREFIX, owner_id=271992863175344130, intents=\n intents, help_command=None)\n print('\\nLoading embed data...')\n try:\n with open(f'{dir_path}/db/embed_id.json', 'r+') as f:\n try:\n self.embed_data = json.load(f)\n if self.embed_data:\n self.embed_id = self.embed_data['eventEmbed']['id']\n except:\n self.embed_data = {'eventEmbed': {'id': None}}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n except:\n with open(f'{dir_path}/db/embed_id.json', 'w+'):\n self.embed_data = {'eventEmbed': {'id': self.bot.embed_id}}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n print('Loading permissions data...')\n try:\n with open('db/roles.json', 'r+') as f:\n try:\n self.perms_data = json.load(f)\n if self.perms_data:\n self.perms = self.perms_data['permissions']\n except Exception as e:\n print(f'{e}')\n except:\n with open(f'{dir_path}/db/roles.json', 'w+') as f:\n self.perms_data = {'permissions': {'admins': [], 'mods': [],\n 'hosts': []}}\n self.perms = self.perms_data['permissions']\n json.dump(self.perms_data, f, indent=4)\n print('Loading roles DB...')\n self.conn = sqlite3.connect(f'{dir_path}/db/events.db')\n self.cursor = self.conn.cursor()\n self.cursor.execute(\n \"\"\"\n CREATE TABLE IF NOT EXISTS events (\n event_id STRING NOT NULL,\n user_id STRING NOT NULL,\n description STRING NOT NULL,\n target STRING NOT NULL\n )\n \"\"\"\n )\n\n async def on_ready(self):\n if not os.path.exists('db'):\n os.makedirs('db')\n if not os.path.exists('logs'):\n os.makedirs('logs')\n print('\\nLoading extensions...')\n for extension in extensions:\n print(f'Loading {extension}')\n bot.load_extension(extension)\n await bot.change_presence(activity=discord.Game(f'{PREFIX}help'))\n print(\n f'\\nLogged in as: {bot.user.name} - {bot.user.id}\\nVersion: {discord.__version__}\\n'\n )\n\n async def on_command_error(self, ctx, error):\n if isinstance(error, commands.BotMissingPermissions):\n await ctx.send(f'I have no permission to do that')\n return\n elif isinstance(error, commands.CheckFailure):\n await ctx.send(f'You have no permission to use this command')\n return\n elif isinstance(error, commands.MissingRequiredArgument):\n await ctx.send(\n f'You forgot to inform the following parameter: {error.param}')\n else:\n d = datetime.datetime.now()\n with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding\n ='utf8') as f:\n f.write(\n f\"\"\"-------------\n{d.hour}:{d.minute}:{d.second}.{d.microsecond}\nCommand: {ctx.message.content}\nAuthor: {ctx.author}\nException: {type(error)}\nDescription: {error}\n-------------\n\n\"\"\"\n )\n await ctx.send(f'It seems something went wrong:```{error}```')\n return\n\n\nbot = LLKEventsBot()\nbot.run(TOKEN)\n", "step-5": "# bot.py\nimport os\nimport sqlite3\nimport json\n\nimport datetime\n\nfrom dotenv import load_dotenv\n\nimport discord\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot\n\nfrom cogs.utils import helper as h\n\nintents = discord.Intents.default()\nintents.members = True\n\nload_dotenv()\nTOKEN = os.getenv('DISCORD_TOKEN')\nGUILD = os.getenv('DISCORD_GUILD')\nPREFIX = os.getenv('BOT_PREFIX')\n\ndir_path = os.path.dirname(os.path.realpath(__file__))\n\nextensions = ['cogs.general', 'cogs.events', 'cogs.moderation']\n\nclass LLKEventsBot(Bot):\n\n def __init__(self):\n super().__init__(\n description=\"Bot created by Oto#2494\",\n command_prefix=PREFIX,\n owner_id=271992863175344130,\n intents=intents,\n help_command=None\n )\n print('\\nLoading embed data...')\n try:\n with open(f'{dir_path}/db/embed_id.json', 'r+') as f:\n try:\n self.embed_data = json.load(f)\n if self.embed_data:\n self.embed_id = self.embed_data['eventEmbed']['id']\n except:\n self.embed_data = {\"eventEmbed\":{\n \"id\": None }}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n except:\n with open(f'{dir_path}/db/embed_id.json', 'w+'):\n self.embed_data = {\"eventEmbed\":{\n \"id\": self.bot.embed_id\n }}\n self.embed_id = self.embed_data['eventEmbed']['id']\n json.dump(self.embed_data, f, indent=4)\n\n print('Loading permissions data...')\n try:\n with open('db/roles.json', 'r+') as f:\n try:\n self.perms_data = json.load(f)\n if self.perms_data:\n self.perms = self.perms_data['permissions']\n except Exception as e:\n print(f'{e}')\n except:\n with open(f'{dir_path}/db/roles.json', 'w+') as f:\n self.perms_data = {\"permissions\":{\n \"admins\": [],\n \"mods\": [],\n \"hosts\": []\n }}\n self.perms = self.perms_data['permissions']\n json.dump(self.perms_data, f, indent=4)\n\n print('Loading roles DB...')\n self.conn = sqlite3.connect(f'{dir_path}/db/events.db')\n self.cursor = self.conn.cursor()\n self.cursor.execute(\"\"\"\n CREATE TABLE IF NOT EXISTS events (\n event_id STRING NOT NULL,\n user_id STRING NOT NULL,\n description STRING NOT NULL,\n target STRING NOT NULL\n )\n \"\"\")\n\n # print('Loading embed data...')\n # try:\n # with open('db/embed_id.json', 'r+') as f:\n # try:\n # self.embed_data = json.load(f)\n # if self.embed_data:\n # self.embed_id = self.embed_data['eventEmbed']['id']\n # except Exception as e:\n # print(f'{e}')\n # except:\n # open('db/embed_id.json', 'w+')\n\n async def on_ready(self):\n if not os.path.exists('db'):\n os.makedirs('db')\n if not os.path.exists('logs'):\n os.makedirs('logs')\n\n print('\\nLoading extensions...')\n for extension in extensions:\n print(f'Loading {extension}')\n bot.load_extension(extension)\n\n await bot.change_presence(activity=discord.Game(f'{PREFIX}help'))\n\n print(f'\\nLogged in as: {bot.user.name} - {bot.user.id}\\nVersion: {discord.__version__}\\n')\n\n # async def on_message(self, msg):\n # if msg.author.bot:\n # return\n\n async def on_command_error(self, ctx, error):\n if isinstance(error, commands.BotMissingPermissions):\n await ctx.send(f'I have no permission to do that')\n return\n elif isinstance(error, commands.CheckFailure):\n await ctx.send(f'You have no permission to use this command')\n return\n elif isinstance(error, commands.MissingRequiredArgument):\n await ctx.send(f'You forgot to inform the following parameter: {error.param}')\n else:\n d = datetime.datetime.now()\n with open(f'logs/{d.year}-{d.month}-{d.day}.log', 'a', encoding='utf8') as f:\n # f.write(f'''-------------\\n{d.hour}:{d.minute}:{d.second}.{d.microsecond}\\n{type(error)}\\n{error}\\n-------------\\n\\n'''')\n f.write(\n '-------------\\n'\n f'{d.hour}:{d.minute}:{d.second}.{d.microsecond}\\n'\n f'Command: {ctx.message.content}\\n'\n f'Author: {ctx.author}\\n'\n f'Exception: {type(error)}\\n'\n f'Description: {error}\\n'\n '-------------\\n\\n'\n )\n await ctx.send(f'It seems something went wrong:```{error}```')\n return\n\n\nbot = LLKEventsBot()\nbot.run(TOKEN)\n", "step-ids": [ 1, 2, 4, 5, 6 ] }
[ 1, 2, 4, 5, 6 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> driver.maximize_window() driver.get('http://zero.webappsecurity.com/') <|reserved_special_token_0|> driver.find_element_by_xpath("(//a[contains(text(),'privacy')])[1]").click() <|reserved_special_token_0|> for window in windows: driver.switch_to.window(window) if driver.title == 'Legal Information | Micro Focus': break driver.find_element_by_link_text('Free Trials').click() driver.close() driver.switch_to.window(parent_window_handle) driver.find_element_by_id('signin_button').click() <|reserved_special_token_1|> <|reserved_special_token_0|> driver = webdriver.Chrome(executable_path= 'D:\\Naveen\\Selenium\\chromedriver_win32\\chromedriver.exe') driver.maximize_window() driver.get('http://zero.webappsecurity.com/') parent_window_handle = driver.current_window_handle driver.find_element_by_xpath("(//a[contains(text(),'privacy')])[1]").click() windows = driver.window_handles for window in windows: driver.switch_to.window(window) if driver.title == 'Legal Information | Micro Focus': break driver.find_element_by_link_text('Free Trials').click() driver.close() driver.switch_to.window(parent_window_handle) driver.find_element_by_id('signin_button').click() <|reserved_special_token_1|> from selenium import webdriver driver = webdriver.Chrome(executable_path= 'D:\\Naveen\\Selenium\\chromedriver_win32\\chromedriver.exe') driver.maximize_window() driver.get('http://zero.webappsecurity.com/') parent_window_handle = driver.current_window_handle driver.find_element_by_xpath("(//a[contains(text(),'privacy')])[1]").click() windows = driver.window_handles for window in windows: driver.switch_to.window(window) if driver.title == 'Legal Information | Micro Focus': break driver.find_element_by_link_text('Free Trials').click() driver.close() driver.switch_to.window(parent_window_handle) driver.find_element_by_id('signin_button').click() <|reserved_special_token_1|> from selenium import webdriver driver = webdriver.Chrome(executable_path=r'D:\Naveen\Selenium\chromedriver_win32\chromedriver.exe') driver.maximize_window() driver.get('http://zero.webappsecurity.com/') parent_window_handle = driver.current_window_handle driver.find_element_by_xpath("(//a[contains(text(),'privacy')])[1]").click() windows = driver.window_handles #driver.switch_to.window(windows[1]) for window in windows: driver.switch_to.window(window) if driver.title == "Legal Information | Micro Focus": break driver.find_element_by_link_text('Free Trials').click() driver.close() driver.switch_to.window(parent_window_handle) driver.find_element_by_id('signin_button').click()
flexible
{ "blob_id": "223413918ba2a49cd13a34026d39b17fb5944572", "index": 5849, "step-1": "<mask token>\n", "step-2": "<mask token>\ndriver.maximize_window()\ndriver.get('http://zero.webappsecurity.com/')\n<mask token>\ndriver.find_element_by_xpath(\"(//a[contains(text(),'privacy')])[1]\").click()\n<mask token>\nfor window in windows:\n driver.switch_to.window(window)\n if driver.title == 'Legal Information | Micro Focus':\n break\ndriver.find_element_by_link_text('Free Trials').click()\ndriver.close()\ndriver.switch_to.window(parent_window_handle)\ndriver.find_element_by_id('signin_button').click()\n", "step-3": "<mask token>\ndriver = webdriver.Chrome(executable_path=\n 'D:\\\\Naveen\\\\Selenium\\\\chromedriver_win32\\\\chromedriver.exe')\ndriver.maximize_window()\ndriver.get('http://zero.webappsecurity.com/')\nparent_window_handle = driver.current_window_handle\ndriver.find_element_by_xpath(\"(//a[contains(text(),'privacy')])[1]\").click()\nwindows = driver.window_handles\nfor window in windows:\n driver.switch_to.window(window)\n if driver.title == 'Legal Information | Micro Focus':\n break\ndriver.find_element_by_link_text('Free Trials').click()\ndriver.close()\ndriver.switch_to.window(parent_window_handle)\ndriver.find_element_by_id('signin_button').click()\n", "step-4": "from selenium import webdriver\ndriver = webdriver.Chrome(executable_path=\n 'D:\\\\Naveen\\\\Selenium\\\\chromedriver_win32\\\\chromedriver.exe')\ndriver.maximize_window()\ndriver.get('http://zero.webappsecurity.com/')\nparent_window_handle = driver.current_window_handle\ndriver.find_element_by_xpath(\"(//a[contains(text(),'privacy')])[1]\").click()\nwindows = driver.window_handles\nfor window in windows:\n driver.switch_to.window(window)\n if driver.title == 'Legal Information | Micro Focus':\n break\ndriver.find_element_by_link_text('Free Trials').click()\ndriver.close()\ndriver.switch_to.window(parent_window_handle)\ndriver.find_element_by_id('signin_button').click()\n", "step-5": "from selenium import webdriver\n\ndriver = webdriver.Chrome(executable_path=r'D:\\Naveen\\Selenium\\chromedriver_win32\\chromedriver.exe')\ndriver.maximize_window()\ndriver.get('http://zero.webappsecurity.com/')\n\nparent_window_handle = driver.current_window_handle\ndriver.find_element_by_xpath(\"(//a[contains(text(),'privacy')])[1]\").click()\n\nwindows = driver.window_handles\n#driver.switch_to.window(windows[1])\n\nfor window in windows:\n driver.switch_to.window(window)\n if driver.title == \"Legal Information | Micro Focus\":\n break\n\ndriver.find_element_by_link_text('Free Trials').click()\ndriver.close()\ndriver.switch_to.window(parent_window_handle)\ndriver.find_element_by_id('signin_button').click()", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import os import pathlib from global_settings import * def get_bits(x): return np.where(x < 0, 0, 1) def check_wrong_bits(bits, bits_estimated): return len(np.argwhere(bits != bits_estimated)) def mkdir(file_path): folder = os.path.dirname(file_path) if not os.path.exists(folder): os.makedirs(folder) def mkfile(file_path): mkdir(file_path) filename = pathlib.Path(file_path) filename.touch(exist_ok=True) def concatenate(total, part): return part if total is None else np.concatenate((total, part)) def complex_channel(m=NUM_ANT, n=NUM_ANT): real = np.random.randn(m, n) imag = np.random.randn(m, n) h = np.row_stack( ( np.column_stack((real, -imag)), np.column_stack((imag, real)), ) ) return h def make_channel_batch(): h_batch = None for _ in range(PACKETS_PER_BATCH): h = complex_channel().reshape([1, 2 * NUM_ANT, 2 * NUM_ANT]) for _ in range(TIME_SLOTS_PER_PACKET): h_batch = concatenate(h_batch, h) return h_batch def signal_batch(batch_size=TIMES_SLOTS_PER_BATCH): s_batch = None random_indexes = np.random.uniform(low=0, high=QPSK_CANDIDATE_SIZE, size=batch_size) for t in range(batch_size): i = int(random_indexes[t]) s = QPSK_CANDIDATES[:, i:i + 1].reshape([1, 2 * NUM_ANT, 1]) s_batch = concatenate(s_batch, s) return s_batch def random_distance(n, length): x = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2 y = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2 return np.sqrt(x ** 2 + y ** 2) def zf_batch(y, h): h_t = np.transpose(h, axes=[0, 2, 1]) f = np.linalg.inv(h_t @ h) @ h_t z = f @ y return np.where(z < 0, -1, 1) / np.sqrt(2) def lmmse_batch(y, h): assert len(h.shape) == 3 batch_size, m, n = h.shape eye = np.concatenate([np.eye(n).reshape([1, n, n]) * batch_size], axis=0) ht = np.transpose(h, axes=[0, 2, 1]) z = np.linalg.inv(ht @ h + eye) @ ht @ y return np.where(z < 0, -1, 1) / np.sqrt(2) def maximum_likelihood_detect_bits(y, h): assert len(h.shape) == 3 batch_size, m, n = h.shape s_mld = np.zeros([batch_size, n, 1]) if True: dst = np.sum(np.square(y - h @ QPSK_CANDIDATES), axis=1) else: dst = None for j in range(QPSK_CANDIDATE_SIZE): s_cand = QPSK_CANDIDATES[:, j:j + 1].reshape([1, 2 * NUM_ANT, 1]) dj = np.sum(np.square(y - h @ s_cand), axis=(1, 2)).reshape([-1, 1]) if dst is None: dst = dj else: dst = np.concatenate((dst, dj), axis=1) min_indexes = dst.argmin(1) for i, t in enumerate(min_indexes): s_mld[i:i + 1, :, :] = QPSK_CANDIDATES[:, t].reshape([1, 2 * NUM_ANT, 1]) return get_bits(s_mld)
normal
{ "blob_id": "74ffbd55867c4b2c6ccbef7d94e0c65aef139057", "index": 7602, "step-1": "<mask token>\n\n\ndef get_bits(x):\n return np.where(x < 0, 0, 1)\n\n\n<mask token>\n\n\ndef mkdir(file_path):\n folder = os.path.dirname(file_path)\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n\n<mask token>\n\n\ndef concatenate(total, part):\n return part if total is None else np.concatenate((total, part))\n\n\ndef complex_channel(m=NUM_ANT, n=NUM_ANT):\n real = np.random.randn(m, n)\n imag = np.random.randn(m, n)\n h = np.row_stack((np.column_stack((real, -imag)), np.column_stack((imag,\n real))))\n return h\n\n\n<mask token>\n\n\ndef random_distance(n, length):\n x = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n y = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n return np.sqrt(x ** 2 + y ** 2)\n\n\ndef zf_batch(y, h):\n h_t = np.transpose(h, axes=[0, 2, 1])\n f = np.linalg.inv(h_t @ h) @ h_t\n z = f @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\n<mask token>\n\n\ndef maximum_likelihood_detect_bits(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n s_mld = np.zeros([batch_size, n, 1])\n if True:\n dst = np.sum(np.square(y - h @ QPSK_CANDIDATES), axis=1)\n else:\n dst = None\n for j in range(QPSK_CANDIDATE_SIZE):\n s_cand = QPSK_CANDIDATES[:, j:j + 1].reshape([1, 2 * NUM_ANT, 1])\n dj = np.sum(np.square(y - h @ s_cand), axis=(1, 2)).reshape([-1, 1]\n )\n if dst is None:\n dst = dj\n else:\n dst = np.concatenate((dst, dj), axis=1)\n min_indexes = dst.argmin(1)\n for i, t in enumerate(min_indexes):\n s_mld[i:i + 1, :, :] = QPSK_CANDIDATES[:, t].reshape([1, 2 *\n NUM_ANT, 1])\n return get_bits(s_mld)\n", "step-2": "<mask token>\n\n\ndef get_bits(x):\n return np.where(x < 0, 0, 1)\n\n\n<mask token>\n\n\ndef mkdir(file_path):\n folder = os.path.dirname(file_path)\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n\ndef mkfile(file_path):\n mkdir(file_path)\n filename = pathlib.Path(file_path)\n filename.touch(exist_ok=True)\n\n\ndef concatenate(total, part):\n return part if total is None else np.concatenate((total, part))\n\n\ndef complex_channel(m=NUM_ANT, n=NUM_ANT):\n real = np.random.randn(m, n)\n imag = np.random.randn(m, n)\n h = np.row_stack((np.column_stack((real, -imag)), np.column_stack((imag,\n real))))\n return h\n\n\n<mask token>\n\n\ndef signal_batch(batch_size=TIMES_SLOTS_PER_BATCH):\n s_batch = None\n random_indexes = np.random.uniform(low=0, high=QPSK_CANDIDATE_SIZE,\n size=batch_size)\n for t in range(batch_size):\n i = int(random_indexes[t])\n s = QPSK_CANDIDATES[:, i:i + 1].reshape([1, 2 * NUM_ANT, 1])\n s_batch = concatenate(s_batch, s)\n return s_batch\n\n\ndef random_distance(n, length):\n x = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n y = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n return np.sqrt(x ** 2 + y ** 2)\n\n\ndef zf_batch(y, h):\n h_t = np.transpose(h, axes=[0, 2, 1])\n f = np.linalg.inv(h_t @ h) @ h_t\n z = f @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\n<mask token>\n\n\ndef maximum_likelihood_detect_bits(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n s_mld = np.zeros([batch_size, n, 1])\n if True:\n dst = np.sum(np.square(y - h @ QPSK_CANDIDATES), axis=1)\n else:\n dst = None\n for j in range(QPSK_CANDIDATE_SIZE):\n s_cand = QPSK_CANDIDATES[:, j:j + 1].reshape([1, 2 * NUM_ANT, 1])\n dj = np.sum(np.square(y - h @ s_cand), axis=(1, 2)).reshape([-1, 1]\n )\n if dst is None:\n dst = dj\n else:\n dst = np.concatenate((dst, dj), axis=1)\n min_indexes = dst.argmin(1)\n for i, t in enumerate(min_indexes):\n s_mld[i:i + 1, :, :] = QPSK_CANDIDATES[:, t].reshape([1, 2 *\n NUM_ANT, 1])\n return get_bits(s_mld)\n", "step-3": "<mask token>\n\n\ndef get_bits(x):\n return np.where(x < 0, 0, 1)\n\n\ndef check_wrong_bits(bits, bits_estimated):\n return len(np.argwhere(bits != bits_estimated))\n\n\ndef mkdir(file_path):\n folder = os.path.dirname(file_path)\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n\ndef mkfile(file_path):\n mkdir(file_path)\n filename = pathlib.Path(file_path)\n filename.touch(exist_ok=True)\n\n\ndef concatenate(total, part):\n return part if total is None else np.concatenate((total, part))\n\n\ndef complex_channel(m=NUM_ANT, n=NUM_ANT):\n real = np.random.randn(m, n)\n imag = np.random.randn(m, n)\n h = np.row_stack((np.column_stack((real, -imag)), np.column_stack((imag,\n real))))\n return h\n\n\ndef make_channel_batch():\n h_batch = None\n for _ in range(PACKETS_PER_BATCH):\n h = complex_channel().reshape([1, 2 * NUM_ANT, 2 * NUM_ANT])\n for _ in range(TIME_SLOTS_PER_PACKET):\n h_batch = concatenate(h_batch, h)\n return h_batch\n\n\ndef signal_batch(batch_size=TIMES_SLOTS_PER_BATCH):\n s_batch = None\n random_indexes = np.random.uniform(low=0, high=QPSK_CANDIDATE_SIZE,\n size=batch_size)\n for t in range(batch_size):\n i = int(random_indexes[t])\n s = QPSK_CANDIDATES[:, i:i + 1].reshape([1, 2 * NUM_ANT, 1])\n s_batch = concatenate(s_batch, s)\n return s_batch\n\n\ndef random_distance(n, length):\n x = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n y = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n return np.sqrt(x ** 2 + y ** 2)\n\n\ndef zf_batch(y, h):\n h_t = np.transpose(h, axes=[0, 2, 1])\n f = np.linalg.inv(h_t @ h) @ h_t\n z = f @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\n<mask token>\n\n\ndef maximum_likelihood_detect_bits(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n s_mld = np.zeros([batch_size, n, 1])\n if True:\n dst = np.sum(np.square(y - h @ QPSK_CANDIDATES), axis=1)\n else:\n dst = None\n for j in range(QPSK_CANDIDATE_SIZE):\n s_cand = QPSK_CANDIDATES[:, j:j + 1].reshape([1, 2 * NUM_ANT, 1])\n dj = np.sum(np.square(y - h @ s_cand), axis=(1, 2)).reshape([-1, 1]\n )\n if dst is None:\n dst = dj\n else:\n dst = np.concatenate((dst, dj), axis=1)\n min_indexes = dst.argmin(1)\n for i, t in enumerate(min_indexes):\n s_mld[i:i + 1, :, :] = QPSK_CANDIDATES[:, t].reshape([1, 2 *\n NUM_ANT, 1])\n return get_bits(s_mld)\n", "step-4": "import os\nimport pathlib\nfrom global_settings import *\n\n\ndef get_bits(x):\n return np.where(x < 0, 0, 1)\n\n\ndef check_wrong_bits(bits, bits_estimated):\n return len(np.argwhere(bits != bits_estimated))\n\n\ndef mkdir(file_path):\n folder = os.path.dirname(file_path)\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n\ndef mkfile(file_path):\n mkdir(file_path)\n filename = pathlib.Path(file_path)\n filename.touch(exist_ok=True)\n\n\ndef concatenate(total, part):\n return part if total is None else np.concatenate((total, part))\n\n\ndef complex_channel(m=NUM_ANT, n=NUM_ANT):\n real = np.random.randn(m, n)\n imag = np.random.randn(m, n)\n h = np.row_stack((np.column_stack((real, -imag)), np.column_stack((imag,\n real))))\n return h\n\n\ndef make_channel_batch():\n h_batch = None\n for _ in range(PACKETS_PER_BATCH):\n h = complex_channel().reshape([1, 2 * NUM_ANT, 2 * NUM_ANT])\n for _ in range(TIME_SLOTS_PER_PACKET):\n h_batch = concatenate(h_batch, h)\n return h_batch\n\n\ndef signal_batch(batch_size=TIMES_SLOTS_PER_BATCH):\n s_batch = None\n random_indexes = np.random.uniform(low=0, high=QPSK_CANDIDATE_SIZE,\n size=batch_size)\n for t in range(batch_size):\n i = int(random_indexes[t])\n s = QPSK_CANDIDATES[:, i:i + 1].reshape([1, 2 * NUM_ANT, 1])\n s_batch = concatenate(s_batch, s)\n return s_batch\n\n\ndef random_distance(n, length):\n x = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n y = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n return np.sqrt(x ** 2 + y ** 2)\n\n\ndef zf_batch(y, h):\n h_t = np.transpose(h, axes=[0, 2, 1])\n f = np.linalg.inv(h_t @ h) @ h_t\n z = f @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\ndef lmmse_batch(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n eye = np.concatenate([np.eye(n).reshape([1, n, n]) * batch_size], axis=0)\n ht = np.transpose(h, axes=[0, 2, 1])\n z = np.linalg.inv(ht @ h + eye) @ ht @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\ndef maximum_likelihood_detect_bits(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n s_mld = np.zeros([batch_size, n, 1])\n if True:\n dst = np.sum(np.square(y - h @ QPSK_CANDIDATES), axis=1)\n else:\n dst = None\n for j in range(QPSK_CANDIDATE_SIZE):\n s_cand = QPSK_CANDIDATES[:, j:j + 1].reshape([1, 2 * NUM_ANT, 1])\n dj = np.sum(np.square(y - h @ s_cand), axis=(1, 2)).reshape([-1, 1]\n )\n if dst is None:\n dst = dj\n else:\n dst = np.concatenate((dst, dj), axis=1)\n min_indexes = dst.argmin(1)\n for i, t in enumerate(min_indexes):\n s_mld[i:i + 1, :, :] = QPSK_CANDIDATES[:, t].reshape([1, 2 *\n NUM_ANT, 1])\n return get_bits(s_mld)\n", "step-5": "import os\nimport pathlib\n\nfrom global_settings import *\n\n\ndef get_bits(x):\n return np.where(x < 0, 0, 1)\n\n\ndef check_wrong_bits(bits, bits_estimated):\n return len(np.argwhere(bits != bits_estimated))\n\n\ndef mkdir(file_path):\n folder = os.path.dirname(file_path)\n if not os.path.exists(folder):\n os.makedirs(folder)\n\n\ndef mkfile(file_path):\n mkdir(file_path)\n filename = pathlib.Path(file_path)\n filename.touch(exist_ok=True)\n\n\ndef concatenate(total, part):\n return part if total is None else np.concatenate((total, part))\n\n\ndef complex_channel(m=NUM_ANT, n=NUM_ANT):\n real = np.random.randn(m, n)\n imag = np.random.randn(m, n)\n h = np.row_stack(\n (\n np.column_stack((real, -imag)),\n np.column_stack((imag, real)),\n )\n )\n return h\n\n\ndef make_channel_batch():\n h_batch = None\n for _ in range(PACKETS_PER_BATCH):\n h = complex_channel().reshape([1, 2 * NUM_ANT, 2 * NUM_ANT])\n for _ in range(TIME_SLOTS_PER_PACKET):\n h_batch = concatenate(h_batch, h)\n return h_batch\n\n\ndef signal_batch(batch_size=TIMES_SLOTS_PER_BATCH):\n s_batch = None\n random_indexes = np.random.uniform(low=0, high=QPSK_CANDIDATE_SIZE, size=batch_size)\n for t in range(batch_size):\n i = int(random_indexes[t])\n s = QPSK_CANDIDATES[:, i:i + 1].reshape([1, 2 * NUM_ANT, 1])\n s_batch = concatenate(s_batch, s)\n return s_batch\n\n\ndef random_distance(n, length):\n x = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n y = np.random.uniform(-1, 1, [n, 1, 1]) * length / 2\n return np.sqrt(x ** 2 + y ** 2)\n\n\ndef zf_batch(y, h):\n h_t = np.transpose(h, axes=[0, 2, 1])\n f = np.linalg.inv(h_t @ h) @ h_t\n z = f @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\ndef lmmse_batch(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n eye = np.concatenate([np.eye(n).reshape([1, n, n]) * batch_size], axis=0)\n ht = np.transpose(h, axes=[0, 2, 1])\n z = np.linalg.inv(ht @ h + eye) @ ht @ y\n return np.where(z < 0, -1, 1) / np.sqrt(2)\n\n\ndef maximum_likelihood_detect_bits(y, h):\n assert len(h.shape) == 3\n batch_size, m, n = h.shape\n s_mld = np.zeros([batch_size, n, 1])\n\n if True:\n dst = np.sum(np.square(y - h @ QPSK_CANDIDATES), axis=1)\n else:\n dst = None\n for j in range(QPSK_CANDIDATE_SIZE):\n s_cand = QPSK_CANDIDATES[:, j:j + 1].reshape([1, 2 * NUM_ANT, 1])\n dj = np.sum(np.square(y - h @ s_cand), axis=(1, 2)).reshape([-1, 1])\n\n if dst is None:\n dst = dj\n else:\n dst = np.concatenate((dst, dj), axis=1)\n\n min_indexes = dst.argmin(1)\n for i, t in enumerate(min_indexes):\n s_mld[i:i + 1, :, :] = QPSK_CANDIDATES[:, t].reshape([1, 2 * NUM_ANT, 1])\n\n return get_bits(s_mld)\n\n\n", "step-ids": [ 7, 9, 11, 13, 14 ] }
[ 7, 9, 11, 13, 14 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> def flask_adapter(request: any, api_route: Type[Route]) ->any: """Adapter pattern for Flask :param - Flask Request :api_route: Composite Routes """ try: query_string_params = request.args.to_dict() if 'account_id' in query_string_params.keys(): body = None query_string_params['account_id'] = int(query_string_params[ 'account_id']) else: body = request.json except: http_error = HttpErrors.error_400() return HttpResponse(status_code=http_error['status_code'], body= http_error['body']) http_request = HttpRequest(header=request.headers, body=body, query= query_string_params) try: response = api_route.route(http_request) except IntegrityError: http_error = HttpErrors.error_400() return HttpResponse(status_code=http_error['status_code'], body= http_error['body']) except Exception as exc: print(exc) http_error = HttpErrors.error_500() return HttpResponse(status_code=http_error['status_code'], body= http_error['body']) return response <|reserved_special_token_1|> from typing import Type from sqlalchemy.exc import IntegrityError from src.main.interface import RouteInterface as Route from src.presenters.helpers import HttpRequest, HttpResponse from src.presenters.errors import HttpErrors def flask_adapter(request: any, api_route: Type[Route]) ->any: """Adapter pattern for Flask :param - Flask Request :api_route: Composite Routes """ try: query_string_params = request.args.to_dict() if 'account_id' in query_string_params.keys(): body = None query_string_params['account_id'] = int(query_string_params[ 'account_id']) else: body = request.json except: http_error = HttpErrors.error_400() return HttpResponse(status_code=http_error['status_code'], body= http_error['body']) http_request = HttpRequest(header=request.headers, body=body, query= query_string_params) try: response = api_route.route(http_request) except IntegrityError: http_error = HttpErrors.error_400() return HttpResponse(status_code=http_error['status_code'], body= http_error['body']) except Exception as exc: print(exc) http_error = HttpErrors.error_500() return HttpResponse(status_code=http_error['status_code'], body= http_error['body']) return response <|reserved_special_token_1|> from typing import Type from sqlalchemy.exc import IntegrityError from src.main.interface import RouteInterface as Route from src.presenters.helpers import HttpRequest, HttpResponse from src.presenters.errors import HttpErrors def flask_adapter(request: any, api_route: Type[Route]) -> any: """Adapter pattern for Flask :param - Flask Request :api_route: Composite Routes """ try: query_string_params = request.args.to_dict() if "account_id" in query_string_params.keys(): body = None query_string_params["account_id"] = int(query_string_params["account_id"]) else: body = request.json except: http_error = HttpErrors.error_400() return HttpResponse( status_code=http_error["status_code"], body=http_error["body"] ) http_request = HttpRequest( header=request.headers, body=body, query=query_string_params ) try: response = api_route.route(http_request) except IntegrityError: http_error = HttpErrors.error_400() return HttpResponse( status_code=http_error["status_code"], body=http_error["body"] ) except Exception as exc: print(exc) http_error = HttpErrors.error_500() return HttpResponse( status_code=http_error["status_code"], body=http_error["body"] ) return response
flexible
{ "blob_id": "3212bb7df990ad7d075b8ca49a99e1072eab2a90", "index": 595, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\ndef flask_adapter(request: any, api_route: Type[Route]) ->any:\n \"\"\"Adapter pattern for Flask\n :param - Flask Request\n :api_route: Composite Routes\n \"\"\"\n try:\n query_string_params = request.args.to_dict()\n if 'account_id' in query_string_params.keys():\n body = None\n query_string_params['account_id'] = int(query_string_params[\n 'account_id'])\n else:\n body = request.json\n except:\n http_error = HttpErrors.error_400()\n return HttpResponse(status_code=http_error['status_code'], body=\n http_error['body'])\n http_request = HttpRequest(header=request.headers, body=body, query=\n query_string_params)\n try:\n response = api_route.route(http_request)\n except IntegrityError:\n http_error = HttpErrors.error_400()\n return HttpResponse(status_code=http_error['status_code'], body=\n http_error['body'])\n except Exception as exc:\n print(exc)\n http_error = HttpErrors.error_500()\n return HttpResponse(status_code=http_error['status_code'], body=\n http_error['body'])\n return response\n", "step-3": "from typing import Type\nfrom sqlalchemy.exc import IntegrityError\nfrom src.main.interface import RouteInterface as Route\nfrom src.presenters.helpers import HttpRequest, HttpResponse\nfrom src.presenters.errors import HttpErrors\n\n\ndef flask_adapter(request: any, api_route: Type[Route]) ->any:\n \"\"\"Adapter pattern for Flask\n :param - Flask Request\n :api_route: Composite Routes\n \"\"\"\n try:\n query_string_params = request.args.to_dict()\n if 'account_id' in query_string_params.keys():\n body = None\n query_string_params['account_id'] = int(query_string_params[\n 'account_id'])\n else:\n body = request.json\n except:\n http_error = HttpErrors.error_400()\n return HttpResponse(status_code=http_error['status_code'], body=\n http_error['body'])\n http_request = HttpRequest(header=request.headers, body=body, query=\n query_string_params)\n try:\n response = api_route.route(http_request)\n except IntegrityError:\n http_error = HttpErrors.error_400()\n return HttpResponse(status_code=http_error['status_code'], body=\n http_error['body'])\n except Exception as exc:\n print(exc)\n http_error = HttpErrors.error_500()\n return HttpResponse(status_code=http_error['status_code'], body=\n http_error['body'])\n return response\n", "step-4": "from typing import Type\nfrom sqlalchemy.exc import IntegrityError\nfrom src.main.interface import RouteInterface as Route\nfrom src.presenters.helpers import HttpRequest, HttpResponse\nfrom src.presenters.errors import HttpErrors\n\n\ndef flask_adapter(request: any, api_route: Type[Route]) -> any:\n \"\"\"Adapter pattern for Flask\n :param - Flask Request\n :api_route: Composite Routes\n \"\"\"\n\n try:\n query_string_params = request.args.to_dict()\n\n if \"account_id\" in query_string_params.keys():\n body = None\n query_string_params[\"account_id\"] = int(query_string_params[\"account_id\"])\n else:\n body = request.json\n\n except:\n http_error = HttpErrors.error_400()\n return HttpResponse(\n status_code=http_error[\"status_code\"], body=http_error[\"body\"]\n )\n\n http_request = HttpRequest(\n header=request.headers, body=body, query=query_string_params\n )\n\n try:\n response = api_route.route(http_request)\n except IntegrityError:\n http_error = HttpErrors.error_400()\n return HttpResponse(\n status_code=http_error[\"status_code\"], body=http_error[\"body\"]\n )\n except Exception as exc:\n print(exc)\n http_error = HttpErrors.error_500()\n return HttpResponse(\n status_code=http_error[\"status_code\"], body=http_error[\"body\"]\n )\n\n return response\n", "step-5": null, "step-ids": [ 0, 1, 2, 3 ] }
[ 0, 1, 2, 3 ]
""" WINRM Module to connect to windows host """ from winrm.protocol import Protocol from lib import logger class WINRM(object): """ WINRM Module to connect to windows host """ def __init__(self, host_ip, usr, pwd): """ - **parameters**, **types**, **return** and **return types**:: :param os_type : windows/linux :param host_ip: ip address of the Windows host :param usr: username of the Windows Host :param pwd: Password of the Windows Host :type os_type: string :type host_ip: string :type u_name: string :type pwd: string """ self.os_type = 'windows' self.host_ip = host_ip self.usr = usr self.pwd = pwd self.shell_id = None self.host_win_ip = None self.conn = None def connect(self): """ Method to connect to a Windows machine. """ try: self.host_win_ip = "http://" + self.host_ip + ":5985/wsman" self.conn = Protocol( endpoint=self.host_win_ip, transport="ntlm", username=self.usr, password=self.pwd, server_cert_validation="ignore") logger.warn("Connecting Windows ...") self.shell_id = self.conn.open_shell() logger.warn(self.shell_id) logger.warn('Connected to Windows.') except Exception as error: msg_exception_error = "Exception raised: %s " % error raise(msg_exception_error) def run_cmd(self, cmd): """ Generic Method for passing command and run it on windows machine and return output. - **parameters**, **types**, **return** and **return types**:: :param cmd: Command to be executed on windows machine. :return stdout,stderr,status_code : output,errormessage and statuscode of output. :rtype stdout,stderr,status_code: tuple """ if 'shell_id' in dir(self): #checking for the shell_id created in winrm object command_id = self.conn.run_command(self.shell_id, cmd) std_out, std_err, status_code = self.conn.get_command_output( self.shell_id, command_id) #runs the command and returns output,error,statuscode return std_out, std_err, status_code
normal
{ "blob_id": "96ac9088650490a7da00c7a20f634b76e673ca2d", "index": 1174, "step-1": "<mask token>\n\n\nclass WINRM(object):\n <mask token>\n <mask token>\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n", "step-2": "<mask token>\n\n\nclass WINRM(object):\n <mask token>\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n", "step-3": "<mask token>\n\n\nclass WINRM(object):\n \"\"\"\n WINRM Module to connect to windows host\n \"\"\"\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n", "step-4": "<mask token>\nfrom winrm.protocol import Protocol\nfrom lib import logger\n\n\nclass WINRM(object):\n \"\"\"\n WINRM Module to connect to windows host\n \"\"\"\n\n def __init__(self, host_ip, usr, pwd):\n \"\"\"\n - **parameters**, **types**, **return** and **return types**::\n :param os_type : windows/linux\n :param host_ip: ip address of the Windows host\n :param usr: username of the Windows Host\n :param pwd: Password of the Windows Host\n :type os_type: string\n :type host_ip: string\n :type u_name: string\n :type pwd: string\n \"\"\"\n self.os_type = 'windows'\n self.host_ip = host_ip\n self.usr = usr\n self.pwd = pwd\n self.shell_id = None\n self.host_win_ip = None\n self.conn = None\n\n def connect(self):\n \"\"\"\n Method to connect to a Windows machine.\n \"\"\"\n try:\n self.host_win_ip = 'http://' + self.host_ip + ':5985/wsman'\n self.conn = Protocol(endpoint=self.host_win_ip, transport=\n 'ntlm', username=self.usr, password=self.pwd,\n server_cert_validation='ignore')\n logger.warn('Connecting Windows ...')\n self.shell_id = self.conn.open_shell()\n logger.warn(self.shell_id)\n logger.warn('Connected to Windows.')\n except Exception as error:\n msg_exception_error = 'Exception raised: %s ' % error\n raise msg_exception_error\n\n def run_cmd(self, cmd):\n \"\"\"\n Generic Method for passing command and run it on windows machine and return output.\n - **parameters**, **types**, **return** and **return types**::\n :param cmd: Command to be executed on windows machine.\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\n :rtype stdout,stderr,status_code: tuple\n \"\"\"\n if 'shell_id' in dir(self):\n command_id = self.conn.run_command(self.shell_id, cmd)\n std_out, std_err, status_code = self.conn.get_command_output(self\n .shell_id, command_id)\n return std_out, std_err, status_code\n", "step-5": "\"\"\"\r\nWINRM Module to connect to windows host\r\n\"\"\"\r\nfrom winrm.protocol import Protocol\r\nfrom lib import logger\r\n\r\n\r\nclass WINRM(object):\r\n \"\"\"\r\n WINRM Module to connect to windows host\r\n \"\"\"\r\n def __init__(self, host_ip, usr, pwd):\r\n \"\"\"\r\n - **parameters**, **types**, **return** and **return types**::\r\n :param os_type : windows/linux\r\n :param host_ip: ip address of the Windows host\r\n :param usr: username of the Windows Host\r\n :param pwd: Password of the Windows Host\r\n :type os_type: string\r\n :type host_ip: string\r\n :type u_name: string\r\n :type pwd: string\r\n \"\"\"\r\n self.os_type = 'windows'\r\n self.host_ip = host_ip\r\n self.usr = usr\r\n self.pwd = pwd\r\n self.shell_id = None\r\n self.host_win_ip = None\r\n self.conn = None\r\n\r\n def connect(self):\r\n \"\"\"\r\n Method to connect to a Windows machine.\r\n \"\"\"\r\n try:\r\n self.host_win_ip = \"http://\" + self.host_ip + \":5985/wsman\"\r\n self.conn = Protocol(\r\n endpoint=self.host_win_ip,\r\n transport=\"ntlm\",\r\n username=self.usr,\r\n password=self.pwd,\r\n server_cert_validation=\"ignore\")\r\n logger.warn(\"Connecting Windows ...\")\r\n self.shell_id = self.conn.open_shell()\r\n logger.warn(self.shell_id)\r\n logger.warn('Connected to Windows.')\r\n except Exception as error:\r\n msg_exception_error = \"Exception raised: %s \" % error\r\n raise(msg_exception_error)\r\n\r\n def run_cmd(self, cmd):\r\n \"\"\"\r\n Generic Method for passing command and run it on windows machine and return output.\r\n - **parameters**, **types**, **return** and **return types**::\r\n :param cmd: Command to be executed on windows machine.\r\n :return stdout,stderr,status_code : output,errormessage and statuscode of output.\r\n :rtype stdout,stderr,status_code: tuple\r\n \"\"\"\r\n if 'shell_id' in dir(self):\r\n #checking for the shell_id created in winrm object\r\n command_id = self.conn.run_command(self.shell_id, cmd)\r\n std_out, std_err, status_code = self.conn.get_command_output(\r\n self.shell_id, command_id)\r\n #runs the command and returns output,error,statuscode\r\n return std_out, std_err, status_code\r\n", "step-ids": [ 3, 4, 5, 6, 7 ] }
[ 3, 4, 5, 6, 7 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> racine.title('listPhoto') <|reserved_special_token_0|> for index in range(0, len(dirImage)): img = ImageCoord(cheminDossier + '\\' + dirImage[index]) if img.has_coord(): listImage.append(img) listImage.sort() <|reserved_special_token_1|> <|reserved_special_token_0|> racine = tkinter.Tk() racine.title('listPhoto') racine.directory = filedialog.askdirectory() cheminDossier = racine.directory dirImage = os.listdir(cheminDossier) listImage = [] for index in range(0, len(dirImage)): img = ImageCoord(cheminDossier + '\\' + dirImage[index]) if img.has_coord(): listImage.append(img) listImage.sort() <|reserved_special_token_1|> from ImageCoord import ImageCoord import os import sys from folium.features import DivIcon racine = tkinter.Tk() racine.title('listPhoto') racine.directory = filedialog.askdirectory() cheminDossier = racine.directory dirImage = os.listdir(cheminDossier) listImage = [] for index in range(0, len(dirImage)): img = ImageCoord(cheminDossier + '\\' + dirImage[index]) if img.has_coord(): listImage.append(img) listImage.sort() <|reserved_special_token_1|> from ImageCoord import ImageCoord import os import sys from folium.features import DivIcon # Chemin du dossier ou l'on recupere les images racine = tkinter.Tk() racine.title("listPhoto") racine.directory = filedialog.askdirectory() cheminDossier = racine.directory dirImage = os.listdir(cheminDossier) listImage = [] # Parcour du dossier d'images for index in range(0,len(dirImage)) : #parcours du dossier img = ImageCoord(cheminDossier + '\\' + dirImage[index]) #Insertion des image avec coordonné if img.has_coord() : listImage.append(img) # Tri des images listImage.sort()
flexible
{ "blob_id": "f5b8d8c291d18c6f320704a89985acbcae97ca2f", "index": 2954, "step-1": "<mask token>\n", "step-2": "<mask token>\nracine.title('listPhoto')\n<mask token>\nfor index in range(0, len(dirImage)):\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n if img.has_coord():\n listImage.append(img)\nlistImage.sort()\n", "step-3": "<mask token>\nracine = tkinter.Tk()\nracine.title('listPhoto')\nracine.directory = filedialog.askdirectory()\ncheminDossier = racine.directory\ndirImage = os.listdir(cheminDossier)\nlistImage = []\nfor index in range(0, len(dirImage)):\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n if img.has_coord():\n listImage.append(img)\nlistImage.sort()\n", "step-4": "from ImageCoord import ImageCoord\nimport os\nimport sys\nfrom folium.features import DivIcon\nracine = tkinter.Tk()\nracine.title('listPhoto')\nracine.directory = filedialog.askdirectory()\ncheminDossier = racine.directory\ndirImage = os.listdir(cheminDossier)\nlistImage = []\nfor index in range(0, len(dirImage)):\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n if img.has_coord():\n listImage.append(img)\nlistImage.sort()\n", "step-5": "from ImageCoord import ImageCoord\nimport os\nimport sys\nfrom folium.features import DivIcon\n\n# Chemin du dossier ou l'on recupere les images\n\nracine = tkinter.Tk()\nracine.title(\"listPhoto\")\nracine.directory = filedialog.askdirectory()\ncheminDossier = racine.directory\ndirImage = os.listdir(cheminDossier)\n\nlistImage = []\n\n# Parcour du dossier d'images\nfor index in range(0,len(dirImage)) :\n #parcours du dossier\n img = ImageCoord(cheminDossier + '\\\\' + dirImage[index])\n\n #Insertion des image avec coordonné\n if img.has_coord() :\n listImage.append(img)\n\n# Tri des images\nlistImage.sort()\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
import math import pandas as pd from matplotlib import pyplot as plt tests = [ { "task": "listsort", "prompt": "examples", "length": 5, "shots": 0, "accuracy": 0.28, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 1, "accuracy": 0.40, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 3, "accuracy": 0.30, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 5, "accuracy": 0.28, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 7, "accuracy": 0.32, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 10, "accuracy": 0.50, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 13, "accuracy": 0.36, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 16, "accuracy": 0.22, "trials": 50}, { "task": "listsort", "prompt": "examples", "length": 5, "shots": 32, "accuracy": 0.20, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 0, "accuracy": 0.76, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 1, "accuracy": 0.66, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 3, "accuracy": 0.46, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 5, "accuracy": 0.44, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 7, "accuracy": 0.44, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 10, "accuracy": 0.42, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 13, "accuracy": 0.30, "trials": 50}, { "task": "listsort", "prompt": "code", "length": 5, "shots": 16, "accuracy": 0.32, "trials": 50}, # { "task": "listsort", "prompt": "examples", "length": 10, "shots": 0, "accuracy": 0.04, "trials": 50}, # { "task": "listsort", "prompt": "examples", "length": 10, "shots": 1, "accuracy": 0.04, "trials": 50}, # { "task": "listsort", "prompt": "examples", "length": 10, "shots": 10, "accuracy": 0.00, "trials": 50}, # { "task": "listsort", "prompt": "examples", "length": 10, "shots": 32, "accuracy": 0.00, "trials": 50}, # { "task": "listsort", "prompt": "code", "length": 10, "shots": 0, "accuracy": 0.04, "trials": 50}, # { "task": "listsort", "prompt": "code", "length": 10, "shots": 1, "accuracy": 0.14, "trials": 50}, # { "task": "listsort", "prompt": "code", "length": 10, "shots": 10, "accuracy": 0.00, "trials": 50}, ] for d in tests: d["code"] = d["prompt"] == "code" d["correct"] = d["accuracy"] * d["trials"] p = d["accuracy"] # 80% confidence: 0.842 # 95% confidence: d["err"] = 0.842 * math.sqrt(p * (1-p) / d["trials"]) df = pd.DataFrame(tests) plt.style.use('dark_background') examples_df = df[df["prompt"] == "examples"] plt.errorbar('shots', 'accuracy', yerr=examples_df["err"], data=examples_df, marker='o', capsize=2, color='mediumorchid', markersize=4, linewidth=1, linestyle='-', label="Examples") code_df = df[df["prompt"] == "code"] plt.errorbar('shots', 'accuracy', yerr=code_df["err"], data=code_df, marker='o', capsize=4, color='darkcyan', markersize=4, linewidth=1, label="Coding") plt.legend() plt.xlabel("Shots") plt.ylabel("Accuracy") plt.title("List Sort Length 5") # plt.savefig('Fig2.png', dpi=300, bbox_inches='tight') plt.show() # seaborn.lineplot(data=df, x="shots", y="correct", hue="prompt", ci="sd") # length 99 # { "task": "listsort", "prompt": "examples", "length": 5, "shots" 10, "accuracy": 0.46, "trials": 50}, # { "task": "listsort", "prompt": "code", "length": 5, "shots": 0, "accuracy": 0.50, "trials": 50}, # { "task": "listsort", "prompt": "code", "length": 10, "shots": 0, "accuracy": 0.02, "trials": 50},
normal
{ "blob_id": "6b6397fd18848ffa2ae9c0ec1443d20f2cbeb8b0", "index": 3637, "step-1": "<mask token>\n", "step-2": "<mask token>\nfor d in tests:\n d['code'] = d['prompt'] == 'code'\n d['correct'] = d['accuracy'] * d['trials']\n p = d['accuracy']\n d['err'] = 0.842 * math.sqrt(p * (1 - p) / d['trials'])\n<mask token>\nplt.style.use('dark_background')\n<mask token>\nplt.errorbar('shots', 'accuracy', yerr=examples_df['err'], data=examples_df,\n marker='o', capsize=2, color='mediumorchid', markersize=4, linewidth=1,\n linestyle='-', label='Examples')\n<mask token>\nplt.errorbar('shots', 'accuracy', yerr=code_df['err'], data=code_df, marker\n ='o', capsize=4, color='darkcyan', markersize=4, linewidth=1, label=\n 'Coding')\nplt.legend()\nplt.xlabel('Shots')\nplt.ylabel('Accuracy')\nplt.title('List Sort Length 5')\nplt.show()\n", "step-3": "<mask token>\ntests = [{'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 0,\n 'accuracy': 0.28, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 1, 'accuracy': 0.4, 'trials': 50}, {\n 'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 3,\n 'accuracy': 0.3, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 5, 'accuracy': 0.28, 'trials': 50}, {\n 'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 7,\n 'accuracy': 0.32, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 10, 'accuracy': 0.5, 'trials': 50}, {\n 'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 13,\n 'accuracy': 0.36, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 16, 'accuracy': 0.22, 'trials': 50},\n {'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 32,\n 'accuracy': 0.2, 'trials': 50}, {'task': 'listsort', 'prompt': 'code',\n 'length': 5, 'shots': 0, 'accuracy': 0.76, 'trials': 50}, {'task':\n 'listsort', 'prompt': 'code', 'length': 5, 'shots': 1, 'accuracy': 0.66,\n 'trials': 50}, {'task': 'listsort', 'prompt': 'code', 'length': 5,\n 'shots': 3, 'accuracy': 0.46, 'trials': 50}, {'task': 'listsort',\n 'prompt': 'code', 'length': 5, 'shots': 5, 'accuracy': 0.44, 'trials': \n 50}, {'task': 'listsort', 'prompt': 'code', 'length': 5, 'shots': 7,\n 'accuracy': 0.44, 'trials': 50}, {'task': 'listsort', 'prompt': 'code',\n 'length': 5, 'shots': 10, 'accuracy': 0.42, 'trials': 50}, {'task':\n 'listsort', 'prompt': 'code', 'length': 5, 'shots': 13, 'accuracy': 0.3,\n 'trials': 50}, {'task': 'listsort', 'prompt': 'code', 'length': 5,\n 'shots': 16, 'accuracy': 0.32, 'trials': 50}]\nfor d in tests:\n d['code'] = d['prompt'] == 'code'\n d['correct'] = d['accuracy'] * d['trials']\n p = d['accuracy']\n d['err'] = 0.842 * math.sqrt(p * (1 - p) / d['trials'])\ndf = pd.DataFrame(tests)\nplt.style.use('dark_background')\nexamples_df = df[df['prompt'] == 'examples']\nplt.errorbar('shots', 'accuracy', yerr=examples_df['err'], data=examples_df,\n marker='o', capsize=2, color='mediumorchid', markersize=4, linewidth=1,\n linestyle='-', label='Examples')\ncode_df = df[df['prompt'] == 'code']\nplt.errorbar('shots', 'accuracy', yerr=code_df['err'], data=code_df, marker\n ='o', capsize=4, color='darkcyan', markersize=4, linewidth=1, label=\n 'Coding')\nplt.legend()\nplt.xlabel('Shots')\nplt.ylabel('Accuracy')\nplt.title('List Sort Length 5')\nplt.show()\n", "step-4": "import math\nimport pandas as pd\nfrom matplotlib import pyplot as plt\ntests = [{'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 0,\n 'accuracy': 0.28, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 1, 'accuracy': 0.4, 'trials': 50}, {\n 'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 3,\n 'accuracy': 0.3, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 5, 'accuracy': 0.28, 'trials': 50}, {\n 'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 7,\n 'accuracy': 0.32, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 10, 'accuracy': 0.5, 'trials': 50}, {\n 'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 13,\n 'accuracy': 0.36, 'trials': 50}, {'task': 'listsort', 'prompt':\n 'examples', 'length': 5, 'shots': 16, 'accuracy': 0.22, 'trials': 50},\n {'task': 'listsort', 'prompt': 'examples', 'length': 5, 'shots': 32,\n 'accuracy': 0.2, 'trials': 50}, {'task': 'listsort', 'prompt': 'code',\n 'length': 5, 'shots': 0, 'accuracy': 0.76, 'trials': 50}, {'task':\n 'listsort', 'prompt': 'code', 'length': 5, 'shots': 1, 'accuracy': 0.66,\n 'trials': 50}, {'task': 'listsort', 'prompt': 'code', 'length': 5,\n 'shots': 3, 'accuracy': 0.46, 'trials': 50}, {'task': 'listsort',\n 'prompt': 'code', 'length': 5, 'shots': 5, 'accuracy': 0.44, 'trials': \n 50}, {'task': 'listsort', 'prompt': 'code', 'length': 5, 'shots': 7,\n 'accuracy': 0.44, 'trials': 50}, {'task': 'listsort', 'prompt': 'code',\n 'length': 5, 'shots': 10, 'accuracy': 0.42, 'trials': 50}, {'task':\n 'listsort', 'prompt': 'code', 'length': 5, 'shots': 13, 'accuracy': 0.3,\n 'trials': 50}, {'task': 'listsort', 'prompt': 'code', 'length': 5,\n 'shots': 16, 'accuracy': 0.32, 'trials': 50}]\nfor d in tests:\n d['code'] = d['prompt'] == 'code'\n d['correct'] = d['accuracy'] * d['trials']\n p = d['accuracy']\n d['err'] = 0.842 * math.sqrt(p * (1 - p) / d['trials'])\ndf = pd.DataFrame(tests)\nplt.style.use('dark_background')\nexamples_df = df[df['prompt'] == 'examples']\nplt.errorbar('shots', 'accuracy', yerr=examples_df['err'], data=examples_df,\n marker='o', capsize=2, color='mediumorchid', markersize=4, linewidth=1,\n linestyle='-', label='Examples')\ncode_df = df[df['prompt'] == 'code']\nplt.errorbar('shots', 'accuracy', yerr=code_df['err'], data=code_df, marker\n ='o', capsize=4, color='darkcyan', markersize=4, linewidth=1, label=\n 'Coding')\nplt.legend()\nplt.xlabel('Shots')\nplt.ylabel('Accuracy')\nplt.title('List Sort Length 5')\nplt.show()\n", "step-5": "import math\n\nimport pandas as pd\nfrom matplotlib import pyplot as plt\n\ntests = [\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 0, \"accuracy\": 0.28, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 1, \"accuracy\": 0.40, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 3, \"accuracy\": 0.30, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 5, \"accuracy\": 0.28, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 7, \"accuracy\": 0.32, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 10, \"accuracy\": 0.50, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 13, \"accuracy\": 0.36, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 16, \"accuracy\": 0.22, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\": 32, \"accuracy\": 0.20, \"trials\": 50},\n\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 0, \"accuracy\": 0.76, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 1, \"accuracy\": 0.66, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 3, \"accuracy\": 0.46, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 5, \"accuracy\": 0.44, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 7, \"accuracy\": 0.44, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 10, \"accuracy\": 0.42, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 13, \"accuracy\": 0.30, \"trials\": 50},\n { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 16, \"accuracy\": 0.32, \"trials\": 50},\n\n\n # { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 10, \"shots\": 0, \"accuracy\": 0.04, \"trials\": 50},\n # { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 10, \"shots\": 1, \"accuracy\": 0.04, \"trials\": 50},\n # { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 10, \"shots\": 10, \"accuracy\": 0.00, \"trials\": 50},\n # { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 10, \"shots\": 32, \"accuracy\": 0.00, \"trials\": 50},\n # { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 10, \"shots\": 0, \"accuracy\": 0.04, \"trials\": 50},\n # { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 10, \"shots\": 1, \"accuracy\": 0.14, \"trials\": 50},\n # { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 10, \"shots\": 10, \"accuracy\": 0.00, \"trials\": 50},\n]\nfor d in tests:\n d[\"code\"] = d[\"prompt\"] == \"code\"\n d[\"correct\"] = d[\"accuracy\"] * d[\"trials\"]\n p = d[\"accuracy\"]\n # 80% confidence: 0.842\n # 95% confidence:\n d[\"err\"] = 0.842 * math.sqrt(p * (1-p) / d[\"trials\"])\n\ndf = pd.DataFrame(tests)\n\n\nplt.style.use('dark_background')\nexamples_df = df[df[\"prompt\"] == \"examples\"]\nplt.errorbar('shots', 'accuracy', yerr=examples_df[\"err\"], data=examples_df, marker='o', capsize=2,\n color='mediumorchid', markersize=4, linewidth=1, linestyle='-', label=\"Examples\")\n\ncode_df = df[df[\"prompt\"] == \"code\"]\nplt.errorbar('shots', 'accuracy', yerr=code_df[\"err\"], data=code_df, marker='o', capsize=4,\n color='darkcyan', markersize=4, linewidth=1, label=\"Coding\")\n\n\nplt.legend()\nplt.xlabel(\"Shots\")\nplt.ylabel(\"Accuracy\")\nplt.title(\"List Sort Length 5\")\n# plt.savefig('Fig2.png', dpi=300, bbox_inches='tight')\nplt.show()\n\n\n\n# seaborn.lineplot(data=df, x=\"shots\", y=\"correct\", hue=\"prompt\", ci=\"sd\")\n\n\n\n\n# length 99\n# { \"task\": \"listsort\", \"prompt\": \"examples\", \"length\": 5, \"shots\" 10, \"accuracy\": 0.46, \"trials\": 50},\n# { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 5, \"shots\": 0, \"accuracy\": 0.50, \"trials\": 50},\n# { \"task\": \"listsort\", \"prompt\": \"code\", \"length\": 10, \"shots\": 0, \"accuracy\": 0.02, \"trials\": 50},\n\n", "step-ids": [ 0, 1, 2, 3, 4 ] }
[ 0, 1, 2, 3, 4 ]
<|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Animation: def __init__(self): self.next_frame = pygame.time.get_ticks() self.frame = 0 self.frame_time = 1000 // ANIMATION_RATE <|reserved_special_token_0|> <|reserved_special_token_1|> <|reserved_special_token_0|> class Animation: def __init__(self): self.next_frame = pygame.time.get_ticks() self.frame = 0 self.frame_time = 1000 // ANIMATION_RATE def anim_sprite(self): if pygame.time.get_ticks() > self.next_frame: self.frame = (self.frame + 1) % (24 * ANIMATION_RATE) self.next_frame += self.frame_time return self.frame <|reserved_special_token_1|> from zelda_utilities.constants import * class Animation: def __init__(self): self.next_frame = pygame.time.get_ticks() self.frame = 0 self.frame_time = 1000 // ANIMATION_RATE def anim_sprite(self): if pygame.time.get_ticks() > self.next_frame: self.frame = (self.frame + 1) % (24 * ANIMATION_RATE) self.next_frame += self.frame_time return self.frame <|reserved_special_token_1|> # Import other modules from zelda_utilities.constants import * # Helps establish the current frame for sprite animation/image changing class Animation: def __init__(self): # Animation clock self.next_frame = pygame.time.get_ticks() # Starting frame self.frame = 0 # ~12 frames/sec (1000ms // 12) self.frame_time = 1000 // ANIMATION_RATE def anim_sprite(self): if pygame.time.get_ticks() > self.next_frame: self.frame = (self.frame + 1) % (24 * ANIMATION_RATE) # reset > 20 sec self.next_frame += self.frame_time return self.frame
flexible
{ "blob_id": "0b36bf9ac7887101be5503a0edce19e1111e5ca0", "index": 6607, "step-1": "<mask token>\n", "step-2": "<mask token>\n\n\nclass Animation:\n\n def __init__(self):\n self.next_frame = pygame.time.get_ticks()\n self.frame = 0\n self.frame_time = 1000 // ANIMATION_RATE\n <mask token>\n", "step-3": "<mask token>\n\n\nclass Animation:\n\n def __init__(self):\n self.next_frame = pygame.time.get_ticks()\n self.frame = 0\n self.frame_time = 1000 // ANIMATION_RATE\n\n def anim_sprite(self):\n if pygame.time.get_ticks() > self.next_frame:\n self.frame = (self.frame + 1) % (24 * ANIMATION_RATE)\n self.next_frame += self.frame_time\n return self.frame\n", "step-4": "from zelda_utilities.constants import *\n\n\nclass Animation:\n\n def __init__(self):\n self.next_frame = pygame.time.get_ticks()\n self.frame = 0\n self.frame_time = 1000 // ANIMATION_RATE\n\n def anim_sprite(self):\n if pygame.time.get_ticks() > self.next_frame:\n self.frame = (self.frame + 1) % (24 * ANIMATION_RATE)\n self.next_frame += self.frame_time\n return self.frame\n", "step-5": "# Import other modules\nfrom zelda_utilities.constants import *\n\n\n# Helps establish the current frame for sprite animation/image changing\nclass Animation:\n def __init__(self):\n # Animation clock\n self.next_frame = pygame.time.get_ticks()\n\n # Starting frame\n self.frame = 0\n\n # ~12 frames/sec (1000ms // 12)\n self.frame_time = 1000 // ANIMATION_RATE\n\n def anim_sprite(self):\n if pygame.time.get_ticks() > self.next_frame:\n self.frame = (self.frame + 1) % (24 * ANIMATION_RATE) # reset > 20 sec\n self.next_frame += self.frame_time\n return self.frame\n\n", "step-ids": [ 0, 2, 3, 4, 5 ] }
[ 0, 2, 3, 4, 5 ]