hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
417k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
1 class
is_sharp_comment_removed
bool
1 class
f70c25e9166e6ebd1af0d494cb03847dc405a4fb
285
py
Python
setup.py
DK1515/cli
f89be9083c4f44306118a3ef27c21e63ab21388c
[ "MIT" ]
1
2018-11-29T23:21:47.000Z
2018-11-29T23:21:47.000Z
setup.py
DK1515/cli
f89be9083c4f44306118a3ef27c21e63ab21388c
[ "MIT" ]
7
2018-11-15T17:45:06.000Z
2018-11-15T17:45:30.000Z
ClickTest/setup.py
DK1515/REST_API_Project_5
708b13d5b81cbb1a5efeef3093186159327b2043
[ "MIT" ]
3
2018-10-23T17:34:03.000Z
2018-11-12T23:14:13.000Z
from setuptools import setup setup( name="myhello", version='0.1', py_modules=['colors'], include_package_data=True, install_requires=[ 'Click', 'colorama', ], entry_points=''' [console_scripts] myhello=hello:cli ''', )
16.764706
30
0.564912
from setuptools import setup setup( name="myhello", version='0.1', py_modules=['colors'], include_package_data=True, install_requires=[ 'Click', 'colorama', ], entry_points=''' [console_scripts] myhello=hello:cli ''', )
true
true
f70c25f3f908dc76375bcf8842ae66d0664b7932
2,421
py
Python
zcrmsdk/src/com/zoho/crm/api/modules/argument.py
zoho/zohocrm-python-sdk-2.0
3a93eb3b57fed4e08f26bd5b311e101cb2995411
[ "Apache-2.0" ]
null
null
null
zcrmsdk/src/com/zoho/crm/api/modules/argument.py
zoho/zohocrm-python-sdk-2.0
3a93eb3b57fed4e08f26bd5b311e101cb2995411
[ "Apache-2.0" ]
null
null
null
zcrmsdk/src/com/zoho/crm/api/modules/argument.py
zoho/zohocrm-python-sdk-2.0
3a93eb3b57fed4e08f26bd5b311e101cb2995411
[ "Apache-2.0" ]
null
null
null
try: from zcrmsdk.src.com.zoho.crm.api.exception import SDKException from zcrmsdk.src.com.zoho.crm.api.util import Constants except Exception: from ..exception import SDKException from ..util import Constants class Argument(object): def __init__(self): """Creates an instance of Argument""" self.__name = None self.__value = None self.__key_modified = dict() def get_name(self): """ The method to get the name Returns: string: A string representing the name """ return self.__name def set_name(self, name): """ The method to set the value to name Parameters: name (string) : A string representing the name """ if name is not None and not isinstance(name, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: name EXPECTED TYPE: str', None, None) self.__name = name self.__key_modified['name'] = 1 def get_value(self): """ The method to get the value Returns: string: A string representing the value """ return self.__value def set_value(self, value): """ The method to set the value to value Parameters: value (string) : A string representing the value """ if value is not None and not isinstance(value, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: value EXPECTED TYPE: str', None, None) self.__value = value self.__key_modified['value'] = 1 def is_key_modified(self, key): """ The method to check if the user has modified the given key Parameters: key (string) : A string representing the key Returns: int: An int representing the modification """ if key is not None and not isinstance(key, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None) if key in self.__key_modified: return self.__key_modified.get(key) return None def set_key_modified(self, key, modification): """ The method to mark the given key as modified Parameters: key (string) : A string representing the key modification (int) : An int representing the modification """ if key is not None and not isinstance(key, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None) if modification is not None and not isinstance(modification, int): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None) self.__key_modified[key] = modification
24.21
100
0.712515
try: from zcrmsdk.src.com.zoho.crm.api.exception import SDKException from zcrmsdk.src.com.zoho.crm.api.util import Constants except Exception: from ..exception import SDKException from ..util import Constants class Argument(object): def __init__(self): self.__name = None self.__value = None self.__key_modified = dict() def get_name(self): return self.__name def set_name(self, name): if name is not None and not isinstance(name, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: name EXPECTED TYPE: str', None, None) self.__name = name self.__key_modified['name'] = 1 def get_value(self): return self.__value def set_value(self, value): if value is not None and not isinstance(value, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: value EXPECTED TYPE: str', None, None) self.__value = value self.__key_modified['value'] = 1 def is_key_modified(self, key): if key is not None and not isinstance(key, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None) if key in self.__key_modified: return self.__key_modified.get(key) return None def set_key_modified(self, key, modification): if key is not None and not isinstance(key, str): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: key EXPECTED TYPE: str', None, None) if modification is not None and not isinstance(modification, int): raise SDKException(Constants.DATA_TYPE_ERROR, 'KEY: modification EXPECTED TYPE: int', None, None) self.__key_modified[key] = modification
true
true
f70c2728edd3ea8062890acf9c7820743b28fe2a
9,753
py
Python
nssrc/com/citrix/netscaler/nitro/resource/config/bfd/bfdsession.py
guardicore/nitro-python
5346a5086134aead80968f15a41ff527adaa0ec1
[ "Apache-2.0" ]
null
null
null
nssrc/com/citrix/netscaler/nitro/resource/config/bfd/bfdsession.py
guardicore/nitro-python
5346a5086134aead80968f15a41ff527adaa0ec1
[ "Apache-2.0" ]
null
null
null
nssrc/com/citrix/netscaler/nitro/resource/config/bfd/bfdsession.py
guardicore/nitro-python
5346a5086134aead80968f15a41ff527adaa0ec1
[ "Apache-2.0" ]
null
null
null
# # Copyright (c) 2021 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response from nssrc.com.citrix.netscaler.nitro.service.options import options from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util class bfdsession(base_resource) : """ Configuration for BFD configuration resource. """ def __init__(self) : self._localip = None self._remoteip = None self._state = None self._localport = None self._remoteport = None self._minimumtransmitinterval = None self._negotiatedminimumtransmitinterval = None self._minimumreceiveinterval = None self._negotiatedminimumreceiveinterval = None self._multiplier = None self._remotemultiplier = None self._vlan = None self._localdiagnotic = None self._localdiscriminator = None self._remotediscriminator = None self._passive = None self._multihop = None self._admindown = None self._originalownerpe = None self._currentownerpe = None self._ownernode = None self.___count = None @property def localip(self) : r"""IPV4 or IPV6 Address of Local Node. """ try : return self._localip except Exception as e: raise e @localip.setter def localip(self, localip) : r"""IPV4 or IPV6 Address of Local Node. """ try : self._localip = localip except Exception as e: raise e @property def remoteip(self) : r"""IPV4 or IPV6 Address of Remote Node. """ try : return self._remoteip except Exception as e: raise e @remoteip.setter def remoteip(self, remoteip) : r"""IPV4 or IPV6 Address of Remote Node. """ try : self._remoteip = remoteip except Exception as e: raise e @property def state(self) : r"""Current state of the BFD session.<br/>Possible values = ADMIN DOWN, DOWN, INIT, UP. """ try : return self._state except Exception as e: raise e @property def localport(self) : r"""Source Port used by Local node to send Control packets for the BFD session. """ try : return self._localport except Exception as e: raise e @property def remoteport(self) : r"""Source Port used by Remote node to send Control packets for the BFD session. """ try : return self._remoteport except Exception as e: raise e @property def minimumtransmitinterval(self) : r"""Minimum trasmit interval, in milliseconds, the local node would like to use when transmitting BFD Control packets. """ try : return self._minimumtransmitinterval except Exception as e: raise e @property def negotiatedminimumtransmitinterval(self) : r"""Negotiated Minimum Transmit Interval. This is the interval at which the local node will be sending out BFD control packets. """ try : return self._negotiatedminimumtransmitinterval except Exception as e: raise e @property def minimumreceiveinterval(self) : r"""Minimum receive interval, in milliseconds, between received BFD Control packets that the local node is capable of supporting. """ try : return self._minimumreceiveinterval except Exception as e: raise e @property def negotiatedminimumreceiveinterval(self) : r"""Negotiated Minimum Receive Interval. This is the interval at which the local node will be expecting BFD control packets. """ try : return self._negotiatedminimumreceiveinterval except Exception as e: raise e @property def multiplier(self) : r"""Detection Multiplier. The negotiated transmit interval multiplied by Detection multiplier provides the Detection Time for the remote node for the BFD session. """ try : return self._multiplier except Exception as e: raise e @property def remotemultiplier(self) : r"""Your Multiplier. The negotiated receive interval multiplied by Your Multiplier provides the Detection Time for the local node for the BFD session. """ try : return self._remotemultiplier except Exception as e: raise e @property def vlan(self) : r"""VLAN ID on which the BDS session is configured. """ try : return self._vlan except Exception as e: raise e @property def localdiagnotic(self) : r"""Diagnostic Code specifying the local system's reason for the last change in session state. """ try : return self._localdiagnotic except Exception as e: raise e @property def localdiscriminator(self) : r"""A unique discriminator value generated by the local node for the session. """ try : return self._localdiscriminator except Exception as e: raise e @property def remotediscriminator(self) : r"""A unique discriminator value as received from the remote node for the session. """ try : return self._remotediscriminator except Exception as e: raise e @property def passive(self) : r"""Flag indicating that the session is passive. """ try : return self._passive except Exception as e: raise e @property def multihop(self) : r"""Flag indicating if the session is multihop. """ try : return self._multihop except Exception as e: raise e @property def admindown(self) : r"""Flag indicating if admin down is being sent. """ try : return self._admindown except Exception as e: raise e @property def originalownerpe(self) : r"""Original Owner PE of the BFD session. """ try : return self._originalownerpe except Exception as e: raise e @property def currentownerpe(self) : r"""Current Owner PE of the BFD session. """ try : return self._currentownerpe except Exception as e: raise e @property def ownernode(self) : r"""The owner node in a Cluster for this BFD session. Owner node can vary from 0 to 31. If ownernode is not specified then the session is treated as Striped session.<br/>Default value: 255. """ try : return self._ownernode except Exception as e: raise e def _get_nitro_response(self, service, response) : r""" converts nitro response into object and returns the object array in case of get request. """ try : result = service.payload_formatter.string_to_resource(bfdsession_response, response, self.__class__.__name__) if(result.errorcode != 0) : if (result.errorcode == 444) : service.clear_session(self) if result.severity : if (result.severity == "ERROR") : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) else : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) return result.bfdsession except Exception as e : raise e def _get_object_name(self) : r""" Returns the value of object identifier argument """ try : return 0 except Exception as e : raise e @classmethod def get(cls, client, name="", option_="") : r""" Use this API to fetch all the bfdsession resources that are configured on netscaler. """ try : if not name : obj = bfdsession() response = obj.get_resources(client, option_) return response except Exception as e : raise e @classmethod def get_args(cls, client, args) : r""" Use this API to fetch all the bfdsession resources that are configured on netscaler. # This uses bfdsession_args which is a way to provide additional arguments while fetching the resources. """ try : obj = bfdsession() option_ = options() option_.args = nitro_util.object_to_string_withoutquotes(args) response = obj.get_resources(client, option_) return response except Exception as e : raise e @classmethod def get_filtered(cls, client, filter_) : r""" Use this API to fetch filtered set of bfdsession resources. filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = bfdsession() option_ = options() option_.filter = filter_ response = obj.getfiltered(client, option_) return response except Exception as e : raise e @classmethod def count(cls, client) : r""" Use this API to count the bfdsession resources configured on NetScaler. """ try : obj = bfdsession() option_ = options() option_.count = True response = obj.get_resources(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e @classmethod def count_filtered(cls, client, filter_) : r""" Use this API to count filtered the set of bfdsession resources. Filter string should be in JSON format.eg: "port:80,servicetype:HTTP". """ try : obj = bfdsession() option_ = options() option_.count = True option_.filter = filter_ response = obj.getfiltered(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e class State: ADMIN_DOWN = "ADMIN DOWN" DOWN = "DOWN" INIT = "INIT" UP = "UP" class bfdsession_response(base_response) : def __init__(self, length=1) : self.bfdsession = [] self.errorcode = 0 self.message = "" self.severity = "" self.sessionid = "" self.bfdsession = [bfdsession() for _ in range(length)]
26.008
191
0.715985
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response from nssrc.com.citrix.netscaler.nitro.service.options import options from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util class bfdsession(base_resource) : def __init__(self) : self._localip = None self._remoteip = None self._state = None self._localport = None self._remoteport = None self._minimumtransmitinterval = None self._negotiatedminimumtransmitinterval = None self._minimumreceiveinterval = None self._negotiatedminimumreceiveinterval = None self._multiplier = None self._remotemultiplier = None self._vlan = None self._localdiagnotic = None self._localdiscriminator = None self._remotediscriminator = None self._passive = None self._multihop = None self._admindown = None self._originalownerpe = None self._currentownerpe = None self._ownernode = None self.___count = None @property def localip(self) : try : return self._localip except Exception as e: raise e @localip.setter def localip(self, localip) : try : self._localip = localip except Exception as e: raise e @property def remoteip(self) : try : return self._remoteip except Exception as e: raise e @remoteip.setter def remoteip(self, remoteip) : try : self._remoteip = remoteip except Exception as e: raise e @property def state(self) : try : return self._state except Exception as e: raise e @property def localport(self) : try : return self._localport except Exception as e: raise e @property def remoteport(self) : try : return self._remoteport except Exception as e: raise e @property def minimumtransmitinterval(self) : try : return self._minimumtransmitinterval except Exception as e: raise e @property def negotiatedminimumtransmitinterval(self) : try : return self._negotiatedminimumtransmitinterval except Exception as e: raise e @property def minimumreceiveinterval(self) : try : return self._minimumreceiveinterval except Exception as e: raise e @property def negotiatedminimumreceiveinterval(self) : try : return self._negotiatedminimumreceiveinterval except Exception as e: raise e @property def multiplier(self) : try : return self._multiplier except Exception as e: raise e @property def remotemultiplier(self) : try : return self._remotemultiplier except Exception as e: raise e @property def vlan(self) : try : return self._vlan except Exception as e: raise e @property def localdiagnotic(self) : try : return self._localdiagnotic except Exception as e: raise e @property def localdiscriminator(self) : try : return self._localdiscriminator except Exception as e: raise e @property def remotediscriminator(self) : try : return self._remotediscriminator except Exception as e: raise e @property def passive(self) : try : return self._passive except Exception as e: raise e @property def multihop(self) : try : return self._multihop except Exception as e: raise e @property def admindown(self) : try : return self._admindown except Exception as e: raise e @property def originalownerpe(self) : try : return self._originalownerpe except Exception as e: raise e @property def currentownerpe(self) : try : return self._currentownerpe except Exception as e: raise e @property def ownernode(self) : try : return self._ownernode except Exception as e: raise e def _get_nitro_response(self, service, response) : try : result = service.payload_formatter.string_to_resource(bfdsession_response, response, self.__class__.__name__) if(result.errorcode != 0) : if (result.errorcode == 444) : service.clear_session(self) if result.severity : if (result.severity == "ERROR") : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) else : raise nitro_exception(result.errorcode, str(result.message), str(result.severity)) return result.bfdsession except Exception as e : raise e def _get_object_name(self) : try : return 0 except Exception as e : raise e @classmethod def get(cls, client, name="", option_="") : try : if not name : obj = bfdsession() response = obj.get_resources(client, option_) return response except Exception as e : raise e @classmethod def get_args(cls, client, args) : try : obj = bfdsession() option_ = options() option_.args = nitro_util.object_to_string_withoutquotes(args) response = obj.get_resources(client, option_) return response except Exception as e : raise e @classmethod def get_filtered(cls, client, filter_) : try : obj = bfdsession() option_ = options() option_.filter = filter_ response = obj.getfiltered(client, option_) return response except Exception as e : raise e @classmethod def count(cls, client) : try : obj = bfdsession() option_ = options() option_.count = True response = obj.get_resources(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e @classmethod def count_filtered(cls, client, filter_) : try : obj = bfdsession() option_ = options() option_.count = True option_.filter = filter_ response = obj.getfiltered(client, option_) if response : return response[0].__dict__['___count'] return 0 except Exception as e : raise e class State: ADMIN_DOWN = "ADMIN DOWN" DOWN = "DOWN" INIT = "INIT" UP = "UP" class bfdsession_response(base_response) : def __init__(self, length=1) : self.bfdsession = [] self.errorcode = 0 self.message = "" self.severity = "" self.sessionid = "" self.bfdsession = [bfdsession() for _ in range(length)]
true
true
f70c274faeae977083489369b7de5cf47e642696
2,548
py
Python
api/modules/holidays/views.py
HemabhKamboj/server
4ca2666cd49694245aeee0e658bf5ebce448e50e
[ "MIT" ]
null
null
null
api/modules/holidays/views.py
HemabhKamboj/server
4ca2666cd49694245aeee0e658bf5ebce448e50e
[ "MIT" ]
null
null
null
api/modules/holidays/views.py
HemabhKamboj/server
4ca2666cd49694245aeee0e658bf5ebce448e50e
[ "MIT" ]
null
null
null
import datetime import math import requests_cache from bs4 import BeautifulSoup from rest_framework import status from rest_framework.decorators import api_view from rest_framework.response import Response from api.modules.holidays.constants import HOLIDAYS_PAGE_URL, HINDI_DAY_STRING_MAP, HINDI_MONTH_STRING_MAP from api.modules.holidays.utils import load_url_content week_difference = datetime.timedelta(days=7) requests_cache.install_cache(expire_after=week_difference) @api_view(['GET']) def get_upcoming_holidays(request, year): """ Returns a list of all the holidays in a given year :param request: :param year: :return: 400 if unable to get response from Holidays Page :return: 503 if unable to correctly parse Holidays Page :return: 200 successful """ holiday_data = [] try: html = load_url_content(HOLIDAYS_PAGE_URL.format(year)) if html: soup = BeautifulSoup(html, 'html.parser') cells = soup.findAll(['th', 'td']) row_len = int(math.ceil(len(cells) / 4)) for ctr in range(row_len): if ctr == 0: continue offset = ctr * 4 holiday_type = cells[offset + 3].text.split() date_string = cells[offset + 0].text.strip().split(" ") day_string = cells[offset + 1].text.strip() # Check if HTML response is in Hindi # If in Hindi, replace with English counterpart if date_string[1] in HINDI_MONTH_STRING_MAP.keys(): date_string[1] = HINDI_MONTH_STRING_MAP[date_string[1]][:3] day_string = HINDI_DAY_STRING_MAP[day_string] try: dt = datetime.datetime.strptime(" ".join(date_string), '%d %b') except ValueError: dt = datetime.datetime.strptime(" ".join(date_string), '%b %d') holiday_obj = { 'month': dt.strftime('%B'), 'date': int(dt.strftime('%d')), 'day': day_string, 'name': cells[offset + 2].text.strip(), 'type': holiday_type[0] } holiday_data.append(holiday_obj) else: return Response(holiday_data, status=status.HTTP_400_BAD_REQUEST) except Exception as e: return Response(str(e), status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(holiday_data, status=status.HTTP_200_OK)
36.927536
106
0.605965
import datetime import math import requests_cache from bs4 import BeautifulSoup from rest_framework import status from rest_framework.decorators import api_view from rest_framework.response import Response from api.modules.holidays.constants import HOLIDAYS_PAGE_URL, HINDI_DAY_STRING_MAP, HINDI_MONTH_STRING_MAP from api.modules.holidays.utils import load_url_content week_difference = datetime.timedelta(days=7) requests_cache.install_cache(expire_after=week_difference) @api_view(['GET']) def get_upcoming_holidays(request, year): holiday_data = [] try: html = load_url_content(HOLIDAYS_PAGE_URL.format(year)) if html: soup = BeautifulSoup(html, 'html.parser') cells = soup.findAll(['th', 'td']) row_len = int(math.ceil(len(cells) / 4)) for ctr in range(row_len): if ctr == 0: continue offset = ctr * 4 holiday_type = cells[offset + 3].text.split() date_string = cells[offset + 0].text.strip().split(" ") day_string = cells[offset + 1].text.strip() if date_string[1] in HINDI_MONTH_STRING_MAP.keys(): date_string[1] = HINDI_MONTH_STRING_MAP[date_string[1]][:3] day_string = HINDI_DAY_STRING_MAP[day_string] try: dt = datetime.datetime.strptime(" ".join(date_string), '%d %b') except ValueError: dt = datetime.datetime.strptime(" ".join(date_string), '%b %d') holiday_obj = { 'month': dt.strftime('%B'), 'date': int(dt.strftime('%d')), 'day': day_string, 'name': cells[offset + 2].text.strip(), 'type': holiday_type[0] } holiday_data.append(holiday_obj) else: return Response(holiday_data, status=status.HTTP_400_BAD_REQUEST) except Exception as e: return Response(str(e), status=status.HTTP_500_INTERNAL_SERVER_ERROR) return Response(holiday_data, status=status.HTTP_200_OK)
true
true
f70c286a9902002e58cba1233b064d8d8b626ec2
2,583
py
Python
xarray/backends/pynio_.py
martindurant/xarray
98a05f11c6f38489c82e86c9e9df796e7fb65fd2
[ "PSF-2.0", "Apache-2.0", "BSD-2-Clause", "BSD-3-Clause" ]
null
null
null
xarray/backends/pynio_.py
martindurant/xarray
98a05f11c6f38489c82e86c9e9df796e7fb65fd2
[ "PSF-2.0", "Apache-2.0", "BSD-2-Clause", "BSD-3-Clause" ]
null
null
null
xarray/backends/pynio_.py
martindurant/xarray
98a05f11c6f38489c82e86c9e9df796e7fb65fd2
[ "PSF-2.0", "Apache-2.0", "BSD-2-Clause", "BSD-3-Clause" ]
null
null
null
from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import numpy as np from .. import Variable from ..core.utils import (FrozenOrderedDict, Frozen, NdimSizeLenMixin, DunderArrayMixin) from ..core import indexing from .common import AbstractDataStore, DataStorePickleMixin class NioArrayWrapper(NdimSizeLenMixin, DunderArrayMixin): def __init__(self, variable_name, datastore): self.datastore = datastore self.variable_name = variable_name array = self.get_array() self.shape = array.shape self.dtype = np.dtype(array.typecode()) def get_array(self): self.datastore.assert_open() return self.datastore.ds.variables[self.variable_name] def __getitem__(self, key): with self.datastore.ensure_open(autoclose=True): array = self.get_array() if key == () and self.ndim == 0: return array.get_value() return array[key] class NioDataStore(AbstractDataStore, DataStorePickleMixin): """Store for accessing datasets via PyNIO """ def __init__(self, filename, mode='r', autoclose=False): import Nio opener = functools.partial(Nio.open_file, filename, mode=mode) self.ds = opener() # xarray provides its own support for FillValue, # so turn off PyNIO's support for the same. self.ds.set_option('MaskedArrayMode', 'MaskedNever') self._autoclose = autoclose self._isopen = True self._opener = opener self._mode = mode def open_store_variable(self, name, var): data = indexing.LazilyIndexedArray(NioArrayWrapper(name, self)) return Variable(var.dimensions, data, var.attributes) def get_variables(self): with self.ensure_open(autoclose=False): return FrozenOrderedDict((k, self.open_store_variable(k, v)) for k, v in self.ds.variables.iteritems()) def get_attrs(self): with self.ensure_open(autoclose=True): return Frozen(self.ds.attributes) def get_dimensions(self): with self.ensure_open(autoclose=True): return Frozen(self.ds.dimensions) def get_encoding(self): encoding = {} encoding['unlimited_dims'] = set( [k for k in self.ds.dimensions if self.ds.unlimited(k)]) return encoding def close(self): if self._isopen: self.ds.close() self._isopen = False
32.2875
79
0.650019
from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import numpy as np from .. import Variable from ..core.utils import (FrozenOrderedDict, Frozen, NdimSizeLenMixin, DunderArrayMixin) from ..core import indexing from .common import AbstractDataStore, DataStorePickleMixin class NioArrayWrapper(NdimSizeLenMixin, DunderArrayMixin): def __init__(self, variable_name, datastore): self.datastore = datastore self.variable_name = variable_name array = self.get_array() self.shape = array.shape self.dtype = np.dtype(array.typecode()) def get_array(self): self.datastore.assert_open() return self.datastore.ds.variables[self.variable_name] def __getitem__(self, key): with self.datastore.ensure_open(autoclose=True): array = self.get_array() if key == () and self.ndim == 0: return array.get_value() return array[key] class NioDataStore(AbstractDataStore, DataStorePickleMixin): def __init__(self, filename, mode='r', autoclose=False): import Nio opener = functools.partial(Nio.open_file, filename, mode=mode) self.ds = opener() self.ds.set_option('MaskedArrayMode', 'MaskedNever') self._autoclose = autoclose self._isopen = True self._opener = opener self._mode = mode def open_store_variable(self, name, var): data = indexing.LazilyIndexedArray(NioArrayWrapper(name, self)) return Variable(var.dimensions, data, var.attributes) def get_variables(self): with self.ensure_open(autoclose=False): return FrozenOrderedDict((k, self.open_store_variable(k, v)) for k, v in self.ds.variables.iteritems()) def get_attrs(self): with self.ensure_open(autoclose=True): return Frozen(self.ds.attributes) def get_dimensions(self): with self.ensure_open(autoclose=True): return Frozen(self.ds.dimensions) def get_encoding(self): encoding = {} encoding['unlimited_dims'] = set( [k for k in self.ds.dimensions if self.ds.unlimited(k)]) return encoding def close(self): if self._isopen: self.ds.close() self._isopen = False
true
true
f70c28cf5ae8ad999f86f2c1d054ba9d5b3f343a
3,237
py
Python
Scenario2-fan-out/python/subscribe_2.py
rido-min/IoTHubMQTTBrokerPreviewSamples
ee5e556822c1f3abf7faa5a597f25429022625fe
[ "MIT" ]
null
null
null
Scenario2-fan-out/python/subscribe_2.py
rido-min/IoTHubMQTTBrokerPreviewSamples
ee5e556822c1f3abf7faa5a597f25429022625fe
[ "MIT" ]
null
null
null
Scenario2-fan-out/python/subscribe_2.py
rido-min/IoTHubMQTTBrokerPreviewSamples
ee5e556822c1f3abf7faa5a597f25429022625fe
[ "MIT" ]
null
null
null
# Copyright (c) Microsoft Corporation. All rights reserved.S # Licensed under the MIT License. See License.txt in the project root for # license information. import os import sys import logging # noqa: F401 import json import time from concurrent.futures import ThreadPoolExecutor from paho_client import PahoClient """ Uncomment the following lines to enable debug logging """ # logging.basicConfig(level=logging.INFO) # logging.getLogger("paho").setLevel(level=logging.DEBUG) ################################## # CREATE CLIENTS ################################## client_1 = PahoClient.create_from_connection_string( os.environ["CS_VEHICLE_1"], clean_session=True ) client_2 = PahoClient.create_from_connection_string( os.environ["CS_VEHICLE_2"], clean_session=True ) all_clients = (client_1, client_2) def listen(client: PahoClient) -> None: ################################## # CONNECT ################################## print("{}: Connecting".format(client.auth.device_id)) client.start_connect() if not client.connection_status.wait_for_connected(timeout=20): print("{}: failed to connect. exiting".format(client.auth.device_id)) sys.exit(1) ################################## # SUBSCRIBE ################################## qos = 1 topic_filter = "fleet/alerts/#" print( "{}: Subscribing to {} at qos {}".format( client.auth.device_id, topic_filter, qos ) ) (rc, mid) = client.subscribe(topic_filter, qos) ack_result = client.incoming_subacks.wait_for_ack(mid, timeout=20) if not ack_result: print("{}: SUBACK not received within 20 seconds".format(client.auth.device_id)) client.disconnect() client.connection_status.wait_for_disconnected() sys.exit(1) elif ack_result[0] == -1: print("{}: Subscription was rejected".format(client.auth.device_id)) client.disconnect() client.connection_status.wait_for_disconnected() sys.exit(1) else: print( "{}: Subscription was granted with qos {}".format( client.auth.device_id, ack_result[0] ) ) ################################## # LISTEN ################################## time_to_listen_in_seconds = 600 end_time = time.time() + time_to_listen_in_seconds while time.time() <= end_time: remaining_time = end_time - time.time() message = client.incoming_messages.pop_next_message(timeout=remaining_time) if message: payload_object = json.loads(message.payload) print( "{}: Message received on topic {}: {}".format( client.auth.device_id, message.topic, payload_object ) ) ################################## # DISCONNECT ################################## print("{}: Disconnecting".format(client.auth.device_id)) client.disconnect() client.connection_status.wait_for_disconnected() ################################## # CREATE_THREADS ################################## with ThreadPoolExecutor() as tp: for client in all_clients: tp.submit(listen, client)
29.427273
88
0.574915
import os import sys import logging import json import time from concurrent.futures import ThreadPoolExecutor from paho_client import PahoClient client_1 = PahoClient.create_from_connection_string( os.environ["CS_VEHICLE_1"], clean_session=True ) client_2 = PahoClient.create_from_connection_string( os.environ["CS_VEHICLE_2"], clean_session=True ) all_clients = (client_1, client_2) def listen(client: PahoClient) -> None: print("{}: Connecting".format(client.auth.device_id)) client.start_connect() if not client.connection_status.wait_for_connected(timeout=20): print("{}: failed to connect. exiting".format(client.auth.device_id)) sys.exit(1) qos = 1 topic_filter = "fleet/alerts/#" print( "{}: Subscribing to {} at qos {}".format( client.auth.device_id, topic_filter, qos ) ) (rc, mid) = client.subscribe(topic_filter, qos) ack_result = client.incoming_subacks.wait_for_ack(mid, timeout=20) if not ack_result: print("{}: SUBACK not received within 20 seconds".format(client.auth.device_id)) client.disconnect() client.connection_status.wait_for_disconnected() sys.exit(1) elif ack_result[0] == -1: print("{}: Subscription was rejected".format(client.auth.device_id)) client.disconnect() client.connection_status.wait_for_disconnected() sys.exit(1) else: print( "{}: Subscription was granted with qos {}".format( client.auth.device_id, ack_result[0] ) ) time_to_listen_in_seconds = 600 end_time = time.time() + time_to_listen_in_seconds while time.time() <= end_time: remaining_time = end_time - time.time() message = client.incoming_messages.pop_next_message(timeout=remaining_time) if message: payload_object = json.loads(message.payload) print( "{}: Message received on topic {}: {}".format( client.auth.device_id, message.topic, payload_object ) ) print("{}: Disconnecting".format(client.auth.device_id)) client.disconnect() client.connection_status.wait_for_disconnected() with ThreadPoolExecutor() as tp: for client in all_clients: tp.submit(listen, client)
true
true
f70c2904f29d66cf924fa7d4d4333085ff320187
1,786
py
Python
ee/models/explicit_team_membership.py
brave-care/posthog
8edd14a16ad936fb241dcf856925e9f2ea87cba4
[ "MIT" ]
7,409
2020-02-09T23:18:10.000Z
2022-03-31T22:36:25.000Z
ee/models/explicit_team_membership.py
brave-care/posthog
8edd14a16ad936fb241dcf856925e9f2ea87cba4
[ "MIT" ]
5,709
2020-02-09T23:26:13.000Z
2022-03-31T20:20:01.000Z
ee/models/explicit_team_membership.py
brave-care/posthog
8edd14a16ad936fb241dcf856925e9f2ea87cba4
[ "MIT" ]
647
2020-02-13T17:50:55.000Z
2022-03-31T11:24:19.000Z
from typing import TYPE_CHECKING from django.db import models from posthog.models.utils import UUIDModel, sane_repr if TYPE_CHECKING: from posthog.models.organization import OrganizationMembership class ExplicitTeamMembership(UUIDModel): class Level(models.IntegerChoices): """Keep in sync with OrganizationMembership.Level (only difference being organizations having an Owner).""" MEMBER = 1, "member" ADMIN = 8, "administrator" team: models.ForeignKey = models.ForeignKey( "posthog.Team", on_delete=models.CASCADE, related_name="explicit_memberships", related_query_name="explicit_membership", ) parent_membership: models.ForeignKey = models.ForeignKey( "posthog.OrganizationMembership", on_delete=models.CASCADE, related_name="explicit_team_memberships", related_query_name="explicit_team_membership", ) level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( default=Level.MEMBER, choices=Level.choices ) joined_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) class Meta: constraints = [ models.UniqueConstraint(fields=["team", "parent_membership"], name="unique_explicit_team_membership"), ] def __str__(self): return str(self.Level(self.level)) @property def effective_level(self) -> "OrganizationMembership.Level": """If organization level is higher than project level, then that takes precedence over explicit project level. """ return max(self.level, self.parent_membership.level) __repr__ = sane_repr("team", "parent_membership", "level")
35.019608
118
0.715566
from typing import TYPE_CHECKING from django.db import models from posthog.models.utils import UUIDModel, sane_repr if TYPE_CHECKING: from posthog.models.organization import OrganizationMembership class ExplicitTeamMembership(UUIDModel): class Level(models.IntegerChoices): MEMBER = 1, "member" ADMIN = 8, "administrator" team: models.ForeignKey = models.ForeignKey( "posthog.Team", on_delete=models.CASCADE, related_name="explicit_memberships", related_query_name="explicit_membership", ) parent_membership: models.ForeignKey = models.ForeignKey( "posthog.OrganizationMembership", on_delete=models.CASCADE, related_name="explicit_team_memberships", related_query_name="explicit_team_membership", ) level: models.PositiveSmallIntegerField = models.PositiveSmallIntegerField( default=Level.MEMBER, choices=Level.choices ) joined_at: models.DateTimeField = models.DateTimeField(auto_now_add=True) updated_at: models.DateTimeField = models.DateTimeField(auto_now=True) class Meta: constraints = [ models.UniqueConstraint(fields=["team", "parent_membership"], name="unique_explicit_team_membership"), ] def __str__(self): return str(self.Level(self.level)) @property def effective_level(self) -> "OrganizationMembership.Level": return max(self.level, self.parent_membership.level) __repr__ = sane_repr("team", "parent_membership", "level")
true
true
f70c29e07b84aee9f07e33726a5727e28f2dadee
1,789
py
Python
print_utils.py
FieryRider/matrix-archive
77c49b9ac56346a2e8136851b71512fb53096cc2
[ "Apache-2.0" ]
null
null
null
print_utils.py
FieryRider/matrix-archive
77c49b9ac56346a2e8136851b71512fb53096cc2
[ "Apache-2.0" ]
null
null
null
print_utils.py
FieryRider/matrix-archive
77c49b9ac56346a2e8136851b71512fb53096cc2
[ "Apache-2.0" ]
1
2021-04-22T06:27:04.000Z
2021-04-22T06:27:04.000Z
import sys class PrintUtils: progress = 0 total_progress = 0 @classmethod def print_progress_bar(cls, iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', print_end = "\r"): """ Call in a loop to create terminal progress bar @params: iteration - Required : current iteration (Int) total - Required : total iterations (Int) prefix - Optional : prefix string (Str) suffix - Optional : suffix string (Str) decimals - Optional : positive number of decimals in percent complete (Int) length - Optional : character length of bar (Int) fill - Optional : bar fill character (Str) print_end - Optional : end character (e.g. "\r", "\r\n") (Str) """ cls.progress = iteration cls.total_progress = total if prefix != "": prefix += " " percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) filledLength = int(length * iteration // total) bar = fill * filledLength + '-' * (length - filledLength) print(f'{prefix}[{bar}] {percent}% {suffix}', end = print_end) if iteration == total: print() @classmethod def smart_print(cls, msg, end = "\x1b[2K\r\n", file = sys.stdout): if not isinstance(msg, str): msg = str(msg) print("\x1b[2K", end = "\r", flush = True) if file == sys.stderr: print("\x1b[31m" + msg + "\x1b[37m", file = sys.stderr, flush = True) else: print(msg, flush = True) if cls.total_progress != 0: cls.print_progress_bar(cls.progress, cls.total_progress)
39.755556
134
0.539966
import sys class PrintUtils: progress = 0 total_progress = 0 @classmethod def print_progress_bar(cls, iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', print_end = "\r"): cls.progress = iteration cls.total_progress = total if prefix != "": prefix += " " percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) filledLength = int(length * iteration // total) bar = fill * filledLength + '-' * (length - filledLength) print(f'{prefix}[{bar}] {percent}% {suffix}', end = print_end) if iteration == total: print() @classmethod def smart_print(cls, msg, end = "\x1b[2K\r\n", file = sys.stdout): if not isinstance(msg, str): msg = str(msg) print("\x1b[2K", end = "\r", flush = True) if file == sys.stderr: print("\x1b[31m" + msg + "\x1b[37m", file = sys.stderr, flush = True) else: print(msg, flush = True) if cls.total_progress != 0: cls.print_progress_bar(cls.progress, cls.total_progress)
true
true
f70c2a289f03467d3edc480afd9ba976b3622b9d
354
py
Python
PythonExercicios/des034.py
leuosilveira/Exercicios-Python_Curso-em-Video
17793a5dde868dd6c9a34604ade4b68582db4563
[ "MIT" ]
null
null
null
PythonExercicios/des034.py
leuosilveira/Exercicios-Python_Curso-em-Video
17793a5dde868dd6c9a34604ade4b68582db4563
[ "MIT" ]
null
null
null
PythonExercicios/des034.py
leuosilveira/Exercicios-Python_Curso-em-Video
17793a5dde868dd6c9a34604ade4b68582db4563
[ "MIT" ]
null
null
null
print('Olá!\nvou te ajudar a calcular o valor do aumento que você receberá.') sal = float(input('Qual o seu salário atual? \n R$')) if sal > 1250.00: print('Seu salário final será de R${:.2f}, pois seu aumento foi de 10%.'.format(sal+sal*0.10)) else: print('Seu salário final será de R${:.2f}, pois seu aumento foi de 15%.'.format(sal+sal*0.15))
50.571429
98
0.675141
print('Olá!\nvou te ajudar a calcular o valor do aumento que você receberá.') sal = float(input('Qual o seu salário atual? \n R$')) if sal > 1250.00: print('Seu salário final será de R${:.2f}, pois seu aumento foi de 10%.'.format(sal+sal*0.10)) else: print('Seu salário final será de R${:.2f}, pois seu aumento foi de 15%.'.format(sal+sal*0.15))
true
true
f70c2b3d4122d9726b7335d3f84e1321b0abeb30
3,040
py
Python
src/HttpCtrl/http_handler.py
annoviko/robotframework-httpctrl
5514d2d567a8b817df8b3598ab559419e6825acd
[ "BSD-3-Clause" ]
16
2019-03-15T12:29:06.000Z
2022-02-25T21:17:27.000Z
src/HttpCtrl/http_handler.py
annoviko/robotframework-httpctrl
5514d2d567a8b817df8b3598ab559419e6825acd
[ "BSD-3-Clause" ]
29
2019-03-15T12:50:18.000Z
2022-01-03T13:28:34.000Z
src/HttpCtrl/http_handler.py
annoviko/robotframework-httpctrl
5514d2d567a8b817df8b3598ab559419e6825acd
[ "BSD-3-Clause" ]
3
2019-11-22T21:06:48.000Z
2021-06-16T10:00:58.000Z
""" HttpCtrl library provides HTTP/HTTPS client and server API to Robot Framework to make REST API testing easy. Authors: Andrei Novikov Date: 2018-2021 Copyright: The 3-Clause BSD License """ from http.server import SimpleHTTPRequestHandler from robot.api import logger from HttpCtrl.internal_messages import TerminationRequest, IgnoreRequest from HttpCtrl.request import Request from HttpCtrl.request_storage import RequestStorage from HttpCtrl.response_storage import ResponseStorage class HttpHandler(SimpleHTTPRequestHandler): def __init__(self, *args, **kwargs): self.server_version = "HttpCtrl.Server/" self.sys_version = "" SimpleHTTPRequestHandler.__init__(self, *args, **kwargs) def do_GET(self): self.__default_handler('GET') def do_POST(self): self.__default_handler('POST') def do_PUT(self): self.__default_handler('PUT') def do_OPTIONS(self): self.__default_handler('OPTIONS') def do_HEAD(self): self.__default_handler('HEAD') def do_PATCH(self): self.__default_handler('PATCH') def do_DELETE(self): self.__default_handler('DELETE') def log_message(self, format, *args): return def log_error(self, format, *args): return def log_request(self, code='-', size='-'): return def __extract_body(self): body_length = int(self.headers.get('Content-Length', 0)) if body_length > 0: return self.rfile.read(body_length).decode('utf-8') return None def __default_handler(self, method): host, port = self.client_address[:2] body = self.__extract_body() logger.info("'%s' request is received from '%s:%s'." % (method, host, port)) request = Request(host, port, method, self.path, self.headers, body) RequestStorage().push(request) response = ResponseStorage().pop() if isinstance(response, TerminationRequest) or isinstance(response, IgnoreRequest): return try: self.__send_response(response) except Exception as exception: logger.info("Response was not sent to client due to reason: '%s'." % str(exception)) def __send_response(self, response): if response is None: logger.error("Response is not provided for incoming request.") return self.send_response(response.get_status()) headers = response.get_headers() for key, value in headers.items(): self.send_header(key, value) body = response.get_body() if body is not None: if isinstance(response.get_body(), str): body = response.get_body().encode("utf-8") self.send_header('Content-Length', str(len(body))) self.end_headers() if body is not None: self.wfile.write(body)
25.982906
109
0.622368
from http.server import SimpleHTTPRequestHandler from robot.api import logger from HttpCtrl.internal_messages import TerminationRequest, IgnoreRequest from HttpCtrl.request import Request from HttpCtrl.request_storage import RequestStorage from HttpCtrl.response_storage import ResponseStorage class HttpHandler(SimpleHTTPRequestHandler): def __init__(self, *args, **kwargs): self.server_version = "HttpCtrl.Server/" self.sys_version = "" SimpleHTTPRequestHandler.__init__(self, *args, **kwargs) def do_GET(self): self.__default_handler('GET') def do_POST(self): self.__default_handler('POST') def do_PUT(self): self.__default_handler('PUT') def do_OPTIONS(self): self.__default_handler('OPTIONS') def do_HEAD(self): self.__default_handler('HEAD') def do_PATCH(self): self.__default_handler('PATCH') def do_DELETE(self): self.__default_handler('DELETE') def log_message(self, format, *args): return def log_error(self, format, *args): return def log_request(self, code='-', size='-'): return def __extract_body(self): body_length = int(self.headers.get('Content-Length', 0)) if body_length > 0: return self.rfile.read(body_length).decode('utf-8') return None def __default_handler(self, method): host, port = self.client_address[:2] body = self.__extract_body() logger.info("'%s' request is received from '%s:%s'." % (method, host, port)) request = Request(host, port, method, self.path, self.headers, body) RequestStorage().push(request) response = ResponseStorage().pop() if isinstance(response, TerminationRequest) or isinstance(response, IgnoreRequest): return try: self.__send_response(response) except Exception as exception: logger.info("Response was not sent to client due to reason: '%s'." % str(exception)) def __send_response(self, response): if response is None: logger.error("Response is not provided for incoming request.") return self.send_response(response.get_status()) headers = response.get_headers() for key, value in headers.items(): self.send_header(key, value) body = response.get_body() if body is not None: if isinstance(response.get_body(), str): body = response.get_body().encode("utf-8") self.send_header('Content-Length', str(len(body))) self.end_headers() if body is not None: self.wfile.write(body)
true
true
f70c2bde40e80afd180cc8ccd64d2e3d06547f1d
677
py
Python
tests/qtest.py
henry232323/henrio
315225094d4d1fdad2142534b8cd640462e8b81f
[ "MIT" ]
6
2017-12-12T00:49:02.000Z
2020-12-03T11:12:40.000Z
tests/qtest.py
henry232323/henrio
315225094d4d1fdad2142534b8cd640462e8b81f
[ "MIT" ]
3
2017-10-04T02:05:31.000Z
2020-10-03T04:28:18.000Z
tests/qtest.py
henry232323/henrio
315225094d4d1fdad2142534b8cd640462e8b81f
[ "MIT" ]
2
2017-10-04T01:59:10.000Z
2017-12-23T21:24:22.000Z
from henrio import * import unittest class QueueTest(unittest.TestCase): def test_queue(self): try: l = get_default_loop() q = HeapQueue(50) print(q) async def d(): return await q.get() async def a(i): await sleep(3) await q.put(i) for x in range(100): l.create_task(a(x)) l.create_task(d()) async def task(): await sleep(5) print(len(l._queue), len(l._tasks)) l.run_until_complete(task()) finally: self.assertEqual(len(q), 0)
20.515152
51
0.456425
from henrio import * import unittest class QueueTest(unittest.TestCase): def test_queue(self): try: l = get_default_loop() q = HeapQueue(50) print(q) async def d(): return await q.get() async def a(i): await sleep(3) await q.put(i) for x in range(100): l.create_task(a(x)) l.create_task(d()) async def task(): await sleep(5) print(len(l._queue), len(l._tasks)) l.run_until_complete(task()) finally: self.assertEqual(len(q), 0)
true
true
f70c2ca88960421c39d473ad7ec4ca9d39d7ff13
11,031
py
Python
data/utils/sample.py
NielsOerbaek/leaf
db007a6310da41d73c5dec071ffe1453519d028b
[ "BSD-2-Clause" ]
null
null
null
data/utils/sample.py
NielsOerbaek/leaf
db007a6310da41d73c5dec071ffe1453519d028b
[ "BSD-2-Clause" ]
null
null
null
data/utils/sample.py
NielsOerbaek/leaf
db007a6310da41d73c5dec071ffe1453519d028b
[ "BSD-2-Clause" ]
null
null
null
''' samples from all raw data; by default samples in a non-iid manner; namely, randomly selects users from raw data until their cumulative amount of data exceeds the given number of datapoints to sample (specified by --fraction argument); ordering of original data points is not preserved in sampled data ''' import argparse import json import os import random import time from collections import OrderedDict from constants import DATASETS, SEED_FILES from util import iid_divide parser = argparse.ArgumentParser() parser.add_argument('--name', help='name of dataset to parse; default: sent140;', type=str, choices=DATASETS, default='sent140') parser.add_argument('--iid', help='sample iid;', action="store_true") parser.add_argument('--niid', help="sample niid;", dest='iid', action='store_false') parser.add_argument('--union', help="sample from union lists;", dest='union', action='store_true') parser.add_argument('--fraction', help='fraction of all data to sample; default: 0.1;', type=float, default=0.1) parser.add_argument('--u', help=('number of users in iid data set; ignored in niid case;' 'represented as fraction of original total number of users; ' 'default: 0.01;'), type=float, default=0.01) parser.add_argument('--seed', help='seed for random sampling of data', type=int, default=None) parser.set_defaults(iid=False) args = parser.parse_args() print('------------------------------') print('sampling data') parent_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) data_dir = os.path.join(parent_path, args.name, 'data') subdir = os.path.join(data_dir, 'all_data') files = os.listdir(subdir) files = [f for f in files if f.endswith('.json')] rng_seed = (args.seed if (args.seed is not None and args.seed >= 0) else int(time.time())) print ("Using seed {}".format(rng_seed)) rng = random.Random(rng_seed) print (os.environ.get('LEAF_DATA_META_DIR')) if os.environ.get('LEAF_DATA_META_DIR') is not None: seed_fname = os.path.join(os.environ.get('LEAF_DATA_META_DIR'), SEED_FILES['sampling']) with open(seed_fname, 'w+') as f: f.write("# sampling_seed used by sampling script - supply as " "--smplseed to preprocess.sh or --seed to utils/sample.py\n") f.write(str(rng_seed)) print ("- random seed written out to {file}".format(file=seed_fname)) else: print ("- using random seed '{seed}' for sampling".format(seed=rng_seed)) if args.union: print("=== Sampling users for each union") union_dir = os.path.join(data_dir, 'union') path_file = os.path.join(union_dir, "union_list_path") with open(path_file, "r") as f: union_list_file = f.read() os.remove(path_file) with open(union_list_file, "r") as f: union_list = json.load(f) total_samples = sum(map(lambda union: sum(map(lambda c: c[1], union)), union_list)) print("total_samples:", total_samples) unions = list(filter(lambda l: len(l) > 1, union_list)) singles = list(filter(lambda l: len(l) == 1, union_list)) print("Number of unions:", len(unions)) print("Number of singles:", len(singles)) union_num_samples = [] union_sample = [] samples_so_far = 0 for union in unions: print("-"*80) print("\tusers:", len(union)) samples_in_this_union = sum(map(lambda c: c[1], union)) print("\tsamples_in_this_union", samples_in_this_union) frac = args.fraction * samples_in_this_union print("\tfrac", frac) selected_users = [] sample_count = 0 for id, samples in union: if sample_count + samples > frac: break selected_users.append(id) sample_count += samples print("\tusers in sample:", len(selected_users)) print("\tsamples in sample:", sample_count) union_sample.append(selected_users) union_num_samples.append(sample_count) samples_so_far += sample_count samples_remain = total_samples * args.fraction - samples_so_far print("samples remain:", samples_remain) num_singles = 0 for single in singles: samples_in_this_user = single[0][1] id = single[0][0] if samples_remain - samples_in_this_user < 0: break union_sample.append([id]) union_num_samples.append(samples_in_this_user) samples_remain -= samples_in_this_user num_singles += 1 union_names = ["union_%d" % i for i in range(len(unions))] singles_names = ["single_%d" % i for i in range(num_singles)] names = union_names + singles_names print("NAMES AND LISTS MATCH:", len(union_sample) == len(names)) print("number of selected singles:", num_singles, "- total singles: ", len(singles)) union_data = dict([(name, {"x": [], "y": []}) for name in names]) for f in files: print("Looking for users in",f) file_dir = os.path.join(subdir, f) with open(file_dir, 'r') as inf: data = json.load(inf, object_pairs_hook=OrderedDict) for user, user_data in data['user_data'].items(): for name, union in zip(names,union_sample): if user in union: union_data[name]['x'] += user_data['x'] union_data[name]['y'] += user_data['y'] #print([(n,len(d["x"])) for n,d in union_data.items()]) # ------------ # create .json file all_data = {} all_data['users'] = names all_data['num_samples'] = union_num_samples all_data['unions'] = union_sample all_data['user_data'] = union_data slabel = 'union' arg_frac = str(args.fraction) arg_frac = arg_frac[2:] arg_nu = str(args.u) arg_nu = arg_nu[2:] arg_label = arg_frac file_name = '%s_%s.json' % (slabel, arg_label) ouf_dir = os.path.join(data_dir, 'sampled_data', file_name) # NOTE: For now, we just write everything to one big json. # This will give us issues if we use a large sample. print('writing %s' % file_name) with open(ouf_dir, 'w') as outfile: json.dump(all_data, outfile) if not args.union: new_user_count = 0 # for iid case for f in files: file_dir = os.path.join(subdir, f) with open(file_dir, 'r') as inf: # Load data into an OrderedDict, to prevent ordering changes # and enable reproducibility data = json.load(inf, object_pairs_hook=OrderedDict) num_users = len(data['users']) tot_num_samples = sum(data['num_samples']) num_new_samples = int(args.fraction * tot_num_samples) hierarchies = None if(args.iid): raw_list = list(data['user_data'].values()) raw_x = [elem['x'] for elem in raw_list] raw_y = [elem['y'] for elem in raw_list] x_list = [item for sublist in raw_x for item in sublist] # flatten raw_x y_list = [item for sublist in raw_y for item in sublist] # flatten raw_y num_new_users = int(round(args.u * num_users)) if num_new_users == 0: num_new_users += 1 indices = [i for i in range(tot_num_samples)] new_indices = rng.sample(indices, num_new_samples) users = [str(i+new_user_count) for i in range(num_new_users)] user_data = {} for user in users: user_data[user] = {'x': [], 'y': []} all_x_samples = [x_list[i] for i in new_indices] all_y_samples = [y_list[i] for i in new_indices] x_groups = iid_divide(all_x_samples, num_new_users) y_groups = iid_divide(all_y_samples, num_new_users) for i in range(num_new_users): user_data[users[i]]['x'] = x_groups[i] user_data[users[i]]['y'] = y_groups[i] num_samples = [len(user_data[u]['y']) for u in users] new_user_count += num_new_users else: ctot_num_samples = 0 users = data['users'] users_and_hiers = None if 'hierarchies' in data: users_and_hiers = list(zip(users, data['hierarchies'])) rng.shuffle(users_and_hiers) else: rng.shuffle(users) user_i = 0 num_samples = [] user_data = {} if 'hierarchies' in data: hierarchies = [] while(ctot_num_samples < num_new_samples): hierarchy = None if users_and_hiers is not None: user, hier = users_and_hiers[user_i] else: user = users[user_i] cdata = data['user_data'][user] cnum_samples = len(data['user_data'][user]['y']) if (ctot_num_samples + cnum_samples > num_new_samples): cnum_samples = num_new_samples - ctot_num_samples indices = [i for i in range(cnum_samples)] new_indices = rng.sample(indices, cnum_samples) x = [] y = [] for i in new_indices: x.append(data['user_data'][user]['x'][i]) y.append(data['user_data'][user]['y'][i]) cdata = {'x': x, 'y': y} if 'hierarchies' in data: hierarchies.append(hier) num_samples.append(cnum_samples) user_data[user] = cdata ctot_num_samples += cnum_samples user_i += 1 if 'hierarchies' in data: users = [u for u, h in users_and_hiers][:user_i] else: users = users[:user_i] # ------------ # create .json file all_data = {} all_data['users'] = users if hierarchies is not None: all_data['hierarchies'] = hierarchies all_data['num_samples'] = num_samples all_data['user_data'] = user_data slabel = '' if(args.iid): slabel = 'iid' else: slabel = 'niid' arg_frac = str(args.fraction) arg_frac = arg_frac[2:] arg_nu = str(args.u) arg_nu = arg_nu[2:] arg_label = arg_frac if(args.iid): arg_label = '%s_%s' % (arg_nu, arg_label) file_name = '%s_%s_%s.json' % ((f[:-5]), slabel, arg_label) ouf_dir = os.path.join(data_dir, 'sampled_data', file_name) print('writing %s' % file_name) with open(ouf_dir, 'w') as outfile: json.dump(all_data, outfile)
34.908228
91
0.573928
import argparse import json import os import random import time from collections import OrderedDict from constants import DATASETS, SEED_FILES from util import iid_divide parser = argparse.ArgumentParser() parser.add_argument('--name', help='name of dataset to parse; default: sent140;', type=str, choices=DATASETS, default='sent140') parser.add_argument('--iid', help='sample iid;', action="store_true") parser.add_argument('--niid', help="sample niid;", dest='iid', action='store_false') parser.add_argument('--union', help="sample from union lists;", dest='union', action='store_true') parser.add_argument('--fraction', help='fraction of all data to sample; default: 0.1;', type=float, default=0.1) parser.add_argument('--u', help=('number of users in iid data set; ignored in niid case;' 'represented as fraction of original total number of users; ' 'default: 0.01;'), type=float, default=0.01) parser.add_argument('--seed', help='seed for random sampling of data', type=int, default=None) parser.set_defaults(iid=False) args = parser.parse_args() print('------------------------------') print('sampling data') parent_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) data_dir = os.path.join(parent_path, args.name, 'data') subdir = os.path.join(data_dir, 'all_data') files = os.listdir(subdir) files = [f for f in files if f.endswith('.json')] rng_seed = (args.seed if (args.seed is not None and args.seed >= 0) else int(time.time())) print ("Using seed {}".format(rng_seed)) rng = random.Random(rng_seed) print (os.environ.get('LEAF_DATA_META_DIR')) if os.environ.get('LEAF_DATA_META_DIR') is not None: seed_fname = os.path.join(os.environ.get('LEAF_DATA_META_DIR'), SEED_FILES['sampling']) with open(seed_fname, 'w+') as f: f.write("# sampling_seed used by sampling script - supply as " "--smplseed to preprocess.sh or --seed to utils/sample.py\n") f.write(str(rng_seed)) print ("- random seed written out to {file}".format(file=seed_fname)) else: print ("- using random seed '{seed}' for sampling".format(seed=rng_seed)) if args.union: print("=== Sampling users for each union") union_dir = os.path.join(data_dir, 'union') path_file = os.path.join(union_dir, "union_list_path") with open(path_file, "r") as f: union_list_file = f.read() os.remove(path_file) with open(union_list_file, "r") as f: union_list = json.load(f) total_samples = sum(map(lambda union: sum(map(lambda c: c[1], union)), union_list)) print("total_samples:", total_samples) unions = list(filter(lambda l: len(l) > 1, union_list)) singles = list(filter(lambda l: len(l) == 1, union_list)) print("Number of unions:", len(unions)) print("Number of singles:", len(singles)) union_num_samples = [] union_sample = [] samples_so_far = 0 for union in unions: print("-"*80) print("\tusers:", len(union)) samples_in_this_union = sum(map(lambda c: c[1], union)) print("\tsamples_in_this_union", samples_in_this_union) frac = args.fraction * samples_in_this_union print("\tfrac", frac) selected_users = [] sample_count = 0 for id, samples in union: if sample_count + samples > frac: break selected_users.append(id) sample_count += samples print("\tusers in sample:", len(selected_users)) print("\tsamples in sample:", sample_count) union_sample.append(selected_users) union_num_samples.append(sample_count) samples_so_far += sample_count samples_remain = total_samples * args.fraction - samples_so_far print("samples remain:", samples_remain) num_singles = 0 for single in singles: samples_in_this_user = single[0][1] id = single[0][0] if samples_remain - samples_in_this_user < 0: break union_sample.append([id]) union_num_samples.append(samples_in_this_user) samples_remain -= samples_in_this_user num_singles += 1 union_names = ["union_%d" % i for i in range(len(unions))] singles_names = ["single_%d" % i for i in range(num_singles)] names = union_names + singles_names print("NAMES AND LISTS MATCH:", len(union_sample) == len(names)) print("number of selected singles:", num_singles, "- total singles: ", len(singles)) union_data = dict([(name, {"x": [], "y": []}) for name in names]) for f in files: print("Looking for users in",f) file_dir = os.path.join(subdir, f) with open(file_dir, 'r') as inf: data = json.load(inf, object_pairs_hook=OrderedDict) for user, user_data in data['user_data'].items(): for name, union in zip(names,union_sample): if user in union: union_data[name]['x'] += user_data['x'] union_data[name]['y'] += user_data['y'] all_data = {} all_data['users'] = names all_data['num_samples'] = union_num_samples all_data['unions'] = union_sample all_data['user_data'] = union_data slabel = 'union' arg_frac = str(args.fraction) arg_frac = arg_frac[2:] arg_nu = str(args.u) arg_nu = arg_nu[2:] arg_label = arg_frac file_name = '%s_%s.json' % (slabel, arg_label) ouf_dir = os.path.join(data_dir, 'sampled_data', file_name) print('writing %s' % file_name) with open(ouf_dir, 'w') as outfile: json.dump(all_data, outfile) if not args.union: new_user_count = 0 for f in files: file_dir = os.path.join(subdir, f) with open(file_dir, 'r') as inf: data = json.load(inf, object_pairs_hook=OrderedDict) num_users = len(data['users']) tot_num_samples = sum(data['num_samples']) num_new_samples = int(args.fraction * tot_num_samples) hierarchies = None if(args.iid): raw_list = list(data['user_data'].values()) raw_x = [elem['x'] for elem in raw_list] raw_y = [elem['y'] for elem in raw_list] x_list = [item for sublist in raw_x for item in sublist] y_list = [item for sublist in raw_y for item in sublist] num_new_users = int(round(args.u * num_users)) if num_new_users == 0: num_new_users += 1 indices = [i for i in range(tot_num_samples)] new_indices = rng.sample(indices, num_new_samples) users = [str(i+new_user_count) for i in range(num_new_users)] user_data = {} for user in users: user_data[user] = {'x': [], 'y': []} all_x_samples = [x_list[i] for i in new_indices] all_y_samples = [y_list[i] for i in new_indices] x_groups = iid_divide(all_x_samples, num_new_users) y_groups = iid_divide(all_y_samples, num_new_users) for i in range(num_new_users): user_data[users[i]]['x'] = x_groups[i] user_data[users[i]]['y'] = y_groups[i] num_samples = [len(user_data[u]['y']) for u in users] new_user_count += num_new_users else: ctot_num_samples = 0 users = data['users'] users_and_hiers = None if 'hierarchies' in data: users_and_hiers = list(zip(users, data['hierarchies'])) rng.shuffle(users_and_hiers) else: rng.shuffle(users) user_i = 0 num_samples = [] user_data = {} if 'hierarchies' in data: hierarchies = [] while(ctot_num_samples < num_new_samples): hierarchy = None if users_and_hiers is not None: user, hier = users_and_hiers[user_i] else: user = users[user_i] cdata = data['user_data'][user] cnum_samples = len(data['user_data'][user]['y']) if (ctot_num_samples + cnum_samples > num_new_samples): cnum_samples = num_new_samples - ctot_num_samples indices = [i for i in range(cnum_samples)] new_indices = rng.sample(indices, cnum_samples) x = [] y = [] for i in new_indices: x.append(data['user_data'][user]['x'][i]) y.append(data['user_data'][user]['y'][i]) cdata = {'x': x, 'y': y} if 'hierarchies' in data: hierarchies.append(hier) num_samples.append(cnum_samples) user_data[user] = cdata ctot_num_samples += cnum_samples user_i += 1 if 'hierarchies' in data: users = [u for u, h in users_and_hiers][:user_i] else: users = users[:user_i] all_data = {} all_data['users'] = users if hierarchies is not None: all_data['hierarchies'] = hierarchies all_data['num_samples'] = num_samples all_data['user_data'] = user_data slabel = '' if(args.iid): slabel = 'iid' else: slabel = 'niid' arg_frac = str(args.fraction) arg_frac = arg_frac[2:] arg_nu = str(args.u) arg_nu = arg_nu[2:] arg_label = arg_frac if(args.iid): arg_label = '%s_%s' % (arg_nu, arg_label) file_name = '%s_%s_%s.json' % ((f[:-5]), slabel, arg_label) ouf_dir = os.path.join(data_dir, 'sampled_data', file_name) print('writing %s' % file_name) with open(ouf_dir, 'w') as outfile: json.dump(all_data, outfile)
true
true
f70c2cbe9e3ad8b0d91c44d57ee280780565d023
5,297
py
Python
firststreet/api/historic.py
opleban/fsf_api_access_python
ebe4af99e0f1dd59f7273fa62e6f05953aa8a510
[ "MIT" ]
null
null
null
firststreet/api/historic.py
opleban/fsf_api_access_python
ebe4af99e0f1dd59f7273fa62e6f05953aa8a510
[ "MIT" ]
null
null
null
firststreet/api/historic.py
opleban/fsf_api_access_python
ebe4af99e0f1dd59f7273fa62e6f05953aa8a510
[ "MIT" ]
null
null
null
# Author: Kelvin Lai <kelvin@firststreet.org> # Copyright: This module is owned by First Street Foundation # Standard Imports import logging # Internal Imports from firststreet.api import csv_format from firststreet.api.api import Api from firststreet.errors import InvalidArgument from firststreet.models.historic import HistoricEvent, HistoricSummary class Historic(Api): """This class receives a list of search_items and handles the creation of a historic product from the request. Methods: get_event: Retrieves a list of Historic Event for the given list of IDs get_summary: Retrieves a list of Historic Summary for the given list of IDs """ def get_event(self, search_item, csv=False, output_dir=None, extra_param=None): """Retrieves historic event product data from the First Street Foundation API given a list of search_items and returns a list of Historic Event objects. Args: search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a file of First Street Foundation IDs csv (bool): To output extracted data to a csv or not output_dir (str): The output directory to save the generated csvs extra_param (str): Extra parameter to be added to the url Returns: A list of Historic Event """ # Get data from api and create objects api_datas = self.call_api(search_item, "historic", "event", None, extra_param=extra_param) product = [HistoricEvent(api_data) for api_data in api_datas] if csv: csv_format.to_csv(product, "historic", "event", output_dir=output_dir) logging.info("Historic Event Data Ready.") return product def get_events_by_location(self, search_item, location_type, csv=False, output_dir=None, extra_param=None): """Retrieves historic summary product data from the First Street Foundation API given a list of location search_items and returns a list of Historic Summary objects. Args: search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a file of First Street Foundation IDs location_type (str): The location lookup type csv (bool): To output extracted data to a csv or not output_dir (str): The output directory to save the generated csvs extra_param (str): Extra parameter to be added to the url Returns: A list of Historic Event Raises: InvalidArgument: The location provided is empty TypeError: The location provided is not a string """ if not location_type: raise InvalidArgument(location_type) elif not isinstance(location_type, str): raise TypeError("location is not a string") # Get data from api and create objects api_datas = self.call_api(search_item, "historic", "summary", location_type) summary = [HistoricSummary(api_data) for api_data in api_datas] search_item = list(set([event.get("eventId") for sum_hist in summary if sum_hist.historic for event in sum_hist.historic])) if search_item: api_datas_event = self.call_api(search_item, "historic", "event", None, extra_param=extra_param) else: api_datas_event = [{"eventId": None}] event = [HistoricEvent(api_data) for api_data in api_datas_event] if csv: csv_format.to_csv([summary, event], "historic", "summary_event", location_type, output_dir=output_dir) logging.info("Historic Summary Event Data Ready.") return [summary, event] def get_summary(self, search_item, location_type, csv=False, output_dir=None, extra_param=None): """Retrieves historic summary product data from the First Street Foundation API given a list of search_items and returns a list of Historic Summary objects. Args: search_item (list/file): A First Street Foundation IDs, lat/lng pair, address, or a file of First Street Foundation IDs location_type (str): The location lookup type csv (bool): To output extracted data to a csv or not output_dir (str): The output directory to save the generated csvs extra_param (str): Extra parameter to be added to the url Returns: A list of Historic Summary Raises: InvalidArgument: The location provided is empty TypeError: The location provided is not a string """ if not location_type: raise InvalidArgument(location_type) elif not isinstance(location_type, str): raise TypeError("location is not a string") # Get data from api and create objects api_datas = self.call_api(search_item, "historic", "summary", location_type, extra_param=extra_param) product = [HistoricSummary(api_data) for api_data in api_datas] if csv: csv_format.to_csv(product, "historic", "summary", location_type, output_dir=output_dir) logging.info("Historic Summary Data Ready.") return product
41.382813
120
0.666037
import logging from firststreet.api import csv_format from firststreet.api.api import Api from firststreet.errors import InvalidArgument from firststreet.models.historic import HistoricEvent, HistoricSummary class Historic(Api): def get_event(self, search_item, csv=False, output_dir=None, extra_param=None): api_datas = self.call_api(search_item, "historic", "event", None, extra_param=extra_param) product = [HistoricEvent(api_data) for api_data in api_datas] if csv: csv_format.to_csv(product, "historic", "event", output_dir=output_dir) logging.info("Historic Event Data Ready.") return product def get_events_by_location(self, search_item, location_type, csv=False, output_dir=None, extra_param=None): if not location_type: raise InvalidArgument(location_type) elif not isinstance(location_type, str): raise TypeError("location is not a string") api_datas = self.call_api(search_item, "historic", "summary", location_type) summary = [HistoricSummary(api_data) for api_data in api_datas] search_item = list(set([event.get("eventId") for sum_hist in summary if sum_hist.historic for event in sum_hist.historic])) if search_item: api_datas_event = self.call_api(search_item, "historic", "event", None, extra_param=extra_param) else: api_datas_event = [{"eventId": None}] event = [HistoricEvent(api_data) for api_data in api_datas_event] if csv: csv_format.to_csv([summary, event], "historic", "summary_event", location_type, output_dir=output_dir) logging.info("Historic Summary Event Data Ready.") return [summary, event] def get_summary(self, search_item, location_type, csv=False, output_dir=None, extra_param=None): if not location_type: raise InvalidArgument(location_type) elif not isinstance(location_type, str): raise TypeError("location is not a string") api_datas = self.call_api(search_item, "historic", "summary", location_type, extra_param=extra_param) product = [HistoricSummary(api_data) for api_data in api_datas] if csv: csv_format.to_csv(product, "historic", "summary", location_type, output_dir=output_dir) logging.info("Historic Summary Data Ready.") return product
true
true
f70c2ccaa66136c4e168b5107d277a7a896ae606
72,840
py
Python
intersight/model/virtualization_iwe_host_interface_relationship.py
CiscoDevNet/intersight-python
04b721f37c3044646a91c185c7259edfb991557a
[ "Apache-2.0" ]
5
2021-12-16T15:13:32.000Z
2022-03-29T16:09:54.000Z
intersight/model/virtualization_iwe_host_interface_relationship.py
CiscoDevNet/intersight-python
04b721f37c3044646a91c185c7259edfb991557a
[ "Apache-2.0" ]
4
2022-01-25T19:05:51.000Z
2022-03-29T20:18:37.000Z
intersight/model/virtualization_iwe_host_interface_relationship.py
CiscoDevNet/intersight-python
04b721f37c3044646a91c185c7259edfb991557a
[ "Apache-2.0" ]
2
2020-07-07T15:01:08.000Z
2022-01-31T04:27:35.000Z
""" Cisco Intersight Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501 The version of the OpenAPI document: 1.0.9-4950 Contact: intersight@cisco.com Generated by: https://openapi-generator.tech """ import re # noqa: F401 import sys # noqa: F401 from intersight.model_utils import ( # noqa: F401 ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) def lazy_import(): from intersight.model.display_names import DisplayNames from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship from intersight.model.mo_mo_ref import MoMoRef from intersight.model.mo_tag import MoTag from intersight.model.mo_version_context import MoVersionContext from intersight.model.virtualization_bond_state import VirtualizationBondState from intersight.model.virtualization_iwe_cluster_relationship import VirtualizationIweClusterRelationship from intersight.model.virtualization_iwe_dv_uplink_relationship import VirtualizationIweDvUplinkRelationship from intersight.model.virtualization_iwe_host_interface import VirtualizationIweHostInterface from intersight.model.virtualization_iwe_host_relationship import VirtualizationIweHostRelationship from intersight.model.virtualization_iwe_network_relationship import VirtualizationIweNetworkRelationship globals()['DisplayNames'] = DisplayNames globals()['MoBaseMoRelationship'] = MoBaseMoRelationship globals()['MoMoRef'] = MoMoRef globals()['MoTag'] = MoTag globals()['MoVersionContext'] = MoVersionContext globals()['VirtualizationBondState'] = VirtualizationBondState globals()['VirtualizationIweClusterRelationship'] = VirtualizationIweClusterRelationship globals()['VirtualizationIweDvUplinkRelationship'] = VirtualizationIweDvUplinkRelationship globals()['VirtualizationIweHostInterface'] = VirtualizationIweHostInterface globals()['VirtualizationIweHostRelationship'] = VirtualizationIweHostRelationship globals()['VirtualizationIweNetworkRelationship'] = VirtualizationIweNetworkRelationship class VirtualizationIweHostInterfaceRelationship(ModelComposed): """NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values. """ allowed_values = { ('class_id',): { 'MO.MOREF': "mo.MoRef", }, ('link_state',): { 'UNKNOWN': "unknown", 'UP': "up", 'DOWN': "down", 'DEGRADED': "degraded", }, ('object_type',): { 'AAA.AUDITRECORD': "aaa.AuditRecord", 'AAA.RETENTIONCONFIG': "aaa.RetentionConfig", 'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy", 'ACCESS.POLICY': "access.Policy", 'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy", 'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface", 'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface", 'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface", 'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface", 'ADAPTER.UNIT': "adapter.Unit", 'ADAPTER.UNITEXPANDER': "adapter.UnitExpander", 'APPLIANCE.APPSTATUS': "appliance.AppStatus", 'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy", 'APPLIANCE.BACKUP': "appliance.Backup", 'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy", 'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting", 'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy", 'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate", 'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim", 'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy", 'APPLIANCE.DIAGSETTING': "appliance.DiagSetting", 'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting", 'APPLIANCE.FILEGATEWAY': "appliance.FileGateway", 'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus", 'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus", 'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle", 'APPLIANCE.NODEINFO': "appliance.NodeInfo", 'APPLIANCE.NODESTATUS': "appliance.NodeStatus", 'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote", 'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport", 'APPLIANCE.RESTORE': "appliance.Restore", 'APPLIANCE.SETUPINFO': "appliance.SetupInfo", 'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo", 'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus", 'APPLIANCE.UPGRADE': "appliance.Upgrade", 'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy", 'ASSET.CLUSTERMEMBER': "asset.ClusterMember", 'ASSET.DEPLOYMENT': "asset.Deployment", 'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice", 'ASSET.DEVICECLAIM': "asset.DeviceClaim", 'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration", 'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager", 'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation", 'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification", 'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration", 'ASSET.SUBSCRIPTION': "asset.Subscription", 'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount", 'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation", 'ASSET.TARGET': "asset.Target", 'BIOS.BOOTDEVICE': "bios.BootDevice", 'BIOS.BOOTMODE': "bios.BootMode", 'BIOS.POLICY': "bios.Policy", 'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder", 'BIOS.TOKENSETTINGS': "bios.TokenSettings", 'BIOS.UNIT': "bios.Unit", 'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration", 'BOOT.CDDDEVICE': "boot.CddDevice", 'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode", 'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity", 'BOOT.HDDDEVICE': "boot.HddDevice", 'BOOT.ISCSIDEVICE': "boot.IscsiDevice", 'BOOT.NVMEDEVICE': "boot.NvmeDevice", 'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice", 'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy", 'BOOT.PXEDEVICE': "boot.PxeDevice", 'BOOT.SANDEVICE': "boot.SanDevice", 'BOOT.SDDEVICE': "boot.SdDevice", 'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice", 'BOOT.USBDEVICE': "boot.UsbDevice", 'BOOT.VMEDIADEVICE': "boot.VmediaDevice", 'BULK.EXPORT': "bulk.Export", 'BULK.EXPORTEDITEM': "bulk.ExportedItem", 'BULK.MOCLONER': "bulk.MoCloner", 'BULK.MOMERGER': "bulk.MoMerger", 'BULK.REQUEST': "bulk.Request", 'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj", 'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor", 'CAPABILITY.CATALOG': "capability.Catalog", 'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor", 'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef", 'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor", 'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef", 'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray", 'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor", 'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef", 'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef", 'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor", 'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef", 'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef", 'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor", 'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef", 'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef", 'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor", 'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef", 'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor", 'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef", 'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability", 'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor", 'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef", 'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy", 'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail", 'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport", 'CHASSIS.CONFIGRESULT': "chassis.ConfigResult", 'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry", 'CHASSIS.IOMPROFILE': "chassis.IomProfile", 'CHASSIS.PROFILE': "chassis.Profile", 'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit", 'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair", 'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface", 'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit", 'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup", 'CLOUD.AWSSUBNET': "cloud.AwsSubnet", 'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine", 'CLOUD.AWSVOLUME': "cloud.AwsVolume", 'CLOUD.AWSVPC': "cloud.AwsVpc", 'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory", 'CLOUD.REGIONS': "cloud.Regions", 'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType", 'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType", 'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType", 'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType", 'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards", 'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType", 'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool", 'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization", 'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace", 'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy", 'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy", 'COMPUTE.BLADE': "compute.Blade", 'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity", 'COMPUTE.BOARD': "compute.Board", 'COMPUTE.MAPPING': "compute.Mapping", 'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary", 'COMPUTE.RACKUNIT': "compute.RackUnit", 'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity", 'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy", 'COMPUTE.SERVERSETTING': "compute.ServerSetting", 'COMPUTE.VMEDIA': "compute.Vmedia", 'COND.ALARM': "cond.Alarm", 'COND.ALARMAGGREGATION': "cond.AlarmAggregation", 'COND.HCLSTATUS': "cond.HclStatus", 'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail", 'COND.HCLSTATUSJOB': "cond.HclStatusJob", 'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade", 'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact", 'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition", 'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution", 'CONVERGEDINFRA.POD': "convergedinfra.Pod", 'CRD.CUSTOMRESOURCE': "crd.CustomResource", 'DEVICECONNECTOR.POLICY': "deviceconnector.Policy", 'EQUIPMENT.CHASSIS': "equipment.Chassis", 'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity", 'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation", 'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary", 'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule", 'EQUIPMENT.FAN': "equipment.Fan", 'EQUIPMENT.FANCONTROL': "equipment.FanControl", 'EQUIPMENT.FANMODULE': "equipment.FanModule", 'EQUIPMENT.FEX': "equipment.Fex", 'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity", 'EQUIPMENT.FEXOPERATION': "equipment.FexOperation", 'EQUIPMENT.FRU': "equipment.Fru", 'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary", 'EQUIPMENT.IOCARD': "equipment.IoCard", 'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation", 'EQUIPMENT.IOEXPANDER': "equipment.IoExpander", 'EQUIPMENT.LOCATORLED': "equipment.LocatorLed", 'EQUIPMENT.PSU': "equipment.Psu", 'EQUIPMENT.PSUCONTROL': "equipment.PsuControl", 'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure", 'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot", 'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule", 'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard", 'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController", 'EQUIPMENT.TPM': "equipment.Tpm", 'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver", 'ETHER.HOSTPORT': "ether.HostPort", 'ETHER.NETWORKPORT': "ether.NetworkPort", 'ETHER.PHYSICALPORT': "ether.PhysicalPort", 'ETHER.PORTCHANNEL': "ether.PortChannel", 'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization", 'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole", 'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole", 'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail", 'FABRIC.CONFIGRESULT': "fabric.ConfigResult", 'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry", 'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity", 'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact", 'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy", 'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy", 'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy", 'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy", 'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole", 'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole", 'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole", 'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole", 'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole", 'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy", 'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy", 'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy", 'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy", 'FABRIC.PCMEMBER': "fabric.PcMember", 'FABRIC.PCOPERATION': "fabric.PcOperation", 'FABRIC.PORTMODE': "fabric.PortMode", 'FABRIC.PORTOPERATION': "fabric.PortOperation", 'FABRIC.PORTPOLICY': "fabric.PortPolicy", 'FABRIC.SERVERROLE': "fabric.ServerRole", 'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile", 'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy", 'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile", 'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy", 'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole", 'FABRIC.UPLINKROLE': "fabric.UplinkRole", 'FABRIC.VLAN': "fabric.Vlan", 'FABRIC.VSAN': "fabric.Vsan", 'FAULT.INSTANCE': "fault.Instance", 'FC.PHYSICALPORT': "fc.PhysicalPort", 'FC.PORTCHANNEL': "fc.PortChannel", 'FCPOOL.FCBLOCK': "fcpool.FcBlock", 'FCPOOL.LEASE': "fcpool.Lease", 'FCPOOL.POOL': "fcpool.Pool", 'FCPOOL.POOLMEMBER': "fcpool.PoolMember", 'FCPOOL.UNIVERSE': "fcpool.Universe", 'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost", 'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor", 'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor", 'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade", 'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor", 'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor", 'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable", 'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta", 'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor", 'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable", 'FIRMWARE.EULA': "firmware.Eula", 'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary", 'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor", 'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor", 'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor", 'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor", 'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor", 'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor", 'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor", 'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware", 'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor", 'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable", 'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor", 'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade", 'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade", 'FIRMWARE.UPGRADE': "firmware.Upgrade", 'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact", 'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus", 'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus", 'FORECAST.CATALOG': "forecast.Catalog", 'FORECAST.DEFINITION': "forecast.Definition", 'FORECAST.INSTANCE': "forecast.Instance", 'GRAPHICS.CARD': "graphics.Card", 'GRAPHICS.CONTROLLER': "graphics.Controller", 'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus", 'HCL.DRIVERIMAGE': "hcl.DriverImage", 'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog", 'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo", 'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem", 'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor", 'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName", 'HYPERFLEX.ALARM': "hyperflex.Alarm", 'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog", 'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy", 'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster", 'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo", 'HYPERFLEX.CLUSTER': "hyperflex.Cluster", 'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy", 'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment", 'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory", 'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot", 'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy", 'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile", 'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy", 'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment", 'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy", 'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult", 'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry", 'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer", 'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic", 'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState", 'HYPERFLEX.DRIVE': "hyperflex.Drive", 'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy", 'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy", 'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal", 'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal", 'HYPERFLEX.HEALTH': "hyperflex.Health", 'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition", 'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution", 'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot", 'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum", 'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion", 'HYPERFLEX.LICENSE': "hyperflex.License", 'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy", 'HYPERFLEX.NODE': "hyperflex.Node", 'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy", 'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile", 'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster", 'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy", 'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion", 'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry", 'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel", 'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken", 'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent", 'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry", 'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion", 'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy", 'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer", 'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy", 'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy", 'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy", 'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo", 'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation", 'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation", 'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo", 'HYPERFLEX.VOLUME': "hyperflex.Volume", 'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration", 'IAAS.CONNECTORPACK': "iaas.ConnectorPack", 'IAAS.DEVICESTATUS': "iaas.DeviceStatus", 'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages", 'IAAS.LICENSEINFO': "iaas.LicenseInfo", 'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks", 'IAAS.SERVICEREQUEST': "iaas.ServiceRequest", 'IAAS.UCSDINFO': "iaas.UcsdInfo", 'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra", 'IAAS.UCSDMESSAGES': "iaas.UcsdMessages", 'IAM.ACCOUNT': "iam.Account", 'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience", 'IAM.APIKEY': "iam.ApiKey", 'IAM.APPREGISTRATION': "iam.AppRegistration", 'IAM.BANNERMESSAGE': "iam.BannerMessage", 'IAM.CERTIFICATE': "iam.Certificate", 'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest", 'IAM.DOMAINGROUP': "iam.DomainGroup", 'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege", 'IAM.ENDPOINTROLE': "iam.EndPointRole", 'IAM.ENDPOINTUSER': "iam.EndPointUser", 'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy", 'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole", 'IAM.IDP': "iam.Idp", 'IAM.IDPREFERENCE': "iam.IdpReference", 'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement", 'IAM.IPADDRESS': "iam.IpAddress", 'IAM.LDAPGROUP': "iam.LdapGroup", 'IAM.LDAPPOLICY': "iam.LdapPolicy", 'IAM.LDAPPROVIDER': "iam.LdapProvider", 'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword", 'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy", 'IAM.OAUTHTOKEN': "iam.OAuthToken", 'IAM.PERMISSION': "iam.Permission", 'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec", 'IAM.PRIVILEGE': "iam.Privilege", 'IAM.PRIVILEGESET': "iam.PrivilegeSet", 'IAM.QUALIFIER': "iam.Qualifier", 'IAM.RESOURCELIMITS': "iam.ResourceLimits", 'IAM.RESOURCEPERMISSION': "iam.ResourcePermission", 'IAM.RESOURCEROLES': "iam.ResourceRoles", 'IAM.ROLE': "iam.Role", 'IAM.SECURITYHOLDER': "iam.SecurityHolder", 'IAM.SERVICEPROVIDER': "iam.ServiceProvider", 'IAM.SESSION': "iam.Session", 'IAM.SESSIONLIMITS': "iam.SessionLimits", 'IAM.SYSTEM': "iam.System", 'IAM.TRUSTPOINT': "iam.TrustPoint", 'IAM.USER': "iam.User", 'IAM.USERGROUP': "iam.UserGroup", 'IAM.USERPREFERENCE': "iam.UserPreference", 'INVENTORY.DEVICEINFO': "inventory.DeviceInfo", 'INVENTORY.DNMOBINDING': "inventory.DnMoBinding", 'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory", 'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder", 'INVENTORY.REQUEST': "inventory.Request", 'IPMIOVERLAN.POLICY': "ipmioverlan.Policy", 'IPPOOL.BLOCKLEASE': "ippool.BlockLease", 'IPPOOL.IPLEASE': "ippool.IpLease", 'IPPOOL.POOL': "ippool.Pool", 'IPPOOL.POOLMEMBER': "ippool.PoolMember", 'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock", 'IPPOOL.SHADOWPOOL': "ippool.ShadowPool", 'IPPOOL.UNIVERSE': "ippool.Universe", 'IQNPOOL.BLOCK': "iqnpool.Block", 'IQNPOOL.LEASE': "iqnpool.Lease", 'IQNPOOL.POOL': "iqnpool.Pool", 'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember", 'IQNPOOL.UNIVERSE': "iqnpool.Universe", 'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus", 'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic", 'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile", 'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation", 'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition", 'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy", 'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository", 'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile", 'KUBERNETES.CATALOG': "kubernetes.Catalog", 'KUBERNETES.CLUSTER': "kubernetes.Cluster", 'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile", 'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile", 'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult", 'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry", 'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy", 'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet", 'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment", 'KUBERNETES.INGRESS': "kubernetes.Ingress", 'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy", 'KUBERNETES.NODE': "kubernetes.Node", 'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile", 'KUBERNETES.POD': "kubernetes.Pod", 'KUBERNETES.SERVICE': "kubernetes.Service", 'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet", 'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy", 'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy", 'KUBERNETES.VERSION': "kubernetes.Version", 'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy", 'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy", 'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider", 'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType", 'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile", 'KVM.POLICY': "kvm.Policy", 'KVM.SESSION': "kvm.Session", 'KVM.TUNNEL': "kvm.Tunnel", 'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData", 'LICENSE.CUSTOMEROP': "license.CustomerOp", 'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp", 'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount", 'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp", 'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount", 'LICENSE.LICENSEINFO': "license.LicenseInfo", 'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp", 'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken", 'LS.SERVICEPROFILE': "ls.ServiceProfile", 'MACPOOL.IDBLOCK': "macpool.IdBlock", 'MACPOOL.LEASE': "macpool.Lease", 'MACPOOL.POOL': "macpool.Pool", 'MACPOOL.POOLMEMBER': "macpool.PoolMember", 'MACPOOL.UNIVERSE': "macpool.Universe", 'MANAGEMENT.CONTROLLER': "management.Controller", 'MANAGEMENT.ENTITY': "management.Entity", 'MANAGEMENT.INTERFACE': "management.Interface", 'MEMORY.ARRAY': "memory.Array", 'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult", 'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration", 'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace", 'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult", 'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy", 'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion", 'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit", 'MEMORY.UNIT': "memory.Unit", 'META.DEFINITION': "meta.Definition", 'NETWORK.ELEMENT': "network.Element", 'NETWORK.ELEMENTSUMMARY': "network.ElementSummary", 'NETWORK.FCZONEINFO': "network.FcZoneInfo", 'NETWORK.VLANPORTINFO': "network.VlanPortInfo", 'NETWORKCONFIG.POLICY': "networkconfig.Policy", 'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost", 'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice", 'NIAAPI.APICHWEOL': "niaapi.ApicHweol", 'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease", 'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend", 'NIAAPI.APICSWEOL': "niaapi.ApicSweol", 'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost", 'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice", 'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol", 'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease", 'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend", 'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol", 'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader", 'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata", 'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader", 'NIAAPI.NIBMETADATA': "niaapi.NibMetadata", 'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex", 'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails", 'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails", 'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails", 'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails", 'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails", 'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest", 'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler", 'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails", 'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails", 'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails", 'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth", 'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails", 'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails", 'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails", 'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails", 'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails", 'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails", 'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails", 'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails", 'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp", 'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc", 'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails", 'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts", 'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails", 'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies", 'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails", 'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails", 'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails", 'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails", 'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails", 'NIATELEMETRY.EPG': "niatelemetry.Epg", 'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails", 'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile", 'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs", 'NIATELEMETRY.FAULT': "niatelemetry.Fault", 'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails", 'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap", 'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap", 'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails", 'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails", 'NIATELEMETRY.LC': "niatelemetry.Lc", 'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails", 'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails", 'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails", 'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails", 'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails", 'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails", 'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails", 'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails", 'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards", 'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage", 'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory", 'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm", 'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric", 'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState", 'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck", 'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies", 'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies", 'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies", 'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory", 'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc", 'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo", 'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails", 'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest", 'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg", 'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter", 'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails", 'NIATELEMETRY.TENANT': "niatelemetry.Tenant", 'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription", 'NTP.POLICY': "ntp.Policy", 'OAUTH.ACCESSTOKEN': "oauth.AccessToken", 'OAUTH.AUTHORIZATION': "oauth.Authorization", 'OPRS.DEPLOYMENT': "oprs.Deployment", 'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage", 'ORGANIZATION.ORGANIZATION': "organization.Organization", 'OS.BULKINSTALLINFO': "os.BulkInstallInfo", 'OS.CATALOG': "os.Catalog", 'OS.CONFIGURATIONFILE': "os.ConfigurationFile", 'OS.DISTRIBUTION': "os.Distribution", 'OS.INSTALL': "os.Install", 'OS.OSSUPPORT': "os.OsSupport", 'OS.SUPPORTEDVERSION': "os.SupportedVersion", 'OS.TEMPLATEFILE': "os.TemplateFile", 'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget", 'PCI.COPROCESSORCARD': "pci.CoprocessorCard", 'PCI.DEVICE': "pci.Device", 'PCI.LINK': "pci.Link", 'PCI.SWITCH': "pci.Switch", 'PORT.GROUP': "port.Group", 'PORT.MACBINDING': "port.MacBinding", 'PORT.SUBGROUP': "port.SubGroup", 'POWER.CONTROLSTATE': "power.ControlState", 'POWER.POLICY': "power.Policy", 'PROCESSOR.UNIT': "processor.Unit", 'RACK.UNITPERSONALITY': "rack.UnitPersonality", 'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway", 'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem", 'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy", 'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile", 'RECOVERY.CONFIGRESULT': "recovery.ConfigResult", 'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry", 'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup", 'RECOVERY.RESTORE': "recovery.Restore", 'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy", 'RESOURCE.GROUP': "resource.Group", 'RESOURCE.GROUPMEMBER': "resource.GroupMember", 'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount", 'RESOURCE.MEMBERSHIP': "resource.Membership", 'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder", 'RESOURCE.RESERVATION': "resource.Reservation", 'RESOURCEPOOL.LEASE': "resourcepool.Lease", 'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource", 'RESOURCEPOOL.POOL': "resourcepool.Pool", 'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember", 'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe", 'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy", 'SDCARD.POLICY': "sdcard.Policy", 'SDWAN.PROFILE': "sdwan.Profile", 'SDWAN.ROUTERNODE': "sdwan.RouterNode", 'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy", 'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy", 'SEARCH.SEARCHITEM': "search.SearchItem", 'SEARCH.TAGITEM': "search.TagItem", 'SECURITY.UNIT': "security.Unit", 'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail", 'SERVER.CONFIGIMPORT': "server.ConfigImport", 'SERVER.CONFIGRESULT': "server.ConfigResult", 'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry", 'SERVER.PROFILE': "server.Profile", 'SERVER.PROFILETEMPLATE': "server.ProfileTemplate", 'SMTP.POLICY': "smtp.Policy", 'SNMP.POLICY': "snmp.Policy", 'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable", 'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory", 'SOFTWARE.HCLMETA': "software.HclMeta", 'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable", 'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable", 'SOFTWARE.RELEASEMETA': "software.ReleaseMeta", 'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable", 'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable", 'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable", 'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization", 'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage", 'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog", 'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper", 'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel", 'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint", 'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec", 'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile", 'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release", 'SOL.POLICY': "sol.Policy", 'SSH.POLICY': "ssh.Policy", 'STORAGE.CONTROLLER': "storage.Controller", 'STORAGE.DISKGROUP': "storage.DiskGroup", 'STORAGE.DISKSLOT': "storage.DiskSlot", 'STORAGE.DRIVEGROUP': "storage.DriveGroup", 'STORAGE.ENCLOSURE': "storage.Enclosure", 'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk", 'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp", 'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController", 'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps", 'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive", 'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive", 'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController", 'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive", 'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive", 'STORAGE.HITACHIARRAY': "storage.HitachiArray", 'STORAGE.HITACHICONTROLLER': "storage.HitachiController", 'STORAGE.HITACHIDISK': "storage.HitachiDisk", 'STORAGE.HITACHIHOST': "storage.HitachiHost", 'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun", 'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup", 'STORAGE.HITACHIPOOL': "storage.HitachiPool", 'STORAGE.HITACHIPORT': "storage.HitachiPort", 'STORAGE.HITACHIVOLUME': "storage.HitachiVolume", 'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer", 'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume", 'STORAGE.ITEM': "storage.Item", 'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate", 'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk", 'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster", 'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort", 'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy", 'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface", 'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort", 'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup", 'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface", 'STORAGE.NETAPPLICENSE': "storage.NetAppLicense", 'STORAGE.NETAPPLUN': "storage.NetAppLun", 'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap", 'STORAGE.NETAPPNODE': "storage.NetAppNode", 'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer", 'STORAGE.NETAPPSENSOR': "storage.NetAppSensor", 'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm", 'STORAGE.NETAPPVOLUME': "storage.NetAppVolume", 'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot", 'STORAGE.PHYSICALDISK': "storage.PhysicalDisk", 'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension", 'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage", 'STORAGE.PUREARRAY': "storage.PureArray", 'STORAGE.PURECONTROLLER': "storage.PureController", 'STORAGE.PUREDISK': "storage.PureDisk", 'STORAGE.PUREHOST': "storage.PureHost", 'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup", 'STORAGE.PUREHOSTLUN': "storage.PureHostLun", 'STORAGE.PUREPORT': "storage.PurePort", 'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup", 'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot", 'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule", 'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule", 'STORAGE.PUREVOLUME': "storage.PureVolume", 'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot", 'STORAGE.SASEXPANDER': "storage.SasExpander", 'STORAGE.SASPORT': "storage.SasPort", 'STORAGE.SPAN': "storage.Span", 'STORAGE.STORAGEPOLICY': "storage.StoragePolicy", 'STORAGE.VDMEMBEREP': "storage.VdMemberEp", 'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive", 'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer", 'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension", 'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity", 'SYSLOG.POLICY': "syslog.Policy", 'TAM.ADVISORYCOUNT': "tam.AdvisoryCount", 'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition", 'TAM.ADVISORYINFO': "tam.AdvisoryInfo", 'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance", 'TAM.SECURITYADVISORY': "tam.SecurityAdvisory", 'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory", 'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory", 'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory", 'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory", 'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory", 'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory", 'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory", 'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy", 'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download", 'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle", 'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus", 'TERMINAL.AUDITLOG': "terminal.AuditLog", 'TERRAFORM.EXECUTOR': "terraform.Executor", 'THERMAL.POLICY': "thermal.Policy", 'TOP.SYSTEM': "top.System", 'UCSD.BACKUPINFO': "ucsd.BackupInfo", 'UUIDPOOL.BLOCK': "uuidpool.Block", 'UUIDPOOL.POOL': "uuidpool.Pool", 'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember", 'UUIDPOOL.UNIVERSE': "uuidpool.Universe", 'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease", 'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager", 'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole", 'VIRTUALIZATION.HOST': "virtualization.Host", 'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster", 'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter", 'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink", 'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch", 'VIRTUALIZATION.IWEHOST': "virtualization.IweHost", 'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface", 'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch", 'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork", 'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk", 'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine", 'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface", 'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk", 'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine", 'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork", 'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster", 'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter", 'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore", 'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster", 'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork", 'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch", 'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder", 'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost", 'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork", 'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork", 'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface", 'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort", 'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter", 'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk", 'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine", 'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot", 'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface", 'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch", 'VMEDIA.POLICY': "vmedia.Policy", 'VMRC.CONSOLE': "vmrc.Console", 'VNC.CONSOLE': "vnc.Console", 'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy", 'VNIC.ETHIF': "vnic.EthIf", 'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy", 'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy", 'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy", 'VNIC.FCIF': "vnic.FcIf", 'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy", 'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy", 'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy", 'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy", 'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy", 'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy", 'VNIC.LCPSTATUS': "vnic.LcpStatus", 'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy", 'VNIC.SCPSTATUS': "vnic.ScpStatus", 'VRF.VRF': "vrf.Vrf", 'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor", 'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor", 'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta", 'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner", 'WORKFLOW.CATALOG': "workflow.Catalog", 'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition", 'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler", 'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo", 'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow", 'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition", 'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance", 'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition", 'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance", 'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput", 'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor", 'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog", 'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition", 'WORKFLOW.TASKINFO': "workflow.TaskInfo", 'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata", 'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification", 'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation", 'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta", 'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition", 'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo", 'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta", 'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata", 'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification", }, } validations = { ('mac',): { 'regex': { 'pattern': r'^$|^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$', # noqa: E501 }, }, } @cached_property def additional_properties_type(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded """ lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501 _nullable = False @cached_property def openapi_types(): """ This must be a method because a model may have properties that are of type self, this must run after the class is loaded Returns openapi_types (dict): The key is attribute name and the value is attribute type. """ lazy_import() return { 'class_id': (str,), # noqa: E501 'moid': (str,), # noqa: E501 'selector': (str,), # noqa: E501 'link': (str,), # noqa: E501 'account_moid': (str,), # noqa: E501 'create_time': (datetime,), # noqa: E501 'domain_group_moid': (str,), # noqa: E501 'mod_time': (datetime,), # noqa: E501 'owners': ([str], none_type,), # noqa: E501 'shared_scope': (str,), # noqa: E501 'tags': ([MoTag], none_type,), # noqa: E501 'version_context': (MoVersionContext,), # noqa: E501 'ancestors': ([MoBaseMoRelationship], none_type,), # noqa: E501 'parent': (MoBaseMoRelationship,), # noqa: E501 'permission_resources': ([MoBaseMoRelationship], none_type,), # noqa: E501 'display_names': (DisplayNames,), # noqa: E501 'bond_state': (VirtualizationBondState,), # noqa: E501 'host_name': (str,), # noqa: E501 'host_uuid': (str,), # noqa: E501 'if_type': (str,), # noqa: E501 'ip_addresses': ([str], none_type,), # noqa: E501 'link_state': (str,), # noqa: E501 'mac': (str,), # noqa: E501 'mtu': (int,), # noqa: E501 'name': (str,), # noqa: E501 'vlans': (str,), # noqa: E501 'cluster': (VirtualizationIweClusterRelationship,), # noqa: E501 'dv_uplink': (VirtualizationIweDvUplinkRelationship,), # noqa: E501 'host': (VirtualizationIweHostRelationship,), # noqa: E501 'network': (VirtualizationIweNetworkRelationship,), # noqa: E501 'object_type': (str,), # noqa: E501 } @cached_property def discriminator(): lazy_import() val = { 'mo.MoRef': MoMoRef, 'virtualization.IweHostInterface': VirtualizationIweHostInterface, } if not val: return None return {'class_id': val} attribute_map = { 'class_id': 'ClassId', # noqa: E501 'moid': 'Moid', # noqa: E501 'selector': 'Selector', # noqa: E501 'link': 'link', # noqa: E501 'account_moid': 'AccountMoid', # noqa: E501 'create_time': 'CreateTime', # noqa: E501 'domain_group_moid': 'DomainGroupMoid', # noqa: E501 'mod_time': 'ModTime', # noqa: E501 'owners': 'Owners', # noqa: E501 'shared_scope': 'SharedScope', # noqa: E501 'tags': 'Tags', # noqa: E501 'version_context': 'VersionContext', # noqa: E501 'ancestors': 'Ancestors', # noqa: E501 'parent': 'Parent', # noqa: E501 'permission_resources': 'PermissionResources', # noqa: E501 'display_names': 'DisplayNames', # noqa: E501 'bond_state': 'BondState', # noqa: E501 'host_name': 'HostName', # noqa: E501 'host_uuid': 'HostUuid', # noqa: E501 'if_type': 'IfType', # noqa: E501 'ip_addresses': 'IpAddresses', # noqa: E501 'link_state': 'LinkState', # noqa: E501 'mac': 'Mac', # noqa: E501 'mtu': 'Mtu', # noqa: E501 'name': 'Name', # noqa: E501 'vlans': 'Vlans', # noqa: E501 'cluster': 'Cluster', # noqa: E501 'dv_uplink': 'DvUplink', # noqa: E501 'host': 'Host', # noqa: E501 'network': 'Network', # noqa: E501 'object_type': 'ObjectType', # noqa: E501 } required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances', ]) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): # noqa: E501 """VirtualizationIweHostInterfaceRelationship - a model defined in OpenAPI Args: Keyword Args: class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "mo.MoRef", must be one of ["mo.MoRef", ] # noqa: E501 _check_type (bool): if True, values for parameters in openapi_types will be type checked and a TypeError will be raised if the wrong type is input. Defaults to True _path_to_item (tuple/list): This is a list of keys or values to drill down to the model in received_data when deserializing a response _spec_property_naming (bool): True if the variable names in the input data are serialized names, as specified in the OpenAPI document. False if the variable names in the input data are pythonic names, e.g. snake case (default) _configuration (Configuration): the instance to use when deserializing a file_type parameter. If passed, type conversion is attempted If omitted no type conversion is done. _visited_composed_classes (tuple): This stores a tuple of classes that we have traveled through so that if we see that class again we will not use its discriminator again. When traveling through a discriminator, the composed schema that is is traveled through is added to this set. For example if Animal has a discriminator petType and we pass in "Dog", and the class Dog allOf includes Animal, we move through Animal once using the discriminator, and pick Dog. Then in Dog, we will make an instance of the Animal class but this time we won't travel through its discriminator because we passed in _visited_composed_classes = (Animal,) moid (str): The Moid of the referenced REST resource.. [optional] # noqa: E501 selector (str): An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. 1. If 'moid' is set this field is ignored. 1. If 'selector' is set and 'moid' is empty/absent from the request, Intersight determines the Moid of the resource matching the filter expression and populates it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.. [optional] # noqa: E501 link (str): A URL to an instance of the 'mo.MoRef' class.. [optional] # noqa: E501 account_moid (str): The Account ID for this managed object.. [optional] # noqa: E501 create_time (datetime): The time when this managed object was created.. [optional] # noqa: E501 domain_group_moid (str): The DomainGroup ID for this managed object.. [optional] # noqa: E501 mod_time (datetime): The time when this managed object was last modified.. [optional] # noqa: E501 owners ([str], none_type): [optional] # noqa: E501 shared_scope (str): Intersight provides pre-built workflows, tasks and policies to end users through global catalogs. Objects that are made available through global catalogs are said to have a 'shared' ownership. Shared objects are either made globally available to all end users or restricted to end users based on their license entitlement. Users can use this property to differentiate the scope (global or a specific license tier) to which a shared MO belongs.. [optional] # noqa: E501 tags ([MoTag], none_type): [optional] # noqa: E501 version_context (MoVersionContext): [optional] # noqa: E501 ancestors ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501 parent (MoBaseMoRelationship): [optional] # noqa: E501 permission_resources ([MoBaseMoRelationship], none_type): An array of relationships to moBaseMo resources.. [optional] # noqa: E501 display_names (DisplayNames): [optional] # noqa: E501 bond_state (VirtualizationBondState): [optional] # noqa: E501 host_name (str): The UUID of the host to which this interface belongs to.. [optional] # noqa: E501 host_uuid (str): The UUID of the host to which this interface belongs to.. [optional] # noqa: E501 if_type (str): A hint of the interface type, such as \"ovs-bond\", \"device\", \"openvswitch\".. [optional] # noqa: E501 ip_addresses ([str], none_type): [optional] # noqa: E501 link_state (str): Link state information such as \"up\", \"down\". * `unknown` - The interface line is unknown. * `up` - The interface line is up. * `down` - The interface line is down. * `degraded` - For a bond/team interface, not all member interface is up.. [optional] if omitted the server will use the default value of "unknown" # noqa: E501 mac (str): The MAC address of the interface.. [optional] # noqa: E501 mtu (int): The MTU size of the interface.. [optional] # noqa: E501 name (str): The name of the host to which this interface belongs to.. [optional] # noqa: E501 vlans (str): A list of vlans allowed on this interface.. [optional] # noqa: E501 cluster (VirtualizationIweClusterRelationship): [optional] # noqa: E501 dv_uplink (VirtualizationIweDvUplinkRelationship): [optional] # noqa: E501 host (VirtualizationIweHostRelationship): [optional] # noqa: E501 network (VirtualizationIweNetworkRelationship): [optional] # noqa: E501 object_type (str): The fully-qualified name of the remote type referred by this relationship.. [optional] # noqa: E501 """ class_id = kwargs.get('class_id', "mo.MoRef") _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) constant_args = { '_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes, } required_args = { 'class_id': class_id, } model_args = {} model_args.update(required_args) model_args.update(kwargs) composed_info = validate_get_composed_info( constant_args, model_args, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] unused_args = composed_info[3] for var_name, var_value in required_args.items(): setattr(self, var_name, var_value) for var_name, var_value in kwargs.items(): if var_name in unused_args and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ not self._additional_properties_model_instances: # discard variable. continue setattr(self, var_name, var_value) @cached_property def _composed_schemas(): # we need this here to make our import statements work # we must store _composed_schemas in here so the code is only run # when we invoke this method. If we kept this at the class # level we would get an error beause the class level # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading lazy_import() return { 'anyOf': [ ], 'allOf': [ ], 'oneOf': [ MoMoRef, VirtualizationIweHostInterface, none_type, ], }
62.685026
1,678
0.657606
import re import sys from intersight.model_utils import ( ApiTypeError, ModelComposed, ModelNormal, ModelSimple, cached_property, change_keys_js_to_python, convert_js_args_to_python_args, date, datetime, file_type, none_type, validate_get_composed_info, ) def lazy_import(): from intersight.model.display_names import DisplayNames from intersight.model.mo_base_mo_relationship import MoBaseMoRelationship from intersight.model.mo_mo_ref import MoMoRef from intersight.model.mo_tag import MoTag from intersight.model.mo_version_context import MoVersionContext from intersight.model.virtualization_bond_state import VirtualizationBondState from intersight.model.virtualization_iwe_cluster_relationship import VirtualizationIweClusterRelationship from intersight.model.virtualization_iwe_dv_uplink_relationship import VirtualizationIweDvUplinkRelationship from intersight.model.virtualization_iwe_host_interface import VirtualizationIweHostInterface from intersight.model.virtualization_iwe_host_relationship import VirtualizationIweHostRelationship from intersight.model.virtualization_iwe_network_relationship import VirtualizationIweNetworkRelationship globals()['DisplayNames'] = DisplayNames globals()['MoBaseMoRelationship'] = MoBaseMoRelationship globals()['MoMoRef'] = MoMoRef globals()['MoTag'] = MoTag globals()['MoVersionContext'] = MoVersionContext globals()['VirtualizationBondState'] = VirtualizationBondState globals()['VirtualizationIweClusterRelationship'] = VirtualizationIweClusterRelationship globals()['VirtualizationIweDvUplinkRelationship'] = VirtualizationIweDvUplinkRelationship globals()['VirtualizationIweHostInterface'] = VirtualizationIweHostInterface globals()['VirtualizationIweHostRelationship'] = VirtualizationIweHostRelationship globals()['VirtualizationIweNetworkRelationship'] = VirtualizationIweNetworkRelationship class VirtualizationIweHostInterfaceRelationship(ModelComposed): allowed_values = { ('class_id',): { 'MO.MOREF': "mo.MoRef", }, ('link_state',): { 'UNKNOWN': "unknown", 'UP': "up", 'DOWN': "down", 'DEGRADED': "degraded", }, ('object_type',): { 'AAA.AUDITRECORD': "aaa.AuditRecord", 'AAA.RETENTIONCONFIG': "aaa.RetentionConfig", 'AAA.RETENTIONPOLICY': "aaa.RetentionPolicy", 'ACCESS.POLICY': "access.Policy", 'ADAPTER.CONFIGPOLICY': "adapter.ConfigPolicy", 'ADAPTER.EXTETHINTERFACE': "adapter.ExtEthInterface", 'ADAPTER.HOSTETHINTERFACE': "adapter.HostEthInterface", 'ADAPTER.HOSTFCINTERFACE': "adapter.HostFcInterface", 'ADAPTER.HOSTISCSIINTERFACE': "adapter.HostIscsiInterface", 'ADAPTER.UNIT': "adapter.Unit", 'ADAPTER.UNITEXPANDER': "adapter.UnitExpander", 'APPLIANCE.APPSTATUS': "appliance.AppStatus", 'APPLIANCE.AUTORMAPOLICY': "appliance.AutoRmaPolicy", 'APPLIANCE.BACKUP': "appliance.Backup", 'APPLIANCE.BACKUPPOLICY': "appliance.BackupPolicy", 'APPLIANCE.CERTIFICATESETTING': "appliance.CertificateSetting", 'APPLIANCE.DATAEXPORTPOLICY': "appliance.DataExportPolicy", 'APPLIANCE.DEVICECERTIFICATE': "appliance.DeviceCertificate", 'APPLIANCE.DEVICECLAIM': "appliance.DeviceClaim", 'APPLIANCE.DEVICEUPGRADEPOLICY': "appliance.DeviceUpgradePolicy", 'APPLIANCE.DIAGSETTING': "appliance.DiagSetting", 'APPLIANCE.EXTERNALSYSLOGSETTING': "appliance.ExternalSyslogSetting", 'APPLIANCE.FILEGATEWAY': "appliance.FileGateway", 'APPLIANCE.FILESYSTEMSTATUS': "appliance.FileSystemStatus", 'APPLIANCE.GROUPSTATUS': "appliance.GroupStatus", 'APPLIANCE.IMAGEBUNDLE': "appliance.ImageBundle", 'APPLIANCE.NODEINFO': "appliance.NodeInfo", 'APPLIANCE.NODESTATUS': "appliance.NodeStatus", 'APPLIANCE.RELEASENOTE': "appliance.ReleaseNote", 'APPLIANCE.REMOTEFILEIMPORT': "appliance.RemoteFileImport", 'APPLIANCE.RESTORE': "appliance.Restore", 'APPLIANCE.SETUPINFO': "appliance.SetupInfo", 'APPLIANCE.SYSTEMINFO': "appliance.SystemInfo", 'APPLIANCE.SYSTEMSTATUS': "appliance.SystemStatus", 'APPLIANCE.UPGRADE': "appliance.Upgrade", 'APPLIANCE.UPGRADEPOLICY': "appliance.UpgradePolicy", 'ASSET.CLUSTERMEMBER': "asset.ClusterMember", 'ASSET.DEPLOYMENT': "asset.Deployment", 'ASSET.DEPLOYMENTDEVICE': "asset.DeploymentDevice", 'ASSET.DEVICECLAIM': "asset.DeviceClaim", 'ASSET.DEVICECONFIGURATION': "asset.DeviceConfiguration", 'ASSET.DEVICECONNECTORMANAGER': "asset.DeviceConnectorManager", 'ASSET.DEVICECONTRACTINFORMATION': "asset.DeviceContractInformation", 'ASSET.DEVICECONTRACTNOTIFICATION': "asset.DeviceContractNotification", 'ASSET.DEVICEREGISTRATION': "asset.DeviceRegistration", 'ASSET.SUBSCRIPTION': "asset.Subscription", 'ASSET.SUBSCRIPTIONACCOUNT': "asset.SubscriptionAccount", 'ASSET.SUBSCRIPTIONDEVICECONTRACTINFORMATION': "asset.SubscriptionDeviceContractInformation", 'ASSET.TARGET': "asset.Target", 'BIOS.BOOTDEVICE': "bios.BootDevice", 'BIOS.BOOTMODE': "bios.BootMode", 'BIOS.POLICY': "bios.Policy", 'BIOS.SYSTEMBOOTORDER': "bios.SystemBootOrder", 'BIOS.TOKENSETTINGS': "bios.TokenSettings", 'BIOS.UNIT': "bios.Unit", 'BIOS.VFSELECTMEMORYRASCONFIGURATION': "bios.VfSelectMemoryRasConfiguration", 'BOOT.CDDDEVICE': "boot.CddDevice", 'BOOT.DEVICEBOOTMODE': "boot.DeviceBootMode", 'BOOT.DEVICEBOOTSECURITY': "boot.DeviceBootSecurity", 'BOOT.HDDDEVICE': "boot.HddDevice", 'BOOT.ISCSIDEVICE': "boot.IscsiDevice", 'BOOT.NVMEDEVICE': "boot.NvmeDevice", 'BOOT.PCHSTORAGEDEVICE': "boot.PchStorageDevice", 'BOOT.PRECISIONPOLICY': "boot.PrecisionPolicy", 'BOOT.PXEDEVICE': "boot.PxeDevice", 'BOOT.SANDEVICE': "boot.SanDevice", 'BOOT.SDDEVICE': "boot.SdDevice", 'BOOT.UEFISHELLDEVICE': "boot.UefiShellDevice", 'BOOT.USBDEVICE': "boot.UsbDevice", 'BOOT.VMEDIADEVICE': "boot.VmediaDevice", 'BULK.EXPORT': "bulk.Export", 'BULK.EXPORTEDITEM': "bulk.ExportedItem", 'BULK.MOCLONER': "bulk.MoCloner", 'BULK.MOMERGER': "bulk.MoMerger", 'BULK.REQUEST': "bulk.Request", 'BULK.SUBREQUESTOBJ': "bulk.SubRequestObj", 'CAPABILITY.ADAPTERUNITDESCRIPTOR': "capability.AdapterUnitDescriptor", 'CAPABILITY.CATALOG': "capability.Catalog", 'CAPABILITY.CHASSISDESCRIPTOR': "capability.ChassisDescriptor", 'CAPABILITY.CHASSISMANUFACTURINGDEF': "capability.ChassisManufacturingDef", 'CAPABILITY.CIMCFIRMWAREDESCRIPTOR': "capability.CimcFirmwareDescriptor", 'CAPABILITY.EQUIPMENTPHYSICALDEF': "capability.EquipmentPhysicalDef", 'CAPABILITY.EQUIPMENTSLOTARRAY': "capability.EquipmentSlotArray", 'CAPABILITY.FANMODULEDESCRIPTOR': "capability.FanModuleDescriptor", 'CAPABILITY.FANMODULEMANUFACTURINGDEF': "capability.FanModuleManufacturingDef", 'CAPABILITY.IOCARDCAPABILITYDEF': "capability.IoCardCapabilityDef", 'CAPABILITY.IOCARDDESCRIPTOR': "capability.IoCardDescriptor", 'CAPABILITY.IOCARDMANUFACTURINGDEF': "capability.IoCardManufacturingDef", 'CAPABILITY.PORTGROUPAGGREGATIONDEF': "capability.PortGroupAggregationDef", 'CAPABILITY.PSUDESCRIPTOR': "capability.PsuDescriptor", 'CAPABILITY.PSUMANUFACTURINGDEF': "capability.PsuManufacturingDef", 'CAPABILITY.SERVERMODELSCAPABILITYDEF': "capability.ServerModelsCapabilityDef", 'CAPABILITY.SERVERSCHEMADESCRIPTOR': "capability.ServerSchemaDescriptor", 'CAPABILITY.SIOCMODULECAPABILITYDEF': "capability.SiocModuleCapabilityDef", 'CAPABILITY.SIOCMODULEDESCRIPTOR': "capability.SiocModuleDescriptor", 'CAPABILITY.SIOCMODULEMANUFACTURINGDEF': "capability.SiocModuleManufacturingDef", 'CAPABILITY.SWITCHCAPABILITY': "capability.SwitchCapability", 'CAPABILITY.SWITCHDESCRIPTOR': "capability.SwitchDescriptor", 'CAPABILITY.SWITCHMANUFACTURINGDEF': "capability.SwitchManufacturingDef", 'CERTIFICATEMANAGEMENT.POLICY': "certificatemanagement.Policy", 'CHASSIS.CONFIGCHANGEDETAIL': "chassis.ConfigChangeDetail", 'CHASSIS.CONFIGIMPORT': "chassis.ConfigImport", 'CHASSIS.CONFIGRESULT': "chassis.ConfigResult", 'CHASSIS.CONFIGRESULTENTRY': "chassis.ConfigResultEntry", 'CHASSIS.IOMPROFILE': "chassis.IomProfile", 'CHASSIS.PROFILE': "chassis.Profile", 'CLOUD.AWSBILLINGUNIT': "cloud.AwsBillingUnit", 'CLOUD.AWSKEYPAIR': "cloud.AwsKeyPair", 'CLOUD.AWSNETWORKINTERFACE': "cloud.AwsNetworkInterface", 'CLOUD.AWSORGANIZATIONALUNIT': "cloud.AwsOrganizationalUnit", 'CLOUD.AWSSECURITYGROUP': "cloud.AwsSecurityGroup", 'CLOUD.AWSSUBNET': "cloud.AwsSubnet", 'CLOUD.AWSVIRTUALMACHINE': "cloud.AwsVirtualMachine", 'CLOUD.AWSVOLUME': "cloud.AwsVolume", 'CLOUD.AWSVPC': "cloud.AwsVpc", 'CLOUD.COLLECTINVENTORY': "cloud.CollectInventory", 'CLOUD.REGIONS': "cloud.Regions", 'CLOUD.SKUCONTAINERTYPE': "cloud.SkuContainerType", 'CLOUD.SKUDATABASETYPE': "cloud.SkuDatabaseType", 'CLOUD.SKUINSTANCETYPE': "cloud.SkuInstanceType", 'CLOUD.SKUNETWORKTYPE': "cloud.SkuNetworkType", 'CLOUD.SKUREGIONRATECARDS': "cloud.SkuRegionRateCards", 'CLOUD.SKUVOLUMETYPE': "cloud.SkuVolumeType", 'CLOUD.TFCAGENTPOOL': "cloud.TfcAgentpool", 'CLOUD.TFCORGANIZATION': "cloud.TfcOrganization", 'CLOUD.TFCWORKSPACE': "cloud.TfcWorkspace", 'COMM.HTTPPROXYPOLICY': "comm.HttpProxyPolicy", 'COMPUTE.BIOSPOSTPOLICY': "compute.BiosPostPolicy", 'COMPUTE.BLADE': "compute.Blade", 'COMPUTE.BLADEIDENTITY': "compute.BladeIdentity", 'COMPUTE.BOARD': "compute.Board", 'COMPUTE.MAPPING': "compute.Mapping", 'COMPUTE.PHYSICALSUMMARY': "compute.PhysicalSummary", 'COMPUTE.RACKUNIT': "compute.RackUnit", 'COMPUTE.RACKUNITIDENTITY': "compute.RackUnitIdentity", 'COMPUTE.SERVERPOWERPOLICY': "compute.ServerPowerPolicy", 'COMPUTE.SERVERSETTING': "compute.ServerSetting", 'COMPUTE.VMEDIA': "compute.Vmedia", 'COND.ALARM': "cond.Alarm", 'COND.ALARMAGGREGATION': "cond.AlarmAggregation", 'COND.HCLSTATUS': "cond.HclStatus", 'COND.HCLSTATUSDETAIL': "cond.HclStatusDetail", 'COND.HCLSTATUSJOB': "cond.HclStatusJob", 'CONNECTORPACK.CONNECTORPACKUPGRADE': "connectorpack.ConnectorPackUpgrade", 'CONNECTORPACK.UPGRADEIMPACT': "connectorpack.UpgradeImpact", 'CONVERGEDINFRA.HEALTHCHECKDEFINITION': "convergedinfra.HealthCheckDefinition", 'CONVERGEDINFRA.HEALTHCHECKEXECUTION': "convergedinfra.HealthCheckExecution", 'CONVERGEDINFRA.POD': "convergedinfra.Pod", 'CRD.CUSTOMRESOURCE': "crd.CustomResource", 'DEVICECONNECTOR.POLICY': "deviceconnector.Policy", 'EQUIPMENT.CHASSIS': "equipment.Chassis", 'EQUIPMENT.CHASSISIDENTITY': "equipment.ChassisIdentity", 'EQUIPMENT.CHASSISOPERATION': "equipment.ChassisOperation", 'EQUIPMENT.DEVICESUMMARY': "equipment.DeviceSummary", 'EQUIPMENT.EXPANDERMODULE': "equipment.ExpanderModule", 'EQUIPMENT.FAN': "equipment.Fan", 'EQUIPMENT.FANCONTROL': "equipment.FanControl", 'EQUIPMENT.FANMODULE': "equipment.FanModule", 'EQUIPMENT.FEX': "equipment.Fex", 'EQUIPMENT.FEXIDENTITY': "equipment.FexIdentity", 'EQUIPMENT.FEXOPERATION': "equipment.FexOperation", 'EQUIPMENT.FRU': "equipment.Fru", 'EQUIPMENT.IDENTITYSUMMARY': "equipment.IdentitySummary", 'EQUIPMENT.IOCARD': "equipment.IoCard", 'EQUIPMENT.IOCARDOPERATION': "equipment.IoCardOperation", 'EQUIPMENT.IOEXPANDER': "equipment.IoExpander", 'EQUIPMENT.LOCATORLED': "equipment.LocatorLed", 'EQUIPMENT.PSU': "equipment.Psu", 'EQUIPMENT.PSUCONTROL': "equipment.PsuControl", 'EQUIPMENT.RACKENCLOSURE': "equipment.RackEnclosure", 'EQUIPMENT.RACKENCLOSURESLOT': "equipment.RackEnclosureSlot", 'EQUIPMENT.SHAREDIOMODULE': "equipment.SharedIoModule", 'EQUIPMENT.SWITCHCARD': "equipment.SwitchCard", 'EQUIPMENT.SYSTEMIOCONTROLLER': "equipment.SystemIoController", 'EQUIPMENT.TPM': "equipment.Tpm", 'EQUIPMENT.TRANSCEIVER': "equipment.Transceiver", 'ETHER.HOSTPORT': "ether.HostPort", 'ETHER.NETWORKPORT': "ether.NetworkPort", 'ETHER.PHYSICALPORT': "ether.PhysicalPort", 'ETHER.PORTCHANNEL': "ether.PortChannel", 'EXTERNALSITE.AUTHORIZATION': "externalsite.Authorization", 'FABRIC.APPLIANCEPCROLE': "fabric.AppliancePcRole", 'FABRIC.APPLIANCEROLE': "fabric.ApplianceRole", 'FABRIC.CONFIGCHANGEDETAIL': "fabric.ConfigChangeDetail", 'FABRIC.CONFIGRESULT': "fabric.ConfigResult", 'FABRIC.CONFIGRESULTENTRY': "fabric.ConfigResultEntry", 'FABRIC.ELEMENTIDENTITY': "fabric.ElementIdentity", 'FABRIC.ESTIMATEIMPACT': "fabric.EstimateImpact", 'FABRIC.ETHNETWORKCONTROLPOLICY': "fabric.EthNetworkControlPolicy", 'FABRIC.ETHNETWORKGROUPPOLICY': "fabric.EthNetworkGroupPolicy", 'FABRIC.ETHNETWORKPOLICY': "fabric.EthNetworkPolicy", 'FABRIC.FCNETWORKPOLICY': "fabric.FcNetworkPolicy", 'FABRIC.FCSTORAGEROLE': "fabric.FcStorageRole", 'FABRIC.FCUPLINKPCROLE': "fabric.FcUplinkPcRole", 'FABRIC.FCUPLINKROLE': "fabric.FcUplinkRole", 'FABRIC.FCOEUPLINKPCROLE': "fabric.FcoeUplinkPcRole", 'FABRIC.FCOEUPLINKROLE': "fabric.FcoeUplinkRole", 'FABRIC.FLOWCONTROLPOLICY': "fabric.FlowControlPolicy", 'FABRIC.LINKAGGREGATIONPOLICY': "fabric.LinkAggregationPolicy", 'FABRIC.LINKCONTROLPOLICY': "fabric.LinkControlPolicy", 'FABRIC.MULTICASTPOLICY': "fabric.MulticastPolicy", 'FABRIC.PCMEMBER': "fabric.PcMember", 'FABRIC.PCOPERATION': "fabric.PcOperation", 'FABRIC.PORTMODE': "fabric.PortMode", 'FABRIC.PORTOPERATION': "fabric.PortOperation", 'FABRIC.PORTPOLICY': "fabric.PortPolicy", 'FABRIC.SERVERROLE': "fabric.ServerRole", 'FABRIC.SWITCHCLUSTERPROFILE': "fabric.SwitchClusterProfile", 'FABRIC.SWITCHCONTROLPOLICY': "fabric.SwitchControlPolicy", 'FABRIC.SWITCHPROFILE': "fabric.SwitchProfile", 'FABRIC.SYSTEMQOSPOLICY': "fabric.SystemQosPolicy", 'FABRIC.UPLINKPCROLE': "fabric.UplinkPcRole", 'FABRIC.UPLINKROLE': "fabric.UplinkRole", 'FABRIC.VLAN': "fabric.Vlan", 'FABRIC.VSAN': "fabric.Vsan", 'FAULT.INSTANCE': "fault.Instance", 'FC.PHYSICALPORT': "fc.PhysicalPort", 'FC.PORTCHANNEL': "fc.PortChannel", 'FCPOOL.FCBLOCK': "fcpool.FcBlock", 'FCPOOL.LEASE': "fcpool.Lease", 'FCPOOL.POOL': "fcpool.Pool", 'FCPOOL.POOLMEMBER': "fcpool.PoolMember", 'FCPOOL.UNIVERSE': "fcpool.Universe", 'FEEDBACK.FEEDBACKPOST': "feedback.FeedbackPost", 'FIRMWARE.BIOSDESCRIPTOR': "firmware.BiosDescriptor", 'FIRMWARE.BOARDCONTROLLERDESCRIPTOR': "firmware.BoardControllerDescriptor", 'FIRMWARE.CHASSISUPGRADE': "firmware.ChassisUpgrade", 'FIRMWARE.CIMCDESCRIPTOR': "firmware.CimcDescriptor", 'FIRMWARE.DIMMDESCRIPTOR': "firmware.DimmDescriptor", 'FIRMWARE.DISTRIBUTABLE': "firmware.Distributable", 'FIRMWARE.DISTRIBUTABLEMETA': "firmware.DistributableMeta", 'FIRMWARE.DRIVEDESCRIPTOR': "firmware.DriveDescriptor", 'FIRMWARE.DRIVERDISTRIBUTABLE': "firmware.DriverDistributable", 'FIRMWARE.EULA': "firmware.Eula", 'FIRMWARE.FIRMWARESUMMARY': "firmware.FirmwareSummary", 'FIRMWARE.GPUDESCRIPTOR': "firmware.GpuDescriptor", 'FIRMWARE.HBADESCRIPTOR': "firmware.HbaDescriptor", 'FIRMWARE.IOMDESCRIPTOR': "firmware.IomDescriptor", 'FIRMWARE.MSWITCHDESCRIPTOR': "firmware.MswitchDescriptor", 'FIRMWARE.NXOSDESCRIPTOR': "firmware.NxosDescriptor", 'FIRMWARE.PCIEDESCRIPTOR': "firmware.PcieDescriptor", 'FIRMWARE.PSUDESCRIPTOR': "firmware.PsuDescriptor", 'FIRMWARE.RUNNINGFIRMWARE': "firmware.RunningFirmware", 'FIRMWARE.SASEXPANDERDESCRIPTOR': "firmware.SasExpanderDescriptor", 'FIRMWARE.SERVERCONFIGURATIONUTILITYDISTRIBUTABLE': "firmware.ServerConfigurationUtilityDistributable", 'FIRMWARE.STORAGECONTROLLERDESCRIPTOR': "firmware.StorageControllerDescriptor", 'FIRMWARE.SWITCHUPGRADE': "firmware.SwitchUpgrade", 'FIRMWARE.UNSUPPORTEDVERSIONUPGRADE': "firmware.UnsupportedVersionUpgrade", 'FIRMWARE.UPGRADE': "firmware.Upgrade", 'FIRMWARE.UPGRADEIMPACT': "firmware.UpgradeImpact", 'FIRMWARE.UPGRADEIMPACTSTATUS': "firmware.UpgradeImpactStatus", 'FIRMWARE.UPGRADESTATUS': "firmware.UpgradeStatus", 'FORECAST.CATALOG': "forecast.Catalog", 'FORECAST.DEFINITION': "forecast.Definition", 'FORECAST.INSTANCE': "forecast.Instance", 'GRAPHICS.CARD': "graphics.Card", 'GRAPHICS.CONTROLLER': "graphics.Controller", 'HCL.COMPATIBILITYSTATUS': "hcl.CompatibilityStatus", 'HCL.DRIVERIMAGE': "hcl.DriverImage", 'HCL.EXEMPTEDCATALOG': "hcl.ExemptedCatalog", 'HCL.HYPERFLEXSOFTWARECOMPATIBILITYINFO': "hcl.HyperflexSoftwareCompatibilityInfo", 'HCL.OPERATINGSYSTEM': "hcl.OperatingSystem", 'HCL.OPERATINGSYSTEMVENDOR': "hcl.OperatingSystemVendor", 'HCL.SUPPORTEDDRIVERNAME': "hcl.SupportedDriverName", 'HYPERFLEX.ALARM': "hyperflex.Alarm", 'HYPERFLEX.APPCATALOG': "hyperflex.AppCatalog", 'HYPERFLEX.AUTOSUPPORTPOLICY': "hyperflex.AutoSupportPolicy", 'HYPERFLEX.BACKUPCLUSTER': "hyperflex.BackupCluster", 'HYPERFLEX.CAPABILITYINFO': "hyperflex.CapabilityInfo", 'HYPERFLEX.CLUSTER': "hyperflex.Cluster", 'HYPERFLEX.CLUSTERBACKUPPOLICY': "hyperflex.ClusterBackupPolicy", 'HYPERFLEX.CLUSTERBACKUPPOLICYDEPLOYMENT': "hyperflex.ClusterBackupPolicyDeployment", 'HYPERFLEX.CLUSTERBACKUPPOLICYINVENTORY': "hyperflex.ClusterBackupPolicyInventory", 'HYPERFLEX.CLUSTERHEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.ClusterHealthCheckExecutionSnapshot", 'HYPERFLEX.CLUSTERNETWORKPOLICY': "hyperflex.ClusterNetworkPolicy", 'HYPERFLEX.CLUSTERPROFILE': "hyperflex.ClusterProfile", 'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICY': "hyperflex.ClusterReplicationNetworkPolicy", 'HYPERFLEX.CLUSTERREPLICATIONNETWORKPOLICYDEPLOYMENT': "hyperflex.ClusterReplicationNetworkPolicyDeployment", 'HYPERFLEX.CLUSTERSTORAGEPOLICY': "hyperflex.ClusterStoragePolicy", 'HYPERFLEX.CONFIGRESULT': "hyperflex.ConfigResult", 'HYPERFLEX.CONFIGRESULTENTRY': "hyperflex.ConfigResultEntry", 'HYPERFLEX.DATAPROTECTIONPEER': "hyperflex.DataProtectionPeer", 'HYPERFLEX.DATASTORESTATISTIC': "hyperflex.DatastoreStatistic", 'HYPERFLEX.DEVICEPACKAGEDOWNLOADSTATE': "hyperflex.DevicePackageDownloadState", 'HYPERFLEX.DRIVE': "hyperflex.Drive", 'HYPERFLEX.EXTFCSTORAGEPOLICY': "hyperflex.ExtFcStoragePolicy", 'HYPERFLEX.EXTISCSISTORAGEPOLICY': "hyperflex.ExtIscsiStoragePolicy", 'HYPERFLEX.FEATURELIMITEXTERNAL': "hyperflex.FeatureLimitExternal", 'HYPERFLEX.FEATURELIMITINTERNAL': "hyperflex.FeatureLimitInternal", 'HYPERFLEX.HEALTH': "hyperflex.Health", 'HYPERFLEX.HEALTHCHECKDEFINITION': "hyperflex.HealthCheckDefinition", 'HYPERFLEX.HEALTHCHECKEXECUTION': "hyperflex.HealthCheckExecution", 'HYPERFLEX.HEALTHCHECKEXECUTIONSNAPSHOT': "hyperflex.HealthCheckExecutionSnapshot", 'HYPERFLEX.HEALTHCHECKPACKAGECHECKSUM': "hyperflex.HealthCheckPackageChecksum", 'HYPERFLEX.HXDPVERSION': "hyperflex.HxdpVersion", 'HYPERFLEX.LICENSE': "hyperflex.License", 'HYPERFLEX.LOCALCREDENTIALPOLICY': "hyperflex.LocalCredentialPolicy", 'HYPERFLEX.NODE': "hyperflex.Node", 'HYPERFLEX.NODECONFIGPOLICY': "hyperflex.NodeConfigPolicy", 'HYPERFLEX.NODEPROFILE': "hyperflex.NodeProfile", 'HYPERFLEX.PROTECTEDCLUSTER': "hyperflex.ProtectedCluster", 'HYPERFLEX.PROXYSETTINGPOLICY': "hyperflex.ProxySettingPolicy", 'HYPERFLEX.SERVERFIRMWAREVERSION': "hyperflex.ServerFirmwareVersion", 'HYPERFLEX.SERVERFIRMWAREVERSIONENTRY': "hyperflex.ServerFirmwareVersionEntry", 'HYPERFLEX.SERVERMODEL': "hyperflex.ServerModel", 'HYPERFLEX.SERVICEAUTHTOKEN': "hyperflex.ServiceAuthToken", 'HYPERFLEX.SOFTWAREDISTRIBUTIONCOMPONENT': "hyperflex.SoftwareDistributionComponent", 'HYPERFLEX.SOFTWAREDISTRIBUTIONENTRY': "hyperflex.SoftwareDistributionEntry", 'HYPERFLEX.SOFTWAREDISTRIBUTIONVERSION': "hyperflex.SoftwareDistributionVersion", 'HYPERFLEX.SOFTWAREVERSIONPOLICY': "hyperflex.SoftwareVersionPolicy", 'HYPERFLEX.STORAGECONTAINER': "hyperflex.StorageContainer", 'HYPERFLEX.SYSCONFIGPOLICY': "hyperflex.SysConfigPolicy", 'HYPERFLEX.UCSMCONFIGPOLICY': "hyperflex.UcsmConfigPolicy", 'HYPERFLEX.VCENTERCONFIGPOLICY': "hyperflex.VcenterConfigPolicy", 'HYPERFLEX.VMBACKUPINFO': "hyperflex.VmBackupInfo", 'HYPERFLEX.VMIMPORTOPERATION': "hyperflex.VmImportOperation", 'HYPERFLEX.VMRESTOREOPERATION': "hyperflex.VmRestoreOperation", 'HYPERFLEX.VMSNAPSHOTINFO': "hyperflex.VmSnapshotInfo", 'HYPERFLEX.VOLUME': "hyperflex.Volume", 'HYPERFLEX.WITNESSCONFIGURATION': "hyperflex.WitnessConfiguration", 'IAAS.CONNECTORPACK': "iaas.ConnectorPack", 'IAAS.DEVICESTATUS': "iaas.DeviceStatus", 'IAAS.DIAGNOSTICMESSAGES': "iaas.DiagnosticMessages", 'IAAS.LICENSEINFO': "iaas.LicenseInfo", 'IAAS.MOSTRUNTASKS': "iaas.MostRunTasks", 'IAAS.SERVICEREQUEST': "iaas.ServiceRequest", 'IAAS.UCSDINFO': "iaas.UcsdInfo", 'IAAS.UCSDMANAGEDINFRA': "iaas.UcsdManagedInfra", 'IAAS.UCSDMESSAGES': "iaas.UcsdMessages", 'IAM.ACCOUNT': "iam.Account", 'IAM.ACCOUNTEXPERIENCE': "iam.AccountExperience", 'IAM.APIKEY': "iam.ApiKey", 'IAM.APPREGISTRATION': "iam.AppRegistration", 'IAM.BANNERMESSAGE': "iam.BannerMessage", 'IAM.CERTIFICATE': "iam.Certificate", 'IAM.CERTIFICATEREQUEST': "iam.CertificateRequest", 'IAM.DOMAINGROUP': "iam.DomainGroup", 'IAM.ENDPOINTPRIVILEGE': "iam.EndPointPrivilege", 'IAM.ENDPOINTROLE': "iam.EndPointRole", 'IAM.ENDPOINTUSER': "iam.EndPointUser", 'IAM.ENDPOINTUSERPOLICY': "iam.EndPointUserPolicy", 'IAM.ENDPOINTUSERROLE': "iam.EndPointUserRole", 'IAM.IDP': "iam.Idp", 'IAM.IDPREFERENCE': "iam.IdpReference", 'IAM.IPACCESSMANAGEMENT': "iam.IpAccessManagement", 'IAM.IPADDRESS': "iam.IpAddress", 'IAM.LDAPGROUP': "iam.LdapGroup", 'IAM.LDAPPOLICY': "iam.LdapPolicy", 'IAM.LDAPPROVIDER': "iam.LdapProvider", 'IAM.LOCALUSERPASSWORD': "iam.LocalUserPassword", 'IAM.LOCALUSERPASSWORDPOLICY': "iam.LocalUserPasswordPolicy", 'IAM.OAUTHTOKEN': "iam.OAuthToken", 'IAM.PERMISSION': "iam.Permission", 'IAM.PRIVATEKEYSPEC': "iam.PrivateKeySpec", 'IAM.PRIVILEGE': "iam.Privilege", 'IAM.PRIVILEGESET': "iam.PrivilegeSet", 'IAM.QUALIFIER': "iam.Qualifier", 'IAM.RESOURCELIMITS': "iam.ResourceLimits", 'IAM.RESOURCEPERMISSION': "iam.ResourcePermission", 'IAM.RESOURCEROLES': "iam.ResourceRoles", 'IAM.ROLE': "iam.Role", 'IAM.SECURITYHOLDER': "iam.SecurityHolder", 'IAM.SERVICEPROVIDER': "iam.ServiceProvider", 'IAM.SESSION': "iam.Session", 'IAM.SESSIONLIMITS': "iam.SessionLimits", 'IAM.SYSTEM': "iam.System", 'IAM.TRUSTPOINT': "iam.TrustPoint", 'IAM.USER': "iam.User", 'IAM.USERGROUP': "iam.UserGroup", 'IAM.USERPREFERENCE': "iam.UserPreference", 'INVENTORY.DEVICEINFO': "inventory.DeviceInfo", 'INVENTORY.DNMOBINDING': "inventory.DnMoBinding", 'INVENTORY.GENERICINVENTORY': "inventory.GenericInventory", 'INVENTORY.GENERICINVENTORYHOLDER': "inventory.GenericInventoryHolder", 'INVENTORY.REQUEST': "inventory.Request", 'IPMIOVERLAN.POLICY': "ipmioverlan.Policy", 'IPPOOL.BLOCKLEASE': "ippool.BlockLease", 'IPPOOL.IPLEASE': "ippool.IpLease", 'IPPOOL.POOL': "ippool.Pool", 'IPPOOL.POOLMEMBER': "ippool.PoolMember", 'IPPOOL.SHADOWBLOCK': "ippool.ShadowBlock", 'IPPOOL.SHADOWPOOL': "ippool.ShadowPool", 'IPPOOL.UNIVERSE': "ippool.Universe", 'IQNPOOL.BLOCK': "iqnpool.Block", 'IQNPOOL.LEASE': "iqnpool.Lease", 'IQNPOOL.POOL': "iqnpool.Pool", 'IQNPOOL.POOLMEMBER': "iqnpool.PoolMember", 'IQNPOOL.UNIVERSE': "iqnpool.Universe", 'IWOTENANT.TENANTSTATUS': "iwotenant.TenantStatus", 'KUBERNETES.ACICNIAPIC': "kubernetes.AciCniApic", 'KUBERNETES.ACICNIPROFILE': "kubernetes.AciCniProfile", 'KUBERNETES.ACICNITENANTCLUSTERALLOCATION': "kubernetes.AciCniTenantClusterAllocation", 'KUBERNETES.ADDONDEFINITION': "kubernetes.AddonDefinition", 'KUBERNETES.ADDONPOLICY': "kubernetes.AddonPolicy", 'KUBERNETES.ADDONREPOSITORY': "kubernetes.AddonRepository", 'KUBERNETES.BAREMETALNODEPROFILE': "kubernetes.BaremetalNodeProfile", 'KUBERNETES.CATALOG': "kubernetes.Catalog", 'KUBERNETES.CLUSTER': "kubernetes.Cluster", 'KUBERNETES.CLUSTERADDONPROFILE': "kubernetes.ClusterAddonProfile", 'KUBERNETES.CLUSTERPROFILE': "kubernetes.ClusterProfile", 'KUBERNETES.CONFIGRESULT': "kubernetes.ConfigResult", 'KUBERNETES.CONFIGRESULTENTRY': "kubernetes.ConfigResultEntry", 'KUBERNETES.CONTAINERRUNTIMEPOLICY': "kubernetes.ContainerRuntimePolicy", 'KUBERNETES.DAEMONSET': "kubernetes.DaemonSet", 'KUBERNETES.DEPLOYMENT': "kubernetes.Deployment", 'KUBERNETES.INGRESS': "kubernetes.Ingress", 'KUBERNETES.NETWORKPOLICY': "kubernetes.NetworkPolicy", 'KUBERNETES.NODE': "kubernetes.Node", 'KUBERNETES.NODEGROUPPROFILE': "kubernetes.NodeGroupProfile", 'KUBERNETES.POD': "kubernetes.Pod", 'KUBERNETES.SERVICE': "kubernetes.Service", 'KUBERNETES.STATEFULSET': "kubernetes.StatefulSet", 'KUBERNETES.SYSCONFIGPOLICY': "kubernetes.SysConfigPolicy", 'KUBERNETES.TRUSTEDREGISTRIESPOLICY': "kubernetes.TrustedRegistriesPolicy", 'KUBERNETES.VERSION': "kubernetes.Version", 'KUBERNETES.VERSIONPOLICY': "kubernetes.VersionPolicy", 'KUBERNETES.VIRTUALMACHINEINFRACONFIGPOLICY': "kubernetes.VirtualMachineInfraConfigPolicy", 'KUBERNETES.VIRTUALMACHINEINFRASTRUCTUREPROVIDER': "kubernetes.VirtualMachineInfrastructureProvider", 'KUBERNETES.VIRTUALMACHINEINSTANCETYPE': "kubernetes.VirtualMachineInstanceType", 'KUBERNETES.VIRTUALMACHINENODEPROFILE': "kubernetes.VirtualMachineNodeProfile", 'KVM.POLICY': "kvm.Policy", 'KVM.SESSION': "kvm.Session", 'KVM.TUNNEL': "kvm.Tunnel", 'LICENSE.ACCOUNTLICENSEDATA': "license.AccountLicenseData", 'LICENSE.CUSTOMEROP': "license.CustomerOp", 'LICENSE.IKSCUSTOMEROP': "license.IksCustomerOp", 'LICENSE.IKSLICENSECOUNT': "license.IksLicenseCount", 'LICENSE.IWOCUSTOMEROP': "license.IwoCustomerOp", 'LICENSE.IWOLICENSECOUNT': "license.IwoLicenseCount", 'LICENSE.LICENSEINFO': "license.LicenseInfo", 'LICENSE.LICENSERESERVATIONOP': "license.LicenseReservationOp", 'LICENSE.SMARTLICENSETOKEN': "license.SmartlicenseToken", 'LS.SERVICEPROFILE': "ls.ServiceProfile", 'MACPOOL.IDBLOCK': "macpool.IdBlock", 'MACPOOL.LEASE': "macpool.Lease", 'MACPOOL.POOL': "macpool.Pool", 'MACPOOL.POOLMEMBER': "macpool.PoolMember", 'MACPOOL.UNIVERSE': "macpool.Universe", 'MANAGEMENT.CONTROLLER': "management.Controller", 'MANAGEMENT.ENTITY': "management.Entity", 'MANAGEMENT.INTERFACE': "management.Interface", 'MEMORY.ARRAY': "memory.Array", 'MEMORY.PERSISTENTMEMORYCONFIGRESULT': "memory.PersistentMemoryConfigResult", 'MEMORY.PERSISTENTMEMORYCONFIGURATION': "memory.PersistentMemoryConfiguration", 'MEMORY.PERSISTENTMEMORYNAMESPACE': "memory.PersistentMemoryNamespace", 'MEMORY.PERSISTENTMEMORYNAMESPACECONFIGRESULT': "memory.PersistentMemoryNamespaceConfigResult", 'MEMORY.PERSISTENTMEMORYPOLICY': "memory.PersistentMemoryPolicy", 'MEMORY.PERSISTENTMEMORYREGION': "memory.PersistentMemoryRegion", 'MEMORY.PERSISTENTMEMORYUNIT': "memory.PersistentMemoryUnit", 'MEMORY.UNIT': "memory.Unit", 'META.DEFINITION': "meta.Definition", 'NETWORK.ELEMENT': "network.Element", 'NETWORK.ELEMENTSUMMARY': "network.ElementSummary", 'NETWORK.FCZONEINFO': "network.FcZoneInfo", 'NETWORK.VLANPORTINFO': "network.VlanPortInfo", 'NETWORKCONFIG.POLICY': "networkconfig.Policy", 'NIAAPI.APICCCOPOST': "niaapi.ApicCcoPost", 'NIAAPI.APICFIELDNOTICE': "niaapi.ApicFieldNotice", 'NIAAPI.APICHWEOL': "niaapi.ApicHweol", 'NIAAPI.APICLATESTMAINTAINEDRELEASE': "niaapi.ApicLatestMaintainedRelease", 'NIAAPI.APICRELEASERECOMMEND': "niaapi.ApicReleaseRecommend", 'NIAAPI.APICSWEOL': "niaapi.ApicSweol", 'NIAAPI.DCNMCCOPOST': "niaapi.DcnmCcoPost", 'NIAAPI.DCNMFIELDNOTICE': "niaapi.DcnmFieldNotice", 'NIAAPI.DCNMHWEOL': "niaapi.DcnmHweol", 'NIAAPI.DCNMLATESTMAINTAINEDRELEASE': "niaapi.DcnmLatestMaintainedRelease", 'NIAAPI.DCNMRELEASERECOMMEND': "niaapi.DcnmReleaseRecommend", 'NIAAPI.DCNMSWEOL': "niaapi.DcnmSweol", 'NIAAPI.FILEDOWNLOADER': "niaapi.FileDownloader", 'NIAAPI.NIAMETADATA': "niaapi.NiaMetadata", 'NIAAPI.NIBFILEDOWNLOADER': "niaapi.NibFileDownloader", 'NIAAPI.NIBMETADATA': "niaapi.NibMetadata", 'NIAAPI.VERSIONREGEX': "niaapi.VersionRegex", 'NIATELEMETRY.AAALDAPPROVIDERDETAILS': "niatelemetry.AaaLdapProviderDetails", 'NIATELEMETRY.AAARADIUSPROVIDERDETAILS': "niatelemetry.AaaRadiusProviderDetails", 'NIATELEMETRY.AAATACACSPROVIDERDETAILS': "niatelemetry.AaaTacacsProviderDetails", 'NIATELEMETRY.APICAPPPLUGINDETAILS': "niatelemetry.ApicAppPluginDetails", 'NIATELEMETRY.APICCOREFILEDETAILS': "niatelemetry.ApicCoreFileDetails", 'NIATELEMETRY.APICDBGEXPRSEXPORTDEST': "niatelemetry.ApicDbgexpRsExportDest", 'NIATELEMETRY.APICDBGEXPRSTSSCHEDULER': "niatelemetry.ApicDbgexpRsTsScheduler", 'NIATELEMETRY.APICFANDETAILS': "niatelemetry.ApicFanDetails", 'NIATELEMETRY.APICFEXDETAILS': "niatelemetry.ApicFexDetails", 'NIATELEMETRY.APICFLASHDETAILS': "niatelemetry.ApicFlashDetails", 'NIATELEMETRY.APICNTPAUTH': "niatelemetry.ApicNtpAuth", 'NIATELEMETRY.APICPSUDETAILS': "niatelemetry.ApicPsuDetails", 'NIATELEMETRY.APICREALMDETAILS': "niatelemetry.ApicRealmDetails", 'NIATELEMETRY.APICSNMPCLIENTGRPDETAILS': "niatelemetry.ApicSnmpClientGrpDetails", 'NIATELEMETRY.APICSNMPCOMMUNITYACCESSDETAILS': "niatelemetry.ApicSnmpCommunityAccessDetails", 'NIATELEMETRY.APICSNMPCOMMUNITYDETAILS': "niatelemetry.ApicSnmpCommunityDetails", 'NIATELEMETRY.APICSNMPTRAPDETAILS': "niatelemetry.ApicSnmpTrapDetails", 'NIATELEMETRY.APICSNMPTRAPFWDSERVERDETAILS': "niatelemetry.ApicSnmpTrapFwdServerDetails", 'NIATELEMETRY.APICSNMPVERSIONTHREEDETAILS': "niatelemetry.ApicSnmpVersionThreeDetails", 'NIATELEMETRY.APICSYSLOGGRP': "niatelemetry.ApicSysLogGrp", 'NIATELEMETRY.APICSYSLOGSRC': "niatelemetry.ApicSysLogSrc", 'NIATELEMETRY.APICTRANSCEIVERDETAILS': "niatelemetry.ApicTransceiverDetails", 'NIATELEMETRY.APICUIPAGECOUNTS': "niatelemetry.ApicUiPageCounts", 'NIATELEMETRY.APPDETAILS': "niatelemetry.AppDetails", 'NIATELEMETRY.COMMONPOLICIES': "niatelemetry.CommonPolicies", 'NIATELEMETRY.DCNMFANDETAILS': "niatelemetry.DcnmFanDetails", 'NIATELEMETRY.DCNMFEXDETAILS': "niatelemetry.DcnmFexDetails", 'NIATELEMETRY.DCNMMODULEDETAILS': "niatelemetry.DcnmModuleDetails", 'NIATELEMETRY.DCNMPSUDETAILS': "niatelemetry.DcnmPsuDetails", 'NIATELEMETRY.DCNMTRANSCEIVERDETAILS': "niatelemetry.DcnmTransceiverDetails", 'NIATELEMETRY.EPG': "niatelemetry.Epg", 'NIATELEMETRY.FABRICMODULEDETAILS': "niatelemetry.FabricModuleDetails", 'NIATELEMETRY.FABRICPODPROFILE': "niatelemetry.FabricPodProfile", 'NIATELEMETRY.FABRICPODSS': "niatelemetry.FabricPodSs", 'NIATELEMETRY.FAULT': "niatelemetry.Fault", 'NIATELEMETRY.HTTPSACLCONTRACTDETAILS': "niatelemetry.HttpsAclContractDetails", 'NIATELEMETRY.HTTPSACLCONTRACTFILTERMAP': "niatelemetry.HttpsAclContractFilterMap", 'NIATELEMETRY.HTTPSACLEPGCONTRACTMAP': "niatelemetry.HttpsAclEpgContractMap", 'NIATELEMETRY.HTTPSACLEPGDETAILS': "niatelemetry.HttpsAclEpgDetails", 'NIATELEMETRY.HTTPSACLFILTERDETAILS': "niatelemetry.HttpsAclFilterDetails", 'NIATELEMETRY.LC': "niatelemetry.Lc", 'NIATELEMETRY.MSOCONTRACTDETAILS': "niatelemetry.MsoContractDetails", 'NIATELEMETRY.MSOEPGDETAILS': "niatelemetry.MsoEpgDetails", 'NIATELEMETRY.MSOSCHEMADETAILS': "niatelemetry.MsoSchemaDetails", 'NIATELEMETRY.MSOSITEDETAILS': "niatelemetry.MsoSiteDetails", 'NIATELEMETRY.MSOTENANTDETAILS': "niatelemetry.MsoTenantDetails", 'NIATELEMETRY.NEXUSDASHBOARDCONTROLLERDETAILS': "niatelemetry.NexusDashboardControllerDetails", 'NIATELEMETRY.NEXUSDASHBOARDDETAILS': "niatelemetry.NexusDashboardDetails", 'NIATELEMETRY.NEXUSDASHBOARDMEMORYDETAILS': "niatelemetry.NexusDashboardMemoryDetails", 'NIATELEMETRY.NEXUSDASHBOARDS': "niatelemetry.NexusDashboards", 'NIATELEMETRY.NIAFEATUREUSAGE': "niatelemetry.NiaFeatureUsage", 'NIATELEMETRY.NIAINVENTORY': "niatelemetry.NiaInventory", 'NIATELEMETRY.NIAINVENTORYDCNM': "niatelemetry.NiaInventoryDcnm", 'NIATELEMETRY.NIAINVENTORYFABRIC': "niatelemetry.NiaInventoryFabric", 'NIATELEMETRY.NIALICENSESTATE': "niatelemetry.NiaLicenseState", 'NIATELEMETRY.PASSWORDSTRENGTHCHECK': "niatelemetry.PasswordStrengthCheck", 'NIATELEMETRY.PODCOMMPOLICIES': "niatelemetry.PodCommPolicies", 'NIATELEMETRY.PODSNMPPOLICIES': "niatelemetry.PodSnmpPolicies", 'NIATELEMETRY.PODTIMESERVERPOLICIES': "niatelemetry.PodTimeServerPolicies", 'NIATELEMETRY.SITEINVENTORY': "niatelemetry.SiteInventory", 'NIATELEMETRY.SNMPSRC': "niatelemetry.SnmpSrc", 'NIATELEMETRY.SSHVERSIONTWO': "niatelemetry.SshVersionTwo", 'NIATELEMETRY.SUPERVISORMODULEDETAILS': "niatelemetry.SupervisorModuleDetails", 'NIATELEMETRY.SYSLOGREMOTEDEST': "niatelemetry.SyslogRemoteDest", 'NIATELEMETRY.SYSLOGSYSMSG': "niatelemetry.SyslogSysMsg", 'NIATELEMETRY.SYSLOGSYSMSGFACFILTER': "niatelemetry.SyslogSysMsgFacFilter", 'NIATELEMETRY.SYSTEMCONTROLLERDETAILS': "niatelemetry.SystemControllerDetails", 'NIATELEMETRY.TENANT': "niatelemetry.Tenant", 'NOTIFICATION.ACCOUNTSUBSCRIPTION': "notification.AccountSubscription", 'NTP.POLICY': "ntp.Policy", 'OAUTH.ACCESSTOKEN': "oauth.AccessToken", 'OAUTH.AUTHORIZATION': "oauth.Authorization", 'OPRS.DEPLOYMENT': "oprs.Deployment", 'OPRS.SYNCTARGETLISTMESSAGE': "oprs.SyncTargetListMessage", 'ORGANIZATION.ORGANIZATION': "organization.Organization", 'OS.BULKINSTALLINFO': "os.BulkInstallInfo", 'OS.CATALOG': "os.Catalog", 'OS.CONFIGURATIONFILE': "os.ConfigurationFile", 'OS.DISTRIBUTION': "os.Distribution", 'OS.INSTALL': "os.Install", 'OS.OSSUPPORT': "os.OsSupport", 'OS.SUPPORTEDVERSION': "os.SupportedVersion", 'OS.TEMPLATEFILE': "os.TemplateFile", 'OS.VALIDINSTALLTARGET': "os.ValidInstallTarget", 'PCI.COPROCESSORCARD': "pci.CoprocessorCard", 'PCI.DEVICE': "pci.Device", 'PCI.LINK': "pci.Link", 'PCI.SWITCH': "pci.Switch", 'PORT.GROUP': "port.Group", 'PORT.MACBINDING': "port.MacBinding", 'PORT.SUBGROUP': "port.SubGroup", 'POWER.CONTROLSTATE': "power.ControlState", 'POWER.POLICY': "power.Policy", 'PROCESSOR.UNIT': "processor.Unit", 'RACK.UNITPERSONALITY': "rack.UnitPersonality", 'RECOMMENDATION.CAPACITYRUNWAY': "recommendation.CapacityRunway", 'RECOMMENDATION.PHYSICALITEM': "recommendation.PhysicalItem", 'RECOVERY.BACKUPCONFIGPOLICY': "recovery.BackupConfigPolicy", 'RECOVERY.BACKUPPROFILE': "recovery.BackupProfile", 'RECOVERY.CONFIGRESULT': "recovery.ConfigResult", 'RECOVERY.CONFIGRESULTENTRY': "recovery.ConfigResultEntry", 'RECOVERY.ONDEMANDBACKUP': "recovery.OnDemandBackup", 'RECOVERY.RESTORE': "recovery.Restore", 'RECOVERY.SCHEDULECONFIGPOLICY': "recovery.ScheduleConfigPolicy", 'RESOURCE.GROUP': "resource.Group", 'RESOURCE.GROUPMEMBER': "resource.GroupMember", 'RESOURCE.LICENSERESOURCECOUNT': "resource.LicenseResourceCount", 'RESOURCE.MEMBERSHIP': "resource.Membership", 'RESOURCE.MEMBERSHIPHOLDER': "resource.MembershipHolder", 'RESOURCE.RESERVATION': "resource.Reservation", 'RESOURCEPOOL.LEASE': "resourcepool.Lease", 'RESOURCEPOOL.LEASERESOURCE': "resourcepool.LeaseResource", 'RESOURCEPOOL.POOL': "resourcepool.Pool", 'RESOURCEPOOL.POOLMEMBER': "resourcepool.PoolMember", 'RESOURCEPOOL.UNIVERSE': "resourcepool.Universe", 'RPROXY.REVERSEPROXY': "rproxy.ReverseProxy", 'SDCARD.POLICY': "sdcard.Policy", 'SDWAN.PROFILE': "sdwan.Profile", 'SDWAN.ROUTERNODE': "sdwan.RouterNode", 'SDWAN.ROUTERPOLICY': "sdwan.RouterPolicy", 'SDWAN.VMANAGEACCOUNTPOLICY': "sdwan.VmanageAccountPolicy", 'SEARCH.SEARCHITEM': "search.SearchItem", 'SEARCH.TAGITEM': "search.TagItem", 'SECURITY.UNIT': "security.Unit", 'SERVER.CONFIGCHANGEDETAIL': "server.ConfigChangeDetail", 'SERVER.CONFIGIMPORT': "server.ConfigImport", 'SERVER.CONFIGRESULT': "server.ConfigResult", 'SERVER.CONFIGRESULTENTRY': "server.ConfigResultEntry", 'SERVER.PROFILE': "server.Profile", 'SERVER.PROFILETEMPLATE': "server.ProfileTemplate", 'SMTP.POLICY': "smtp.Policy", 'SNMP.POLICY': "snmp.Policy", 'SOFTWARE.APPLIANCEDISTRIBUTABLE': "software.ApplianceDistributable", 'SOFTWARE.DOWNLOADHISTORY': "software.DownloadHistory", 'SOFTWARE.HCLMETA': "software.HclMeta", 'SOFTWARE.HYPERFLEXBUNDLEDISTRIBUTABLE': "software.HyperflexBundleDistributable", 'SOFTWARE.HYPERFLEXDISTRIBUTABLE': "software.HyperflexDistributable", 'SOFTWARE.RELEASEMETA': "software.ReleaseMeta", 'SOFTWARE.SOLUTIONDISTRIBUTABLE': "software.SolutionDistributable", 'SOFTWARE.UCSDBUNDLEDISTRIBUTABLE': "software.UcsdBundleDistributable", 'SOFTWARE.UCSDDISTRIBUTABLE': "software.UcsdDistributable", 'SOFTWAREREPOSITORY.AUTHORIZATION': "softwarerepository.Authorization", 'SOFTWAREREPOSITORY.CACHEDIMAGE': "softwarerepository.CachedImage", 'SOFTWAREREPOSITORY.CATALOG': "softwarerepository.Catalog", 'SOFTWAREREPOSITORY.CATEGORYMAPPER': "softwarerepository.CategoryMapper", 'SOFTWAREREPOSITORY.CATEGORYMAPPERMODEL': "softwarerepository.CategoryMapperModel", 'SOFTWAREREPOSITORY.CATEGORYSUPPORTCONSTRAINT': "softwarerepository.CategorySupportConstraint", 'SOFTWAREREPOSITORY.DOWNLOADSPEC': "softwarerepository.DownloadSpec", 'SOFTWAREREPOSITORY.OPERATINGSYSTEMFILE': "softwarerepository.OperatingSystemFile", 'SOFTWAREREPOSITORY.RELEASE': "softwarerepository.Release", 'SOL.POLICY': "sol.Policy", 'SSH.POLICY': "ssh.Policy", 'STORAGE.CONTROLLER': "storage.Controller", 'STORAGE.DISKGROUP': "storage.DiskGroup", 'STORAGE.DISKSLOT': "storage.DiskSlot", 'STORAGE.DRIVEGROUP': "storage.DriveGroup", 'STORAGE.ENCLOSURE': "storage.Enclosure", 'STORAGE.ENCLOSUREDISK': "storage.EnclosureDisk", 'STORAGE.ENCLOSUREDISKSLOTEP': "storage.EnclosureDiskSlotEp", 'STORAGE.FLEXFLASHCONTROLLER': "storage.FlexFlashController", 'STORAGE.FLEXFLASHCONTROLLERPROPS': "storage.FlexFlashControllerProps", 'STORAGE.FLEXFLASHPHYSICALDRIVE': "storage.FlexFlashPhysicalDrive", 'STORAGE.FLEXFLASHVIRTUALDRIVE': "storage.FlexFlashVirtualDrive", 'STORAGE.FLEXUTILCONTROLLER': "storage.FlexUtilController", 'STORAGE.FLEXUTILPHYSICALDRIVE': "storage.FlexUtilPhysicalDrive", 'STORAGE.FLEXUTILVIRTUALDRIVE': "storage.FlexUtilVirtualDrive", 'STORAGE.HITACHIARRAY': "storage.HitachiArray", 'STORAGE.HITACHICONTROLLER': "storage.HitachiController", 'STORAGE.HITACHIDISK': "storage.HitachiDisk", 'STORAGE.HITACHIHOST': "storage.HitachiHost", 'STORAGE.HITACHIHOSTLUN': "storage.HitachiHostLun", 'STORAGE.HITACHIPARITYGROUP': "storage.HitachiParityGroup", 'STORAGE.HITACHIPOOL': "storage.HitachiPool", 'STORAGE.HITACHIPORT': "storage.HitachiPort", 'STORAGE.HITACHIVOLUME': "storage.HitachiVolume", 'STORAGE.HYPERFLEXSTORAGECONTAINER': "storage.HyperFlexStorageContainer", 'STORAGE.HYPERFLEXVOLUME': "storage.HyperFlexVolume", 'STORAGE.ITEM': "storage.Item", 'STORAGE.NETAPPAGGREGATE': "storage.NetAppAggregate", 'STORAGE.NETAPPBASEDISK': "storage.NetAppBaseDisk", 'STORAGE.NETAPPCLUSTER': "storage.NetAppCluster", 'STORAGE.NETAPPETHERNETPORT': "storage.NetAppEthernetPort", 'STORAGE.NETAPPEXPORTPOLICY': "storage.NetAppExportPolicy", 'STORAGE.NETAPPFCINTERFACE': "storage.NetAppFcInterface", 'STORAGE.NETAPPFCPORT': "storage.NetAppFcPort", 'STORAGE.NETAPPINITIATORGROUP': "storage.NetAppInitiatorGroup", 'STORAGE.NETAPPIPINTERFACE': "storage.NetAppIpInterface", 'STORAGE.NETAPPLICENSE': "storage.NetAppLicense", 'STORAGE.NETAPPLUN': "storage.NetAppLun", 'STORAGE.NETAPPLUNMAP': "storage.NetAppLunMap", 'STORAGE.NETAPPNODE': "storage.NetAppNode", 'STORAGE.NETAPPNTPSERVER': "storage.NetAppNtpServer", 'STORAGE.NETAPPSENSOR': "storage.NetAppSensor", 'STORAGE.NETAPPSTORAGEVM': "storage.NetAppStorageVm", 'STORAGE.NETAPPVOLUME': "storage.NetAppVolume", 'STORAGE.NETAPPVOLUMESNAPSHOT': "storage.NetAppVolumeSnapshot", 'STORAGE.PHYSICALDISK': "storage.PhysicalDisk", 'STORAGE.PHYSICALDISKEXTENSION': "storage.PhysicalDiskExtension", 'STORAGE.PHYSICALDISKUSAGE': "storage.PhysicalDiskUsage", 'STORAGE.PUREARRAY': "storage.PureArray", 'STORAGE.PURECONTROLLER': "storage.PureController", 'STORAGE.PUREDISK': "storage.PureDisk", 'STORAGE.PUREHOST': "storage.PureHost", 'STORAGE.PUREHOSTGROUP': "storage.PureHostGroup", 'STORAGE.PUREHOSTLUN': "storage.PureHostLun", 'STORAGE.PUREPORT': "storage.PurePort", 'STORAGE.PUREPROTECTIONGROUP': "storage.PureProtectionGroup", 'STORAGE.PUREPROTECTIONGROUPSNAPSHOT': "storage.PureProtectionGroupSnapshot", 'STORAGE.PUREREPLICATIONSCHEDULE': "storage.PureReplicationSchedule", 'STORAGE.PURESNAPSHOTSCHEDULE': "storage.PureSnapshotSchedule", 'STORAGE.PUREVOLUME': "storage.PureVolume", 'STORAGE.PUREVOLUMESNAPSHOT': "storage.PureVolumeSnapshot", 'STORAGE.SASEXPANDER': "storage.SasExpander", 'STORAGE.SASPORT': "storage.SasPort", 'STORAGE.SPAN': "storage.Span", 'STORAGE.STORAGEPOLICY': "storage.StoragePolicy", 'STORAGE.VDMEMBEREP': "storage.VdMemberEp", 'STORAGE.VIRTUALDRIVE': "storage.VirtualDrive", 'STORAGE.VIRTUALDRIVECONTAINER': "storage.VirtualDriveContainer", 'STORAGE.VIRTUALDRIVEEXTENSION': "storage.VirtualDriveExtension", 'STORAGE.VIRTUALDRIVEIDENTITY': "storage.VirtualDriveIdentity", 'SYSLOG.POLICY': "syslog.Policy", 'TAM.ADVISORYCOUNT': "tam.AdvisoryCount", 'TAM.ADVISORYDEFINITION': "tam.AdvisoryDefinition", 'TAM.ADVISORYINFO': "tam.AdvisoryInfo", 'TAM.ADVISORYINSTANCE': "tam.AdvisoryInstance", 'TAM.SECURITYADVISORY': "tam.SecurityAdvisory", 'TASK.HITACHISCOPEDINVENTORY': "task.HitachiScopedInventory", 'TASK.HYPERFLEXSCOPEDINVENTORY': "task.HyperflexScopedInventory", 'TASK.IWESCOPEDINVENTORY': "task.IweScopedInventory", 'TASK.NETAPPSCOPEDINVENTORY': "task.NetAppScopedInventory", 'TASK.PUBLICCLOUDSCOPEDINVENTORY': "task.PublicCloudScopedInventory", 'TASK.PURESCOPEDINVENTORY': "task.PureScopedInventory", 'TASK.SERVERSCOPEDINVENTORY': "task.ServerScopedInventory", 'TECHSUPPORTMANAGEMENT.COLLECTIONCONTROLPOLICY': "techsupportmanagement.CollectionControlPolicy", 'TECHSUPPORTMANAGEMENT.DOWNLOAD': "techsupportmanagement.Download", 'TECHSUPPORTMANAGEMENT.TECHSUPPORTBUNDLE': "techsupportmanagement.TechSupportBundle", 'TECHSUPPORTMANAGEMENT.TECHSUPPORTSTATUS': "techsupportmanagement.TechSupportStatus", 'TERMINAL.AUDITLOG': "terminal.AuditLog", 'TERRAFORM.EXECUTOR': "terraform.Executor", 'THERMAL.POLICY': "thermal.Policy", 'TOP.SYSTEM': "top.System", 'UCSD.BACKUPINFO': "ucsd.BackupInfo", 'UUIDPOOL.BLOCK': "uuidpool.Block", 'UUIDPOOL.POOL': "uuidpool.Pool", 'UUIDPOOL.POOLMEMBER': "uuidpool.PoolMember", 'UUIDPOOL.UNIVERSE': "uuidpool.Universe", 'UUIDPOOL.UUIDLEASE': "uuidpool.UuidLease", 'VIRTUALIZATION.CISCOHYPERVISORMANAGER': "virtualization.CiscoHypervisorManager", 'VIRTUALIZATION.ESXICONSOLE': "virtualization.EsxiConsole", 'VIRTUALIZATION.HOST': "virtualization.Host", 'VIRTUALIZATION.IWECLUSTER': "virtualization.IweCluster", 'VIRTUALIZATION.IWEDATACENTER': "virtualization.IweDatacenter", 'VIRTUALIZATION.IWEDVUPLINK': "virtualization.IweDvUplink", 'VIRTUALIZATION.IWEDVSWITCH': "virtualization.IweDvswitch", 'VIRTUALIZATION.IWEHOST': "virtualization.IweHost", 'VIRTUALIZATION.IWEHOSTINTERFACE': "virtualization.IweHostInterface", 'VIRTUALIZATION.IWEHOSTVSWITCH': "virtualization.IweHostVswitch", 'VIRTUALIZATION.IWENETWORK': "virtualization.IweNetwork", 'VIRTUALIZATION.IWEVIRTUALDISK': "virtualization.IweVirtualDisk", 'VIRTUALIZATION.IWEVIRTUALMACHINE': "virtualization.IweVirtualMachine", 'VIRTUALIZATION.IWEVIRTUALMACHINENETWORKINTERFACE': "virtualization.IweVirtualMachineNetworkInterface", 'VIRTUALIZATION.VIRTUALDISK': "virtualization.VirtualDisk", 'VIRTUALIZATION.VIRTUALMACHINE': "virtualization.VirtualMachine", 'VIRTUALIZATION.VIRTUALNETWORK': "virtualization.VirtualNetwork", 'VIRTUALIZATION.VMWARECLUSTER': "virtualization.VmwareCluster", 'VIRTUALIZATION.VMWAREDATACENTER': "virtualization.VmwareDatacenter", 'VIRTUALIZATION.VMWAREDATASTORE': "virtualization.VmwareDatastore", 'VIRTUALIZATION.VMWAREDATASTORECLUSTER': "virtualization.VmwareDatastoreCluster", 'VIRTUALIZATION.VMWAREDISTRIBUTEDNETWORK': "virtualization.VmwareDistributedNetwork", 'VIRTUALIZATION.VMWAREDISTRIBUTEDSWITCH': "virtualization.VmwareDistributedSwitch", 'VIRTUALIZATION.VMWAREFOLDER': "virtualization.VmwareFolder", 'VIRTUALIZATION.VMWAREHOST': "virtualization.VmwareHost", 'VIRTUALIZATION.VMWAREKERNELNETWORK': "virtualization.VmwareKernelNetwork", 'VIRTUALIZATION.VMWARENETWORK': "virtualization.VmwareNetwork", 'VIRTUALIZATION.VMWAREPHYSICALNETWORKINTERFACE': "virtualization.VmwarePhysicalNetworkInterface", 'VIRTUALIZATION.VMWAREUPLINKPORT': "virtualization.VmwareUplinkPort", 'VIRTUALIZATION.VMWAREVCENTER': "virtualization.VmwareVcenter", 'VIRTUALIZATION.VMWAREVIRTUALDISK': "virtualization.VmwareVirtualDisk", 'VIRTUALIZATION.VMWAREVIRTUALMACHINE': "virtualization.VmwareVirtualMachine", 'VIRTUALIZATION.VMWAREVIRTUALMACHINESNAPSHOT': "virtualization.VmwareVirtualMachineSnapshot", 'VIRTUALIZATION.VMWAREVIRTUALNETWORKINTERFACE': "virtualization.VmwareVirtualNetworkInterface", 'VIRTUALIZATION.VMWAREVIRTUALSWITCH': "virtualization.VmwareVirtualSwitch", 'VMEDIA.POLICY': "vmedia.Policy", 'VMRC.CONSOLE': "vmrc.Console", 'VNC.CONSOLE': "vnc.Console", 'VNIC.ETHADAPTERPOLICY': "vnic.EthAdapterPolicy", 'VNIC.ETHIF': "vnic.EthIf", 'VNIC.ETHNETWORKPOLICY': "vnic.EthNetworkPolicy", 'VNIC.ETHQOSPOLICY': "vnic.EthQosPolicy", 'VNIC.FCADAPTERPOLICY': "vnic.FcAdapterPolicy", 'VNIC.FCIF': "vnic.FcIf", 'VNIC.FCNETWORKPOLICY': "vnic.FcNetworkPolicy", 'VNIC.FCQOSPOLICY': "vnic.FcQosPolicy", 'VNIC.ISCSIADAPTERPOLICY': "vnic.IscsiAdapterPolicy", 'VNIC.ISCSIBOOTPOLICY': "vnic.IscsiBootPolicy", 'VNIC.ISCSISTATICTARGETPOLICY': "vnic.IscsiStaticTargetPolicy", 'VNIC.LANCONNECTIVITYPOLICY': "vnic.LanConnectivityPolicy", 'VNIC.LCPSTATUS': "vnic.LcpStatus", 'VNIC.SANCONNECTIVITYPOLICY': "vnic.SanConnectivityPolicy", 'VNIC.SCPSTATUS': "vnic.ScpStatus", 'VRF.VRF': "vrf.Vrf", 'WORKFLOW.ANSIBLEBATCHEXECUTOR': "workflow.AnsibleBatchExecutor", 'WORKFLOW.BATCHAPIEXECUTOR': "workflow.BatchApiExecutor", 'WORKFLOW.BUILDTASKMETA': "workflow.BuildTaskMeta", 'WORKFLOW.BUILDTASKMETAOWNER': "workflow.BuildTaskMetaOwner", 'WORKFLOW.CATALOG': "workflow.Catalog", 'WORKFLOW.CUSTOMDATATYPEDEFINITION': "workflow.CustomDataTypeDefinition", 'WORKFLOW.ERRORRESPONSEHANDLER': "workflow.ErrorResponseHandler", 'WORKFLOW.PENDINGDYNAMICWORKFLOWINFO': "workflow.PendingDynamicWorkflowInfo", 'WORKFLOW.ROLLBACKWORKFLOW': "workflow.RollbackWorkflow", 'WORKFLOW.SOLUTIONACTIONDEFINITION': "workflow.SolutionActionDefinition", 'WORKFLOW.SOLUTIONACTIONINSTANCE': "workflow.SolutionActionInstance", 'WORKFLOW.SOLUTIONDEFINITION': "workflow.SolutionDefinition", 'WORKFLOW.SOLUTIONINSTANCE': "workflow.SolutionInstance", 'WORKFLOW.SOLUTIONOUTPUT': "workflow.SolutionOutput", 'WORKFLOW.SSHBATCHEXECUTOR': "workflow.SshBatchExecutor", 'WORKFLOW.TASKDEBUGLOG': "workflow.TaskDebugLog", 'WORKFLOW.TASKDEFINITION': "workflow.TaskDefinition", 'WORKFLOW.TASKINFO': "workflow.TaskInfo", 'WORKFLOW.TASKMETADATA': "workflow.TaskMetadata", 'WORKFLOW.TASKNOTIFICATION': "workflow.TaskNotification", 'WORKFLOW.TEMPLATEEVALUATION': "workflow.TemplateEvaluation", 'WORKFLOW.TEMPLATEFUNCTIONMETA': "workflow.TemplateFunctionMeta", 'WORKFLOW.WORKFLOWDEFINITION': "workflow.WorkflowDefinition", 'WORKFLOW.WORKFLOWINFO': "workflow.WorkflowInfo", 'WORKFLOW.WORKFLOWMETA': "workflow.WorkflowMeta", 'WORKFLOW.WORKFLOWMETADATA': "workflow.WorkflowMetadata", 'WORKFLOW.WORKFLOWNOTIFICATION': "workflow.WorkflowNotification", }, } validations = { ('mac',): { 'regex': { 'pattern': r'^$|^([0-9A-Fa-f]{2}[:-]){5}([0-9A-Fa-f]{2})$', }, }, } @cached_property def additional_properties_type(): lazy_import() return (bool, date, datetime, dict, float, int, list, str, none_type,) _nullable = False @cached_property def openapi_types(): lazy_import() return { 'class_id': (str,), 'moid': (str,), 'selector': (str,), 'link': (str,), 'account_moid': (str,), 'create_time': (datetime,), 'domain_group_moid': (str,), 'mod_time': (datetime,), 'owners': ([str], none_type,), 'shared_scope': (str,), 'tags': ([MoTag], none_type,), 'version_context': (MoVersionContext,), 'ancestors': ([MoBaseMoRelationship], none_type,), 'parent': (MoBaseMoRelationship,), 'permission_resources': ([MoBaseMoRelationship], none_type,), 'display_names': (DisplayNames,), 'bond_state': (VirtualizationBondState,), 'host_name': (str,), 'host_uuid': (str,), 'if_type': (str,), 'ip_addresses': ([str], none_type,), 'link_state': (str,), 'mac': (str,), 'mtu': (int,), 'name': (str,), 'vlans': (str,), 'cluster': (VirtualizationIweClusterRelationship,), 'dv_uplink': (VirtualizationIweDvUplinkRelationship,), 'host': (VirtualizationIweHostRelationship,), 'network': (VirtualizationIweNetworkRelationship,), 'object_type': (str,), } @cached_property def discriminator(): lazy_import() val = { 'mo.MoRef': MoMoRef, 'virtualization.IweHostInterface': VirtualizationIweHostInterface, } if not val: return None return {'class_id': val} attribute_map = { 'class_id': 'ClassId', 'moid': 'Moid', 'selector': 'Selector', 'link': 'link', 'account_moid': 'AccountMoid', 'create_time': 'CreateTime', 'domain_group_moid': 'DomainGroupMoid', 'mod_time': 'ModTime', 'owners': 'Owners', 'shared_scope': 'SharedScope', 'tags': 'Tags', 'version_context': 'VersionContext', 'ancestors': 'Ancestors', 'parent': 'Parent', 'permission_resources': 'PermissionResources', 'display_names': 'DisplayNames', 'bond_state': 'BondState', 'host_name': 'HostName', 'host_uuid': 'HostUuid', 'if_type': 'IfType', 'ip_addresses': 'IpAddresses', 'link_state': 'LinkState', 'mac': 'Mac', 'mtu': 'Mtu', 'name': 'Name', 'vlans': 'Vlans', 'cluster': 'Cluster', 'dv_uplink': 'DvUplink', 'host': 'Host', 'network': 'Network', 'object_type': 'ObjectType', } required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', '_composed_instances', '_var_name_to_model_instances', '_additional_properties_model_instances', ]) @convert_js_args_to_python_args def __init__(self, *args, **kwargs): class_id = kwargs.get('class_id', "mo.MoRef") _check_type = kwargs.pop('_check_type', True) _spec_property_naming = kwargs.pop('_spec_property_naming', False) _path_to_item = kwargs.pop('_path_to_item', ()) _configuration = kwargs.pop('_configuration', None) _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) if args: raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) self._data_store = {} self._check_type = _check_type self._spec_property_naming = _spec_property_naming self._path_to_item = _path_to_item self._configuration = _configuration self._visited_composed_classes = _visited_composed_classes + (self.__class__,) constant_args = { '_check_type': _check_type, '_path_to_item': _path_to_item, '_spec_property_naming': _spec_property_naming, '_configuration': _configuration, '_visited_composed_classes': self._visited_composed_classes, } required_args = { 'class_id': class_id, } model_args = {} model_args.update(required_args) model_args.update(kwargs) composed_info = validate_get_composed_info( constant_args, model_args, self) self._composed_instances = composed_info[0] self._var_name_to_model_instances = composed_info[1] self._additional_properties_model_instances = composed_info[2] unused_args = composed_info[3] for var_name, var_value in required_args.items(): setattr(self, var_name, var_value) for var_name, var_value in kwargs.items(): if var_name in unused_args and \ self._configuration is not None and \ self._configuration.discard_unknown_keys and \ not self._additional_properties_model_instances: continue setattr(self, var_name, var_value) @cached_property def _composed_schemas(): # loading lazy_import() return { 'anyOf': [ ], 'allOf': [ ], 'oneOf': [ MoMoRef, VirtualizationIweHostInterface, none_type, ], }
true
true
f70c2d8d45be6f5330ff2d3db61a1152fce3e388
4,299
py
Python
common-python/bcctools/bcc_ansible/bcc_switch_agent_datasource.py
LaudateCorpus1/atg-commerce-iaas
f1ae31657fc0111a5c019d46a28a3c81aae1acb2
[ "MIT" ]
28
2016-11-07T14:03:25.000Z
2022-02-01T08:46:52.000Z
common-python/bcctools/bcc_ansible/bcc_switch_agent_datasource.py
LaudateCorpus1/atg-commerce-iaas
f1ae31657fc0111a5c019d46a28a3c81aae1acb2
[ "MIT" ]
3
2016-11-09T13:23:03.000Z
2018-04-05T15:49:22.000Z
common-python/bcctools/bcc_ansible/bcc_switch_agent_datasource.py
LaudateCorpus1/atg-commerce-iaas
f1ae31657fc0111a5c019d46a28a3c81aae1acb2
[ "MIT" ]
13
2016-10-27T17:59:38.000Z
2022-02-18T04:38:38.000Z
#!/usr/bin/python # The MIT License (MIT) # # Copyright (c) 2017 Oracle # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# __author__ = "Michael Shanley (Oracle A-Team)" __copyright__ = "Copyright (c) 2017 Oracle" __version__ = "1.0.0.0" __date__ = "@BUILDDATE@" __status__ = "Development" __module__ = "switch_agent_datasource" # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~# ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: bcc_switch_agent_datasource short_description: Switch an agents datasource version_added: "1.0" description: - Switch an agents datasource options: action: description: - Action to be executed against the BCC required: false default: switch_agent_datasource choices: ['switch_agent_datasource'] endpoint: description: - BCC REST Endpoint required: true default: null type: str cookie: description: - ATG Cookie required: false default: null type: str switchTargetID: description: - ID of the target the agent(s) you want to switch belong to required: true default: null type: str agentIDs: description: - ID's of the agents you want to switch datasources on - Comma separated list of ID's required: true default: null type: str requirements: - "python >= 2.6" - "ATG BCCTools module" ... ''' EXAMPLES = ''' ''' from bcc_rest.bcc_get_agent_id import getAgentID # Main processing function def main(): module = AnsibleModule( argument_spec = dict( action = dict(default='switch_agent_datasource', choices=['switch_agent_datasource']), endpoint = dict(required=True, type='str'), switchTargetID = dict(required=True, type='str'), agentIDs = dict(required=True, type='str'), cookie = dict(required=False, type='str') ) ) endpoint = module.params['endpoint'] switchTargetID = module.params['switchTargetID'] agentIDs = module.params['agentIDs'] cookie = module.params['cookie'] changed = False try: if module.params['action'] == 'switch_agent_datasource': response = getAgentID(endpoint, switchTargetID, agentIDs, cookie) jsonobj = json.loads(response.text) if ('formExceptions' in jsonobj): module.fail_json(msg=jsonobj) module.exit_json(changed=changed, targetID=jsonobj) else: module.fail_json(msg="Unknown action") except Exception as e: module.fail_json(msg=str(e.message)) return # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.splitter import * # Main function to kick off processing if __name__ == "__main__": main()
32.816794
115
0.612933
__author__ = "Michael Shanley (Oracle A-Team)" __copyright__ = "Copyright (c) 2017 Oracle" __version__ = "1.0.0.0" __date__ = "@BUILDDATE@" __status__ = "Development" __module__ = "switch_agent_datasource" ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: bcc_switch_agent_datasource short_description: Switch an agents datasource version_added: "1.0" description: - Switch an agents datasource options: action: description: - Action to be executed against the BCC required: false default: switch_agent_datasource choices: ['switch_agent_datasource'] endpoint: description: - BCC REST Endpoint required: true default: null type: str cookie: description: - ATG Cookie required: false default: null type: str switchTargetID: description: - ID of the target the agent(s) you want to switch belong to required: true default: null type: str agentIDs: description: - ID's of the agents you want to switch datasources on - Comma separated list of ID's required: true default: null type: str requirements: - "python >= 2.6" - "ATG BCCTools module" ... ''' EXAMPLES = ''' ''' from bcc_rest.bcc_get_agent_id import getAgentID def main(): module = AnsibleModule( argument_spec = dict( action = dict(default='switch_agent_datasource', choices=['switch_agent_datasource']), endpoint = dict(required=True, type='str'), switchTargetID = dict(required=True, type='str'), agentIDs = dict(required=True, type='str'), cookie = dict(required=False, type='str') ) ) endpoint = module.params['endpoint'] switchTargetID = module.params['switchTargetID'] agentIDs = module.params['agentIDs'] cookie = module.params['cookie'] changed = False try: if module.params['action'] == 'switch_agent_datasource': response = getAgentID(endpoint, switchTargetID, agentIDs, cookie) jsonobj = json.loads(response.text) if ('formExceptions' in jsonobj): module.fail_json(msg=jsonobj) module.exit_json(changed=changed, targetID=jsonobj) else: module.fail_json(msg="Unknown action") except Exception as e: module.fail_json(msg=str(e.message)) return from ansible.module_utils.basic import * from ansible.module_utils.splitter import * if __name__ == "__main__": main()
true
true
f70c2d9f2af9f994a71e25fe38bb042a3b41ce76
86
py
Python
todo/admin.py
Overexm/django_git_merey
85ca0fa1c38e4dee40259a31fcb904897f6d3cbc
[ "MIT" ]
null
null
null
todo/admin.py
Overexm/django_git_merey
85ca0fa1c38e4dee40259a31fcb904897f6d3cbc
[ "MIT" ]
null
null
null
todo/admin.py
Overexm/django_git_merey
85ca0fa1c38e4dee40259a31fcb904897f6d3cbc
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import ToDo admin.site.register(ToDo)
14.333333
32
0.802326
from django.contrib import admin from .models import ToDo admin.site.register(ToDo)
true
true
f70c2ddc8e681bddcb67b9de0718cd3fd68ded9f
33,074
py
Python
python-wamp-client/labby/rpc.py
Simon-Deuring/labgrid-frotend-mle
11dd53167c9fea06c6ad9af07b87f0cae1df5fd0
[ "MIT" ]
null
null
null
python-wamp-client/labby/rpc.py
Simon-Deuring/labgrid-frotend-mle
11dd53167c9fea06c6ad9af07b87f0cae1df5fd0
[ "MIT" ]
19
2021-10-15T08:31:51.000Z
2021-10-19T12:53:46.000Z
python-wamp-client/labby/rpc.py
Simon-Deuring/labgrid-frotend-mle
11dd53167c9fea06c6ad9af07b87f0cae1df5fd0
[ "MIT" ]
null
null
null
""" Generic RPC functions for labby """ # import asyncio import asyncio from cgi import print_exception import os from pathlib import Path from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union import yaml from attr import attrib, attrs from autobahn.wamp.exception import ApplicationError from labby.console import Console from labby.resource import LabbyResource, NetworkSerialPort, PowerAction, power_resources, power_resource_from_name from .labby_error import (LabbyError, failed, invalid_parameter, not_found) from .labby_types import (ExporterName, GroupName, LabbyPlace, PlaceName, PowerState, Resource, ResourceName, Session, Place) from .labby_util import flatten def _check_not_none(*args, **kwargs) -> Optional[LabbyError]: return next((invalid_parameter(f"Missing required parameter: {name}.") for name, val in vars().items() if val is None), None) @attrs() class RPCDesc(): name: str = attrib(default=None) endpoint: str = attrib(default=None) remote_endpoint: str = attrib(default=None) info: Optional[str] = attrib(default=None) parameter: Optional[List[Dict[str, str]]] = attrib(default=None) return_type: Optional[str] = attrib(default=None) def _localfile(path): return Path(os.path.dirname(os.path.realpath(__file__))).joinpath(path) FUNCTION_INFO = {} with open(_localfile('rpc_desc.yaml'), 'r', encoding='utf-8') as file: FUNCTION_INFO = {key: RPCDesc(**val) for key, val in yaml.load(file, yaml.loader.FullLoader).items() if val is not None} # non exhaustive list of serializable primitive types _serializable_primitive: List[Type] = [int, float, str, bool] def invalidates_cache(attribute, *rec_args, reconstitute: Optional[Callable] = None): """ on call clear attribute (e.g. set to None) """ def decorator(func: Callable): def wrapped(self: Session, *args, **kwargs): setattr(self, attribute, None) return func(self, *args, **kwargs) return wrapped return decorator def cached(attribute: str): """ Decorator defintion to cache data in labby context and fetch data from server """ assert attribute is not None def decorator(func: Callable): async def wrapped(context: Session, *args, **kwargs): assert context is not None if not hasattr(context, attribute): context.__dict__.update({attribute: None}) data = None else: data: Optional[Dict] = context.__getattribute__( attribute) if data is None: data: Optional[Dict] = await func(context, *args, **kwargs) if not isinstance(data, LabbyError): context.__setattr__(attribute, data) return data return wrapped return decorator def labby_serialized(func): """ Custom serializer decorator for labby rpc functions to make sure returned values are cbor/json serializable """ async def wrapped(*args, **kwargs) -> Union[None, List, Dict, int, float, str, bool]: ret = await func(*args, **kwargs) if ret is None: return None if isinstance(ret, LabbyError): return ret.to_json() if isinstance(ret, LabbyPlace): return ret.to_json() if isinstance(ret, (dict, list)) or type(ret) in _serializable_primitive: return ret raise NotImplementedError( f"{type(ret)} can currently not be serialized!") return wrapped async def fetch(context: Session, attribute: str, endpoint: str, *args, **kwargs) -> Any: """ QoL function to fetch data drom Coordinator and store in attribute member in Session """ assert context is not None assert attribute is not None assert endpoint is not None data: Optional[Dict] = getattr(context, attribute) if data is None: data: Optional[Dict] = await context.call(endpoint, *args, **kwargs) setattr(context, attribute, data) return data async def fetch_places(context: Session, place: Optional[PlaceName]) -> Union[Dict[PlaceName, Place], LabbyError]: """ Fetch places from coordinator, update if missing and handle possible errors """ assert context is not None _data = await context.places.get(context) # type: ignore if _data is None: if place is None: return not_found("Could not find any places.") return not_found(f"Could not find place with name {place}.") if place is not None: if place in _data.keys(): return {place: _data[place]} return not_found(f"Could not find place with name {place}.") return _data async def fetch_resources(context: Session, place: Optional[PlaceName], resource_key: Optional[ResourceName]) -> Union[Dict, LabbyError]: """ Fetch resources from coordinator, update if missing and handle possible errors """ assert context is not None data: Optional[Dict] = await context.resources.get(context) if data is None: if place is None: return not_found("Could not find any resources.") return not_found(f"No resources found for place {place}.") if place is not None: data = {exporter: {k: v for k, v in exporter_data.items() if k == place and v} for exporter, exporter_data in data.items()} if resource_key is not None: data = {exporter: {place_name: {k: v for k, v in place_res.items() if k == resource_key if v} for place_name, place_res in exporter_data.items() if place_res} for exporter, exporter_data in data.items()} return data @cached("peers") async def fetch_peers(context: Session) -> Union[Dict, LabbyError]: session_ids = await context.call("wamp.session.list") sessions = {} for sess in session_ids: # ['exact']: tmp = await context.call("wamp.session.get", sess) if tmp and 'authid' in tmp: sessions[tmp['authid']] = tmp return sessions async def get_exporters(context: Session) -> Union[List[ExporterName], LabbyError]: peers = await fetch_peers(context) if isinstance(peers, LabbyError): return peers assert peers is not None return [x.replace('exporter/', '') for x in peers if x.startswith('exporter')] def _calc_power_for_place(place_name, resources: Iterable[Dict]): pstate = False for res in resources: if isinstance(res['acquired'], Iterable): pstate |= place_name in res['acquired'] else: pstate |= res['acquired'] == place_name return pstate @cached("power_states") async def fetch_power_state(context: Session, place: Optional[PlaceName]) -> Union[PowerState, LabbyError]: """ Use fetch resource to determine power state, this may update context.resource """ _resources = await fetch_resources(context=context, place=place, resource_key=None) if isinstance(_resources, LabbyError): return _resources if len(_resources) > 0: _resources = flatten(_resources) _places = await fetch_places(context, place) if isinstance(_places, LabbyError): return _places power_states = {} assert _places for place_name, place_data in _places.items(): if 'acquired_resources' in place_data: if len(place_data['acquired_resources']) == 0 or place_name not in _resources: power_states[place_name] = {'power_state': False} continue resources_to_check = ((v for k, v in _resources[place_name].items() if any( (k in a for a in place_data['acquired_resources'])))) power_states[place_name] = { 'power_state': _calc_power_for_place(place_name, resources_to_check)} return power_states @labby_serialized async def places(context: Session, place: Optional[PlaceName] = None) -> Union[List[LabbyPlace], LabbyError]: """ returns registered places as dict of lists """ context.log.info("Fetching places.") data = await fetch_places(context, place) if isinstance(data, LabbyError): return data power_states = await fetch_power_state(context=context, place=place) assert power_states is not None if isinstance(power_states, LabbyError): return power_states await get_reservations(context) def token_from_place(name): return next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == name), None) place_res = [] assert data for place_name, place_data in data.items(): # append the place to acquired places if # it has been acquired in a previous session if (place_data and place_data['acquired'] == context.user_name and place_name not in context.acquired_places ): context.acquired_places.add(place_name) if place is not None and place_name != place: continue # ??? (Kevin) what if there are more than one or no matches if len(place_data["matches"]) > 0 and 'exporter' in place_data["matches"]: exporter = place_data["matches"][0]["exporter"] else: exporter = None place_data.update({ "name": place_name, "exporter": exporter, "power_state": power_states.get(place_name, {}).get('power_state', None), "reservation": token_from_place(place_name) }) place_res.append(place_data) return place_res @labby_serialized async def list_places(context: Session) -> List[PlaceName]: """ Return all place names """ await fetch_places(context, None) return list(context.places.get_soft().keys()) if context.places else [] @labby_serialized async def resource(context: Session, place: Optional[PlaceName] = None, resource_key=None ) -> Union[Dict[ResourceName, Resource], LabbyError]: """ rpc: returns resources registered for given place """ context.log.info(f"Fetching resources for {place}.") resource_data = await fetch_resources(context=context, place=place, resource_key=resource_key) if isinstance(resource_data, LabbyError): return resource_data if place is None: return resource_data if len(flatten(resource_data)) == 0: return not_found(f"Place {place} not found.") return resource_data @labby_serialized async def power_state(context: Session, place: PlaceName, ) -> Union[PowerState, LabbyError]: """ rpc: return power state for a given place """ if place is None: return invalid_parameter("Missing required parameter: place.").to_json() power_data = await fetch_power_state(context=context, place=place) assert power_data is not None if isinstance(power_data, LabbyError): return power_data if place not in power_data.keys(): return not_found(f"Place {place} not found on Coordinator.").to_json() return power_data[place] @labby_serialized async def resource_overview(context: Session, place: Optional[PlaceName] = None, ) -> Union[List[Resource], LabbyError]: """ rpc: returns list of all resources on target """ context.log.info(f"Fetching resources overview for {place}.") targets = await fetch_resources(context=context, place=place, resource_key=None) if isinstance(targets, LabbyError): return targets ret = [] for exporter, resources in targets.items(): for res_place, res in resources.items(): if place is None or place == res_place: ret.extend({'name': key, 'target': exporter, 'place': res_place, **values} for key, values in res.items()) return ret @labby_serialized async def resource_by_name(context: Session, name: ResourceName, # filter by name ) -> Union[List[Resource], LabbyError]: """ rpc: returns list of all resources of given name on target """ if name is None: return invalid_parameter("Missing required parameter: name.") resource_data = await fetch_resources(context, place=None, resource_key=None) if isinstance(resource_data, LabbyError): return resource_data ret = [] for target, resources in resource_data.items(): for place, res in resources.items(): ret.extend( {'name': key, 'target': target, 'place': place, **values} for key, values in res.items() if name == key ) return ret @labby_serialized async def resource_names(context: Session) -> List[Dict[str, str]]: await fetch_resources(context, None, None) data = context.resources or {} def it(x): return x.items() return [ {'exporter': exporter, 'group': grp_name, 'class': x.get('cls'), 'name': name, } for exporter, group in it(data) for grp_name, res in it(group) for name, x in it(res) ] @labby_serialized async def acquire(context: Session, place: PlaceName) -> Union[bool, LabbyError]: """ rpc for acquiring places """ if place is None: return invalid_parameter("Missing required parameter: place.") if place in context.acquired_places: return failed(f"Already acquired place {place}.") # , group, resource_key, place) context.log.info(f"Acquiring place {place}.") try: acquire_successful = await context.call("org.labgrid.coordinator.acquire_place", place) except ApplicationError as err: return failed(f"Got exception while trying to call org.labgrid.coordinator.acquire_place. {err}") if acquire_successful: context.acquired_places.add(place) # remove the reservation if there was one if token := next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None,): ret = await cancel_reservation(context, token) if isinstance(ret, LabbyError): # context.log.error(f"Could not cancel reservation after acquire: {ret}") print(f"Could not cancel reservation after acquire: {ret}") del context.reservations[token] return acquire_successful @labby_serialized async def release(context: Session, place: PlaceName) -> Union[bool, LabbyError]: """ rpc for releasing 'acquired' places """ if place is None: return invalid_parameter("Missing required parameter: place.") if place not in context.acquired_places: return failed(f"Place {place} is not acquired") context.log.info(f"Releasing place {place}.") try: release_successful = await context.call('org.labgrid.coordinator.release_place', place) if place in context.acquired_places: # place update was quicker context.acquired_places.remove(place) except ApplicationError as err: return failed(f"Got exception while trying to call org.labgrid.coordinator.release_place. {err}") return release_successful @labby_serialized async def info(_context=None, func_key: Optional[str] = None) -> Union[List[Dict], LabbyError]: """ RPC call for general info for RPC function usage """ if func_key is None: return [desc.__dict__ for desc in globals()["FUNCTION_INFO"].values()] if func_key not in globals()["FUNCTION_INFO"]: return not_found(f"Function {func_key} not found in registry.") return globals()["FUNCTION_INFO"][func_key].__dict__ async def get_reservations(context: Session) -> Dict: """ RPC call to list current reservations on the Coordinator """ reservation_data: Dict = await context.call("org.labgrid.coordinator.get_reservations") for token, data in reservation_data.items(): if (data['state'] in ('waiting', 'allocated', 'acquired') and data['owner'] == context.user_name): context.to_refresh.add(token) context.reservations.update(**reservation_data) return reservation_data @labby_serialized async def create_reservation(context: Session, place: PlaceName, priority: float = 0.) -> Union[Dict, LabbyError]: # TODO figure out filters, priorities, etc # TODO should multiple reservations be allowed? if place is None: return invalid_parameter("Missing required parameter: place.") await get_reservations(context) # get current state from coordinator if any((place == x['filters']['main']['name'] for x in context.reservations.values() if 'name' in x['filters']['main'] and x['state'] not in ('expired', 'invalid'))): return failed(f"Place {place} is already reserved.") reservation = await context.call("org.labgrid.coordinator.create_reservation", f"name={place}", prio=priority) if not reservation: return failed("Failed to create reservation") context.reservations.update(reservation) context.to_refresh.add((next(iter(reservation.keys())))) return reservation async def refresh_reservations(context: Session): while True: to_remove = set() context.reservations = await context.call("org.labgrid.coordinator.get_reservations") for token in context.to_refresh: if token in context.reservations: # context.log.info(f"Refreshing reservation {token}") state = context.reservations[token]['state'] place_name = context.reservations[token]['filters']['main']['name'] if state == 'waiting': ret = await context.call("org.labgrid.coordinator.poll_reservation", token) if not ret: context.log.error( f"Failed to poll reservation {token}.") context.reservations[token] = ret # acquire the resource, when it has been allocated by the coordinator elif (context.reservations[token]['state'] == 'allocated' or (context.reservations[token]['state'] == 'acquired' and place_name not in context.acquired_places) ): ret = await acquire(context, place_name) await cancel_reservation(context, place_name) if not ret: context.log.error( f"Could not acquire reserved place {token}: {place_name}") to_remove.add(token) else: to_remove.add(token) else: to_remove.add(token) for token in to_remove: context.to_refresh.remove(token) await asyncio.sleep(1.) # !! TODO set to 10s @labby_serialized async def cancel_reservation(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") await get_reservations(context) # get current state from coordinator token = next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None) if token is None: return failed(f"No reservations available for place {place}.") del context.reservations[token] return await context.call("org.labgrid.coordinator.cancel_reservation", token) @labby_serialized async def poll_reservation(context: Session, place: PlaceName) -> Union[Dict, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") token = next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None) if token is None: return failed(f"No reservations available for place {place}.") if not token: return failed("Failed to poll reservation.") reservation = await context.call("org.labgrid.coordinator.poll_reservation", token) context.reservations[token] = reservation return reservation @labby_serialized async def reset(context: Session, place: PlaceName) -> Union[bool, LabbyError]: """ Send a reset request to a place matching a given place name Note """ check = _check_not_none() if isinstance(check, LabbyError): return check context.log.info(f"Resetting place {place}") release_later = False if place not in context.acquired_places: release_later = True acq = await acquire(context, place) if isinstance(acquire, LabbyError): return acq if not acq: return failed(f"Could not acquire place {place}.") res = await fetch_resources(context, place, None) if isinstance(res, LabbyError): return failed(f"Failed to get resources for place {place}.") res = flatten(res, 2) # remove exporter and group from res for resname, resdata in res.items(): if resname in power_resources: try: context.log.info(f"Resetting {place}/{resname}.") power_resource = power_resource_from_name(resname, resdata) url = power_resource.power(PowerAction.cycle) assert (ssh_session := context.ssh_session) is not None assert ssh_session.client (_, _, serr) = ssh_session.client.exec_command( command=f"curl -Ss '{url}' > /dev/null" ) if len(msg := serr.read()) > 0: context.log.error( f"Got error while resetting console. {msg}") except ValueError: pass # not a valid powerresource after all ?? except Exception as e: raise e # other errors occured o.O if release_later: rel = await release(context, place) if isinstance(rel, LabbyError) or not rel: return failed(f"Failed to release place {place} after reset.") return True @labby_serialized async def console(context: Session, place: PlaceName): # TODO allow selection of resource to connect console to if place is None: return invalid_parameter("Missing required parameter: place.") if place not in context.acquired_places: ret = await acquire(context, place) if isinstance(ret, LabbyError): return ret if not ret: return failed("Failed to acquire Place (It may already have been acquired).") if place in context.open_consoles: return failed(f"There is already a console open for {place}.") # check that place has a console _resources = await fetch_resources(context, place, resource_key=None) if isinstance(_resources, LabbyError): return _resources if len(_resources) == 0: return failed(f"No resources on {place}.") _resources = flatten(_resources, depth=2) # remove exporter and place _resource: Optional[LabbyResource] = next( ( NetworkSerialPort( cls=data['cls'], port=data['params']['port'], host=data['params']['host'], speed=data['params']['speed'], protocol=data['params'].get('protocol', 'rfc2217'), ) for _, data in _resources.items() if 'cls' in data and data['cls'] == 'NetworkSerialPort' ), None, ) if _resource is None: return failed(f"No network serial port on {place}.") assert isinstance(_resource, NetworkSerialPort) assert context.ssh_session.client context.open_consoles[place] = (_con := Console(host=_resource.host or 'localhost', speed=_resource.speed, port=_resource.port, ssh_session=context.ssh_session.client)) async def _read(read_fn,): while place in context.open_consoles: try: data = await read_fn() assert context.frontend context.frontend.publish(f"localhost.consoles.{place}", data) except (OSError, EOFError): print_exception() context.log.error(f"Console closed read on {place}.") _con.close() if place in context.open_consoles: del context.open_consoles[place] print("Closing read.") except: print_exception() context.log.error(f"Console on {place} read failed.") _con.close() if place in context.open_consoles: del context.open_consoles[place] print("Closing read exc.") asyncio.run_coroutine_threadsafe( _read(_con.read_stdout), asyncio.get_event_loop()) asyncio.run_coroutine_threadsafe( _read(_con.read_stderr), asyncio.get_event_loop()) return True @labby_serialized async def console_write(context: Session, place: PlaceName, data: str) -> Union[bool, LabbyError]: # TODO implement if place not in context.acquired_places: return failed(f"Place {place} is not acquired.") if not (_console := context.open_consoles.get(place)): return failed(f"Place {place} has no open consoles.") if not data: # data was empty return failed(f"Could not write to Console {place}. Data was empty") try: _console.write_to_stdin(data) except Exception as e: context.log.exception(e) return failed(f"Failed to write to Console {place}.") # # do stuff # context.log.info(f"Console on {place} received: {data}.") return True @labby_serialized async def console_close(context: Session, place: PlaceName) -> Optional[LabbyError]: if place not in context.acquired_places: return failed(f"Place {place} is not acquired.") if not context.open_consoles.get(place): return failed(f"Place {place} has no open consoles.") context.log.info(f"Closing console on {place}.") context.open_consoles[place].close() del context.open_consoles[place] async def video(context: Session, *args): pass @labby_serialized async def forward(context: Session, *args): """ Forward a rpc call to the labgrid coordinator """ return await context.call(*args) @labby_serialized async def create_place(context: Session, place: PlaceName) -> Union[bool, LabbyError]: """ Create a new place on the coordinator """ if place is None: return invalid_parameter("Missing required parameter: place.") _places = await fetch_places(context, place=None) if isinstance(_places, LabbyError): return _places assert _places if place in _places: return failed(f"Place {place} already exists.") return await context.call("org.labgrid.coordinator.add_place", place) @labby_serialized async def delete_place(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") _places = await fetch_places(context, place) assert context.places # should have been set with fetch_places if isinstance(_places, LabbyError): return _places return await context.call("org.labgrid.coordinator.del_place", place) @labby_serialized async def create_resource(context: Session, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: # TODO (Kevin) Find a way to do this without being a exporter/ delegate to exporter if group_name is None: return invalid_parameter("Missing required parameter: group_name.") if resource_name is None: return invalid_parameter("Missing required parameter: resource_name.") ret = await context.call("org.labgrid.coordinator.set_resource", group_name, resource_name, {}) return ret @labby_serialized async def delete_resource(context: Session, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: # TODO (Kevin) Find a way to do this without being a exporter/ delegate to exporter if group_name is None: return invalid_parameter("Missing required parameter: group_name.") if resource_name is None: return invalid_parameter("Missing required parameter: resource_name.") ret = await context.call("org.labgrid.coordinator.update_resource", group_name, resource_name, None) return ret @labby_serialized async def places_names(context: Session) -> Union[List[PlaceName], LabbyError]: _places = await fetch_places(context, None) if isinstance(_places, LabbyError): return _places assert _places return list(_places.keys()) @labby_serialized async def get_alias(context: Session, place: PlaceName) -> Union[List[str], LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") data = await fetch_places(context, place) if isinstance(data, LabbyError): return data assert data if len(data) == 0: return [] return [a for x in data.values() for a in x['aliases']] @labby_serialized async def add_match(context: Session, place: PlaceName, exporter: ExporterName, group: GroupName, cls: ResourceName, name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: return await context.call("org.labgrid.coordinator.add_place_match", place, f"{exporter}/{group}/{cls}/{name}") except: return failed(f"Failed to add match {exporter}/{group}/{cls}/{name} to place {place}.") @labby_serialized async def del_match(context: Session, place: PlaceName, exporter: ExporterName, group: GroupName, cls: ResourceName, name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: return await context.call("org.labgrid.coordinator.del_place_match", place, f"{exporter}/{group}/{cls}/{name}") except: return failed(f"Failed to add match {exporter}/{group}/{cls}/{name} to place {place}.") @labby_serialized async def acquire_resource(context: Session, place_name: PlaceName, exporter: ExporterName, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: procedure = f"org.labgrid.exporter.{exporter}.acquire" return await context.call(procedure, group_name, resource_name, place_name) except: return failed(f"Failed to acquire resource {exporter}/{place_name}/{resource_name}.") @labby_serialized async def release_resource(context: Session, place_name: PlaceName, exporter: ExporterName, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: procedure = f"org.labgrid.exporter.{exporter}.release" return await context.call(procedure, group_name, resource_name, place_name) except Exception: return failed(f"Failed to release resource {exporter}/{place_name}/{resource_name}.") @labby_serialized async def cli_command(context: Session, command: str) -> Union[str, LabbyError]: if command is None or not command: return failed("Command must not be empty.") assert (ssh_session := context.ssh_session).client context.log.info( f"Issuing labgrid-client command: labgrid-client {command}") try: (_, sout, serr) = ssh_session.client.exec_command( command=f"export LG_USERNAME={context.user_name}; labgrid-client {command}") so = str(sout.read(), encoding='utf-8') if se := str(serr.read(), encoding='utf-8'): so += f"\n\n{se}" return so except Exception: return failed("Failed to execute cli command.") @labby_serialized async def username(context: Session) -> Union[str, LabbyError]: return context.user_name or failed("Username has not been set correctly.")
38.36891
170
0.635151
import asyncio from cgi import print_exception import os from pathlib import Path from typing import Any, Callable, Dict, Iterable, List, Optional, Type, Union import yaml from attr import attrib, attrs from autobahn.wamp.exception import ApplicationError from labby.console import Console from labby.resource import LabbyResource, NetworkSerialPort, PowerAction, power_resources, power_resource_from_name from .labby_error import (LabbyError, failed, invalid_parameter, not_found) from .labby_types import (ExporterName, GroupName, LabbyPlace, PlaceName, PowerState, Resource, ResourceName, Session, Place) from .labby_util import flatten def _check_not_none(*args, **kwargs) -> Optional[LabbyError]: return next((invalid_parameter(f"Missing required parameter: {name}.") for name, val in vars().items() if val is None), None) @attrs() class RPCDesc(): name: str = attrib(default=None) endpoint: str = attrib(default=None) remote_endpoint: str = attrib(default=None) info: Optional[str] = attrib(default=None) parameter: Optional[List[Dict[str, str]]] = attrib(default=None) return_type: Optional[str] = attrib(default=None) def _localfile(path): return Path(os.path.dirname(os.path.realpath(__file__))).joinpath(path) FUNCTION_INFO = {} with open(_localfile('rpc_desc.yaml'), 'r', encoding='utf-8') as file: FUNCTION_INFO = {key: RPCDesc(**val) for key, val in yaml.load(file, yaml.loader.FullLoader).items() if val is not None} _serializable_primitive: List[Type] = [int, float, str, bool] def invalidates_cache(attribute, *rec_args, reconstitute: Optional[Callable] = None): def decorator(func: Callable): def wrapped(self: Session, *args, **kwargs): setattr(self, attribute, None) return func(self, *args, **kwargs) return wrapped return decorator def cached(attribute: str): assert attribute is not None def decorator(func: Callable): async def wrapped(context: Session, *args, **kwargs): assert context is not None if not hasattr(context, attribute): context.__dict__.update({attribute: None}) data = None else: data: Optional[Dict] = context.__getattribute__( attribute) if data is None: data: Optional[Dict] = await func(context, *args, **kwargs) if not isinstance(data, LabbyError): context.__setattr__(attribute, data) return data return wrapped return decorator def labby_serialized(func): async def wrapped(*args, **kwargs) -> Union[None, List, Dict, int, float, str, bool]: ret = await func(*args, **kwargs) if ret is None: return None if isinstance(ret, LabbyError): return ret.to_json() if isinstance(ret, LabbyPlace): return ret.to_json() if isinstance(ret, (dict, list)) or type(ret) in _serializable_primitive: return ret raise NotImplementedError( f"{type(ret)} can currently not be serialized!") return wrapped async def fetch(context: Session, attribute: str, endpoint: str, *args, **kwargs) -> Any: assert context is not None assert attribute is not None assert endpoint is not None data: Optional[Dict] = getattr(context, attribute) if data is None: data: Optional[Dict] = await context.call(endpoint, *args, **kwargs) setattr(context, attribute, data) return data async def fetch_places(context: Session, place: Optional[PlaceName]) -> Union[Dict[PlaceName, Place], LabbyError]: assert context is not None _data = await context.places.get(context) if _data is None: if place is None: return not_found("Could not find any places.") return not_found(f"Could not find place with name {place}.") if place is not None: if place in _data.keys(): return {place: _data[place]} return not_found(f"Could not find place with name {place}.") return _data async def fetch_resources(context: Session, place: Optional[PlaceName], resource_key: Optional[ResourceName]) -> Union[Dict, LabbyError]: assert context is not None data: Optional[Dict] = await context.resources.get(context) if data is None: if place is None: return not_found("Could not find any resources.") return not_found(f"No resources found for place {place}.") if place is not None: data = {exporter: {k: v for k, v in exporter_data.items() if k == place and v} for exporter, exporter_data in data.items()} if resource_key is not None: data = {exporter: {place_name: {k: v for k, v in place_res.items() if k == resource_key if v} for place_name, place_res in exporter_data.items() if place_res} for exporter, exporter_data in data.items()} return data @cached("peers") async def fetch_peers(context: Session) -> Union[Dict, LabbyError]: session_ids = await context.call("wamp.session.list") sessions = {} for sess in session_ids: tmp = await context.call("wamp.session.get", sess) if tmp and 'authid' in tmp: sessions[tmp['authid']] = tmp return sessions async def get_exporters(context: Session) -> Union[List[ExporterName], LabbyError]: peers = await fetch_peers(context) if isinstance(peers, LabbyError): return peers assert peers is not None return [x.replace('exporter/', '') for x in peers if x.startswith('exporter')] def _calc_power_for_place(place_name, resources: Iterable[Dict]): pstate = False for res in resources: if isinstance(res['acquired'], Iterable): pstate |= place_name in res['acquired'] else: pstate |= res['acquired'] == place_name return pstate @cached("power_states") async def fetch_power_state(context: Session, place: Optional[PlaceName]) -> Union[PowerState, LabbyError]: _resources = await fetch_resources(context=context, place=place, resource_key=None) if isinstance(_resources, LabbyError): return _resources if len(_resources) > 0: _resources = flatten(_resources) _places = await fetch_places(context, place) if isinstance(_places, LabbyError): return _places power_states = {} assert _places for place_name, place_data in _places.items(): if 'acquired_resources' in place_data: if len(place_data['acquired_resources']) == 0 or place_name not in _resources: power_states[place_name] = {'power_state': False} continue resources_to_check = ((v for k, v in _resources[place_name].items() if any( (k in a for a in place_data['acquired_resources'])))) power_states[place_name] = { 'power_state': _calc_power_for_place(place_name, resources_to_check)} return power_states @labby_serialized async def places(context: Session, place: Optional[PlaceName] = None) -> Union[List[LabbyPlace], LabbyError]: context.log.info("Fetching places.") data = await fetch_places(context, place) if isinstance(data, LabbyError): return data power_states = await fetch_power_state(context=context, place=place) assert power_states is not None if isinstance(power_states, LabbyError): return power_states await get_reservations(context) def token_from_place(name): return next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == name), None) place_res = [] assert data for place_name, place_data in data.items(): if (place_data and place_data['acquired'] == context.user_name and place_name not in context.acquired_places ): context.acquired_places.add(place_name) if place is not None and place_name != place: continue if len(place_data["matches"]) > 0 and 'exporter' in place_data["matches"]: exporter = place_data["matches"][0]["exporter"] else: exporter = None place_data.update({ "name": place_name, "exporter": exporter, "power_state": power_states.get(place_name, {}).get('power_state', None), "reservation": token_from_place(place_name) }) place_res.append(place_data) return place_res @labby_serialized async def list_places(context: Session) -> List[PlaceName]: await fetch_places(context, None) return list(context.places.get_soft().keys()) if context.places else [] @labby_serialized async def resource(context: Session, place: Optional[PlaceName] = None, resource_key=None ) -> Union[Dict[ResourceName, Resource], LabbyError]: context.log.info(f"Fetching resources for {place}.") resource_data = await fetch_resources(context=context, place=place, resource_key=resource_key) if isinstance(resource_data, LabbyError): return resource_data if place is None: return resource_data if len(flatten(resource_data)) == 0: return not_found(f"Place {place} not found.") return resource_data @labby_serialized async def power_state(context: Session, place: PlaceName, ) -> Union[PowerState, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.").to_json() power_data = await fetch_power_state(context=context, place=place) assert power_data is not None if isinstance(power_data, LabbyError): return power_data if place not in power_data.keys(): return not_found(f"Place {place} not found on Coordinator.").to_json() return power_data[place] @labby_serialized async def resource_overview(context: Session, place: Optional[PlaceName] = None, ) -> Union[List[Resource], LabbyError]: context.log.info(f"Fetching resources overview for {place}.") targets = await fetch_resources(context=context, place=place, resource_key=None) if isinstance(targets, LabbyError): return targets ret = [] for exporter, resources in targets.items(): for res_place, res in resources.items(): if place is None or place == res_place: ret.extend({'name': key, 'target': exporter, 'place': res_place, **values} for key, values in res.items()) return ret @labby_serialized async def resource_by_name(context: Session, name: ResourceName, ) -> Union[List[Resource], LabbyError]: if name is None: return invalid_parameter("Missing required parameter: name.") resource_data = await fetch_resources(context, place=None, resource_key=None) if isinstance(resource_data, LabbyError): return resource_data ret = [] for target, resources in resource_data.items(): for place, res in resources.items(): ret.extend( {'name': key, 'target': target, 'place': place, **values} for key, values in res.items() if name == key ) return ret @labby_serialized async def resource_names(context: Session) -> List[Dict[str, str]]: await fetch_resources(context, None, None) data = context.resources or {} def it(x): return x.items() return [ {'exporter': exporter, 'group': grp_name, 'class': x.get('cls'), 'name': name, } for exporter, group in it(data) for grp_name, res in it(group) for name, x in it(res) ] @labby_serialized async def acquire(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") if place in context.acquired_places: return failed(f"Already acquired place {place}.") context.log.info(f"Acquiring place {place}.") try: acquire_successful = await context.call("org.labgrid.coordinator.acquire_place", place) except ApplicationError as err: return failed(f"Got exception while trying to call org.labgrid.coordinator.acquire_place. {err}") if acquire_successful: context.acquired_places.add(place) if token := next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None,): ret = await cancel_reservation(context, token) if isinstance(ret, LabbyError): print(f"Could not cancel reservation after acquire: {ret}") del context.reservations[token] return acquire_successful @labby_serialized async def release(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") if place not in context.acquired_places: return failed(f"Place {place} is not acquired") context.log.info(f"Releasing place {place}.") try: release_successful = await context.call('org.labgrid.coordinator.release_place', place) if place in context.acquired_places: context.acquired_places.remove(place) except ApplicationError as err: return failed(f"Got exception while trying to call org.labgrid.coordinator.release_place. {err}") return release_successful @labby_serialized async def info(_context=None, func_key: Optional[str] = None) -> Union[List[Dict], LabbyError]: if func_key is None: return [desc.__dict__ for desc in globals()["FUNCTION_INFO"].values()] if func_key not in globals()["FUNCTION_INFO"]: return not_found(f"Function {func_key} not found in registry.") return globals()["FUNCTION_INFO"][func_key].__dict__ async def get_reservations(context: Session) -> Dict: reservation_data: Dict = await context.call("org.labgrid.coordinator.get_reservations") for token, data in reservation_data.items(): if (data['state'] in ('waiting', 'allocated', 'acquired') and data['owner'] == context.user_name): context.to_refresh.add(token) context.reservations.update(**reservation_data) return reservation_data @labby_serialized async def create_reservation(context: Session, place: PlaceName, priority: float = 0.) -> Union[Dict, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") await get_reservations(context) if any((place == x['filters']['main']['name'] for x in context.reservations.values() if 'name' in x['filters']['main'] and x['state'] not in ('expired', 'invalid'))): return failed(f"Place {place} is already reserved.") reservation = await context.call("org.labgrid.coordinator.create_reservation", f"name={place}", prio=priority) if not reservation: return failed("Failed to create reservation") context.reservations.update(reservation) context.to_refresh.add((next(iter(reservation.keys())))) return reservation async def refresh_reservations(context: Session): while True: to_remove = set() context.reservations = await context.call("org.labgrid.coordinator.get_reservations") for token in context.to_refresh: if token in context.reservations: state = context.reservations[token]['state'] place_name = context.reservations[token]['filters']['main']['name'] if state == 'waiting': ret = await context.call("org.labgrid.coordinator.poll_reservation", token) if not ret: context.log.error( f"Failed to poll reservation {token}.") context.reservations[token] = ret elif (context.reservations[token]['state'] == 'allocated' or (context.reservations[token]['state'] == 'acquired' and place_name not in context.acquired_places) ): ret = await acquire(context, place_name) await cancel_reservation(context, place_name) if not ret: context.log.error( f"Could not acquire reserved place {token}: {place_name}") to_remove.add(token) else: to_remove.add(token) else: to_remove.add(token) for token in to_remove: context.to_refresh.remove(token) await asyncio.sleep(1.) @labby_serialized async def cancel_reservation(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") await get_reservations(context) token = next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None) if token is None: return failed(f"No reservations available for place {place}.") del context.reservations[token] return await context.call("org.labgrid.coordinator.cancel_reservation", token) @labby_serialized async def poll_reservation(context: Session, place: PlaceName) -> Union[Dict, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") token = next((token for token, x in context.reservations.items() if x['filters']['main']['name'] == place), None) if token is None: return failed(f"No reservations available for place {place}.") if not token: return failed("Failed to poll reservation.") reservation = await context.call("org.labgrid.coordinator.poll_reservation", token) context.reservations[token] = reservation return reservation @labby_serialized async def reset(context: Session, place: PlaceName) -> Union[bool, LabbyError]: check = _check_not_none() if isinstance(check, LabbyError): return check context.log.info(f"Resetting place {place}") release_later = False if place not in context.acquired_places: release_later = True acq = await acquire(context, place) if isinstance(acquire, LabbyError): return acq if not acq: return failed(f"Could not acquire place {place}.") res = await fetch_resources(context, place, None) if isinstance(res, LabbyError): return failed(f"Failed to get resources for place {place}.") res = flatten(res, 2) for resname, resdata in res.items(): if resname in power_resources: try: context.log.info(f"Resetting {place}/{resname}.") power_resource = power_resource_from_name(resname, resdata) url = power_resource.power(PowerAction.cycle) assert (ssh_session := context.ssh_session) is not None assert ssh_session.client (_, _, serr) = ssh_session.client.exec_command( command=f"curl -Ss '{url}' > /dev/null" ) if len(msg := serr.read()) > 0: context.log.error( f"Got error while resetting console. {msg}") except ValueError: pass except Exception as e: raise e if release_later: rel = await release(context, place) if isinstance(rel, LabbyError) or not rel: return failed(f"Failed to release place {place} after reset.") return True @labby_serialized async def console(context: Session, place: PlaceName): if place is None: return invalid_parameter("Missing required parameter: place.") if place not in context.acquired_places: ret = await acquire(context, place) if isinstance(ret, LabbyError): return ret if not ret: return failed("Failed to acquire Place (It may already have been acquired).") if place in context.open_consoles: return failed(f"There is already a console open for {place}.") _resources = await fetch_resources(context, place, resource_key=None) if isinstance(_resources, LabbyError): return _resources if len(_resources) == 0: return failed(f"No resources on {place}.") _resources = flatten(_resources, depth=2) _resource: Optional[LabbyResource] = next( ( NetworkSerialPort( cls=data['cls'], port=data['params']['port'], host=data['params']['host'], speed=data['params']['speed'], protocol=data['params'].get('protocol', 'rfc2217'), ) for _, data in _resources.items() if 'cls' in data and data['cls'] == 'NetworkSerialPort' ), None, ) if _resource is None: return failed(f"No network serial port on {place}.") assert isinstance(_resource, NetworkSerialPort) assert context.ssh_session.client context.open_consoles[place] = (_con := Console(host=_resource.host or 'localhost', speed=_resource.speed, port=_resource.port, ssh_session=context.ssh_session.client)) async def _read(read_fn,): while place in context.open_consoles: try: data = await read_fn() assert context.frontend context.frontend.publish(f"localhost.consoles.{place}", data) except (OSError, EOFError): print_exception() context.log.error(f"Console closed read on {place}.") _con.close() if place in context.open_consoles: del context.open_consoles[place] print("Closing read.") except: print_exception() context.log.error(f"Console on {place} read failed.") _con.close() if place in context.open_consoles: del context.open_consoles[place] print("Closing read exc.") asyncio.run_coroutine_threadsafe( _read(_con.read_stdout), asyncio.get_event_loop()) asyncio.run_coroutine_threadsafe( _read(_con.read_stderr), asyncio.get_event_loop()) return True @labby_serialized async def console_write(context: Session, place: PlaceName, data: str) -> Union[bool, LabbyError]: if place not in context.acquired_places: return failed(f"Place {place} is not acquired.") if not (_console := context.open_consoles.get(place)): return failed(f"Place {place} has no open consoles.") if not data: return failed(f"Could not write to Console {place}. Data was empty") try: _console.write_to_stdin(data) except Exception as e: context.log.exception(e) return failed(f"Failed to write to Console {place}.") context.log.info(f"Console on {place} received: {data}.") return True @labby_serialized async def console_close(context: Session, place: PlaceName) -> Optional[LabbyError]: if place not in context.acquired_places: return failed(f"Place {place} is not acquired.") if not context.open_consoles.get(place): return failed(f"Place {place} has no open consoles.") context.log.info(f"Closing console on {place}.") context.open_consoles[place].close() del context.open_consoles[place] async def video(context: Session, *args): pass @labby_serialized async def forward(context: Session, *args): return await context.call(*args) @labby_serialized async def create_place(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") _places = await fetch_places(context, place=None) if isinstance(_places, LabbyError): return _places assert _places if place in _places: return failed(f"Place {place} already exists.") return await context.call("org.labgrid.coordinator.add_place", place) @labby_serialized async def delete_place(context: Session, place: PlaceName) -> Union[bool, LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") _places = await fetch_places(context, place) assert context.places if isinstance(_places, LabbyError): return _places return await context.call("org.labgrid.coordinator.del_place", place) @labby_serialized async def create_resource(context: Session, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: if group_name is None: return invalid_parameter("Missing required parameter: group_name.") if resource_name is None: return invalid_parameter("Missing required parameter: resource_name.") ret = await context.call("org.labgrid.coordinator.set_resource", group_name, resource_name, {}) return ret @labby_serialized async def delete_resource(context: Session, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: if group_name is None: return invalid_parameter("Missing required parameter: group_name.") if resource_name is None: return invalid_parameter("Missing required parameter: resource_name.") ret = await context.call("org.labgrid.coordinator.update_resource", group_name, resource_name, None) return ret @labby_serialized async def places_names(context: Session) -> Union[List[PlaceName], LabbyError]: _places = await fetch_places(context, None) if isinstance(_places, LabbyError): return _places assert _places return list(_places.keys()) @labby_serialized async def get_alias(context: Session, place: PlaceName) -> Union[List[str], LabbyError]: if place is None: return invalid_parameter("Missing required parameter: place.") data = await fetch_places(context, place) if isinstance(data, LabbyError): return data assert data if len(data) == 0: return [] return [a for x in data.values() for a in x['aliases']] @labby_serialized async def add_match(context: Session, place: PlaceName, exporter: ExporterName, group: GroupName, cls: ResourceName, name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: return await context.call("org.labgrid.coordinator.add_place_match", place, f"{exporter}/{group}/{cls}/{name}") except: return failed(f"Failed to add match {exporter}/{group}/{cls}/{name} to place {place}.") @labby_serialized async def del_match(context: Session, place: PlaceName, exporter: ExporterName, group: GroupName, cls: ResourceName, name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: return await context.call("org.labgrid.coordinator.del_place_match", place, f"{exporter}/{group}/{cls}/{name}") except: return failed(f"Failed to add match {exporter}/{group}/{cls}/{name} to place {place}.") @labby_serialized async def acquire_resource(context: Session, place_name: PlaceName, exporter: ExporterName, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: procedure = f"org.labgrid.exporter.{exporter}.acquire" return await context.call(procedure, group_name, resource_name, place_name) except: return failed(f"Failed to acquire resource {exporter}/{place_name}/{resource_name}.") @labby_serialized async def release_resource(context: Session, place_name: PlaceName, exporter: ExporterName, group_name: GroupName, resource_name: ResourceName) -> Union[bool, LabbyError]: _check_not_none(**vars()) try: procedure = f"org.labgrid.exporter.{exporter}.release" return await context.call(procedure, group_name, resource_name, place_name) except Exception: return failed(f"Failed to release resource {exporter}/{place_name}/{resource_name}.") @labby_serialized async def cli_command(context: Session, command: str) -> Union[str, LabbyError]: if command is None or not command: return failed("Command must not be empty.") assert (ssh_session := context.ssh_session).client context.log.info( f"Issuing labgrid-client command: labgrid-client {command}") try: (_, sout, serr) = ssh_session.client.exec_command( command=f"export LG_USERNAME={context.user_name}; labgrid-client {command}") so = str(sout.read(), encoding='utf-8') if se := str(serr.read(), encoding='utf-8'): so += f"\n\n{se}" return so except Exception: return failed("Failed to execute cli command.") @labby_serialized async def username(context: Session) -> Union[str, LabbyError]: return context.user_name or failed("Username has not been set correctly.")
true
true
f70c2f0326e85540f299fe27470653d4c2fb01bf
1,985
py
Python
data_collection/gazette/spiders/es_vila_velha.py
kaiocp/querido-diario
86004049c6eee305e13066cf3607d30849bb099a
[ "MIT" ]
454
2018-04-07T03:32:57.000Z
2020-08-17T19:56:22.000Z
data_collection/gazette/spiders/es_vila_velha.py
kaiocp/querido-diario
86004049c6eee305e13066cf3607d30849bb099a
[ "MIT" ]
165
2018-04-13T00:36:17.000Z
2020-08-17T23:41:45.000Z
data_collection/gazette/spiders/es_vila_velha.py
kaiocp/querido-diario
86004049c6eee305e13066cf3607d30849bb099a
[ "MIT" ]
183
2018-04-11T15:09:37.000Z
2020-08-15T18:55:11.000Z
from datetime import date import dateparser from scrapy import FormRequest, Request from gazette.items import Gazette from gazette.spiders.base import BaseGazetteSpider class VilaVelhaSpider(BaseGazetteSpider): name = "es_vila_velha" allowed_domains = ["www.vilavelha.es.gov.br"] TERRITORY_ID = "3205200" GAZETTE_URL_CSS = "td:last-child a::attr(href)" GAZETTE_DATE_CSS = "td:nth-child(2) span b::text" GAZETTE_ISSUE_CSS = "td:nth-child(3) span b::text" JAVASCRIPT_POSTBACK_REGEX = r"javascript:__doPostBack\('(.*)',''\)" start_date = date(2016, 7, 1) def start_requests(self): start_date = self.start_date.strftime("%d/%m/%Y") end_date = self.end_date.strftime("%d/%m/%Y") base_url = "https://www.vilavelha.es.gov.br/diariooficial/ConsultaDiario.aspx" gazettes_url = f"{base_url}?dataInicial={start_date}&dataFinal={end_date}" yield Request(gazettes_url) def parse(self, response): for element in response.css("#ctl00_cpConteudo_gvDocumentos tr"): is_header = element.css("th").extract() != [] if is_header: continue date = element.css(self.GAZETTE_DATE_CSS).get() date = dateparser.parse(date, languages=["pt"]).date() event_target = element.css(self.GAZETTE_URL_CSS).re_first( self.JAVASCRIPT_POSTBACK_REGEX ) gazette_issue = element.css(self.GAZETTE_ISSUE_CSS).get() is_extra = "EXTRA" in gazette_issue edition_number = gazette_issue.split(" ")[0] document_request = FormRequest.from_response( response, formdata={"__EVENTTARGET": event_target} ) yield Gazette( date=date, file_requests=[document_request], edition_number=edition_number, is_extra_edition=is_extra, power="executive_legislative", )
36.090909
86
0.63073
from datetime import date import dateparser from scrapy import FormRequest, Request from gazette.items import Gazette from gazette.spiders.base import BaseGazetteSpider class VilaVelhaSpider(BaseGazetteSpider): name = "es_vila_velha" allowed_domains = ["www.vilavelha.es.gov.br"] TERRITORY_ID = "3205200" GAZETTE_URL_CSS = "td:last-child a::attr(href)" GAZETTE_DATE_CSS = "td:nth-child(2) span b::text" GAZETTE_ISSUE_CSS = "td:nth-child(3) span b::text" JAVASCRIPT_POSTBACK_REGEX = r"javascript:__doPostBack\('(.*)',''\)" start_date = date(2016, 7, 1) def start_requests(self): start_date = self.start_date.strftime("%d/%m/%Y") end_date = self.end_date.strftime("%d/%m/%Y") base_url = "https://www.vilavelha.es.gov.br/diariooficial/ConsultaDiario.aspx" gazettes_url = f"{base_url}?dataInicial={start_date}&dataFinal={end_date}" yield Request(gazettes_url) def parse(self, response): for element in response.css("#ctl00_cpConteudo_gvDocumentos tr"): is_header = element.css("th").extract() != [] if is_header: continue date = element.css(self.GAZETTE_DATE_CSS).get() date = dateparser.parse(date, languages=["pt"]).date() event_target = element.css(self.GAZETTE_URL_CSS).re_first( self.JAVASCRIPT_POSTBACK_REGEX ) gazette_issue = element.css(self.GAZETTE_ISSUE_CSS).get() is_extra = "EXTRA" in gazette_issue edition_number = gazette_issue.split(" ")[0] document_request = FormRequest.from_response( response, formdata={"__EVENTTARGET": event_target} ) yield Gazette( date=date, file_requests=[document_request], edition_number=edition_number, is_extra_edition=is_extra, power="executive_legislative", )
true
true
f70c2fd4c461e1403893699683a5364d22263b7f
5,630
py
Python
docs/source/conf.py
DouglasWebster/gdscript-to-restructured
e8a6513bbeaf204a9c0d267f86c41c203581af44
[ "MIT" ]
null
null
null
docs/source/conf.py
DouglasWebster/gdscript-to-restructured
e8a6513bbeaf204a9c0d267f86c41c203581af44
[ "MIT" ]
null
null
null
docs/source/conf.py
DouglasWebster/gdscript-to-restructured
e8a6513bbeaf204a9c0d267f86c41c203581af44
[ "MIT" ]
null
null
null
# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import sphinx_rtd_theme import os import sys # -- Project information ----------------------------------------------------- project = 'GDScript to reStructured' copyright = '2021, GDScript' author = 'Nathan Lavato' # The full version, including alpha/beta/rc tags version = '0.1.0' release = version pygments_style = 'sphinx' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # -- General configuration --------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. needs_sphinx = '3.0' sys.path.append(os.path.abspath("_extensions")) extensions = [ 'sphinx_tabs.tabs', "notfound.extension", "sphinx.ext.extlinks" ] # Warning when the Sphinx Tabs extension is used with unknown # builders (like the dummy builder) - as it doesn't cause errors, # we can ignore this so we still can treat other warnings as errors. sphinx_tabs_nowarn = True extlinks = { 'godot_class' : ('https://docs.godotengine.org/en/stable/classes/class_%s.html', '') } # Custom 4O4 page HTML template. # https://github.com/readthedocs/sphinx-notfound-page notfound_context = { "title": "Page not found", "body": """ <h1>Page not found</h1> <p> Sorry, we couldn't find that page. It may have been renamed or removed in the version of the documentation you're currently browsing. </p> <p> If you're currently browsing the <em>latest</em> version of the documentation, try browsing the <a href="/en/stable/"><em>stable</em> version of the documentation</a>. </p> <p> Alternatively, use the <a href="#" onclick="$('#rtd-search-form [name=\\'q\\']').focus()">Search docs</a> box on the left or <a href="/">go to the homepage</a>. </p> """, } # on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: notfound_urls_prefix = '' if not os.getenv("SPHINX_NO_GDSCRIPT"): extensions.append("gdscript") # if not os.getenv("SPHINX_NO_SEARCH"): # extensions.append("sphinx_search.extension") if not os.getenv("SPHINX_NO_DESCRIPTIONS"): extensions.append("godot_descriptions") # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # You can specify multiple suffix as a list of string: ['.rst', '.md'] # source_suffix = ['.rst', '.md'] source_suffix = '.rst' source_encoding = 'utf-8-sig' # The master toctree document master_doc = 'index' # Parse Sphinx tags passed from RTD via environment env_tags = os.getenv("SPHINX_TAGS") if env_tags is not None: for tag in env_tags.split(","): print("Adding Sphinx tag: %s" % tag.strip()) tags.add(tag.strip()) # noqa: F82 supported_languages = { "en": "Godot Engine (%s) documentation in English", } language = os.getenv("READTHEDOCS_LANGUAGE", "en") if not language in supported_languages.keys(): print("Unknown language: " + language) print("Supported languages: " + ", ".join(supported_languages.keys())) print( "The configured language is either wrong, or it should be added to supported_languages in conf.py. Falling back to 'en'." ) language = "en" is_i18n = tags.has("i18n") # noqa: F821 exclude_patterns = ["_build"] # fmt: off # These imports should *not* be moved to the start of the file, # they depend on the sys.path.append call registering "_extensions". # GDScript syntax highlighting from gdscript import GDScriptLexer from sphinx.highlighting import lexers lexers["gdscript"] = GDScriptLexer() # fmt: on smartquotes = False # Pygments (syntax highlighting) style to use pygments_style = "sphinx" highlight_language = "gdscript" # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # These paths are either relative to html_static_path # or fully qualified paths (eg. https://...) html_css_files = [ "css/custom.css", "css/my.css" ] html_js_files = [ "js/custom.js", ] html_theme_options = { 'logo_only': True, 'collapse_navigation': False } html_logo = "docs_logo.png" latex_elements = { 'extraclassoptions': 'openany', 'preamble': r''' \usepackage{subfig} \usepackage{graphicx} ''', 'papersize': 'a4paper' }
29.631579
129
0.668206
import sphinx_rtd_theme import os import sys project = 'GDScript to reStructured' copyright = '2021, GDScript' author = 'Nathan Lavato' version = '0.1.0' release = version pygments_style = 'sphinx' exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] needs_sphinx = '3.0' sys.path.append(os.path.abspath("_extensions")) extensions = [ 'sphinx_tabs.tabs', "notfound.extension", "sphinx.ext.extlinks" ] # we can ignore this so we still can treat other warnings as errors. sphinx_tabs_nowarn = True extlinks = { 'godot_class' : ('https://docs.godotengine.org/en/stable/classes/class_%s.html', '') } # Custom 4O4 page HTML template. # https://github.com/readthedocs/sphinx-notfound-page notfound_context = { "title": "Page not found", "body": """ <h1>Page not found</h1> <p> Sorry, we couldn't find that page. It may have been renamed or removed in the version of the documentation you're currently browsing. </p> <p> If you're currently browsing the <em>latest</em> version of the documentation, try browsing the <a href="/en/stable/"><em>stable</em> version of the documentation</a>. </p> <p> Alternatively, use the <a href="#" onclick="$('#rtd-search-form [name=\\'q\\']').focus()">Search docs</a> box on the left or <a href="/">go to the homepage</a>. </p> """, } on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: notfound_urls_prefix = '' if not os.getenv("SPHINX_NO_GDSCRIPT"): extensions.append("gdscript") if not os.getenv("SPHINX_NO_DESCRIPTIONS"): extensions.append("godot_descriptions") templates_path = ['_templates'] source_suffix = '.rst' source_encoding = 'utf-8-sig' master_doc = 'index' env_tags = os.getenv("SPHINX_TAGS") if env_tags is not None: for tag in env_tags.split(","): print("Adding Sphinx tag: %s" % tag.strip()) tags.add(tag.strip()) supported_languages = { "en": "Godot Engine (%s) documentation in English", } language = os.getenv("READTHEDOCS_LANGUAGE", "en") if not language in supported_languages.keys(): print("Unknown language: " + language) print("Supported languages: " + ", ".join(supported_languages.keys())) print( "The configured language is either wrong, or it should be added to supported_languages in conf.py. Falling back to 'en'." ) language = "en" is_i18n = tags.has("i18n") exclude_patterns = ["_build"] from gdscript import GDScriptLexer from sphinx.highlighting import lexers lexers["gdscript"] = GDScriptLexer() smartquotes = False pygments_style = "sphinx" highlight_language = "gdscript" html_theme = 'sphinx_rtd_theme' html_static_path = ['_static'] html_css_files = [ "css/custom.css", "css/my.css" ] html_js_files = [ "js/custom.js", ] html_theme_options = { 'logo_only': True, 'collapse_navigation': False } html_logo = "docs_logo.png" latex_elements = { 'extraclassoptions': 'openany', 'preamble': r''' \usepackage{subfig} \usepackage{graphicx} ''', 'papersize': 'a4paper' }
true
true
f70c3014ecf1637bf94b800311ac7370413a10c1
617
py
Python
tests/plugins/tasks/networking/test_netmiko_file_transfer.py
MatthiasGabriel/nornir
af948f20daee725f9bf257b0e73b688838ebcbc2
[ "Apache-2.0" ]
1
2019-04-10T08:14:59.000Z
2019-04-10T08:14:59.000Z
tests/plugins/tasks/networking/test_netmiko_file_transfer.py
MatthiasGabriel/nornir
af948f20daee725f9bf257b0e73b688838ebcbc2
[ "Apache-2.0" ]
null
null
null
tests/plugins/tasks/networking/test_netmiko_file_transfer.py
MatthiasGabriel/nornir
af948f20daee725f9bf257b0e73b688838ebcbc2
[ "Apache-2.0" ]
null
null
null
import os from nornir.plugins.tasks import networking THIS_DIR = os.path.dirname(os.path.realpath(__file__)) class Test(object): def test_netmiko_file_transfer(self, nornir): source_file = os.path.join(THIS_DIR, "data", "test_file.txt") dest_file = "test_file.txt" result = nornir.filter(name="dev4.group_2").run( networking.netmiko_file_transfer, source_file=source_file, dest_file=dest_file, direction="put", ) assert result for h, r in result.items(): assert r.result assert r.changed
26.826087
69
0.623987
import os from nornir.plugins.tasks import networking THIS_DIR = os.path.dirname(os.path.realpath(__file__)) class Test(object): def test_netmiko_file_transfer(self, nornir): source_file = os.path.join(THIS_DIR, "data", "test_file.txt") dest_file = "test_file.txt" result = nornir.filter(name="dev4.group_2").run( networking.netmiko_file_transfer, source_file=source_file, dest_file=dest_file, direction="put", ) assert result for h, r in result.items(): assert r.result assert r.changed
true
true
f70c321544f9877f102645bed6748a44c12c94c1
8,019
py
Python
pyshapelets/lts_smaller_shap_dicts.py
GillesVandewiele/pyShapelets
d7e91150c17bf0f5fed55dc36d0c4d2d447e80c9
[ "MIT" ]
16
2017-07-12T12:24:21.000Z
2021-01-23T14:11:24.000Z
pyshapelets/lts_smaller_shap_dicts.py
GillesVandewiele/pyShapelets
d7e91150c17bf0f5fed55dc36d0c4d2d447e80c9
[ "MIT" ]
13
2017-07-09T08:06:41.000Z
2017-09-21T14:16:04.000Z
pyshapelets/lts_smaller_shap_dicts.py
GillesVandewiele/pyShapelets
d7e91150c17bf0f5fed55dc36d0c4d2d447e80c9
[ "MIT" ]
4
2017-12-07T16:47:22.000Z
2019-11-08T20:42:40.000Z
import time from collections import Counter, defaultdict import warnings; warnings.filterwarnings('ignore') import glob import re import ast import numpy as np import pandas as pd import matplotlib.pyplot as plt from algorithms import ShapeletTransformer from extractors.extractor import MultiGeneticExtractor from data.load_all_datasets import load_data_train_test from sklearn.metrics import accuracy_score, log_loss from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import LogisticRegression from sklearn.svm import SVC from sklearn.model_selection import GridSearchCV from tslearn.shapelets import ShapeletModel def parse_shapelets(shapelets): shapelets = shapelets.replace(']', '],')[:-2] shapelets = re.sub(r'\s+', ', ', shapelets) shapelets = re.sub(r',+', ',', shapelets) shapelets = shapelets.replace('],[', '], [') shapelets = shapelets.replace('[,', '[') shapelets = '[' + shapelets + ']' shapelets = re.sub(r',\s+]', ']', shapelets) return ast.literal_eval(shapelets) def fit_rf(X_distances_train, y_train, X_distances_test, y_test, out_path): rf = GridSearchCV(RandomForestClassifier(), {'n_estimators': [10, 25, 50, 100, 500], 'max_depth': [None, 3, 7, 15]}) rf.fit(X_distances_train, y_train) hard_preds = rf.predict(X_distances_test) proba_preds = rf.predict_proba(X_distances_test) print("[RF] Accuracy = {}".format(accuracy_score(y_test, hard_preds))) print("[RF] Logloss = {}".format(log_loss(y_test, proba_preds))) hard_preds = pd.DataFrame(hard_preds, columns=['prediction']) proba_preds = pd.DataFrame(proba_preds, columns=['proba_{}'.format(x) for x in set(list(y_train) + list(y_test))]) hard_preds.to_csv(out_path.split('.')[0]+'_rf_hard.csv') proba_preds.to_csv(out_path.split('.')[0]+'_rf_proba.csv') def fit_lr(X_distances_train, y_train, X_distances_test, y_test, out_path): lr = GridSearchCV(LogisticRegression(), {'penalty': ['l1', 'l2'], 'C': [0.001, 0.01, 0.1, 1.0, 10.0]}) lr.fit(X_distances_train, y_train) hard_preds = lr.predict(X_distances_test) proba_preds = lr.predict_proba(X_distances_test) print("[LR] Accuracy = {}".format(accuracy_score(y_test, hard_preds))) print("[LR] Logloss = {}".format(log_loss(y_test, proba_preds))) hard_preds = pd.DataFrame(hard_preds, columns=['prediction']) proba_preds = pd.DataFrame(proba_preds, columns=['proba_{}'.format(x) for x in set(list(y_train) + list(y_test))]) hard_preds.to_csv(out_path.split('.')[0]+'_lr_hard.csv') proba_preds.to_csv(out_path.split('.')[0]+'_lr_proba.csv') def fit_svm(X_distances_train, y_train, X_distances_test, y_test, out_path): svc = GridSearchCV(SVC(kernel='linear', probability=True), {'C': [0.001, 0.01, 0.1, 1.0, 10.0]}) svc.fit(X_distances_train, y_train) hard_preds = svc.predict(X_distances_test) proba_preds = svc.predict_proba(X_distances_test) print("[SVM] Accuracy = {}".format(accuracy_score(y_test, hard_preds))) print("[SVM] Logloss = {}".format(log_loss(y_test, proba_preds))) hard_preds = pd.DataFrame(hard_preds, columns=['prediction']) proba_preds = pd.DataFrame(proba_preds, columns=['proba_{}'.format(x) for x in set(list(y_train) + list(y_test))]) hard_preds.to_csv(out_path.split('.')[0]+'_svm_hard.csv') proba_preds.to_csv(out_path.split('.')[0]+'_svm_proba.csv') def fit_lts(X_train, y_train, X_test, y_test, shap_dict, reg, max_it, shap_out_path, pred_out_path, timing_out_path): # Fit LTS model, print metrics on test-set, write away predictions and shapelets clf = ShapeletModel(n_shapelets_per_size=shap_dict, max_iter=max_it, verbose_level=0, batch_size=1, optimizer='sgd', weight_regularizer=reg) start = time.time() clf.fit( np.reshape( X_train, (X_train.shape[0], X_train.shape[1], 1) ), y_train ) learning_time = time.time() - start print('Learning shapelets took {}s'.format(learning_time)) with open(shap_out_path, 'w+') as ofp: for shap in clf.shapelets_: ofp.write(str(np.reshape(shap, (-1))) + '\n') with open(timing_out_path, 'w+') as ofp: ofp.write(str(learning_time)) X_distances_train = clf.transform(X_train) X_distances_test = clf.transform(X_test) print('Max distance value = {}'.format(np.max(X_distances_train))) fit_rf(X_distances_train, y_train, X_distances_test, y_test, pred_out_path) fit_lr(X_distances_train, y_train, X_distances_test, y_test, pred_out_path) fit_svm(X_distances_train, y_train, X_distances_test, y_test, pred_out_path) hyper_parameters_lts = { 'Adiac': [0.3, 0.2, 3, 0.01, 10000], 'Beef': [0.15, 0.125, 3, 0.01, 10000], 'BeetleFly': [0.15, 0.125, 1, 0.01, 5000], 'BirdChicken': [0.3, 0.075, 1, 0.1, 10000], 'ChlorineConcentration': [0.3, 0.2, 3, 0.01, 10000], 'Coffee': [0.05, 0.075, 2, 0.01, 5000], 'DiatomSizeReduction': [0.3, 0.175, 2, 0.01, 10000], 'ECGFiveDays': [0.05, 0.125, 2, 0.01, 10000], 'FaceFour': [0.3, 0.175, 3, 1.0, 5000], 'GunPoint': [0.15, 0.2, 3, 0.1, 10000], 'ItalyPowerDemand': [0.3, 0.2, 3, 0.01, 5000], 'Lightning7': [0.05, 0.075, 3, 1, 5000], 'MedicalImages': [0.3, 0.2, 2, 1, 10000], 'MoteStrain': [0.3, 0.2, 3, 1, 10000], #NOT AVAILABLE#'Otoliths': [0.15, 0.125, 3, 0.01, 2000], 'SonyAIBORobotSurface1': [0.3, 0.125, 2, 0.01, 10000], 'SonyAIBORobotSurface2': [0.3, 0.125, 2, 0.01, 10000], 'Symbols': [0.05, 0.175, 1, 0.1, 5000], 'SyntheticControl': [0.15, 0.125, 3, 0.01, 5000], 'Trace': [0.15, 0.125, 2, 0.1, 10000], 'TwoLeadECG': [0.3, 0.075, 1, 0.1, 10000] } datasets = [ 'Adiac', 'Beef', 'BeetleFly', 'BirdChicken', 'ChlorineConcentration', 'Coffee', 'ECGFiveDays', 'FaceFour', 'GunPoint', 'ItalyPowerDemand', 'Lightning7', 'MedicalImages', 'MoteStrain', 'SonyAIBORobotSurface1', 'SonyAIBORobotSurface2', 'Symbols', 'SyntheticControl', 'Trace', 'TwoLeadECG', 'DiatomSizeReduction' ] learning_sizes = defaultdict(list) genetic_sizes = defaultdict(list) metadata = sorted(load_data_train_test(), key=lambda x: x['train']['n_samples']**2*x['train']['n_features']**3) for dataset in metadata: train_df = pd.read_csv(dataset['train']['data_path']) test_df = pd.read_csv(dataset['test']['data_path']) X_train = train_df.drop('target', axis=1).values y_train = train_df['target'] X_test = test_df.drop('target', axis=1).values y_test = test_df['target'] map_dict = {} for j, c in enumerate(np.unique(y_train)): map_dict[c] = j y_train = y_train.map(map_dict) y_test = y_test.map(map_dict) y_train = y_train.values y_test = y_test.values nr_shap, l, r, reg, max_it = hyper_parameters_lts[dataset['train']['name']] files = glob.glob('results/lts_vs_genetic/{}_genetic_shapelets*.txt'.format(dataset['train']['name'])) if len(files): sizes = [] for f in files: shaps = parse_shapelets(open(f, 'r').read()) genetic_sizes[dataset['train']['name']].append(len(shaps)) for s in shaps: sizes.append(len(s)) shap_dict_cntr = Counter(np.random.choice(sizes, size=int(np.mean(genetic_sizes[dataset['train']['name']])))) shap_dict = {} for c in shap_dict_cntr: shap_dict[int(c)] = int(shap_dict_cntr[c]) fit_lts(X_train, y_train, X_test, y_test, dict(shap_dict), reg, max_it, 'results/lts_smaller/{}_learned_shapelets_{}.txt'.format(dataset['train']['name'], int(time.time())), 'results/lts_smaller/{}_learned_shapelets_predictions_{}.csv'.format(dataset['train']['name'], int(time.time())), 'results/lts_smaller/{}_learned_runtime_{}.csv'.format(dataset['train']['name'], int(time.time())) )
38.73913
127
0.654695
import time from collections import Counter, defaultdict import warnings; warnings.filterwarnings('ignore') import glob import re import ast import numpy as np import pandas as pd import matplotlib.pyplot as plt from algorithms import ShapeletTransformer from extractors.extractor import MultiGeneticExtractor from data.load_all_datasets import load_data_train_test from sklearn.metrics import accuracy_score, log_loss from sklearn.ensemble import RandomForestClassifier from sklearn.linear_model import LogisticRegression from sklearn.svm import SVC from sklearn.model_selection import GridSearchCV from tslearn.shapelets import ShapeletModel def parse_shapelets(shapelets): shapelets = shapelets.replace(']', '],')[:-2] shapelets = re.sub(r'\s+', ', ', shapelets) shapelets = re.sub(r',+', ',', shapelets) shapelets = shapelets.replace('],[', '], [') shapelets = shapelets.replace('[,', '[') shapelets = '[' + shapelets + ']' shapelets = re.sub(r',\s+]', ']', shapelets) return ast.literal_eval(shapelets) def fit_rf(X_distances_train, y_train, X_distances_test, y_test, out_path): rf = GridSearchCV(RandomForestClassifier(), {'n_estimators': [10, 25, 50, 100, 500], 'max_depth': [None, 3, 7, 15]}) rf.fit(X_distances_train, y_train) hard_preds = rf.predict(X_distances_test) proba_preds = rf.predict_proba(X_distances_test) print("[RF] Accuracy = {}".format(accuracy_score(y_test, hard_preds))) print("[RF] Logloss = {}".format(log_loss(y_test, proba_preds))) hard_preds = pd.DataFrame(hard_preds, columns=['prediction']) proba_preds = pd.DataFrame(proba_preds, columns=['proba_{}'.format(x) for x in set(list(y_train) + list(y_test))]) hard_preds.to_csv(out_path.split('.')[0]+'_rf_hard.csv') proba_preds.to_csv(out_path.split('.')[0]+'_rf_proba.csv') def fit_lr(X_distances_train, y_train, X_distances_test, y_test, out_path): lr = GridSearchCV(LogisticRegression(), {'penalty': ['l1', 'l2'], 'C': [0.001, 0.01, 0.1, 1.0, 10.0]}) lr.fit(X_distances_train, y_train) hard_preds = lr.predict(X_distances_test) proba_preds = lr.predict_proba(X_distances_test) print("[LR] Accuracy = {}".format(accuracy_score(y_test, hard_preds))) print("[LR] Logloss = {}".format(log_loss(y_test, proba_preds))) hard_preds = pd.DataFrame(hard_preds, columns=['prediction']) proba_preds = pd.DataFrame(proba_preds, columns=['proba_{}'.format(x) for x in set(list(y_train) + list(y_test))]) hard_preds.to_csv(out_path.split('.')[0]+'_lr_hard.csv') proba_preds.to_csv(out_path.split('.')[0]+'_lr_proba.csv') def fit_svm(X_distances_train, y_train, X_distances_test, y_test, out_path): svc = GridSearchCV(SVC(kernel='linear', probability=True), {'C': [0.001, 0.01, 0.1, 1.0, 10.0]}) svc.fit(X_distances_train, y_train) hard_preds = svc.predict(X_distances_test) proba_preds = svc.predict_proba(X_distances_test) print("[SVM] Accuracy = {}".format(accuracy_score(y_test, hard_preds))) print("[SVM] Logloss = {}".format(log_loss(y_test, proba_preds))) hard_preds = pd.DataFrame(hard_preds, columns=['prediction']) proba_preds = pd.DataFrame(proba_preds, columns=['proba_{}'.format(x) for x in set(list(y_train) + list(y_test))]) hard_preds.to_csv(out_path.split('.')[0]+'_svm_hard.csv') proba_preds.to_csv(out_path.split('.')[0]+'_svm_proba.csv') def fit_lts(X_train, y_train, X_test, y_test, shap_dict, reg, max_it, shap_out_path, pred_out_path, timing_out_path): clf = ShapeletModel(n_shapelets_per_size=shap_dict, max_iter=max_it, verbose_level=0, batch_size=1, optimizer='sgd', weight_regularizer=reg) start = time.time() clf.fit( np.reshape( X_train, (X_train.shape[0], X_train.shape[1], 1) ), y_train ) learning_time = time.time() - start print('Learning shapelets took {}s'.format(learning_time)) with open(shap_out_path, 'w+') as ofp: for shap in clf.shapelets_: ofp.write(str(np.reshape(shap, (-1))) + '\n') with open(timing_out_path, 'w+') as ofp: ofp.write(str(learning_time)) X_distances_train = clf.transform(X_train) X_distances_test = clf.transform(X_test) print('Max distance value = {}'.format(np.max(X_distances_train))) fit_rf(X_distances_train, y_train, X_distances_test, y_test, pred_out_path) fit_lr(X_distances_train, y_train, X_distances_test, y_test, pred_out_path) fit_svm(X_distances_train, y_train, X_distances_test, y_test, pred_out_path) hyper_parameters_lts = { 'Adiac': [0.3, 0.2, 3, 0.01, 10000], 'Beef': [0.15, 0.125, 3, 0.01, 10000], 'BeetleFly': [0.15, 0.125, 1, 0.01, 5000], 'BirdChicken': [0.3, 0.075, 1, 0.1, 10000], 'ChlorineConcentration': [0.3, 0.2, 3, 0.01, 10000], 'Coffee': [0.05, 0.075, 2, 0.01, 5000], 'DiatomSizeReduction': [0.3, 0.175, 2, 0.01, 10000], 'ECGFiveDays': [0.05, 0.125, 2, 0.01, 10000], 'FaceFour': [0.3, 0.175, 3, 1.0, 5000], 'GunPoint': [0.15, 0.2, 3, 0.1, 10000], 'ItalyPowerDemand': [0.3, 0.2, 3, 0.01, 5000], 'Lightning7': [0.05, 0.075, 3, 1, 5000], 'MedicalImages': [0.3, 0.2, 2, 1, 10000], 'MoteStrain': [0.3, 0.2, 3, 1, 10000], 'SonyAIBORobotSurface1': [0.3, 0.125, 2, 0.01, 10000], 'SonyAIBORobotSurface2': [0.3, 0.125, 2, 0.01, 10000], 'Symbols': [0.05, 0.175, 1, 0.1, 5000], 'SyntheticControl': [0.15, 0.125, 3, 0.01, 5000], 'Trace': [0.15, 0.125, 2, 0.1, 10000], 'TwoLeadECG': [0.3, 0.075, 1, 0.1, 10000] } datasets = [ 'Adiac', 'Beef', 'BeetleFly', 'BirdChicken', 'ChlorineConcentration', 'Coffee', 'ECGFiveDays', 'FaceFour', 'GunPoint', 'ItalyPowerDemand', 'Lightning7', 'MedicalImages', 'MoteStrain', 'SonyAIBORobotSurface1', 'SonyAIBORobotSurface2', 'Symbols', 'SyntheticControl', 'Trace', 'TwoLeadECG', 'DiatomSizeReduction' ] learning_sizes = defaultdict(list) genetic_sizes = defaultdict(list) metadata = sorted(load_data_train_test(), key=lambda x: x['train']['n_samples']**2*x['train']['n_features']**3) for dataset in metadata: train_df = pd.read_csv(dataset['train']['data_path']) test_df = pd.read_csv(dataset['test']['data_path']) X_train = train_df.drop('target', axis=1).values y_train = train_df['target'] X_test = test_df.drop('target', axis=1).values y_test = test_df['target'] map_dict = {} for j, c in enumerate(np.unique(y_train)): map_dict[c] = j y_train = y_train.map(map_dict) y_test = y_test.map(map_dict) y_train = y_train.values y_test = y_test.values nr_shap, l, r, reg, max_it = hyper_parameters_lts[dataset['train']['name']] files = glob.glob('results/lts_vs_genetic/{}_genetic_shapelets*.txt'.format(dataset['train']['name'])) if len(files): sizes = [] for f in files: shaps = parse_shapelets(open(f, 'r').read()) genetic_sizes[dataset['train']['name']].append(len(shaps)) for s in shaps: sizes.append(len(s)) shap_dict_cntr = Counter(np.random.choice(sizes, size=int(np.mean(genetic_sizes[dataset['train']['name']])))) shap_dict = {} for c in shap_dict_cntr: shap_dict[int(c)] = int(shap_dict_cntr[c]) fit_lts(X_train, y_train, X_test, y_test, dict(shap_dict), reg, max_it, 'results/lts_smaller/{}_learned_shapelets_{}.txt'.format(dataset['train']['name'], int(time.time())), 'results/lts_smaller/{}_learned_shapelets_predictions_{}.csv'.format(dataset['train']['name'], int(time.time())), 'results/lts_smaller/{}_learned_runtime_{}.csv'.format(dataset['train']['name'], int(time.time())) )
true
true
f70c3267c713a80d0e3f5d9f83fe64fcabba8b3f
3,941
py
Python
python/317_shortest_distance_from_all_buildings.py
liaison/LeetCode
8b10a1f6bbeb3ebfda99248994f7c325140ee2fd
[ "MIT" ]
17
2016-03-01T22:40:53.000Z
2021-04-19T02:15:03.000Z
python/317_shortest_distance_from_all_buildings.py
liaison/LeetCode
8b10a1f6bbeb3ebfda99248994f7c325140ee2fd
[ "MIT" ]
null
null
null
python/317_shortest_distance_from_all_buildings.py
liaison/LeetCode
8b10a1f6bbeb3ebfda99248994f7c325140ee2fd
[ "MIT" ]
3
2019-03-07T03:48:43.000Z
2020-04-05T01:11:36.000Z
class SolutionTLE: def shortestDistance(self, grid: List[List[int]]) -> int: buildings = [] rows, cols = len(grid), len(grid[0]) for row in range(rows): for col in range(cols): if grid[row][col] == 1: buildings.append((row, col)) def bfs(start): row, col = start visited = set() queue = deque([(row, col, 0)]) distance = {} while queue: curr_row, curr_col, steps = queue.popleft() for offset_row, offset_col in [(0, 1), (1, 0), (0, -1), (-1, 0)]: next_row, next_col = curr_row + offset_row, curr_col + offset_col if next_row < 0 or next_row >= rows \ or next_col < 0 or next_col >= cols: continue if grid[next_row][next_col] == 0: if (next_row, next_col) not in visited: visited.add((next_row, next_col)) distance[(next_row, next_col)] = steps + 1 queue.append((next_row, next_col, steps + 1)) return distance total_distance = {} for start in buildings: distances = bfs(start) for land, min_distance in distances.items(): if land not in total_distance: total_distance[land] = (0, 0) curr_count, curr_distance = total_distance[land] total_distance[land] = (curr_count + 1, curr_distance + min_distance) total_buildings = len(buildings) min_distance_sum = float('inf') for count, min_distance in total_distance.values(): if count == total_buildings: min_distance_sum = min(min_distance_sum, min_distance) return min_distance_sum if min_distance_sum != float('inf') else -1 class SolutionArray: def shortestDistance(self, grid: List[List[int]]) -> int: buildings = [] rows, cols = len(grid), len(grid[0]) for row in range(rows): for col in range(cols): if grid[row][col] == 1: buildings.append((row, col)) def bfs(start): row, col = start visited = [[False]*cols for _ in range(rows)] queue = deque([(row, col, 0)]) distance = {} while queue: curr_row, curr_col, steps = queue.popleft() for offset_row, offset_col in [(0, 1), (1, 0), (0, -1), (-1, 0)]: next_row, next_col = curr_row + offset_row, curr_col + offset_col if next_row < 0 or next_row >= rows \ or next_col < 0 or next_col >= cols: continue if grid[next_row][next_col] == 0: if not visited[next_row][next_col]: visited[next_row][next_col] = True distance[(next_row, next_col)] = steps + 1 queue.append((next_row, next_col, steps + 1)) return distance total_distance = {} for start in buildings: distances = bfs(start) for land, min_distance in distances.items(): if land not in total_distance: total_distance[land] = (0, 0) curr_count, curr_distance = total_distance[land] total_distance[land] = (curr_count + 1, curr_distance + min_distance) total_buildings = len(buildings) min_distance_sum = float('inf') for count, min_distance in total_distance.values(): if count == total_buildings: min_distance_sum = min(min_distance_sum, min_distance) return min_distance_sum if min_distance_sum != float('inf') else -1
36.831776
85
0.511038
class SolutionTLE: def shortestDistance(self, grid: List[List[int]]) -> int: buildings = [] rows, cols = len(grid), len(grid[0]) for row in range(rows): for col in range(cols): if grid[row][col] == 1: buildings.append((row, col)) def bfs(start): row, col = start visited = set() queue = deque([(row, col, 0)]) distance = {} while queue: curr_row, curr_col, steps = queue.popleft() for offset_row, offset_col in [(0, 1), (1, 0), (0, -1), (-1, 0)]: next_row, next_col = curr_row + offset_row, curr_col + offset_col if next_row < 0 or next_row >= rows \ or next_col < 0 or next_col >= cols: continue if grid[next_row][next_col] == 0: if (next_row, next_col) not in visited: visited.add((next_row, next_col)) distance[(next_row, next_col)] = steps + 1 queue.append((next_row, next_col, steps + 1)) return distance total_distance = {} for start in buildings: distances = bfs(start) for land, min_distance in distances.items(): if land not in total_distance: total_distance[land] = (0, 0) curr_count, curr_distance = total_distance[land] total_distance[land] = (curr_count + 1, curr_distance + min_distance) total_buildings = len(buildings) min_distance_sum = float('inf') for count, min_distance in total_distance.values(): if count == total_buildings: min_distance_sum = min(min_distance_sum, min_distance) return min_distance_sum if min_distance_sum != float('inf') else -1 class SolutionArray: def shortestDistance(self, grid: List[List[int]]) -> int: buildings = [] rows, cols = len(grid), len(grid[0]) for row in range(rows): for col in range(cols): if grid[row][col] == 1: buildings.append((row, col)) def bfs(start): row, col = start visited = [[False]*cols for _ in range(rows)] queue = deque([(row, col, 0)]) distance = {} while queue: curr_row, curr_col, steps = queue.popleft() for offset_row, offset_col in [(0, 1), (1, 0), (0, -1), (-1, 0)]: next_row, next_col = curr_row + offset_row, curr_col + offset_col if next_row < 0 or next_row >= rows \ or next_col < 0 or next_col >= cols: continue if grid[next_row][next_col] == 0: if not visited[next_row][next_col]: visited[next_row][next_col] = True distance[(next_row, next_col)] = steps + 1 queue.append((next_row, next_col, steps + 1)) return distance total_distance = {} for start in buildings: distances = bfs(start) for land, min_distance in distances.items(): if land not in total_distance: total_distance[land] = (0, 0) curr_count, curr_distance = total_distance[land] total_distance[land] = (curr_count + 1, curr_distance + min_distance) total_buildings = len(buildings) min_distance_sum = float('inf') for count, min_distance in total_distance.values(): if count == total_buildings: min_distance_sum = min(min_distance_sum, min_distance) return min_distance_sum if min_distance_sum != float('inf') else -1
true
true
f70c343abe32b3e4f1f802f02e6469b2c98b9f06
1,170
py
Python
hearthstone/training/pytorch/worker/distributed/remote_agent.py
JDBumgardner/stone_ground_hearth_battles
9fe095651fab60e8ddbf563f0b9b7f3e723d5f4f
[ "Apache-2.0" ]
20
2020-08-01T03:14:57.000Z
2021-12-19T11:47:50.000Z
hearthstone/training/pytorch/worker/distributed/remote_agent.py
JDBumgardner/stone_ground_hearth_battles
9fe095651fab60e8ddbf563f0b9b7f3e723d5f4f
[ "Apache-2.0" ]
48
2020-08-01T03:06:43.000Z
2022-02-27T10:03:47.000Z
hearthstone/training/pytorch/worker/distributed/remote_agent.py
JDBumgardner/stone_ground_hearth_battles
9fe095651fab60e8ddbf563f0b9b7f3e723d5f4f
[ "Apache-2.0" ]
3
2020-06-28T01:23:37.000Z
2021-11-11T23:09:36.000Z
from torch.distributed.rpc import RRef from hearthstone.simulator.agent import AnnotatingAgent, Annotation, DiscoverChoiceAction, StandardAction, \ RearrangeCardsAction, HeroChoiceAction class RemoteAgent(AnnotatingAgent): def __init__(self, remote_agent: RRef): self.remote_agent = remote_agent async def hero_choice_action(self, player: 'Player') -> HeroChoiceAction: return self.remote_agent.rpc_sync().hero_choice_action(player) async def annotated_rearrange_cards(self, player: 'Player') -> (RearrangeCardsAction, Annotation): return self.remote_agent.rpc_sync().annotated_rearrange_cards(player) async def annotated_buy_phase_action(self, player: 'Player') -> (StandardAction, Annotation): return self.remote_agent.rpc_sync().annotated_buy_phase_action(player) async def annotated_discover_choice_action(self, player: 'Player') -> (DiscoverChoiceAction, Annotation): return self.remote_agent.rpc_sync().annotated_discover_choice_action(player) async def game_over(self, player: 'Player', ranking: int) -> Annotation: return self.remote_agent.rpc_sync().game_over(player, ranking)
46.8
109
0.769231
from torch.distributed.rpc import RRef from hearthstone.simulator.agent import AnnotatingAgent, Annotation, DiscoverChoiceAction, StandardAction, \ RearrangeCardsAction, HeroChoiceAction class RemoteAgent(AnnotatingAgent): def __init__(self, remote_agent: RRef): self.remote_agent = remote_agent async def hero_choice_action(self, player: 'Player') -> HeroChoiceAction: return self.remote_agent.rpc_sync().hero_choice_action(player) async def annotated_rearrange_cards(self, player: 'Player') -> (RearrangeCardsAction, Annotation): return self.remote_agent.rpc_sync().annotated_rearrange_cards(player) async def annotated_buy_phase_action(self, player: 'Player') -> (StandardAction, Annotation): return self.remote_agent.rpc_sync().annotated_buy_phase_action(player) async def annotated_discover_choice_action(self, player: 'Player') -> (DiscoverChoiceAction, Annotation): return self.remote_agent.rpc_sync().annotated_discover_choice_action(player) async def game_over(self, player: 'Player', ranking: int) -> Annotation: return self.remote_agent.rpc_sync().game_over(player, ranking)
true
true
f70c34cd3bb99fa7462ca40b5ee544919ee9b2e1
279
py
Python
tests/artificial/transf_RelativeDifference/trend_MovingMedian/cycle_30/ar_/test_artificial_1024_RelativeDifference_MovingMedian_30__100.py
shaido987/pyaf
b9afd089557bed6b90b246d3712c481ae26a1957
[ "BSD-3-Clause" ]
377
2016-10-13T20:52:44.000Z
2022-03-29T18:04:14.000Z
tests/artificial/transf_RelativeDifference/trend_MovingMedian/cycle_30/ar_/test_artificial_1024_RelativeDifference_MovingMedian_30__100.py
ysdede/pyaf
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
[ "BSD-3-Clause" ]
160
2016-10-13T16:11:53.000Z
2022-03-28T04:21:34.000Z
tests/artificial/transf_RelativeDifference/trend_MovingMedian/cycle_30/ar_/test_artificial_1024_RelativeDifference_MovingMedian_30__100.py
ysdede/pyaf
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
[ "BSD-3-Clause" ]
63
2017-03-09T14:51:18.000Z
2022-03-27T20:52:57.000Z
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "RelativeDifference", sigma = 0.0, exog_count = 100, ar_order = 0);
39.857143
179
0.74552
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 30, transform = "RelativeDifference", sigma = 0.0, exog_count = 100, ar_order = 0);
true
true
f70c34e56b4dd10253f14bf6d684816904c53872
168,083
py
Python
src/sage/modular/abvar/abvar.py
switzel/sage
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
[ "BSL-1.0" ]
null
null
null
src/sage/modular/abvar/abvar.py
switzel/sage
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
[ "BSL-1.0" ]
null
null
null
src/sage/modular/abvar/abvar.py
switzel/sage
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
[ "BSL-1.0" ]
1
2020-07-24T12:20:37.000Z
2020-07-24T12:20:37.000Z
""" Base class for modular abelian varieties AUTHORS: - William Stein (2007-03) TESTS:: sage: A = J0(33) sage: D = A.decomposition(); D [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) ] sage: loads(dumps(D)) == D True sage: loads(dumps(A)) == A True """ ########################################################################### # Copyright (C) 2007 William Stein <wstein@gmail.com> # # Distributed under the terms of the GNU General Public License (GPL) # # http://www.gnu.org/licenses/ # ########################################################################### from sage.categories.all import ModularAbelianVarieties from sage.structure.sequence import Sequence, Sequence_generic from sage.structure.parent_base import ParentWithBase from morphism import HeckeOperator, Morphism, DegeneracyMap from torsion_subgroup import RationalTorsionSubgroup, QQbarTorsionSubgroup from finite_subgroup import (FiniteSubgroup_lattice, FiniteSubgroup, TorsionPoint) from cuspidal_subgroup import CuspidalSubgroup, RationalCuspidalSubgroup, RationalCuspSubgroup from sage.rings.all import (ZZ, QQ, QQbar, LCM, divisors, Integer, prime_range) from sage.rings.ring import is_Ring from sage.modules.free_module import is_FreeModule from sage.modular.arithgroup.all import is_CongruenceSubgroup, is_Gamma0, is_Gamma1, is_GammaH from sage.modular.modsym.all import ModularSymbols from sage.modular.modsym.space import ModularSymbolsSpace from sage.matrix.all import matrix, block_diagonal_matrix, identity_matrix from sage.modules.all import vector from sage.groups.all import AbelianGroup from sage.databases.cremona import cremona_letter_code from sage.misc.all import prod from copy import copy import homology import homspace import lseries def is_ModularAbelianVariety(x): """ Return True if x is a modular abelian variety. INPUT: - ``x`` - object EXAMPLES:: sage: from sage.modular.abvar.abvar import is_ModularAbelianVariety sage: is_ModularAbelianVariety(5) False sage: is_ModularAbelianVariety(J0(37)) True Returning True is a statement about the data type not whether or not some abelian variety is modular:: sage: is_ModularAbelianVariety(EllipticCurve('37a')) False """ return isinstance(x, ModularAbelianVariety_abstract) class ModularAbelianVariety_abstract(ParentWithBase): def __init__(self, groups, base_field, is_simple=None, newform_level=None, isogeny_number=None, number=None, check=True): """ Abstract base class for modular abelian varieties. INPUT: - ``groups`` - a tuple of congruence subgroups - ``base_field`` - a field - ``is_simple`` - bool; whether or not self is simple - ``newform_level`` - if self is isogenous to a newform abelian variety, returns the level of that abelian variety - ``isogeny_number`` - which isogeny class the corresponding newform is in; this corresponds to the Cremona letter code - ``number`` - the t number of the degeneracy map that this abelian variety is the image under - ``check`` - whether to do some type checking on the defining data EXAMPLES: One should not create an instance of this class, but we do so anyways here as an example:: sage: A = sage.modular.abvar.abvar.ModularAbelianVariety_abstract((Gamma0(37),), QQ) sage: type(A) <class 'sage.modular.abvar.abvar.ModularAbelianVariety_abstract_with_category'> All hell breaks loose if you try to do anything with `A`:: sage: A <repr(<sage.modular.abvar.abvar.ModularAbelianVariety_abstract_with_category at 0x...>) failed: NotImplementedError: BUG -- lattice method must be defined in derived class> All instances of this class are in the category of modular abelian varieties:: sage: A.category() Category of modular abelian varieties over Rational Field sage: J0(23).category() Category of modular abelian varieties over Rational Field """ if check: if not isinstance(groups, tuple): raise TypeError("groups must be a tuple") for G in groups: if not is_CongruenceSubgroup(G): raise TypeError("each element of groups must be a congruence subgroup") self.__groups = groups if is_simple is not None: self.__is_simple = is_simple if newform_level is not None: self.__newform_level = newform_level if number is not None: self.__degen_t = number if isogeny_number is not None: self.__isogeny_number = isogeny_number if check and not is_Ring(base_field) and base_field.is_field(): raise TypeError("base_field must be a field") ParentWithBase.__init__(self, base_field, category = ModularAbelianVarieties(base_field)) def groups(self): r""" Return an ordered tuple of the congruence subgroups that the ambient product Jacobian is attached to. Every modular abelian variety is a finite quotient of an abelian subvariety of a product of modular Jacobians `J_\Gamma`. This function returns a tuple containing the groups `\Gamma`. EXAMPLES:: sage: A = (J0(37) * J1(13))[0]; A Simple abelian subvariety 13aG1(1,13) of dimension 2 of J0(37) x J1(13) sage: A.groups() (Congruence Subgroup Gamma0(37), Congruence Subgroup Gamma1(13)) """ return self.__groups ############################################################################# # lattice() *must* be defined by every derived class!!!! def lattice(self): """ Return lattice in ambient cuspidal modular symbols product that defines this modular abelian variety. This must be defined in each derived class. OUTPUT: a free module over `\ZZ` EXAMPLES:: sage: A = sage.modular.abvar.abvar.ModularAbelianVariety_abstract((Gamma0(37),), QQ) sage: A <repr(<sage.modular.abvar.abvar.ModularAbelianVariety_abstract_with_category at 0x...>) failed: NotImplementedError: BUG -- lattice method must be defined in derived class> """ raise NotImplementedError("BUG -- lattice method must be defined in derived class") ############################################################################# def free_module(self): r""" Synonym for ``self.lattice()``. OUTPUT: a free module over `\ZZ` EXAMPLES:: sage: J0(37).free_module() Ambient free module of rank 4 over the principal ideal domain Integer Ring sage: J0(37)[0].free_module() Free module of degree 4 and rank 2 over Integer Ring Echelon basis matrix: [ 1 -1 1 0] [ 0 0 2 -1] """ return self.lattice() def vector_space(self): r""" Return vector space corresponding to the modular abelian variety. This is the lattice tensored with `\QQ`. EXAMPLES:: sage: J0(37).vector_space() Vector space of dimension 4 over Rational Field sage: J0(37)[0].vector_space() Vector space of degree 4 and dimension 2 over Rational Field Basis matrix: [ 1 -1 0 1/2] [ 0 0 1 -1/2] """ try: return self.__vector_space except AttributeError: self.__vector_space = self.lattice().change_ring(QQ) return self.__vector_space def base_field(self): r""" Synonym for ``self.base_ring()``. EXAMPLES:: sage: J0(11).base_field() Rational Field """ return self.base_ring() def base_extend(self, K): """ EXAMPLES:: sage: A = J0(37); A Abelian variety J0(37) of dimension 2 sage: A.base_extend(QQbar) Abelian variety J0(37) over Algebraic Field of dimension 2 sage: A.base_extend(GF(7)) Abelian variety J0(37) over Finite Field of size 7 of dimension 2 """ N = self.__newform_level if hasattr(self, '__newform_level') else None return ModularAbelianVariety(self.groups(), self.lattice(), K, newform_level=N) def __contains__(self, x): """ Determine whether or not self contains x. EXAMPLES:: sage: J = J0(67); G = (J[0] + J[1]).intersection(J[1] + J[2]) sage: G[0] Finite subgroup with invariants [5, 10] over QQbar of Abelian subvariety of dimension 3 of J0(67) sage: a = G[0].0; a [(1/10, 1/10, 3/10, 1/2, 1, -2, -3, 33/10, 0, -1/2)] sage: a in J[0] False sage: a in (J[0]+J[1]) True sage: a in (J[1]+J[2]) True sage: C = G[1] # abelian variety in kernel sage: G[0].0 [(1/10, 1/10, 3/10, 1/2, 1, -2, -3, 33/10, 0, -1/2)] sage: 5*G[0].0 [(1/2, 1/2, 3/2, 5/2, 5, -10, -15, 33/2, 0, -5/2)] sage: 5*G[0].0 in C True """ if not isinstance(x, TorsionPoint): return False if x.parent().abelian_variety().groups() != self.groups(): return False v = x.element() n = v.denominator() nLambda = self.ambient_variety().lattice().scale(n) return n*v in self.lattice() + nLambda def __cmp__(self, other): """ Compare two modular abelian varieties. If other is not a modular abelian variety, compares the types of self and other. If other is a modular abelian variety, compares the groups, then if those are the same, compares the newform level and isogeny class number and degeneracy map numbers. If those are not defined or matched up, compare the underlying lattices. EXAMPLES:: sage: cmp(J0(37)[0], J0(37)[1]) -1 sage: cmp(J0(33)[0], J0(33)[1]) -1 sage: cmp(J0(37), 5) #random 1 """ if not isinstance(other, ModularAbelianVariety_abstract): return cmp(type(self), type(other)) if self is other: return 0 c = cmp(self.groups(), other.groups()) if c: return c try: c = cmp(self.__newform_level, other.__newform_level) if c: return c except AttributeError: pass try: c = cmp(self.__isogeny_number, other.__isogeny_number) if c: return c except AttributeError: pass try: c = cmp(self.__degen_t, other.__degen_t) if c: return c except AttributeError: pass # NOTE!! having the same newform level, isogeny class number, # and degen_t does not imply two abelian varieties are equal. # See the docstring for self.label. return cmp(self.lattice(), other.lattice()) def __radd__(self,other): """ Return other + self when other is 0. Otherwise raise a TypeError. EXAMPLES:: sage: int(0) + J0(37) Abelian variety J0(37) of dimension 2 """ if other == 0: return self raise TypeError def _repr_(self): """ Return string representation of this modular abelian variety. This is just the generic base class, so it's unlikely to be called in practice. EXAMPLES:: sage: A = J0(23) sage: import sage.modular.abvar.abvar as abvar sage: abvar.ModularAbelianVariety_abstract._repr_(A) 'Abelian variety J0(23) of dimension 2' :: sage: (J0(11) * J0(33))._repr_() 'Abelian variety J0(11) x J0(33) of dimension 4' """ field = '' if self.base_field() == QQ else ' over %s'%self.base_field() #if self.newform_level(none_if_not_known=True) is None: simple = self.is_simple(none_if_not_known=True) if simple and self.dimension() > 0: label = self.label() + ' ' else: label = '' simple = 'Simple a' if simple else 'A' if self.is_ambient(): return '%sbelian variety %s%s of dimension %s'%(simple, self._ambient_repr(), field, self.dimension()) if self.is_subvariety_of_ambient_jacobian(): sub = 'subvariety' else: sub = 'variety factor' return "%sbelian %s %sof dimension %s of %s%s"%( simple, sub, label, self.dimension(), self._ambient_repr(), field) def label(self): r""" Return the label associated to this modular abelian variety. The format of the label is [level][isogeny class][group](t, ambient level) If this abelian variety `B` has the above label, this implies only that `B` is isogenous to the newform abelian variety `A_f` associated to the newform with label [level][isogeny class][group]. The [group] is empty for `\Gamma_0(N)`, is G1 for `\Gamma_1(N)` and is GH[...] for `\Gamma_H(N)`. .. warning:: The sum of `\delta_s(A_f)` for all `s\mid t` contains `A`, but no sum for a proper divisor of `t` contains `A`. It need *not* be the case that `B` is equal to `\delta_t(A_f)`!!! OUTPUT: string EXAMPLES:: sage: J0(11).label() '11a(1,11)' sage: J0(11)[0].label() '11a(1,11)' sage: J0(33)[2].label() '33a(1,33)' sage: J0(22).label() Traceback (most recent call last): ... ValueError: self must be simple We illustrate that self need not equal `\delta_t(A_f)`:: sage: J = J0(11); phi = J.degeneracy_map(33, 1) + J.degeneracy_map(33,3) sage: B = phi.image(); B Abelian subvariety of dimension 1 of J0(33) sage: B.decomposition() [ Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) ] sage: C = J.degeneracy_map(33,3).image(); C Abelian subvariety of dimension 1 of J0(33) sage: C == B False """ degen = str(self.degen_t()).replace(' ','') return '%s%s'%(self.newform_label(), degen) def newform_label(self): """ Return the label [level][isogeny class][group] of the newform `f` such that this abelian variety is isogenous to the newform abelian variety `A_f`. If this abelian variety is not simple, raise a ValueError. OUTPUT: string EXAMPLES:: sage: J0(11).newform_label() '11a' sage: J0(33)[2].newform_label() '33a' The following fails since `J_0(33)` is not simple:: sage: J0(33).newform_label() Traceback (most recent call last): ... ValueError: self must be simple """ N, G = self.newform_level() if is_Gamma0(G): group = '' elif is_Gamma1(G): group = 'G1' elif is_GammaH(G): group = 'GH%s'%(str(G._generators_for_H()).replace(' ','')) return '%s%s%s'%(N, cremona_letter_code(self.isogeny_number()), group) def _isogeny_to_newform_abelian_variety(self): r""" Return an isogeny from self to an abelian variety `A_f` attached to a newform. If self is not simple (so that no such isogeny exists), raise a ValueError. EXAMPLES:: sage: J0(22)[0]._isogeny_to_newform_abelian_variety() Abelian variety morphism: From: Simple abelian subvariety 11a(1,22) of dimension 1 of J0(22) To: Newform abelian subvariety 11a of dimension 1 of J0(11) sage: J = J0(11); phi = J.degeneracy_map(33, 1) + J.degeneracy_map(33,3) sage: A = phi.image() sage: A._isogeny_to_newform_abelian_variety().matrix() [-3 3] [ 0 -3] """ try: return self._newform_isogeny except AttributeError: pass if not self.is_simple(): raise ValueError("self is not simple") ls = [] t, N = self.decomposition()[0].degen_t() A = self.ambient_variety() for i in range(len(self.groups())): g = self.groups()[i] if N == g.level(): J = g.modular_abelian_variety() d = J.degeneracy_map(self.newform_level()[0], t) p = A.project_to_factor(i) mat = p.matrix() * d.matrix() if not (self.lattice().matrix() * mat).is_zero(): break from constructor import AbelianVariety Af = AbelianVariety(self.newform_label()) H = A.Hom(Af.ambient_variety()) m = H(Morphism(H, mat)) self._newform_isogeny = m.restrict_domain(self).restrict_codomain(Af) return self._newform_isogeny def _simple_isogeny(self, other): """ Given self and other, if both are simple, and correspond to the same newform with the same congruence subgroup, return an isogeny. Otherwise, raise a ValueError. INPUT: - ``self, other`` - modular abelian varieties OUTPUT: an isogeny EXAMPLES:: sage: J = J0(33); J Abelian variety J0(33) of dimension 3 sage: J[0]._simple_isogeny(J[1]) Abelian variety morphism: From: Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) To: Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) The following illustrates how simple isogeny is only implemented when the ambients are the same:: sage: J[0]._simple_isogeny(J1(11)) Traceback (most recent call last): ... NotImplementedError: _simple_isogeny only implemented when both abelian variety have the same ambient product Jacobian """ if not is_ModularAbelianVariety(other): raise TypeError("other must be a modular abelian variety") if not self.is_simple(): raise ValueError("self is not simple") if not other.is_simple(): raise ValueError("other is not simple") if self.groups() != other.groups(): # The issue here is that the stuff below probably won't make any sense at all if we don't know # that the two newform abelian varieties $A_f$ are identical. raise NotImplementedError("_simple_isogeny only implemented when both abelian variety have the same ambient product Jacobian") if (self.newform_level() != other.newform_level()) or \ (self.isogeny_number() != other.isogeny_number()): raise ValueError("self and other do not correspond to the same newform") return other._isogeny_to_newform_abelian_variety().complementary_isogeny() * \ self._isogeny_to_newform_abelian_variety() def _Hom_(self, B, cat=None): """ INPUT: - ``B`` - modular abelian varieties - ``cat`` - category EXAMPLES:: sage: J0(37)._Hom_(J1(37)) Space of homomorphisms from Abelian variety J0(37) of dimension 2 to Abelian variety J1(37) of dimension 40 sage: J0(37)._Hom_(J1(37)).homset_category() Category of modular abelian varieties over Rational Field """ if cat is None: K = self.base_field(); L = B.base_field() if K == L: F = K elif K == QQbar or L == QQbar: F = QQbar else: # TODO -- improve this raise ValueError("please specify a category") cat = ModularAbelianVarieties(F) if self is B: return self.endomorphism_ring(cat) else: return homspace.Homspace(self, B, cat) def in_same_ambient_variety(self, other): """ Return True if self and other are abelian subvarieties of the same ambient product Jacobian. EXAMPLES:: sage: A,B,C = J0(33) sage: A.in_same_ambient_variety(B) True sage: A.in_same_ambient_variety(J0(11)) False """ if not is_ModularAbelianVariety(other): return False if self.groups() != other.groups(): return False if not self.is_subvariety_of_ambient_jacobian() or not other.is_subvariety_of_ambient_jacobian(): return False return True def modular_kernel(self): """ Return the modular kernel of this abelian variety, which is the kernel of the canonical polarization of self. EXAMPLES:: sage: A = AbelianVariety('33a'); A Newform abelian subvariety 33a of dimension 1 of J0(33) sage: A.modular_kernel() Finite subgroup with invariants [3, 3] over QQ of Newform abelian subvariety 33a of dimension 1 of J0(33) """ try: return self.__modular_kernel except AttributeError: _, f, _ = self.dual() G = f.kernel()[0] self.__modular_kernel = G return G def modular_degree(self): """ Return the modular degree of this abelian variety, which is the square root of the degree of the modular kernel. EXAMPLES:: sage: A = AbelianVariety('37a') sage: A.modular_degree() 2 """ n = self.modular_kernel().order() return ZZ(n.sqrt()) def intersection(self, other): """ Returns the intersection of self and other inside a common ambient Jacobian product. INPUT: - ``other`` - a modular abelian variety or a finite group OUTPUT: If other is a modular abelian variety: - ``G`` - finite subgroup of self - ``A`` - abelian variety (identity component of intersection) If other is a finite group: - ``G`` - a finite group EXAMPLES: We intersect some abelian varieties with finite intersection. :: sage: J = J0(37) sage: J[0].intersection(J[1]) (Finite subgroup with invariants [2, 2] over QQ of Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37), Simple abelian subvariety of dimension 0 of J0(37)) :: sage: D = list(J0(65)); D [Simple abelian subvariety 65a(1,65) of dimension 1 of J0(65), Simple abelian subvariety 65b(1,65) of dimension 2 of J0(65), Simple abelian subvariety 65c(1,65) of dimension 2 of J0(65)] sage: D[0].intersection(D[1]) (Finite subgroup with invariants [2] over QQ of Simple abelian subvariety 65a(1,65) of dimension 1 of J0(65), Simple abelian subvariety of dimension 0 of J0(65)) sage: (D[0]+D[1]).intersection(D[1]+D[2]) (Finite subgroup with invariants [2] over QQbar of Abelian subvariety of dimension 3 of J0(65), Abelian subvariety of dimension 2 of J0(65)) :: sage: J = J0(33) sage: J[0].intersection(J[1]) (Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety of dimension 0 of J0(33)) Next we intersect two abelian varieties with non-finite intersection:: sage: J = J0(67); D = J.decomposition(); D [ Simple abelian subvariety 67a(1,67) of dimension 1 of J0(67), Simple abelian subvariety 67b(1,67) of dimension 2 of J0(67), Simple abelian subvariety 67c(1,67) of dimension 2 of J0(67) ] sage: (D[0] + D[1]).intersection(D[1] + D[2]) (Finite subgroup with invariants [5, 10] over QQbar of Abelian subvariety of dimension 3 of J0(67), Abelian subvariety of dimension 2 of J0(67)) """ # First check whether we are intersecting an abelian variety # with a finite subgroup. If so, call the intersection method # for the finite group, which does know how to intersect with # an abelian variety. if isinstance(other, FiniteSubgroup): return other.intersection(self) # Now both self and other are abelian varieties. We require # at least that the ambient Jacobian product is the same for # them. if not self.in_same_ambient_variety(other): raise TypeError("other must be an abelian variety in the same ambient space") # 1. Compute the abelian variety (connected) part of the intersection V = self.vector_space().intersection(other.vector_space()) if V.dimension() > 0: # If there is a nonzero abelian variety, get the actual # lattice that defines it. We intersect (=saturate) in # the sum of the lattices, to ensure that the intersection # is an abelian subvariety of both self and other (even if # they aren't subvarieties of the ambient Jacobian). lattice = V.intersection(self.lattice() + other.lattice()) A = ModularAbelianVariety(self.groups(), lattice, self.base_field(), check=False) else: A = self.zero_subvariety() # 2. Compute the finite intersection group when the # intersection is finite, or a group that maps surjectively # onto the component group in general. # First we get basis matrices for the lattices that define # both abelian varieties. L = self.lattice().basis_matrix() M = other.lattice().basis_matrix() # Then we stack matrices and find a subset that forms a # basis. LM = L.stack(M) P = LM.pivot_rows() V = (ZZ**L.ncols()).span_of_basis([LM.row(p) for p in P]) S = (self.lattice() + other.lattice()).saturation() n = self.lattice().rank() # Finally we project onto the L factor. gens = [L.linear_combination_of_rows(v.list()[:n]) for v in V.coordinate_module(S).basis()] if A.dimension() > 0: finitegroup_base_field = QQbar else: finitegroup_base_field = self.base_field() G = self.finite_subgroup(gens, field_of_definition=finitegroup_base_field) return G, A def __add__(self, other): r""" Returns the sum of the *images* of self and other inside the ambient Jacobian product. self and other must be abelian subvarieties of the ambient Jacobian product. ..warning:: The sum of course only makes sense in some ambient variety, and by definition this function takes the sum of the images of both self and other in the ambient product Jacobian. EXAMPLES: We compute the sum of two abelian varieties of `J_0(33)`:: sage: J = J0(33) sage: J[0] + J[1] Abelian subvariety of dimension 2 of J0(33) We sum all three and get the full `J_0(33)`:: sage: (J[0] + J[1]) + (J[1] + J[2]) Abelian variety J0(33) of dimension 3 Adding to zero works:: sage: J[0] + 0 Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) Hence the sum command works:: sage: sum([J[0], J[2]]) Abelian subvariety of dimension 2 of J0(33) We try to add something in `J_0(33)` to something in `J_0(11)`; this shouldn't and doesn't work. :: sage: J[0] + J0(11) Traceback (most recent call last): ... TypeError: sum not defined since ambient spaces different We compute the diagonal image of `J_0(11)` in `J_0(33)`, then add the result to the new elliptic curve of level `33`. :: sage: A = J0(11) sage: B = (A.degeneracy_map(33,1) + A.degeneracy_map(33,3)).image() sage: B + J0(33)[2] Abelian subvariety of dimension 2 of J0(33) TESTS: This exposed a bug in HNF (see trac #4527):: sage: A = J0(206).new_subvariety().decomposition()[3] ; A # long time Simple abelian subvariety 206d(1,206) of dimension 4 of J0(206) sage: B = J0(206).old_subvariety(2) ; B # long time Abelian subvariety of dimension 16 of J0(206) sage: A+B # long time Abelian subvariety of dimension 20 of J0(206) """ if not is_ModularAbelianVariety(other): if other == 0: return self raise TypeError("other must be a modular abelian variety") if self.groups() != other.groups(): raise ValueError("incompatible ambient Jacobians") L = self.vector_space() + other.vector_space() M = L.intersection(self._ambient_lattice()) return ModularAbelianVariety(self.groups(), M, self.base_field(), check=False) def direct_product(self, other): """ Compute the direct product of self and other. INPUT: - ``self, other`` - modular abelian varieties OUTPUT: abelian variety EXAMPLES:: sage: J0(11).direct_product(J1(13)) Abelian variety J0(11) x J1(13) of dimension 3 sage: A = J0(33)[0].direct_product(J0(33)[1]); A Abelian subvariety of dimension 2 of J0(33) x J0(33) sage: A.lattice() Free module of degree 12 and rank 4 over Integer Ring Echelon basis matrix: [ 1 1 -2 0 2 -1 0 0 0 0 0 0] [ 0 3 -2 -1 2 0 0 0 0 0 0 0] [ 0 0 0 0 0 0 1 0 0 0 -1 2] [ 0 0 0 0 0 0 0 1 -1 1 0 -2] """ return self * other def __pow__(self, n): """ Return `n^{th}` power of self. INPUT: - ``n`` - a nonnegative integer OUTPUT: an abelian variety EXAMPLES:: sage: J = J0(37) sage: J^0 Simple abelian subvariety of dimension 0 of J0(37) sage: J^1 Abelian variety J0(37) of dimension 2 sage: J^1 is J True """ n = ZZ(n) if n < 0: raise ValueError("n must be nonnegative") if n == 0: return self.zero_subvariety() if n == 1: return self groups = self.groups() * n L = self.lattice().basis_matrix() lattice = block_diagonal_matrix([L]*n).row_module(ZZ) return ModularAbelianVariety(groups, lattice, self.base_field(), check=False) def __mul__(self, other): """ Compute the direct product of self and other. EXAMPLES: Some modular Jacobians:: sage: J0(11) * J0(33) Abelian variety J0(11) x J0(33) of dimension 4 sage: J0(11) * J0(33) * J0(11) Abelian variety J0(11) x J0(33) x J0(11) of dimension 5 We multiply some factors of `J_0(65)`:: sage: d = J0(65).decomposition() sage: d[0] * d[1] * J0(11) Abelian subvariety of dimension 4 of J0(65) x J0(65) x J0(11) """ if not is_ModularAbelianVariety(other): raise TypeError("other must be a modular abelian variety") if other.base_ring() != self.base_ring(): raise TypeError("self and other must have the same base ring") groups = tuple(list(self.groups()) + list(other.groups())) lattice = self.lattice().direct_sum(other.lattice()) base_field = self.base_ring() return ModularAbelianVariety(groups, lattice, base_field, check=False) def quotient(self, other): """ Compute the quotient of self and other, where other is either an abelian subvariety of self or a finite subgroup of self. INPUT: - ``other`` - a finite subgroup or subvariety OUTPUT: a pair (A, phi) with phi the quotient map from self to A EXAMPLES: We quotient `J_0(33)` out by an abelian subvariety:: sage: Q, f = J0(33).quotient(J0(33)[0]) sage: Q Abelian variety factor of dimension 2 of J0(33) sage: f Abelian variety morphism: From: Abelian variety J0(33) of dimension 3 To: Abelian variety factor of dimension 2 of J0(33) We quotient `J_0(33)` by the cuspidal subgroup:: sage: C = J0(33).cuspidal_subgroup() sage: Q, f = J0(33).quotient(C) sage: Q Abelian variety factor of dimension 3 of J0(33) sage: f.kernel()[0] Finite subgroup with invariants [10, 10] over QQ of Abelian variety J0(33) of dimension 3 sage: C Finite subgroup with invariants [10, 10] over QQ of Abelian variety J0(33) of dimension 3 sage: J0(11).direct_product(J1(13)) Abelian variety J0(11) x J1(13) of dimension 3 """ return self.__div__(other) def __div__(self, other): """ Compute the quotient of self and other, where other is either an abelian subvariety of self or a finite subgroup of self. INPUT: - ``other`` - a finite subgroup or subvariety EXAMPLES: Quotient out by a finite group:: sage: J = J0(67); G = (J[0] + J[1]).intersection(J[1] + J[2]) sage: Q, _ = J/G[0]; Q Abelian variety factor of dimension 5 of J0(67) over Algebraic Field sage: Q.base_field() Algebraic Field sage: Q.lattice() Free module of degree 10 and rank 10 over Integer Ring Echelon basis matrix: [1/10 1/10 3/10 1/2 0 0 0 3/10 0 1/2] [ 0 1/5 4/5 4/5 0 0 0 0 0 3/5] ... Quotient out by an abelian subvariety:: sage: A, B, C = J0(33) sage: Q, phi = J0(33)/A sage: Q Abelian variety factor of dimension 2 of J0(33) sage: phi.domain() Abelian variety J0(33) of dimension 3 sage: phi.codomain() Abelian variety factor of dimension 2 of J0(33) sage: phi.kernel() (Finite subgroup with invariants [2] over QQbar of Abelian variety J0(33) of dimension 3, Abelian subvariety of dimension 1 of J0(33)) sage: phi.kernel()[1] == A True The abelian variety we quotient out by must be an abelian subvariety. :: sage: Q = (A + B)/C; Q Traceback (most recent call last): ... TypeError: other must be a subgroup or abelian subvariety """ if isinstance(other, FiniteSubgroup): if other.abelian_variety() != self: other = self.finite_subgroup(other) return self._quotient_by_finite_subgroup(other) elif isinstance(other, ModularAbelianVariety_abstract) and other.is_subvariety(self): return self._quotient_by_abelian_subvariety(other) else: raise TypeError("other must be a subgroup or abelian subvariety") def degeneracy_map(self, M_ls, t_ls): """ Return the degeneracy map with domain self and given level/parameter. If self.ambient_variety() is a product of Jacobians (as opposed to a single Jacobian), then one can provide a list of new levels and parameters, corresponding to the ambient Jacobians in order. (See the examples below.) INPUT: - ``M, t`` - integers level and `t`, or - ``Mlist, tlist`` - if self is in a nontrivial product ambient Jacobian, input consists of a list of levels and corresponding list of `t`'s. OUTPUT: a degeneracy map EXAMPLES: We make several degeneracy maps related to `J_0(11)` and `J_0(33)` and compute their matrices. :: sage: d1 = J0(11).degeneracy_map(33, 1); d1 Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [1] sage: d1.matrix() [ 0 -3 2 1 -2 0] [ 1 -2 0 1 0 -1] sage: d2 = J0(11).degeneracy_map(33, 3); d2 Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [3] sage: d2.matrix() [-1 0 0 0 1 -2] [-1 -1 1 -1 1 0] sage: d3 = J0(33).degeneracy_map(11, 1); d3 Degeneracy map from Abelian variety J0(33) of dimension 3 to Abelian variety J0(11) of dimension 1 defined by [1] He we verify that first mapping from level `11` to level `33`, then back is multiplication by `4`:: sage: d1.matrix() * d3.matrix() [4 0] [0 4] We compute a more complicated degeneracy map involving nontrivial product ambient Jacobians; note that this is just the block direct sum of the two matrices at the beginning of this example:: sage: d = (J0(11)*J0(11)).degeneracy_map([33,33], [1,3]); d Degeneracy map from Abelian variety J0(11) x J0(11) of dimension 2 to Abelian variety J0(33) x J0(33) of dimension 6 defined by [1, 3] sage: d.matrix() [ 0 -3 2 1 -2 0 0 0 0 0 0 0] [ 1 -2 0 1 0 -1 0 0 0 0 0 0] [ 0 0 0 0 0 0 -1 0 0 0 1 -2] [ 0 0 0 0 0 0 -1 -1 1 -1 1 0] """ if not isinstance(M_ls, list): M_ls = [M_ls] if not isinstance(t_ls, list): t_ls = [t_ls] groups = self.groups() length = len(M_ls) if length != len(t_ls): raise ValueError("must have same number of Ms and ts") if length != len(groups): raise ValueError("must have same number of Ms and groups in ambient variety") for i in range(length): N = groups[i].level() if (M_ls[i]%N) and (N%M_ls[i]): raise ValueError("one level must divide the other in %s-th component"%i) if (( max(M_ls[i],N) // min(M_ls[i],N) ) % t_ls[i]): raise ValueError("each t must divide the quotient of the levels") ls = [ self.groups()[i].modular_abelian_variety().degeneracy_map(M_ls[i], t_ls[i]).matrix() for i in range(length) ] new_codomain = prod([ self.groups()[i]._new_group_from_level(M_ls[i]).modular_abelian_variety() for i in range(length) ]) M = block_diagonal_matrix(ls, subdivide=False) H = self.Hom(new_codomain) return H(DegeneracyMap(H, M.restrict_domain(self.lattice()), t_ls)) def _quotient_by_finite_subgroup(self, G): """ Return the quotient of self by the finite subgroup `G`. This is used internally by the quotient and __div__ commands. INPUT: - ``G`` - a finite subgroup of self OUTPUT: abelian variety - the quotient `Q` of self by `G` - ``morphism`` - from self to the quotient `Q` EXAMPLES: We quotient the elliptic curve `J_0(11)` out by its cuspidal subgroup. :: sage: A = J0(11) sage: G = A.cuspidal_subgroup(); G Finite subgroup with invariants [5] over QQ of Abelian variety J0(11) of dimension 1 sage: Q, f = A._quotient_by_finite_subgroup(G) sage: Q Abelian variety factor of dimension 1 of J0(11) sage: f Abelian variety morphism: From: Abelian variety J0(11) of dimension 1 To: Abelian variety factor of dimension 1 of J0(11) We compute the finite kernel of `f` (hence the [0]) and note that it equals the subgroup `G` that we quotiented out by:: sage: f.kernel()[0] == G True """ if G.order() == 1: return self L = self.lattice() + G.lattice() A = ModularAbelianVariety(self.groups(), L, G.field_of_definition()) M = L.coordinate_module(self.lattice()).basis_matrix() phi = self.Hom(A)(M) return A, phi def _quotient_by_abelian_subvariety(self, B): """ Return the quotient of self by the abelian variety `B`. This is used internally by the quotient and __div__ commands. INPUT: - ``B`` - an abelian subvariety of self OUTPUT: - ``abelian variety`` - quotient `Q` of self by B - ``morphism`` - from self to the quotient `Q` EXAMPLES: We compute the new quotient of `J_0(33)`. :: sage: A = J0(33); B = A.old_subvariety() sage: Q, f = A._quotient_by_abelian_subvariety(B) Note that the quotient happens to also be an abelian subvariety:: sage: Q Abelian subvariety of dimension 1 of J0(33) sage: Q.lattice() Free module of degree 6 and rank 2 over Integer Ring Echelon basis matrix: [ 1 0 0 -1 0 0] [ 0 0 1 0 1 -1] sage: f Abelian variety morphism: From: Abelian variety J0(33) of dimension 3 To: Abelian subvariety of dimension 1 of J0(33) We verify that `B` is equal to the kernel of the quotient map. :: sage: f.kernel()[1] == B True Next we quotient `J_0(33)` out by `Q` itself:: sage: C, g = A._quotient_by_abelian_subvariety(Q) The result is not a subvariety:: sage: C Abelian variety factor of dimension 2 of J0(33) sage: C.lattice() Free module of degree 6 and rank 4 over Integer Ring Echelon basis matrix: [ 1/3 0 0 2/3 -1 0] [ 0 1 0 0 -1 1] [ 0 0 1/3 0 -2/3 2/3] [ 0 0 0 1 -1 -1] """ # We first compute the complement of B in self to get # an abelian variety C also in self such that self/B # is isogenous to C. This is the case because the # projection map pi:self --> C is surjective and has # kernel a finite extension of the abelian variety B. C = B.complement(self) # Now that we have C we need to find some abelian variety Q # isogenous to C and a map self --> Q whose kernel is exactly # B. We do this by computing the kernel of the map pi below, # which is an extension of the abelian variety B by a finite # group Phi of complements. Our strategy is to enlarge the # lattice that defines C so that the map pi below suddenly # has connected kernel. pi = self.projection(C) psi = pi.factor_out_component_group() Q = psi.codomain() return Q, psi def projection(self, A, check=True): """ Given an abelian subvariety A of self, return a projection morphism from self to A. Note that this morphism need not be unique. INPUT: - ``A`` - an abelian variety OUTPUT: a morphism EXAMPLES:: sage: a,b,c = J0(33) sage: pi = J0(33).projection(a); pi.matrix() [ 3 -2] [-5 5] [-4 1] [ 3 -2] [ 5 0] [ 1 1] sage: pi = (a+b).projection(a); pi.matrix() [ 0 0] [-3 2] [-4 1] [-1 -1] sage: pi = a.projection(a); pi.matrix() [1 0] [0 1] We project onto a factor in a product of two Jacobians:: sage: A = J0(11)*J0(11); A Abelian variety J0(11) x J0(11) of dimension 2 sage: A[0] Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11) sage: A.projection(A[0]) Abelian variety morphism: From: Abelian variety J0(11) x J0(11) of dimension 2 To: Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11) sage: A.projection(A[0]).matrix() [0 0] [0 0] [1 0] [0 1] sage: A.projection(A[1]).matrix() [1 0] [0 1] [0 0] [0 0] """ if check and not A.is_subvariety(self): raise ValueError("A must be an abelian subvariety of self") W = A.complement(self) mat = A.lattice().basis_matrix().stack(W.lattice().basis_matrix()) # solve X * mat = self, i.e. write each row of self in terms of the # rows of mat. X = mat.solve_left(self.lattice().basis_matrix()) # The projection map is got from the first 2*dim(A) columns of X. X = X.matrix_from_columns(range(2*A.dimension())) X, _ = X._clear_denom() return Morphism(self.Hom(A), X) def project_to_factor(self, n): """ If self is an ambient product of Jacobians, return a projection from self to the nth such Jacobian. EXAMPLES:: sage: J = J0(33) sage: J.project_to_factor(0) Abelian variety endomorphism of Abelian variety J0(33) of dimension 3 :: sage: J = J0(33) * J0(37) * J0(11) sage: J.project_to_factor(2) Abelian variety morphism: From: Abelian variety J0(33) x J0(37) x J0(11) of dimension 6 To: Abelian variety J0(11) of dimension 1 sage: J.project_to_factor(2).matrix() [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [0 0] [1 0] [0 1] """ if not self.is_ambient(): raise ValueError("self is not ambient") if n >= len(self.groups()): raise IndexError("index (=%s) too large (max = %s)"%(n, len(self.groups()))) G = self.groups()[n] A = G.modular_abelian_variety() index = sum([ gp.modular_symbols().cuspidal_subspace().dimension() for gp in self.groups()[0:n] ]) H = self.Hom(A) mat = H.matrix_space()(0) mat.set_block(index, 0, identity_matrix(2*A.dimension())) return H(Morphism(H, mat)) def is_subvariety_of_ambient_jacobian(self): """ Return True if self is (presented as) a subvariety of the ambient product Jacobian. Every abelian variety in Sage is a quotient of a subvariety of an ambient Jacobian product by a finite subgroup. EXAMPLES:: sage: J0(33).is_subvariety_of_ambient_jacobian() True sage: A = J0(33)[0]; A Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) sage: A.is_subvariety_of_ambient_jacobian() True sage: B, phi = A / A.torsion_subgroup(2) sage: B Abelian variety factor of dimension 1 of J0(33) sage: phi.matrix() [2 0] [0 2] sage: B.is_subvariety_of_ambient_jacobian() False """ try: return self.__is_sub_ambient except AttributeError: self.__is_sub_ambient = (self.lattice().denominator() == 1) return self.__is_sub_ambient def ambient_variety(self): """ Return the ambient modular abelian variety that contains this abelian variety. The ambient variety is always a product of Jacobians of modular curves. OUTPUT: abelian variety EXAMPLES:: sage: A = J0(33)[0]; A Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) sage: A.ambient_variety() Abelian variety J0(33) of dimension 3 """ try: return self.__ambient_variety except AttributeError: A = ModularAbelianVariety(self.groups(), ZZ**(2*self._ambient_dimension()), self.base_field(), check=False) self.__ambient_variety = A return A def ambient_morphism(self): """ Return the morphism from self to the ambient variety. This is injective if self is natural a subvariety of the ambient product Jacobian. OUTPUT: morphism The output is cached. EXAMPLES: We compute the ambient structure morphism for an abelian subvariety of `J_0(33)`:: sage: A,B,C = J0(33) sage: phi = A.ambient_morphism() sage: phi.domain() Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) sage: phi.codomain() Abelian variety J0(33) of dimension 3 sage: phi.matrix() [ 1 1 -2 0 2 -1] [ 0 3 -2 -1 2 0] phi is of course injective :: sage: phi.kernel() (Finite subgroup with invariants [] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Abelian subvariety of dimension 0 of J0(33)) This is the same as the basis matrix for the lattice corresponding to self:: sage: A.lattice() Free module of degree 6 and rank 2 over Integer Ring Echelon basis matrix: [ 1 1 -2 0 2 -1] [ 0 3 -2 -1 2 0] We compute a non-injective map to an ambient space:: sage: Q,pi = J0(33)/A sage: phi = Q.ambient_morphism() sage: phi.matrix() [ 1 4 1 9 -1 -1] [ 0 15 0 0 30 -75] [ 0 0 5 10 -5 15] [ 0 0 0 15 -15 30] sage: phi.kernel()[0] Finite subgroup with invariants [5, 15, 15] over QQ of Abelian variety factor of dimension 2 of J0(33) """ try: return self.__ambient_morphism except AttributeError: matrix,_ = self.lattice().basis_matrix()._clear_denom() phi = Morphism(self.Hom(self.ambient_variety()), matrix) self.__ambient_morphism = phi return phi def is_ambient(self): """ Return True if self equals the ambient product Jacobian. OUTPUT: bool EXAMPLES:: sage: A,B,C = J0(33) sage: A.is_ambient() False sage: J0(33).is_ambient() True sage: (A+B).is_ambient() False sage: (A+B+C).is_ambient() True """ try: return self.__is_ambient except AttributeError: pass L = self.lattice() self.__is_ambient = (self.lattice() == ZZ**L.degree()) return self.__is_ambient def dimension(self): """ Return the dimension of this abelian variety. EXAMPLES:: sage: A = J0(23) sage: A.dimension() 2 """ return self.lattice().rank() // 2 def rank(self): """ Return the rank of the underlying lattice of self. EXAMPLES:: sage: J = J0(33) sage: J.rank() 6 sage: J[1] Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) sage: (J[1] * J[1]).rank() 4 """ return self.lattice().rank() def degree(self): """ Return the degree of this abelian variety, which is the dimension of the ambient Jacobian product. EXAMPLES:: sage: A = J0(23) sage: A.dimension() 2 """ return self._ambient_dimension() def endomorphism_ring(self, category=None): """ Return the endomorphism ring of self. OUTPUT: b = self.sturm_bound() EXAMPLES: We compute a few endomorphism rings:: sage: J0(11).endomorphism_ring() Endomorphism ring of Abelian variety J0(11) of dimension 1 sage: J0(37).endomorphism_ring() Endomorphism ring of Abelian variety J0(37) of dimension 2 sage: J0(33)[2].endomorphism_ring() Endomorphism ring of Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) No real computation is done:: sage: J1(123456).endomorphism_ring() Endomorphism ring of Abelian variety J1(123456) of dimension 423185857 """ try: return self.__endomorphism_ring except AttributeError: pass self.__endomorphism_ring = homspace.EndomorphismSubring(self, category=category) return self.__endomorphism_ring def sturm_bound(self): r""" Return a bound `B` such that all Hecke operators `T_n` for `n\leq B` generate the Hecke algebra. OUTPUT: integer EXAMPLES:: sage: J0(11).sturm_bound() 2 sage: J0(33).sturm_bound() 8 sage: J1(17).sturm_bound() 48 sage: J1(123456).sturm_bound() 1693483008 sage: JH(37,[2,3]).sturm_bound() 7 sage: J1(37).sturm_bound() 228 """ try: return self.__sturm_bound except AttributeError: B = max([G.sturm_bound(2) for G in self.groups()]) self.__sturm_bound = B return B def is_hecke_stable(self): """ Return True if self is stable under the Hecke operators of its ambient Jacobian. OUTPUT: bool EXAMPLES:: sage: J0(11).is_hecke_stable() True sage: J0(33)[2].is_hecke_stable() True sage: J0(33)[0].is_hecke_stable() False sage: (J0(33)[0] + J0(33)[1]).is_hecke_stable() True """ try: return self._is_hecke_stable except AttributeError: pass #b = self.modular_symbols().sturm_bound() b = max([ m.sturm_bound() for m in self._ambient_modular_symbols_spaces() ]) J = self.ambient_variety() L = self.lattice() B = self.lattice().basis() for n in prime_range(1,b+1): Tn_matrix = J.hecke_operator(n).matrix() for v in B: if not (v*Tn_matrix in L): self._is_hecke_stable = False return False self._is_hecke_stable = True return True def is_subvariety(self, other): """ Return True if self is a subvariety of other as they sit in a common ambient modular Jacobian. In particular, this function will only return True if self and other have exactly the same ambient Jacobians. EXAMPLES:: sage: J = J0(37); J Abelian variety J0(37) of dimension 2 sage: A = J[0]; A Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37) sage: A.is_subvariety(A) True sage: A.is_subvariety(J) True """ if not is_ModularAbelianVariety(other): return False if self is other: return True if self.groups() != other.groups(): return False L = self.lattice() M = other.lattice() # self is an abelian subvariety of other if and only if # 1. L is a subset of M (so the abelian subvarieties of # the ambient J are equal), and # 2. L is relatively saturated in M, i.e., M/L is # torsion free. if not L.is_submodule(M): return False # To determine if L is relatively saturated we compute the # intersection of M with (L tensor Q) and see if that equals # L. return L.change_ring(QQ).intersection(M) == L def change_ring(self, R): """ Change the base ring of this modular abelian variety. EXAMPLES:: sage: A = J0(23) sage: A.change_ring(QQ) Abelian variety J0(23) of dimension 2 """ return ModularAbelianVariety(self.groups(), self.lattice(), R, check=False) def level(self): """ Return the level of this modular abelian variety, which is an integer N (usually minimal) such that this modular abelian variety is a quotient of `J_1(N)`. In the case that the ambient variety of self is a product of Jacobians, return the LCM of their levels. EXAMPLES:: sage: J1(5077).level() 5077 sage: JH(389,[4]).level() 389 sage: (J0(11)*J0(17)).level() 187 """ try: return self.__level except AttributeError: self.__level = LCM([G.level() for G in self.groups()]) return self.__level def newform_level(self, none_if_not_known=False): """ Write self as a product (up to isogeny) of newform abelian varieties `A_f`. Then this function return the least common multiple of the levels of the newforms `f`, along with the corresponding group or list of groups (the groups do not appear with multiplicity). INPUT: - ``none_if_not_known`` - (default: False) if True, return None instead of attempting to compute the newform level, if it isn't already known. This None result is not cached. OUTPUT: integer group or list of distinct groups EXAMPLES:: sage: J0(33)[0].newform_level() (11, Congruence Subgroup Gamma0(33)) sage: J0(33)[0].newform_level(none_if_not_known=True) (11, Congruence Subgroup Gamma0(33)) Here there are multiple groups since there are in fact multiple newforms:: sage: (J0(11) * J1(13)).newform_level() (143, [Congruence Subgroup Gamma0(11), Congruence Subgroup Gamma1(13)]) """ try: return self.__newform_level except AttributeError: if none_if_not_known: return None N = [A.newform_level() for A in self.decomposition()] level = LCM([z[0] for z in N]) groups = sorted(set([z[1] for z in N])) if len(groups) == 1: groups = groups[0] self.__newform_level = level, groups return self.__newform_level def zero_subvariety(self): """ Return the zero subvariety of self. EXAMPLES:: sage: J = J0(37) sage: J.zero_subvariety() Simple abelian subvariety of dimension 0 of J0(37) sage: J.zero_subvariety().level() 37 sage: J.zero_subvariety().newform_level() (1, []) """ try: return self.__zero_subvariety except AttributeError: lattice = (ZZ**(2*self.degree())).zero_submodule() A = ModularAbelianVariety(self.groups(), lattice, self.base_field(), is_simple=True, check=False) self.__zero_subvariety = A return A ############################################################################### # Properties of the ambient product of Jacobians ############################################################################### def _ambient_repr(self): """ OUTPUT: string EXAMPLES:: sage: (J0(33)*J1(11))._ambient_repr() 'J0(33) x J1(11)' """ v = [] for G in self.groups(): if is_Gamma0(G): v.append('J0(%s)'%G.level()) elif is_Gamma1(G): v.append('J1(%s)'%G.level()) elif is_GammaH(G): v.append('JH(%s,%s)'%(G.level(), G._generators_for_H())) return ' x '.join(v) def _ambient_latex_repr(self): """ Return Latex representation of the ambient product. OUTPUT: string EXAMPLES:: sage: (J0(11) * J0(33))._ambient_latex_repr() 'J_0(11) \\times J_0(33)' """ v = [] for G in self.groups(): if is_Gamma0(G): v.append('J_0(%s)'%G.level()) elif is_Gamma1(G): v.append('J_1(%s)'%G.level()) elif is_GammaH(G): v.append('J_H(%s,%s)'%(G.level(), G._generators_for_H())) return ' \\times '.join(v) def _ambient_lattice(self): """ Return free lattice of rank twice the degree of self. This is the lattice corresponding to the ambient product Jacobian. OUTPUT: lattice EXAMPLES: We compute the ambient lattice of a product:: sage: (J0(33)*J1(11))._ambient_lattice() Ambient free module of rank 8 over the principal ideal domain Integer Ring We compute the ambient lattice of an abelian subvariety `J_0(33)`, which is the same as the lattice for the `J_0(33)` itself:: sage: A = J0(33)[0]; A._ambient_lattice() Ambient free module of rank 6 over the principal ideal domain Integer Ring sage: J0(33)._ambient_lattice() Ambient free module of rank 6 over the principal ideal domain Integer Ring """ try: return self.__ambient_lattice except AttributeError: self.__ambient_lattice = ZZ**(2*self.degree()) return self.__ambient_lattice def _ambient_modular_symbols_spaces(self): """ Return a tuple of the ambient cuspidal modular symbols spaces that make up the Jacobian product that contains self. OUTPUT: tuple of cuspidal modular symbols spaces EXAMPLES:: sage: (J0(11) * J0(33))._ambient_modular_symbols_spaces() (Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field, Modular Symbols subspace of dimension 6 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field) sage: (J0(11) * J0(33)[0])._ambient_modular_symbols_spaces() (Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field, Modular Symbols subspace of dimension 6 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field) """ if not self.is_ambient(): return self.ambient_variety()._ambient_modular_symbols_spaces() try: return self.__ambient_modular_symbols_spaces except AttributeError: X = tuple([ModularSymbols(G).cuspidal_subspace() for G in self.groups()]) self.__ambient_modular_symbols_spaces = X return X def _ambient_modular_symbols_abvars(self): """ Return a tuple of the ambient modular symbols abelian varieties that make up the Jacobian product that contains self. OUTPUT: tuple of modular symbols abelian varieties EXAMPLES:: sage: (J0(11) * J0(33))._ambient_modular_symbols_abvars() (Abelian variety J0(11) of dimension 1, Abelian variety J0(33) of dimension 3) """ if not self.is_ambient(): return self.ambient_variety()._ambient_modular_symbols_abvars() try: return self.__ambient_modular_symbols_abvars except AttributeError: X = tuple([ModularAbelianVariety_modsym(M) for M in self._ambient_modular_symbols_spaces()]) self.__ambient_modular_symbols_abvars = X return X def _ambient_dimension(self): """ Return the dimension of the ambient Jacobian product. EXAMPLES:: sage: A = J0(37) * J1(13); A Abelian variety J0(37) x J1(13) of dimension 4 sage: A._ambient_dimension() 4 sage: B = A[0]; B Simple abelian subvariety 13aG1(1,13) of dimension 2 of J0(37) x J1(13) sage: B._ambient_dimension() 4 This example is fast because it implicitly calls _ambient_dimension. :: sage: J0(902834082394) Abelian variety J0(902834082394) of dimension 113064825881 """ try: return self.__ambient_dimension except AttributeError: d = sum([G.dimension_cusp_forms(2) for G in self.groups()], Integer(0)) self.__ambient_dimension = d return d def _ambient_hecke_matrix_on_modular_symbols(self, n): r""" Return block direct sum of the matrix of the Hecke operator `T_n` acting on each of the ambient modular symbols spaces. INPUT: - ``n`` - an integer `\geq 1`. OUTPUT: a matrix EXAMPLES:: sage: (J0(11) * J1(13))._ambient_hecke_matrix_on_modular_symbols(2) [-2 0 0 0 0 0] [ 0 -2 0 0 0 0] [ 0 0 -2 0 -1 1] [ 0 0 1 -1 0 -1] [ 0 0 1 1 -2 0] [ 0 0 0 1 -1 -1] """ if not self.is_ambient(): return self.ambient_variety()._ambient_hecke_matrix_on_modular_symbols(n) try: return self.__ambient_hecke_matrix_on_modular_symbols[n] except AttributeError: self.__ambient_hecke_matrix_on_modular_symbols = {} except KeyError: pass M = self._ambient_modular_symbols_spaces() if len(M) == 0: return matrix(QQ,0) T = M[0].hecke_matrix(n) for i in range(1,len(M)): T = T.block_sum(M[i].hecke_matrix(n)) self.__ambient_hecke_matrix_on_modular_symbols[n] = T return T ############################################################################### # Rational and Integral Homology ############################################################################### def _rational_homology_space(self): """ Return the rational homology of this modular abelian variety. EXAMPLES:: sage: J = J0(11) sage: J._rational_homology_space() Vector space of dimension 2 over Rational Field The result is cached:: sage: J._rational_homology_space() is J._rational_homology_space() True """ try: return self.__rational_homology_space except AttributeError: HQ = self.rational_homology().free_module() self.__rational_homology_space = HQ return HQ def homology(self, base_ring=ZZ): """ Return the homology of this modular abelian variety. .. warning:: For efficiency reasons the basis of the integral homology need not be the same as the basis for the rational homology. EXAMPLES:: sage: J0(389).homology(GF(7)) Homology with coefficients in Finite Field of size 7 of Abelian variety J0(389) of dimension 32 sage: J0(389).homology(QQ) Rational Homology of Abelian variety J0(389) of dimension 32 sage: J0(389).homology(ZZ) Integral Homology of Abelian variety J0(389) of dimension 32 """ try: return self._homology[base_ring] except AttributeError: self._homology = {} except KeyError: pass if base_ring == ZZ: H = homology.IntegralHomology(self) elif base_ring == QQ: H = homology.RationalHomology(self) else: H = homology.Homology_over_base(self, base_ring) self._homology[base_ring] = H return H def integral_homology(self): """ Return the integral homology of this modular abelian variety. EXAMPLES:: sage: H = J0(43).integral_homology(); H Integral Homology of Abelian variety J0(43) of dimension 3 sage: H.rank() 6 sage: H = J1(17).integral_homology(); H Integral Homology of Abelian variety J1(17) of dimension 5 sage: H.rank() 10 If you just ask for the rank of the homology, no serious calculations are done, so the following is fast:: sage: H = J0(50000).integral_homology(); H Integral Homology of Abelian variety J0(50000) of dimension 7351 sage: H.rank() 14702 A product:: sage: H = (J0(11) * J1(13)).integral_homology() sage: H.hecke_operator(2) Hecke operator T_2 on Integral Homology of Abelian variety J0(11) x J1(13) of dimension 3 sage: H.hecke_operator(2).matrix() [-2 0 0 0 0 0] [ 0 -2 0 0 0 0] [ 0 0 -2 0 -1 1] [ 0 0 1 -1 0 -1] [ 0 0 1 1 -2 0] [ 0 0 0 1 -1 -1] """ return self.homology(ZZ) def rational_homology(self): """ Return the rational homology of this modular abelian variety. EXAMPLES:: sage: H = J0(37).rational_homology(); H Rational Homology of Abelian variety J0(37) of dimension 2 sage: H.rank() 4 sage: H.base_ring() Rational Field sage: H = J1(17).rational_homology(); H Rational Homology of Abelian variety J1(17) of dimension 5 sage: H.rank() 10 sage: H.base_ring() Rational Field """ return self.homology(QQ) ############################################################################### # L-series ############################################################################### def lseries(self): """ Return the complex `L`-series of this modular abelian variety. EXAMPLES:: sage: A = J0(37) sage: A.lseries() Complex L-series attached to Abelian variety J0(37) of dimension 2 """ try: return self.__lseries except AttributeError: pass self.__lseries = lseries.Lseries_complex(self) return self.__lseries def padic_lseries(self, p): """ Return the `p`-adic `L`-series of this modular abelian variety. EXAMPLES:: sage: A = J0(37) sage: A.padic_lseries(7) 7-adic L-series attached to Abelian variety J0(37) of dimension 2 """ p = int(p) try: return self.__lseries_padic[p] except AttributeError: self.__lseries_padic = {} except KeyError: pass self.__lseries_padic[p] = lseries.Lseries_padic(self, p) return self.__lseries_padic[p] ############################################################################### # Hecke Operators ############################################################################### def hecke_operator(self, n): """ Return the `n^{th}` Hecke operator on the modular abelian variety, if this makes sense [[elaborate]]. Otherwise raise a ValueError. EXAMPLES: We compute `T_2` on `J_0(37)`. :: sage: t2 = J0(37).hecke_operator(2); t2 Hecke operator T_2 on Abelian variety J0(37) of dimension 2 sage: t2.charpoly().factor() x * (x + 2) sage: t2.index() 2 Note that there is no matrix associated to Hecke operators on modular abelian varieties. For a matrix, instead consider, e.g., the Hecke operator on integral or rational homology. :: sage: t2.action_on_homology().matrix() [-1 1 1 -1] [ 1 -1 1 0] [ 0 0 -2 1] [ 0 0 0 0] """ try: return self._hecke_operator[n] except AttributeError: self._hecke_operator = {} except KeyError: pass Tn = HeckeOperator(self, n) self._hecke_operator[n] = Tn return Tn def hecke_polynomial(self, n, var='x'): r""" Return the characteristic polynomial of the `n^{th}` Hecke operator `T_n` acting on self. Raises an ArithmeticError if self is not Hecke equivariant. INPUT: - ``n`` - integer `\geq 1` - ``var`` - string (default: 'x'); valid variable name EXAMPLES:: sage: J0(33).hecke_polynomial(2) x^3 + 3*x^2 - 4 sage: f = J0(33).hecke_polynomial(2, 'y'); f y^3 + 3*y^2 - 4 sage: f.parent() Univariate Polynomial Ring in y over Rational Field sage: J0(33)[2].hecke_polynomial(3) x + 1 sage: J0(33)[0].hecke_polynomial(5) x - 1 sage: J0(33)[0].hecke_polynomial(11) x - 1 sage: J0(33)[0].hecke_polynomial(3) Traceback (most recent call last): ... ArithmeticError: subspace is not invariant under matrix """ n = Integer(n) if n <= 0: raise ValueError("n must be a positive integer") key = (n,var) try: return self.__hecke_polynomial[key] except AttributeError: self.__hecke_polynomial = {} except KeyError: pass f = self._compute_hecke_polynomial(n, var=var) self.__hecke_polynomial[key] = f return f def _compute_hecke_polynomial(self, n, var='x'): """ Return the Hecke polynomial of index `n` in terms of the given variable. INPUT: - ``n`` - positive integer - ``var`` - string (default: 'x') EXAMPLES:: sage: A = J0(33)*J0(11) sage: A._compute_hecke_polynomial(2) x^4 + 5*x^3 + 6*x^2 - 4*x - 8 """ return self.hecke_operator(n).charpoly(var=var) def _integral_hecke_matrix(self, n): """ Return the matrix of the Hecke operator `T_n` acting on the integral homology of this modular abelian variety, if the modular abelian variety is stable under `T_n`. Otherwise, raise an ArithmeticError. EXAMPLES:: sage: A = J0(23) sage: t = A._integral_hecke_matrix(2); t [ 0 1 -1 0] [ 0 1 -1 1] [-1 2 -2 1] [-1 1 0 -1] sage: t.parent() Full MatrixSpace of 4 by 4 dense matrices over Integer Ring """ A = self._ambient_hecke_matrix_on_modular_symbols(n) return A.restrict(self.lattice()) def _rational_hecke_matrix(self, n): r""" Return the matrix of the Hecke operator `T_n` acting on the rational homology `H_1(A,\QQ)` of this modular abelian variety, if this action is defined. Otherwise, raise an ArithmeticError. EXAMPLES:: sage: A = J0(23) sage: t = A._rational_hecke_matrix(2); t [ 0 1 -1 0] [ 0 1 -1 1] [-1 2 -2 1] [-1 1 0 -1] sage: t.parent() Full MatrixSpace of 4 by 4 dense matrices over Rational Field """ return self._integral_hecke_matrix(n) ############################################################################### # Subgroups ############################################################################### def qbar_torsion_subgroup(self): r""" Return the group of all points of finite order in the algebraic closure of this abelian variety. EXAMPLES:: sage: T = J0(33).qbar_torsion_subgroup(); T Group of all torsion points in QQbar on Abelian variety J0(33) of dimension 3 The field of definition is the same as the base field of the abelian variety. :: sage: T.field_of_definition() Rational Field On the other hand, T is a module over `\ZZ`. :: sage: T.base_ring() Integer Ring """ try: return self.__qbar_torsion_subgroup except AttributeError: G = QQbarTorsionSubgroup(self) self.__qbar_torsion_subgroup = G return G def rational_torsion_subgroup(self): """ Return the maximal torsion subgroup of self defined over QQ. EXAMPLES:: sage: J = J0(33) sage: A = J.new_subvariety() sage: A Abelian subvariety of dimension 1 of J0(33) sage: t = A.rational_torsion_subgroup() sage: t.multiple_of_order() 4 sage: t.divisor_of_order() 4 sage: t.order() 4 sage: t.gens() [[(1/2, 0, 0, -1/2, 0, 0)], [(0, 0, 1/2, 0, 1/2, -1/2)]] sage: t Torsion subgroup of Abelian subvariety of dimension 1 of J0(33) """ try: return self.__rational_torsion_subgroup except AttributeError: T = RationalTorsionSubgroup(self) self.__rational_torsion_subgroup = T return T def cuspidal_subgroup(self): """ Return the cuspidal subgroup of this modular abelian variety. This is the subgroup generated by rational cusps. EXAMPLES:: sage: J = J0(54) sage: C = J.cuspidal_subgroup() sage: C.gens() [[(1/3, 0, 0, 0, 0, 1/3, 0, 2/3)], [(0, 1/3, 0, 0, 0, 2/3, 0, 1/3)], [(0, 0, 1/9, 1/9, 1/9, 1/9, 1/9, 2/9)], [(0, 0, 0, 1/3, 0, 1/3, 0, 0)], [(0, 0, 0, 0, 1/3, 1/3, 0, 1/3)], [(0, 0, 0, 0, 0, 0, 1/3, 2/3)]] sage: C.invariants() [3, 3, 3, 3, 3, 9] sage: J1(13).cuspidal_subgroup() Finite subgroup with invariants [19, 19] over QQ of Abelian variety J1(13) of dimension 2 sage: A = J0(33)[0] sage: A.cuspidal_subgroup() Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) """ try: return self._cuspidal_subgroup except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise ValueError("self must be a subvariety of the ambient variety") if self.is_ambient(): T = self._ambient_cuspidal_subgroup(rational_only=False) else: T = self.ambient_variety().cuspidal_subgroup().intersection(self) self._cuspidal_subgroup = T return T def _ambient_cuspidal_subgroup(self, rational_only=False, rational_subgroup=False): """ EXAMPLES:: sage: (J1(13)*J0(11))._ambient_cuspidal_subgroup() Finite subgroup with invariants [19, 95] over QQ of Abelian variety J1(13) x J0(11) of dimension 3 sage: (J0(33))._ambient_cuspidal_subgroup() Finite subgroup with invariants [10, 10] over QQ of Abelian variety J0(33) of dimension 3 sage: (J0(33)*J0(33))._ambient_cuspidal_subgroup() Finite subgroup with invariants [10, 10, 10, 10] over QQ of Abelian variety J0(33) x J0(33) of dimension 6 """ n = 2 * self.degree() i = 0 lattice = (ZZ**n).zero_submodule() if rational_subgroup: CS = RationalCuspidalSubgroup elif rational_only: CS = RationalCuspSubgroup else: CS = CuspidalSubgroup for J in self._ambient_modular_symbols_abvars(): L = CS(J).lattice().basis_matrix() Z_left = matrix(QQ,L.nrows(),i) Z_right = matrix(QQ,L.nrows(),n-i-L.ncols()) lattice += (Z_left.augment(L).augment(Z_right)).row_module(ZZ) i += L.ncols() return FiniteSubgroup_lattice(self, lattice, field_of_definition=self.base_field()) def shimura_subgroup(self): r""" Return the Shimura subgroup of this modular abelian variety. This is the kernel of $J_0(N) \rightarrow J_1(N)$ under the natural map. Here we compute the Shimura subgroup as the kernel of $J_0(N) \rightarrow J_0(Np)$ where the map is the difference between the two degeneracy maps. EXAMPLES:: sage: J=J0(11) sage: J.shimura_subgroup() Finite subgroup with invariants [5] over QQ of Abelian variety J0(11) of dimension 1 sage: J=J0(17) sage: G=J.cuspidal_subgroup(); G Finite subgroup with invariants [4] over QQ of Abelian variety J0(17) of dimension 1 sage: S=J.shimura_subgroup(); S Finite subgroup with invariants [4] over QQ of Abelian variety J0(17) of dimension 1 sage: G.intersection(S) Finite subgroup with invariants [2] over QQ of Abelian variety J0(17) of dimension 1 sage: J=J0(33) sage: A=J.decomposition()[0] sage: A.shimura_subgroup() Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) sage: J.shimura_subgroup() Finite subgroup with invariants [10] over QQ of Abelian variety J0(33) of dimension 3 """ N=self.level() J=self.ambient_variety() for p in prime_range(100): if N%p!=0: break phi=J.degeneracy_map(N*p,1) phip=J.degeneracy_map(N*p,p) SIG = (phi-phip).kernel() assert SIG[1].dimension()==0, "The intersection should have dimension 0" return self.intersection(SIG[0]) def rational_cusp_subgroup(self): r""" Return the subgroup of this modular abelian variety generated by rational cusps. This is a subgroup of the group of rational points in the cuspidal subgroup. .. warning:: This is only currently implemented for `\Gamma_0(N)`. EXAMPLES:: sage: J = J0(54) sage: CQ = J.rational_cusp_subgroup(); CQ Finite subgroup with invariants [3, 3, 9] over QQ of Abelian variety J0(54) of dimension 4 sage: CQ.gens() [[(1/3, 0, 0, 1/3, 2/3, 1/3, 0, 1/3)], [(0, 0, 1/9, 1/9, 7/9, 7/9, 1/9, 8/9)], [(0, 0, 0, 0, 0, 0, 1/3, 2/3)]] sage: factor(CQ.order()) 3^4 sage: CQ.invariants() [3, 3, 9] In this example the rational cuspidal subgroup and the cuspidal subgroup differ by a lot. :: sage: J = J0(49) sage: J.cuspidal_subgroup() Finite subgroup with invariants [2, 14] over QQ of Abelian variety J0(49) of dimension 1 sage: J.rational_cusp_subgroup() Finite subgroup with invariants [2] over QQ of Abelian variety J0(49) of dimension 1 Note that computation of the rational cusp subgroup isn't implemented for `\Gamma_1`. :: sage: J = J1(13) sage: J.cuspidal_subgroup() Finite subgroup with invariants [19, 19] over QQ of Abelian variety J1(13) of dimension 2 sage: J.rational_cusp_subgroup() Traceback (most recent call last): ... NotImplementedError: computation of rational cusps only implemented in Gamma0 case. """ try: return self._rational_cusp_subgroup except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise ValueError("self must be a subvariety of the ambient variety") if self.is_ambient(): T = self._ambient_cuspidal_subgroup(rational_only=True) else: T = self.ambient_variety().rational_cusp_subgroup().intersection(self) self._rational_cusp_subgroup = T return T def rational_cuspidal_subgroup(self): r""" Return the rational subgroup of the cuspidal subgroup of this modular abelian variety. This is a subgroup of the group of rational points in the cuspidal subgroup. .. warning:: This is only currently implemented for `\Gamma_0(N)`. EXAMPLES:: sage: J = J0(54) sage: CQ = J.rational_cuspidal_subgroup(); CQ Finite subgroup with invariants [3, 3, 9] over QQ of Abelian variety J0(54) of dimension 4 sage: CQ.gens() [[(1/3, 0, 0, 1/3, 2/3, 1/3, 0, 1/3)], [(0, 0, 1/9, 1/9, 7/9, 7/9, 1/9, 8/9)], [(0, 0, 0, 0, 0, 0, 1/3, 2/3)]] sage: factor(CQ.order()) 3^4 sage: CQ.invariants() [3, 3, 9] In this example the rational cuspidal subgroup and the cuspidal subgroup differ by a lot. :: sage: J = J0(49) sage: J.cuspidal_subgroup() Finite subgroup with invariants [2, 14] over QQ of Abelian variety J0(49) of dimension 1 sage: J.rational_cuspidal_subgroup() Finite subgroup with invariants [2] over QQ of Abelian variety J0(49) of dimension 1 Note that computation of the rational cusp subgroup isn't implemented for `\Gamma_1`. :: sage: J = J1(13) sage: J.cuspidal_subgroup() Finite subgroup with invariants [19, 19] over QQ of Abelian variety J1(13) of dimension 2 sage: J.rational_cuspidal_subgroup() Traceback (most recent call last): ... NotImplementedError: only implemented when group is Gamma0 """ try: return self._rational_cuspidal_subgroup except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise ValueError("self must be a subvariety of the ambient variety") if self.is_ambient(): T = self._ambient_cuspidal_subgroup(rational_subgroup=True) else: T = self.ambient_variety().rational_cuspidal_subgroup().intersection(self) self._rational_cuspidal_subgroup = T return T def zero_subgroup(self): """ Return the zero subgroup of this modular abelian variety, as a finite group. EXAMPLES:: sage: A =J0(54); G = A.zero_subgroup(); G Finite subgroup with invariants [] over QQ of Abelian variety J0(54) of dimension 4 sage: G.is_subgroup(A) True """ try: return self.__zero_subgroup except AttributeError: G = FiniteSubgroup_lattice(self, self.lattice(), field_of_definition=QQ) self.__zero_subgroup = G return G def finite_subgroup(self, X, field_of_definition=None, check=True): """ Return a finite subgroup of this modular abelian variety. INPUT: - ``X`` - list of elements of other finite subgroups of this modular abelian variety or elements that coerce into the rational homology (viewed as a rational vector space); also X could be a finite subgroup itself that is contained in this abelian variety. - ``field_of_definition`` - (default: None) field over which this group is defined. If None try to figure out the best base field. OUTPUT: a finite subgroup of a modular abelian variety EXAMPLES:: sage: J = J0(11) sage: J.finite_subgroup([[1/5,0], [0,1/3]]) Finite subgroup with invariants [15] over QQbar of Abelian variety J0(11) of dimension 1 :: sage: J = J0(33); C = J[0].cuspidal_subgroup(); C Finite subgroup with invariants [5] over QQ of Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) sage: J.finite_subgroup([[0,0,0,0,0,1/6]]) Finite subgroup with invariants [6] over QQbar of Abelian variety J0(33) of dimension 3 sage: J.finite_subgroup(C) Finite subgroup with invariants [5] over QQ of Abelian variety J0(33) of dimension 3 """ if isinstance(X, (list, tuple)): X = self._ambient_lattice().span(X) elif isinstance(X, FiniteSubgroup): if field_of_definition is None: field_of_definition = X.field_of_definition() A = X.abelian_variety() if A.groups() != self.groups(): raise ValueError("ambient product Jacobians must be equal") if A == self: X = X.lattice() else: if X.is_subgroup(self): X = (X.lattice() + self.lattice()).intersection(self.vector_space()) else: raise ValueError("X must be a subgroup of self.") if field_of_definition is None: field_of_definition = QQbar else: field_of_definition = field_of_definition return FiniteSubgroup_lattice(self, X, field_of_definition=field_of_definition, check=check) def torsion_subgroup(self, n): """ If n is an integer, return the subgroup of points of order n. Return the `n`-torsion subgroup of elements of order dividing `n` of this modular abelian variety `A`, i.e., the group `A[n]`. EXAMPLES:: sage: J1(13).torsion_subgroup(19) Finite subgroup with invariants [19, 19, 19, 19] over QQ of Abelian variety J1(13) of dimension 2 :: sage: A = J0(23) sage: G = A.torsion_subgroup(5); G Finite subgroup with invariants [5, 5, 5, 5] over QQ of Abelian variety J0(23) of dimension 2 sage: G.order() 625 sage: G.gens() [[(1/5, 0, 0, 0)], [(0, 1/5, 0, 0)], [(0, 0, 1/5, 0)], [(0, 0, 0, 1/5)]] sage: A = J0(23) sage: A.torsion_subgroup(2).order() 16 """ try: return self.__torsion_subgroup[n] except KeyError: pass except AttributeError: self.__torsion_subgroup = {} lattice = self.lattice().scale(1/Integer(n)) H = FiniteSubgroup_lattice(self, lattice, field_of_definition=self.base_field()) self.__torsion_subgroup[n] = H return H ############################################################################### # Decomposition ############################################################################### def degen_t(self, none_if_not_known=False): """ If this abelian variety is obtained via decomposition then it gets labeled with the newform label along with some information about degeneracy maps. In particular, the label ends in a pair `(t,N)`, where `N` is the ambient level and `t` is an integer that divides the quotient of `N` by the newform level. This function returns the tuple `(t,N)`, or raises a ValueError if self isn't simple. .. note:: It need not be the case that self is literally equal to the image of the newform abelian variety under the `t^{th}` degeneracy map. See the documentation for the label method for more details. INPUT: - ``none_if_not_known`` - (default: False) - if True, return None instead of attempting to compute the degen map's `t`, if it isn't known. This None result is not cached. OUTPUT: a pair (integer, integer) EXAMPLES:: sage: D = J0(33).decomposition(); D [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) ] sage: D[0].degen_t() (1, 33) sage: D[1].degen_t() (3, 33) sage: D[2].degen_t() (1, 33) sage: J0(33).degen_t() Traceback (most recent call last): ... ValueError: self must be simple """ try: return self.__degen_t except AttributeError: if none_if_not_known: return None elif self.dimension() > 0 and self.is_simple(): self.__degen_t = self.decomposition()[0].degen_t() return self.__degen_t raise ValueError("self must be simple") def isogeny_number(self, none_if_not_known=False): """ Return the number (starting at 0) of the isogeny class of new simple abelian varieties that self is in. If self is not simple, raises a ValueError exception. INPUT: - ``none_if_not_known`` - bool (default: False); if True then this function may return None instead of True of False if we don't already know the isogeny number of self. EXAMPLES: We test the none_if_not_known flag first:: sage: J0(33).isogeny_number(none_if_not_known=True) is None True Of course, `J_0(33)` is not simple, so this function raises a ValueError:: sage: J0(33).isogeny_number() Traceback (most recent call last): ... ValueError: self must be simple Each simple factor has isogeny number 1, since that's the number at which the factor is new. :: sage: J0(33)[1].isogeny_number() 0 sage: J0(33)[2].isogeny_number() 0 Next consider `J_0(37)` where there are two distinct newform factors:: sage: J0(37)[1].isogeny_number() 1 """ try: return self.__isogeny_number except AttributeError: if none_if_not_known: return None elif self.is_simple(): self.__isogeny_number = self.decomposition()[0].isogeny_number() return self.__isogeny_number else: raise ValueError("self must be simple") def is_simple(self, none_if_not_known=False): """ Return whether or not this modular abelian variety is simple, i.e., has no proper nonzero abelian subvarieties. INPUT: - ``none_if_not_known`` - bool (default: False); if True then this function may return None instead of True of False if we don't already know whether or not self is simple. EXAMPLES:: sage: J0(5).is_simple(none_if_not_known=True) is None # this may fail if J0(5) comes up elsewhere... True sage: J0(33).is_simple() False sage: J0(33).is_simple(none_if_not_known=True) False sage: J0(33)[1].is_simple() True sage: J1(17).is_simple() False """ try: return self.__is_simple except AttributeError: if none_if_not_known: return None self.__is_simple = len(self.decomposition()) <= 1 return self.__is_simple def decomposition(self, simple=True, bound=None): """ Return a sequence of abelian subvarieties of self that are all simple, have finite intersection and sum to self. INPUT: simple- bool (default: True) if True, all factors are simple. If False, each factor returned is isogenous to a power of a simple and the simples in each factor are distinct. - ``bound`` - int (default: None) if given, only use Hecke operators up to this bound when decomposing. This can give wrong answers, so use with caution! EXAMPLES:: sage: m = ModularSymbols(11).cuspidal_submodule() sage: d1 = m.degeneracy_map(33,1).matrix(); d3=m.degeneracy_map(33,3).matrix() sage: w = ModularSymbols(33).submodule((d1 + d3).image(), check=False) sage: A = w.abelian_variety(); A Abelian subvariety of dimension 1 of J0(33) sage: D = A.decomposition(); D [ Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) ] sage: D[0] == A True sage: B = A + J0(33)[0]; B Abelian subvariety of dimension 2 of J0(33) sage: dd = B.decomposition(simple=False); dd [ Abelian subvariety of dimension 2 of J0(33) ] sage: dd[0] == B True sage: dd = B.decomposition(); dd [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) ] sage: sum(dd) == B True We decompose a product of two Jacobians:: sage: (J0(33) * J0(11)).decomposition() [ Simple abelian subvariety 11a(1,11) of dimension 1 of J0(33) x J0(11), Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33) x J0(11), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) x J0(11), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) x J0(11) ] """ try: return self.__decomposition[(simple, bound)] except KeyError: pass except AttributeError: self.__decomposition = {} if self.is_ambient(): # Decompose each piece, then lift if len(self.groups()) == 0: D = [] elif len(self.groups()) == 1: D = ModularAbelianVariety_modsym(ModularSymbols(self.groups()[0], sign=0).cuspidal_submodule()).decomposition(simple=simple, bound=bound) else: # Decompose each ambient modular symbols factor. #X = [ModularAbelianVariety_modsym(ModularSymbols(G,sign=0).cuspidal_submodule()) for G in self.groups()] from abvar_ambient_jacobian import ModAbVar_ambient_jacobian_class X = [ModAbVar_ambient_jacobian_class(G) for G in self.groups()] E = [A.decomposition(simple=simple, bound=bound) for A in X] i = 0 n = 2*self.dimension() # Now lift each factor of the decomposition to self. G = self.groups() D = [] K = self.base_field() for C in E: for B in C: L = B.lattice().basis_matrix() if simple: is_simple = True else: is_simple = None lattice = matrix(QQ,L.nrows(),i).augment(L).augment(matrix(QQ,L.nrows(),n-i-L.ncols())).row_module(ZZ) D.append(ModularAbelianVariety(G, lattice, K, is_simple=is_simple, newform_level=B.newform_level(), isogeny_number=B.isogeny_number(none_if_not_known=True), number=B.degen_t(none_if_not_known=True))) if len(C) > 0: i += L.ncols() elif not simple: # In this case decompose the ambient space into powers of # simple abelian varieties (i.e. with # \code{simple=False)}, and then intersect the lattice # corresponding to self with each of these factors. D = [] L = self.lattice() groups = self.groups() K = self.base_ring() for X in self.ambient_variety().decomposition(simple=False): lattice = L.intersection(X.vector_space()) if lattice.rank() > 0: the_factor = ModularAbelianVariety(groups, lattice, K, is_simple=X.is_simple(none_if_not_known=True), newform_level=X.newform_level(), isogeny_number=X.isogeny_number(none_if_not_known=True), number=X.degen_t(none_if_not_known=True)) D.append(the_factor) else: # See the documentation for self._classify_ambient_factors # in order to understand what we're doing here. I_F, I_E, X = self._classify_ambient_factors(simple=simple, bound=bound) Z_E = [X[i] for i in I_E] Z_F = [X[i] for i in I_F] F = sum(Z_F, self.zero_subvariety()) # Now self is isogenous to the sum of the factors in Z. # We use this isogeny to obtain a product decomposition of # self. if F == self: # The easy case -- it is already such a decomposition D = Z_F else: # The hard case -- now we have to pull back the # factorization # Suppose $B$ is an abelian variety and there is a # finite degree map $B\to J$, where $J$ is an ambient # Jacobian. Suppose further that we find abelian # subvarieties $E$ and $F$ of $J$ such that $E + F = # J$, $E$ and $F$ have finite intersection, the # composition $B \to J \to J/E$ is an isogeny, and we # know an explicit decomposition of $F$. Then we can # compute a decomposition of $B$ as follows. Let # $L_E$ and $L_F$ be the lattices corresponding to $E$ # and $F$ inside of $L_J$. Compute a matrix $\Phi$ # representing the composition $L_B \to L_J \to L_F # \otimes \QQ$, where the map $L_J$ to $L_F\otimes # \QQ$ is projection onto the second factor in the # decomposition of $L_J$ as $L_E + L_F$ (up to finite # index). Finally, for each factor $A_i$ of $F$ with # lattice $L_{A_i}$, compute the saturation $S_i$ of # $\Phi^{-1}(L_{A_i})$. Then the $S_i$ define a # decomposition of $B$. E = sum(Z_E, self.zero_subvariety()) L_B = self.lattice() L_E = E.lattice() L_F = F.lattice() decomp_matrix = L_E.basis_matrix().stack(L_F.basis_matrix()) # Now we compute explicitly the ZZ-linear map (over # QQ) from L_B that is "projection onto L_F". This # means write each element of a basis for L_B in terms # of decomp_matrix, then take the bottom coordinates. X = decomp_matrix.solve_left(L_B.basis_matrix()) # Now row of X gives each element of L_B as a linear # combination of the rows of decomp_matrix. We # project onto L_F by taking the right-most part of # this matrix. n = X.ncols() proj = X.matrix_from_columns(range(n-L_F.rank(), n)) # Now proj is the matrix of projection that goes from # L_B to L_F, wrt the basis of those spaces. section = proj**(-1) # Now section maps L_F to L_B (tensor QQ). Now we # just take each factor of F, which corresponds to a # submodule of L_F, and map it over to L_B tensor QQ # and saturate. D = [] groups = self.groups() K = self.base_field() for A in Z_F: L_A = A.lattice() M = L_F.coordinate_module(L_A).basis_matrix() * section M, _ = M._clear_denom() M = M.saturation() M = M * L_B.basis_matrix() lattice = M.row_module(ZZ) the_factor = ModularAbelianVariety(groups, lattice, K, is_simple=True, newform_level=A.newform_level(), isogeny_number=A.isogeny_number(), number=A.degen_t()) D.append(the_factor) ################ if isinstance(D, Sequence_generic): S = D else: D.sort() S = Sequence(D, immutable=True, cr=True, universe=self.category()) self.__decomposition[(simple, bound)] = S return S def _classify_ambient_factors(self, simple=True, bound=None): r""" This function implements the following algorithm, which produces data useful in finding a decomposition or complement of self. #. Suppose `A_1 + \cdots + A_n` is a simple decomposition of the ambient space. #. For each `i`, let `B_i = A_1 + \cdots + A_i`. #. For each `i`, compute the intersection `C_i` of `B_i` and self. #. For each `i`, if the dimension of `C_i` is bigger than `C_{i-1}` put `i` in the "in" list; otherwise put `i` in the "out" list. Then one can show that self is isogenous to the sum of the `A_i` with `i` in the "in" list. Moreover, the sum of the `A_j` with `i` in the "out" list is a complement of self in the ambient space. INPUT: - ``simple`` - bool (default: True) - ``bound`` - integer (default: None); if given, passed onto decomposition function OUTPUT: IN list OUT list simple (or power of simple) factors EXAMPLES:: sage: d1 = J0(11).degeneracy_map(33, 1); d1 Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [1] sage: d2 = J0(11).degeneracy_map(33, 3); d2 Degeneracy map from Abelian variety J0(11) of dimension 1 to Abelian variety J0(33) of dimension 3 defined by [3] sage: A = (d1 + d2).image(); A Abelian subvariety of dimension 1 of J0(33) sage: A._classify_ambient_factors() ([1], [0, 2], [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) ]) """ # Decompose an arbitrary abelian variety amb = self.ambient_variety() S = self.vector_space() X = amb.decomposition(simple=simple, bound=bound) IN = []; OUT = [] i = 0 V = 0 last_dimension = 0 for j in range(len(X)): V += X[j].vector_space() d = S.intersection(V).dimension() if d > last_dimension: IN.append(j) last_dimension = d else: OUT.append(j) return IN, OUT, X def _isogeny_to_product_of_simples(self): r""" Given an abelian variety `A`, return an isogeny `\phi: A \rightarrow B_1 \times \cdots \times B_n`, where each `B_i` is simple. Note that this isogeny is not unique. EXAMPLES:: sage: J = J0(37) ; J.decomposition() [ Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37), Simple abelian subvariety 37b(1,37) of dimension 1 of J0(37) ] sage: phi = J._isogeny_to_product_of_simples() ; phi Abelian variety morphism: From: Abelian variety J0(37) of dimension 2 To: Abelian subvariety of dimension 2 of J0(37) x J0(37) sage: J[0].intersection(J[1]) == phi.kernel() True :: sage: J = J0(22) * J0(37) sage: J._isogeny_to_product_of_simples() Abelian variety morphism: From: Abelian variety J0(22) x J0(37) of dimension 4 To: Abelian subvariety of dimension 4 of J0(11) x J0(11) x J0(37) x J0(37) """ try: return self._simple_product_isogeny except AttributeError: pass D = self.decomposition() dest = prod([d._isogeny_to_newform_abelian_variety().image() for d in D]) A = self.ambient_variety() dim = sum([d.dimension() for d in D]) proj_ls = [ A.projection(factor) for factor in D ] mat = matrix(ZZ, 2*self.dimension(), 2*dim) ind = 0 for i in range(len(D)): factor = D[i] proj = proj_ls[i] mat.set_block(0, ind, proj.restrict_domain(self).matrix()) ind += 2*factor.dimension() H = self.Hom(dest) self._simple_product_isogeny = H(Morphism(H, mat)) return self._simple_product_isogeny def _isogeny_to_product_of_powers(self): r""" Given an abelian variety `A`, return an isogeny `\phi: A \rightarrow B_1 \times \cdots \times B_n`, where each `B_i` is a power of a simple abelian variety. These factors will be exactly those returned by self.decomposition(simple=False).Note that this isogeny is not unique. EXAMPLES:: sage: J = J0(33) ; D = J.decomposition(simple=False) ; len(D) 2 sage: phi = J._isogeny_to_product_of_powers() ; phi Abelian variety morphism: From: Abelian variety J0(33) of dimension 3 To: Abelian subvariety of dimension 3 of J0(33) x J0(33) :: sage: J = J0(22) * J0(37) sage: J._isogeny_to_product_of_powers() Abelian variety morphism: From: Abelian variety J0(22) x J0(37) of dimension 4 To: Abelian subvariety of dimension 4 of J0(22) x J0(37) x J0(22) x J0(37) x J0(22) x J0(37) """ try: return self._simple_power_product_isogeny except AttributeError: pass D = self.decomposition(simple=False) A = self.ambient_variety() proj_ls = [ A.projection(factor) for factor in D ] dest = prod([phi.image() for phi in proj_ls]) dim = sum([d.dimension() for d in D]) mat = matrix(ZZ, 2*self.dimension(), 2*dim) ind = 0 for i in range(len(D)): factor = D[i] proj = proj_ls[i] mat.set_block(0, ind, proj.restrict_domain(self).matrix()) ind += 2*factor.dimension() H = self.Hom(dest) self._simple_power_product_isogeny = H(Morphism(H, mat)) return self._simple_power_product_isogeny def complement(self, A=None): """ Return a complement of this abelian variety. INPUT: - ``A`` - (default: None); if given, A must be an abelian variety that contains self, in which case the complement of self is taken inside A. Otherwise the complement is taken in the ambient product Jacobian. OUTPUT: abelian variety EXAMPLES:: sage: a,b,c = J0(33) sage: (a+b).complement() Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) sage: (a+b).complement() == c True sage: a.complement(a+b) Abelian subvariety of dimension 1 of J0(33) """ try: C = self.__complement except AttributeError: pass if self.dimension() is 0: if A is None: C = self.ambient_variety() else: C = A elif A is not None and self.dimension() == A.dimension(): if not self.is_subvariety(A): raise ValueError("self must be a subvariety of A") C = self.zero_subvariety() else: _, factors, X = self._classify_ambient_factors() D = [X[i] for i in factors] C = sum(D) if C: self.__complement = C if A is not None: C = C.intersection(A)[1] else: C = self.zero_subvariety() return C def dual(self): r""" Return the dual of this abelian variety. OUTPUT: - dual abelian variety - morphism from self to dual - covering morphism from J to dual .. warning:: This is currently only implemented when self is an abelian subvariety of the ambient Jacobian product, and the complement of self in the ambient product Jacobian share no common factors. A more general implementation will require implementing computation of the intersection pairing on integral homology and the resulting Weil pairing on torsion. EXAMPLES: We compute the dual of the elliptic curve newform abelian variety of level `33`, and find the kernel of the modular map, which has structure `(\ZZ/3)^2`. :: sage: A,B,C = J0(33) sage: C Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) sage: Cd, f, pi = C.dual() sage: f.matrix() [3 0] [0 3] sage: f.kernel()[0] Finite subgroup with invariants [3, 3] over QQ of Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) By a theorem the modular degree must thus be `3`:: sage: E = EllipticCurve('33a') sage: E.modular_degree() 3 Next we compute the dual of a `2`-dimensional new simple abelian subvariety of `J_0(43)`. :: sage: A = AbelianVariety('43b'); A Newform abelian subvariety 43b of dimension 2 of J0(43) sage: Ad, f, pi = A.dual() The kernel shows that the modular degree is `2`:: sage: f.kernel()[0] Finite subgroup with invariants [2, 2] over QQ of Newform abelian subvariety 43b of dimension 2 of J0(43) Unfortunately, the dual is not implemented in general:: sage: A = J0(22)[0]; A Simple abelian subvariety 11a(1,22) of dimension 1 of J0(22) sage: A.dual() Traceback (most recent call last): ... NotImplementedError: dual not implemented unless complement shares no simple factors with self. """ try: return self.__dual except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise NotImplementedError("dual not implemented unless abelian variety is a subvariety of the ambient Jacobian product") if not self._complement_shares_no_factors_with_same_label(): raise NotImplementedError("dual not implemented unless complement shares no simple factors with self.") C = self.complement() Q, phi = self.ambient_variety().quotient(C) psi = self.ambient_morphism() self.__dual = Q, phi*psi, phi return self.__dual def _factors_with_same_label(self, other): """ Given two modular abelian varieties self and other, this function returns a list of simple abelian subvarieties appearing in the decomposition of self that have the same newform labels. Each simple factor with a given newform label appears at most one. INPUT: - ``other`` - abelian variety OUTPUT: list of simple abelian varieties EXAMPLES:: sage: D = J0(33).decomposition(); D [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) ] sage: D[0]._factors_with_same_label(D[1]) [Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)] sage: D[0]._factors_with_same_label(D[2]) [] sage: (D[0]+D[1])._factors_with_same_label(D[1] + D[2]) [Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)] This illustrates that the multiplicities in the returned list are 1:: sage: (D[0]+D[1])._factors_with_same_label(J0(33)) [Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)] This illustrates that the ambient product Jacobians do not have to be the same:: sage: (D[0]+D[1])._factors_with_same_label(J0(22)) [Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33)] This illustrates that the actual factor labels are relevant, not just the isogeny class. :: sage: (D[0]+D[1])._factors_with_same_label(J1(11)) [] sage: J1(11)[0].newform_label() '11aG1' """ if not isinstance(other, ModularAbelianVariety_abstract): raise TypeError("other must be an abelian variety") D = self.decomposition() C = set([A.newform_label() for A in other.decomposition()]) Z = [] for X in D: lbl = X.newform_label() if lbl in C: Z.append(X) C.remove(lbl) Z.sort() return Z def _complement_shares_no_factors_with_same_label(self): """ Return True if no simple factor of self has the same newform_label as any factor in a Poincare complement of self in the ambient product Jacobian. EXAMPLES: `J_0(37)` is made up of two non-isogenous elliptic curves:: sage: J0(37)[0]._complement_shares_no_factors_with_same_label() True `J_0(33)` decomposes as a product of two isogenous elliptic curves with a third nonisogenous curve:: sage: D = J0(33).decomposition(); D [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) ] sage: D[0]._complement_shares_no_factors_with_same_label() False sage: (D[0]+D[1])._complement_shares_no_factors_with_same_label() True sage: D[2]._complement_shares_no_factors_with_same_label() True This example illustrates the relevance of the ambient product Jacobian. :: sage: D = (J0(11) * J0(11)).decomposition(); D [ Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11), Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J0(11) ] sage: D[0]._complement_shares_no_factors_with_same_label() False This example illustrates that it is the newform label, not the isogeny, class that matters:: sage: D = (J0(11)*J1(11)).decomposition(); D [ Simple abelian subvariety 11aG1(1,11) of dimension 1 of J0(11) x J1(11), Simple abelian subvariety 11a(1,11) of dimension 1 of J0(11) x J1(11) ] sage: D[0]._complement_shares_no_factors_with_same_label() True sage: D[0].newform_label() '11aG1' sage: D[1].newform_label() '11a' """ try: return self.__complement_shares except AttributeError: t = len(self._factors_with_same_label(self.complement())) == 0 self.__complement_shares = t return t def __getitem__(self, i): """ Returns the `i^{th}` decomposition factor of self or returns the slice `i` of decompositions of self. EXAMPLES:: sage: J = J0(389) sage: J.decomposition() [ Simple abelian subvariety 389a(1,389) of dimension 1 of J0(389), Simple abelian subvariety 389b(1,389) of dimension 2 of J0(389), Simple abelian subvariety 389c(1,389) of dimension 3 of J0(389), Simple abelian subvariety 389d(1,389) of dimension 6 of J0(389), Simple abelian subvariety 389e(1,389) of dimension 20 of J0(389) ] sage: J[2] Simple abelian subvariety 389c(1,389) of dimension 3 of J0(389) sage: J[-1] Simple abelian subvariety 389e(1,389) of dimension 20 of J0(389) sage: J = J0(125); J.decomposition() [ Simple abelian subvariety 125a(1,125) of dimension 2 of J0(125), Simple abelian subvariety 125b(1,125) of dimension 2 of J0(125), Simple abelian subvariety 125c(1,125) of dimension 4 of J0(125) ] sage: J[:2] [ Simple abelian subvariety 125a(1,125) of dimension 2 of J0(125), Simple abelian subvariety 125b(1,125) of dimension 2 of J0(125) ] """ return self.decomposition()[i] class ModularAbelianVariety(ModularAbelianVariety_abstract): def __init__(self, groups, lattice=None, base_field=QQ, is_simple=None, newform_level=None, isogeny_number=None, number=None, check=True): r""" Create a modular abelian variety with given level and base field. INPUT: - ``groups`` - a tuple of congruence subgroups - ``lattice`` - (default: `\ZZ^n`) a full lattice in `\ZZ^n`, where `n` is the sum of the dimensions of the spaces of cuspidal modular symbols corresponding to each `\Gamma \in` groups - ``base_field`` - a field (default: `\QQ`) EXAMPLES:: sage: J0(23) Abelian variety J0(23) of dimension 2 """ ModularAbelianVariety_abstract.__init__(self, groups, base_field, is_simple=is_simple, newform_level=newform_level, isogeny_number=isogeny_number, number=number, check=check) if lattice is None: lattice = ZZ**(2*self._ambient_dimension()) if check: n = self._ambient_dimension() if not is_FreeModule(lattice): raise TypeError("lattice must be a free module") if lattice.base_ring() != ZZ: raise TypeError("lattice must be over ZZ") if lattice.degree() != 2*n: raise ValueError("lattice must have degree 2*n (=%s)"%(2*n)) if not lattice.saturation().is_submodule(lattice): # potentially expensive raise ValueError("lattice must be full") self.__lattice = lattice def lattice(self): """ Return the lattice that defines this abelian variety. OUTPUT: - ``lattice`` - a lattice embedded in the rational homology of the ambient product Jacobian EXAMPLES:: sage: A = (J0(11) * J0(37))[1]; A Simple abelian subvariety 37a(1,37) of dimension 1 of J0(11) x J0(37) sage: type(A) <class 'sage.modular.abvar.abvar.ModularAbelianVariety_with_category'> sage: A.lattice() Free module of degree 6 and rank 2 over Integer Ring Echelon basis matrix: [ 0 0 1 -1 1 0] [ 0 0 0 0 2 -1] """ return self.__lattice class ModularAbelianVariety_modsym_abstract(ModularAbelianVariety_abstract): # Anything that derives from this class must define the # modular_symbols method, which returns a cuspidal modular symbols # space over QQ. It can have any sign. def _modular_symbols(self): """ Return the space of modular symbols corresponding to this modular symbols abelian variety. EXAMPLES: This function is in the abstract base class, so it raises a NotImplementedError:: sage: M = ModularSymbols(37).cuspidal_submodule() sage: A = M.abelian_variety(); A Abelian variety J0(37) of dimension 2 sage: sage.modular.abvar.abvar.ModularAbelianVariety_modsym_abstract._modular_symbols(A) Traceback (most recent call last): ... NotImplementedError: bug -- must define this Of course this function isn't called in practice, so this works:: sage: A._modular_symbols() Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field """ raise NotImplementedError("bug -- must define this") def __add__(self, other): """ Add two modular abelian variety factors. EXAMPLES:: sage: A = J0(42); D = A.decomposition(); D [ Simple abelian subvariety 14a(1,42) of dimension 1 of J0(42), Simple abelian subvariety 14a(3,42) of dimension 1 of J0(42), Simple abelian subvariety 21a(1,42) of dimension 1 of J0(42), Simple abelian subvariety 21a(2,42) of dimension 1 of J0(42), Simple abelian subvariety 42a(1,42) of dimension 1 of J0(42) ] sage: D[0] + D[1] Abelian subvariety of dimension 2 of J0(42) sage: D[1].is_subvariety(D[0] + D[1]) True sage: D[0] + D[1] + D[2] Abelian subvariety of dimension 3 of J0(42) sage: D[0] + D[0] Abelian subvariety of dimension 1 of J0(42) sage: D[0] + D[0] == D[0] True sage: sum(D, D[0]) == A True """ if not is_ModularAbelianVariety(other): if other == 0: return self raise TypeError("sum not defined") if not isinstance(other, ModularAbelianVariety_modsym_abstract): return ModularAbelianVariety_abstract.__add__(self, other) if self.groups() != other.groups(): raise TypeError("sum not defined since ambient spaces different") M = self.modular_symbols() + other.modular_symbols() return ModularAbelianVariety_modsym(M) def groups(self): """ Return the tuple of groups associated to the modular symbols abelian variety. This is always a 1-tuple. OUTPUT: tuple EXAMPLES:: sage: A = ModularSymbols(33).cuspidal_submodule().abelian_variety(); A Abelian variety J0(33) of dimension 3 sage: A.groups() (Congruence Subgroup Gamma0(33),) sage: type(A) <class 'sage.modular.abvar.abvar.ModularAbelianVariety_modsym_with_category'> """ return (self._modular_symbols().group(), ) def lattice(self): r""" Return the lattice defining this modular abelian variety. OUTPUT: A free `\ZZ`-module embedded in an ambient `\QQ`-vector space. EXAMPLES:: sage: A = ModularSymbols(33).cuspidal_submodule()[0].abelian_variety(); A Abelian subvariety of dimension 1 of J0(33) sage: A.lattice() Free module of degree 6 and rank 2 over Integer Ring User basis matrix: [ 1 0 0 -1 0 0] [ 0 0 1 0 1 -1] sage: type(A) <class 'sage.modular.abvar.abvar.ModularAbelianVariety_modsym_with_category'> """ try: return self.__lattice except AttributeError: M = self.modular_symbols() S = M.ambient_module().cuspidal_submodule() if M.dimension() == S.dimension(): L = ZZ**M.dimension() else: K0 = M.integral_structure() K1 = S.integral_structure() L = K1.coordinate_module(K0) self.__lattice = L return self.__lattice def _set_lattice(self, lattice): """ Set the lattice of this modular symbols abelian variety. .. warning:: This is only for internal use. Do not use this unless you really really know what you're doing. That's why there is an underscore in this method name. INPUT: - ``lattice`` - a lattice EXAMPLES: We do something evil - there's no type checking since this function is for internal use only:: sage: A = ModularSymbols(33).cuspidal_submodule().abelian_variety() sage: A._set_lattice(5) sage: A.lattice() 5 """ self.__lattice = lattice def modular_symbols(self, sign=0): """ Return space of modular symbols (with given sign) associated to this modular abelian variety, if it can be found by cutting down using Hecke operators. Otherwise raise a RuntimeError exception. EXAMPLES:: sage: A = J0(37) sage: A.modular_symbols() Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field sage: A.modular_symbols(1) Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(37) of weight 2 with sign 1 over Rational Field More examples:: sage: J0(11).modular_symbols() Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field sage: J0(11).modular_symbols(sign=1) Modular Symbols subspace of dimension 1 of Modular Symbols space of dimension 2 for Gamma_0(11) of weight 2 with sign 1 over Rational Field sage: J0(11).modular_symbols(sign=0) Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field sage: J0(11).modular_symbols(sign=-1) Modular Symbols space of dimension 1 for Gamma_0(11) of weight 2 with sign -1 over Rational Field Even more examples:: sage: A = J0(33)[1]; A Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33) sage: A.modular_symbols() Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field It is not always possible to determine the sign subspaces:: sage: A.modular_symbols(1) Traceback (most recent call last): ... RuntimeError: unable to determine sign (=1) space of modular symbols :: sage: A.modular_symbols(-1) Traceback (most recent call last): ... RuntimeError: unable to determine sign (=-1) space of modular symbols """ M = self._modular_symbols().modular_symbols_of_sign(sign) if (sign != 0 and M.dimension() != self.dimension()) or (sign == 0 and M.dimension() != 2*self.dimension()): raise RuntimeError("unable to determine sign (=%s) space of modular symbols"%sign) return M def _compute_hecke_polynomial(self, n, var='x'): """ Return the characteristic polynomial of the `n^{th}` Hecke operator on self. .. note:: If self has dimension d, then this is a polynomial of degree d. It is not of degree 2\*d, so it is the square root of the characteristic polynomial of the Hecke operator on integral or rational homology (which has degree 2\*d). EXAMPLES:: sage: J0(11).hecke_polynomial(2) x + 2 sage: J0(23)._compute_hecke_polynomial(2) x^2 + x - 1 sage: J1(13).hecke_polynomial(2) x^2 + 3*x + 3 sage: factor(J0(43).hecke_polynomial(2)) (x + 2) * (x^2 - 2) The Hecke polynomial is the square root of the characteristic polynomial:: sage: factor(J0(43).hecke_operator(2).charpoly()) (x + 2) * (x^2 - 2) """ return sqrt_poly(self.modular_symbols().hecke_polynomial(n, var)) def _integral_hecke_matrix(self, n, sign=0): """ Return the action of the Hecke operator `T_n` on the integral homology of self. INPUT: - ``n`` - a positive integer - ``sign`` - 0, +1, or -1; if 1 or -1 act on the +1 or -1 quotient of the integral homology. EXAMPLES:: sage: J1(13)._integral_hecke_matrix(2) # slightly random choice of basis [-2 0 -1 1] [ 1 -1 0 -1] [ 1 1 -2 0] [ 0 1 -1 -1] sage: J1(13)._integral_hecke_matrix(2,sign=1) # slightly random choice of basis [-1 1] [-1 -2] sage: J1(13)._integral_hecke_matrix(2,sign=-1) # slightly random choice of basis [-2 -1] [ 1 -1] """ return self.modular_symbols(sign).integral_hecke_matrix(n) def _rational_hecke_matrix(self, n, sign=0): """ Return the action of the Hecke operator `T_n` on the rational homology of self. INPUT: - ``n`` - a positive integer - ``sign`` - 0, +1, or -1; if 1 or -1 act on the +1 or -1 quotient of the rational homology. EXAMPLES:: sage: J1(13)._rational_hecke_matrix(2) # slightly random choice of basis [-2 0 -1 1] [ 1 -1 0 -1] [ 1 1 -2 0] [ 0 1 -1 -1] sage: J0(43)._rational_hecke_matrix(2,sign=1) # slightly random choice of basis [-2 0 1] [-1 -2 2] [-2 0 2] """ return self._integral_hecke_matrix(n, sign=sign).change_ring(QQ) def group(self): """ Return the congruence subgroup associated that this modular abelian variety is associated to. EXAMPLES:: sage: J0(13).group() Congruence Subgroup Gamma0(13) sage: J1(997).group() Congruence Subgroup Gamma1(997) sage: JH(37,[3]).group() Congruence Subgroup Gamma_H(37) with H generated by [3] sage: J0(37)[1].groups() (Congruence Subgroup Gamma0(37),) """ return self.modular_symbols().group() def is_subvariety(self, other): """ Return True if self is a subvariety of other. EXAMPLES:: sage: J = J0(37); J Abelian variety J0(37) of dimension 2 sage: A = J[0]; A Simple abelian subvariety 37a(1,37) of dimension 1 of J0(37) sage: A.is_subvariety(J) True sage: A.is_subvariety(J0(11)) False There may be a way to map `A` into `J_0(74)`, but `A` is not equipped with any special structure of an embedding. :: sage: A.is_subvariety(J0(74)) False Some ambient examples:: sage: J = J0(37) sage: J.is_subvariety(J) True sage: J.is_subvariety(25) False More examples:: sage: A = J0(42); D = A.decomposition(); D [ Simple abelian subvariety 14a(1,42) of dimension 1 of J0(42), Simple abelian subvariety 14a(3,42) of dimension 1 of J0(42), Simple abelian subvariety 21a(1,42) of dimension 1 of J0(42), Simple abelian subvariety 21a(2,42) of dimension 1 of J0(42), Simple abelian subvariety 42a(1,42) of dimension 1 of J0(42) ] sage: D[0].is_subvariety(A) True sage: D[1].is_subvariety(D[0] + D[1]) True sage: D[2].is_subvariety(D[0] + D[1]) False """ if not is_ModularAbelianVariety(other): return False if not isinstance(other, ModularAbelianVariety_modsym_abstract): return ModularAbelianVariety_abstract.is_subvariety(self, other) return self.modular_symbols().is_submodule(other.modular_symbols()) def is_ambient(self): """ Return True if this abelian variety attached to a modular symbols space space is attached to the cuspidal subspace of the ambient modular symbols space. OUTPUT: bool EXAMPLES:: sage: A = ModularSymbols(43).cuspidal_subspace().abelian_variety(); A Abelian variety J0(43) of dimension 3 sage: A.is_ambient() True sage: type(A) <class 'sage.modular.abvar.abvar.ModularAbelianVariety_modsym_with_category'> sage: A = ModularSymbols(43).cuspidal_subspace()[1].abelian_variety(); A Abelian subvariety of dimension 2 of J0(43) sage: A.is_ambient() False """ return self.degree() == self.dimension() def dimension(self): """ Return the dimension of this modular abelian variety. EXAMPLES:: sage: J0(37)[0].dimension() 1 sage: J0(43)[1].dimension() 2 sage: J1(17)[1].dimension() 4 """ try: return self._dimension except AttributeError: M = self._modular_symbols() if M.sign() == 0: d = M.dimension() // 2 else: d = M.dimension() self._dimension = d return d def new_subvariety(self, p=None): """ Return the new or `p`-new subvariety of self. INPUT: - ``self`` - a modular abelian variety - ``p`` - prime number or None (default); if p is a prime, return the p-new subvariety. Otherwise return the full new subvariety. EXAMPLES:: sage: J0(33).new_subvariety() Abelian subvariety of dimension 1 of J0(33) sage: J0(100).new_subvariety() Abelian subvariety of dimension 1 of J0(100) sage: J1(13).new_subvariety() Abelian variety J1(13) of dimension 2 """ try: return self.__new_subvariety[p] except AttributeError: self.__new_subvariety = {} except KeyError: pass A = self.modular_symbols() N = A.new_submodule(p=p) B = ModularAbelianVariety_modsym(N) self.__new_subvariety[p] = B return B def old_subvariety(self, p=None): """ Return the old or `p`-old abelian variety of self. INPUT: - ``self`` - a modular abelian variety - ``p`` - prime number or None (default); if p is a prime, return the p-old subvariety. Otherwise return the full old subvariety. EXAMPLES:: sage: J0(33).old_subvariety() Abelian subvariety of dimension 2 of J0(33) sage: J0(100).old_subvariety() Abelian subvariety of dimension 6 of J0(100) sage: J1(13).old_subvariety() Abelian subvariety of dimension 0 of J1(13) """ try: return self.__old_subvariety[p] except AttributeError: self.__old_subvariety = {} except KeyError: pass A = self.modular_symbols() N = A.old_submodule(p=p) B = ModularAbelianVariety_modsym(N) self.__old_subvariety[p] = B return B def decomposition(self, simple=True, bound=None): r""" Decompose this modular abelian variety as a product of abelian subvarieties, up to isogeny. INPUT: simple- bool (default: True) if True, all factors are simple. If False, each factor returned is isogenous to a power of a simple and the simples in each factor are distinct. - ``bound`` - int (default: None) if given, only use Hecke operators up to this bound when decomposing. This can give wrong answers, so use with caution! EXAMPLES:: sage: J = J0(33) sage: J.decomposition() [ Simple abelian subvariety 11a(1,33) of dimension 1 of J0(33), Simple abelian subvariety 11a(3,33) of dimension 1 of J0(33), Simple abelian subvariety 33a(1,33) of dimension 1 of J0(33) ] sage: J1(17).decomposition() [ Simple abelian subvariety 17aG1(1,17) of dimension 1 of J1(17), Simple abelian subvariety 17bG1(1,17) of dimension 4 of J1(17) ] """ try: return self.__decomposition[(simple, bound)] except KeyError: pass except AttributeError: self.__decomposition = {} if not self.is_ambient(): S = ModularAbelianVariety_abstract.decomposition(self, simple=simple, bound=bound) else: A = self.modular_symbols() amb = A.ambient_module() G = amb.group() S = amb.cuspidal_submodule().integral_structure() if simple: M = A.level() D = [] for N in reversed(divisors(M)): if N > 1: isogeny_number = 0 A = amb.modular_symbols_of_level(N).cuspidal_subspace().new_subspace() if bound is None: X = factor_new_space(A) else: X = A.decomposition(bound = bound) for B in X: for t in divisors(M//N): D.append(ModularAbelianVariety_modsym(B.degeneracy_map(M, t).image(), is_simple=True, newform_level=(N, G), isogeny_number=isogeny_number, number=(t,M))) isogeny_number += 1 elif A == amb.cuspidal_submodule(): D = [ModularAbelianVariety_modsym(B) for B in A.decomposition(bound = bound)] else: D = ModularAbelianVariety_abstract.decomposition(self, simple=simple, bound=bound) D.sort() S = Sequence(D, immutable=True, cr=True, universe=self.category()) self.__decomposition[(simple, bound)] = S return S class ModularAbelianVariety_modsym(ModularAbelianVariety_modsym_abstract): def __init__(self, modsym, lattice=None, newform_level=None, is_simple=None, isogeny_number=None, number=None, check=True): """ Modular abelian variety that corresponds to a Hecke stable space of cuspidal modular symbols. EXAMPLES: We create a modular abelian variety attached to a space of modular symbols. :: sage: M = ModularSymbols(23).cuspidal_submodule() sage: A = M.abelian_variety(); A Abelian variety J0(23) of dimension 2 """ if check: if not isinstance(modsym, ModularSymbolsSpace): raise TypeError("modsym must be a modular symbols space") if modsym.sign() != 0: raise TypeError("modular symbols space must have sign 0") if not modsym.is_cuspidal(): raise ValueError("modsym must be cuspidal") ModularAbelianVariety_abstract.__init__(self, (modsym.group(), ), modsym.base_ring(), newform_level=newform_level, is_simple=is_simple, isogeny_number=isogeny_number, number=number, check=check) if lattice is not None: self._set_lattice(lattice) self.__modsym = modsym def _modular_symbols(self): """ Return the modular symbols space that defines this modular abelian variety. OUTPUT: space of modular symbols EXAMPLES:: sage: M = ModularSymbols(37).cuspidal_submodule() sage: A = M.abelian_variety(); A Abelian variety J0(37) of dimension 2 sage: A._modular_symbols() Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field """ return self.__modsym def component_group_order(self, p): """ Return the order of the component group of the special fiber at p of the Neron model of self. NOTE: For bad primes, this is only implemented when the group if Gamma0 and p exactly divides the level. NOTE: the input abelian variety must be simple ALGORITHM: See "Component Groups of Quotients of J0(N)" by Kohel and Stein. That paper is about optimal quotients; however, section 4.1 of Conrad-Stein "Component Groups of Purely Toric Quotients", one sees that the component group of an optimal quotient is the same as the component group of its dual (which is the subvariety). INPUT: - p -- a prime number OUTPUT: - Integer EXAMPLES:: sage: A = J0(37)[1] sage: A.component_group_order(37) 3 sage: A = J0(43)[1] sage: A.component_group_order(37) 1 sage: A.component_group_order(43) 7 sage: A = J0(23)[0] sage: A.component_group_order(23) 11 """ if not self.is_simple(): raise ValueError("self must be simple") p = Integer(p) if not p.is_prime(): raise ValueError("p must be a prime integer") try: return self.__component_group[p][0] except AttributeError: self.__component_group = {} except KeyError: pass # Easy special case -- a prime of good reduction if self.level() % p != 0: one = Integer(1) self.__component_group[p] = (one,one,one) return one # Cases that we don't know how to handle yet. if not is_Gamma0(self.group()): raise NotImplementedError("computation of component group not implemented when group isn't Gamma0") if self.level() % (p*p) == 0: raise NotImplementedError("computation of component group not implemented when p^2 divides the level") # Now we're on Gamma0(p*M) with gcd(p,M) = 1. # 1. Compute factor of Brandt module space, and put integral structure on it. # TODO -- in case self.level() is prime, should use # supersingular module instead for massive speedup... Of # course, then one can just use Emertons theorem that the # component group order equals the torsion order, and avoid # all of this! XI = self.brandt_module(p) Y = XI.ambient_module() n = Y.dimension() # X_ZZ is the submodule of degree 0 divisors M = ZZ**n deg_zero = [] for k in range(1,n): v = vector(ZZ, n) v[0] = 1 v[k] = -1 deg_zero.append(v) X_ZZ = M.span(deg_zero, ZZ) XI_ZZ = XI.free_module().intersection(M) # 2. Compute the map alpha: X --> Hom(X[I],Z) over ZZ # todo -- this could be done more quickly with a clever matrix multiply B = [XI(v) for v in XI_ZZ.basis()] mat = [] for v in M.basis(): w = Y(v) mat.append([w.monodromy_pairing(b) for b in B]) monodromy = matrix(ZZ, mat) alpha = X_ZZ.basis_matrix().change_ring(ZZ) * monodromy # 3. Compute invariants: # * Phi_X = #coker(alpha) # * m_X = #(alpha(X)/alpha(X[I])) alphaX = alpha.row_module() Phi_X_invariants = alphaX.basis_matrix().change_ring(ZZ).elementary_divisors() Phi_X = prod(Phi_X_invariants + [Integer(1)]) W = alphaX.span([b*monodromy for b in XI_ZZ.basis()], ZZ) m_X = Integer(W.index_in(alphaX)) # 4. Compute the modular degree moddeg = self.modular_degree() # 5. Obtain the component group order using Theorem 1 of [Kohel-Stein] Phi = Phi_X * moddeg / m_X # 6. Record the answer self.__component_group[p] = (Phi, Phi_X_invariants, m_X) return Phi def _invariants_of_image_of_component_group_of_J0(self, p): """ Return the elementary invariants of the image of the component group of J0(N). The API of this function is subject to change, which is why it starts with an underscore. INPUT: - p -- integer OUTPUT: - list -- of elementary invariants EXAMPLES:: sage: A = J0(62).new_subvariety()[1]; A Simple abelian subvariety 62b(1,62) of dimension 2 of J0(62) sage: A._invariants_of_image_of_component_group_of_J0(2) [1, 6] sage: A.component_group_order(2) 66 """ self.component_group_order(p) return list(self.__component_group[p][1]) # make a copy def tamagawa_number(self, p): """ Return the Tamagawa number of this abelian variety at p. NOTE: For bad primes, this is only implemented when the group if Gamma0 and p exactly divides the level and Atkin-Lehner acts diagonally on this abelian variety (e.g., if this variety is new and simple). See the self.component_group command for more information. NOTE: the input abelian variety must be simple In cases where this function doesn't work, consider using the self.tamagawa_number_bounds functions. INPUT: - p -- a prime number OUTPUT: - Integer EXAMPLES:: sage: A = J0(37)[1] sage: A.tamagawa_number(37) 3 sage: A = J0(43)[1] sage: A.tamagawa_number(37) 1 sage: A.tamagawa_number(43) 7 sage: A = J0(23)[0] sage: A.tamagawa_number(23) 11 """ try: return self.__tamagawa_number[p] except AttributeError: self.__tamagawa_number = {} except KeyError: pass if not self.is_simple(): raise ValueError("self must be simple") try: g = self.component_group_order(p) except NotImplementedError: raise NotImplementedError("Tamagawa number can't be determined using known algorithms, so consider using the tamagawa_number_bounds function instead") div, mul, mul_primes = self.tamagawa_number_bounds(p) if div == mul: cp = div else: raise NotImplementedError("the Tamagawa number at %s is a power of 2, but the exact power can't be determined using known algorithms. Consider using the tamagawa_number_bounds function instead."%p) self.__tamagawa_number[p] = cp return cp def tamagawa_number_bounds(self, p): """ Return a divisor and multiple of the Tamagawa number of self at p. NOTE: the input abelian variety must be simple INPUT: - p -- a prime number OUTPUT: - div -- integer; divisor of Tamagawa number at p - mul -- integer; multiple of Tamagawa number at p - mul_primes -- tuple; in case mul==0, a list of all primes that can possibly divide the Tamagawa number at p. EXAMPLES:: sage: A = J0(63).new_subvariety()[1]; A Simple abelian subvariety 63b(1,63) of dimension 2 of J0(63) sage: A.tamagawa_number_bounds(7) (3, 3, ()) sage: A.tamagawa_number_bounds(3) (1, 0, (2, 3, 5)) """ try: return self.__tamagawa_number_bounds[p] except AttributeError: self.__tamagawa_number_bounds = {} except KeyError: pass if not self.is_simple(): raise ValueError("self must be simple") N = self.level() div = 1; mul = 0; mul_primes = [] if N % p != 0: div = 1; mul = 1 elif N.valuation(p) == 1: M = self.modular_symbols(sign=1) if is_Gamma0(M.group()): g = self.component_group_order(p) W = M.atkin_lehner_operator(p).matrix() cp = None if W == -1: # Frob acts trivially div = g; mul = g elif W == 1: # Frob acts by -1 n = g.valuation(2) if n <= 1: div = 2**n else: phi_X_invs = self._invariants_of_image_of_component_group_of_J0(p) m = max(1, len([z for z in phi_X_invs if z%2==0])) div = 2**m mul = 2**n else: raise NotImplementedError("Atkin-Lehner at p must act as a scalar") else: mul_primes = list(sorted(set([p] + [q for q in prime_range(2,2*self.dimension()+2)]))) div = Integer(div) mul = Integer(mul) mul_primes = tuple(mul_primes) self.__tamagawa_number_bounds[p] = (div, mul, mul_primes) return (div, mul, mul_primes) def brandt_module(self, p): """ Return the Brandt module at p that corresponds to self. This is the factor of the vector space on the ideal class set in an order of level N in the quaternion algebra ramified at p and infinity. INPUT: - p -- prime that exactly divides the level OUTPUT: - Brandt module space that corresponds to self. EXAMPLES:: sage: J0(43)[1].brandt_module(43) Subspace of dimension 2 of Brandt module of dimension 4 of level 43 of weight 2 over Rational Field sage: J0(43)[1].brandt_module(43).basis() ((1, 0, -1/2, -1/2), (0, 1, -1/2, -1/2)) sage: J0(43)[0].brandt_module(43).basis() ((0, 0, 1, -1),) sage: J0(35)[0].brandt_module(5).basis() ((1, 0, -1, 0),) sage: J0(35)[0].brandt_module(7).basis() ((1, -1, 1, -1),) """ try: return self.__brandt_module[p] except AttributeError: self.__brandt_module = {} except KeyError: pass p = Integer(p) if not is_Gamma0(self.group()): raise NotImplementedError("Brandt module only defined on Gamma0") if not p.is_prime(): raise ValueError("p must be a prime integer") if self.level().valuation(p) != 1: raise ValueError("p must exactly divide the level") M = self.level() / p from sage.modular.all import BrandtModule V = BrandtModule(p, M) # now cut out version of self in B S = self.modular_symbols(sign=1) B = S.hecke_bound() if self.dimension() <= 3: q = 2 while V.dimension() > self.dimension() and q <= B: f = S.hecke_polynomial(q) V = f(V.hecke_operator(q)).kernel() q = next_prime(q) if V.dimension() > self.dimension(): raise RuntimeError("unable to cut out Brandt module (got dimension %s instead of %s)"%(V.dimension(), self.dimension())) else: D = V.decomposition() D = [A for A in D if A.dimension() == self.dimension()] # now figure out which element of D is isomorphic to self. q = 2 while len(D) > 1 and q <= B: f = S.hecke_polynomial(q) D = [A for A in D if A.hecke_polynomial(q) == f] q = next_prime(q) if len(D) != 1: raise RuntimeError("unable to locate Brandt module (got %s candidates instead of 1)"%(len(D))) V = D[0] self.__brandt_module[p] = V return V def sqrt_poly(f): """ Return the square root of the polynomial `f`. .. note:: At some point something like this should be a member of the polynomial class. For now this is just used internally by some charpoly functions above. EXAMPLES:: sage: R.<x> = QQ[] sage: f = (x-1)*(x+2)*(x^2 + 1/3*x + 5) sage: f x^4 + 4/3*x^3 + 10/3*x^2 + 13/3*x - 10 sage: sage.modular.abvar.abvar.sqrt_poly(f^2) x^4 + 4/3*x^3 + 10/3*x^2 + 13/3*x - 10 sage: sage.modular.abvar.abvar.sqrt_poly(f) Traceback (most recent call last): ... ValueError: f must be a perfect square sage: sage.modular.abvar.abvar.sqrt_poly(2*f^2) Traceback (most recent call last): ... ValueError: f must be monic """ if not f.is_monic(): raise ValueError("f must be monic") try: return prod([g**Integer(e/Integer(2)) for g,e in f.factor()]) except TypeError: raise ValueError("f must be a perfect square") #################################################################################################### # Useful for decomposing exactly the sort of modular symbols spaces that come up here. from random import randrange from sage.rings.arith import next_prime def random_hecke_operator(M, t=None, p=2): """ Return a random Hecke operator acting on `M`, got by adding to `t` a random multiple of `T_p` INPUT: - ``M`` - modular symbols space - ``t`` - None or a Hecke operator - ``p`` - a prime OUTPUT: Hecke operator prime EXAMPLES:: sage: M = ModularSymbols(11).cuspidal_subspace() sage: t, p = sage.modular.abvar.abvar.random_hecke_operator(M) sage: p 3 sage: t, p = sage.modular.abvar.abvar.random_hecke_operator(M, t, p) sage: p 5 """ r = 0 while r == 0: r = randrange(1,p//2+1) * ZZ.random_element() t = (0 if t is None else t) + r*M.hecke_operator(p) return t, next_prime(p) def factor_new_space(M): """ Given a new space `M` of modular symbols, return the decomposition into simple of `M` under the Hecke operators. INPUT: - ``M`` - modular symbols space OUTPUT: list of factors EXAMPLES:: sage: M = ModularSymbols(37).cuspidal_subspace() sage: sage.modular.abvar.abvar.factor_new_space(M) [ Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 5 for Gamma_0(37) of weight 2 with sign 0 over Rational Field ] """ t = None; p = 2 for i in range(200): t, p = random_hecke_operator(M, t, p) f = t.charpoly() cube_free = True for _, e in f.factor(): if e > 2: cube_free = False break if cube_free: return t.decomposition() t, p = random_hecke_operator(M, t, p) raise RuntimeError("unable to factor new space -- this should not happen") # should never happen def factor_modsym_space_new_factors(M): """ Given an ambient modular symbols space, return complete factorization of it. INPUT: - ``M`` - modular symbols space OUTPUT: list of decompositions corresponding to each new space. EXAMPLES:: sage: M = ModularSymbols(33) sage: sage.modular.abvar.abvar.factor_modsym_space_new_factors(M) [[ Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 3 for Gamma_0(11) of weight 2 with sign 0 over Rational Field ], [ Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field ]] """ eps = M.character() K = eps.conductor() if eps is not None else 1 N = [M.modular_symbols_of_level(d).cuspidal_subspace().new_subspace() \ for d in M.level().divisors() if d%K == 0 and (d == 11 or d >= 13)] return [factor_new_space(A) for A in N] def simple_factorization_of_modsym_space(M, simple=True): """ Return factorization of `M`. If simple is False, return powers of simples. INPUT: - ``M`` - modular symbols space - ``simple`` - bool (default: True) OUTPUT: sequence EXAMPLES:: sage: M = ModularSymbols(33) sage: sage.modular.abvar.abvar.simple_factorization_of_modsym_space(M) [ (11, 0, 1, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field), (11, 0, 3, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field), (33, 0, 1, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field) ] sage: sage.modular.abvar.abvar.simple_factorization_of_modsym_space(M, simple=False) [ (11, 0, None, Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field), (33, 0, None, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field) ] """ D = [] N = M.level() for G in factor_modsym_space_new_factors(M): if len(G) > 0: # Compute the matrices of the degeneracy maps up. T = divisors(N//G[0].level()) degen = [G[0].ambient_module().degeneracy_map(N, t).matrix() for t in T] # Construct a matrix with rows the basis for all the factors # stacked on top of each other. We just multiply this by each # degeneracy matrix to get the basis for the images of the # factors at higher level. By doing matrix multiplies, we # save time over taking images of individual factors. matrix = G[0].basis_matrix() for A in G[1:]: matrix = matrix.stack(A.basis_matrix()) # Compute the actual images ims = [matrix * z for z in degen] # Construct the corresponding subspaces at higher level. j = 0 for (isog,A) in enumerate(G): d = A.dimension() if simple: for i in range(len(T)): V = ims[i].matrix_from_rows(range(j, j+d)).row_module() W = M.submodule(V, check=False) D.append( (A.level(), isog, T[i], W) ) else: V = sum(ims[i].matrix_from_rows(range(j, j+d)).row_module() for i in range(len(T))) W = M.submodule(V, check=False) D.append( (A.level(), isog, None, W)) j += d return Sequence(D, cr=True) def modsym_lattices(M, factors): """ Append lattice information to the output of simple_factorization_of_modsym_space. INPUT: - ``M`` - modular symbols spaces - ``factors`` - Sequence (simple_factorization_of_modsym_space) OUTPUT: sequence with more information for each factor (the lattice) EXAMPLES:: sage: M = ModularSymbols(33) sage: factors = sage.modular.abvar.abvar.simple_factorization_of_modsym_space(M, simple=False) sage: sage.modular.abvar.abvar.modsym_lattices(M, factors) [ (11, 0, None, Modular Symbols subspace of dimension 4 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field, Free module of degree 6 and rank 4 over Integer Ring Echelon basis matrix: [ 1 0 0 0 -1 2] [ 0 1 0 0 -1 1] [ 0 0 1 0 -2 2] [ 0 0 0 1 -1 -1]), (33, 0, None, Modular Symbols subspace of dimension 2 of Modular Symbols space of dimension 9 for Gamma_0(33) of weight 2 with sign 0 over Rational Field, Free module of degree 6 and rank 2 over Integer Ring Echelon basis matrix: [ 1 0 0 -1 0 0] [ 0 0 1 0 1 -1]) ] """ # 1. Change basis of everything to the ambient integral modular symbols space # 2. Clear denominator. # 3. Echelonize/saturate each factor if len(factors) == 0: return factors D = [] I = M.cuspidal_submodule().integral_structure().basis_matrix() A = factors[0][-1].basis_matrix() rows = [range(A.nrows())] for F in factors[1:]: mat = F[-1].basis_matrix() i = rows[-1][-1]+1 rows.append(range(i, i + mat.nrows())) A = A.stack(mat) X = I.solve_left(A) X, _ = X._clear_denom() for i, R in enumerate(rows): A = X.matrix_from_rows(R) A = copy(A.saturation()) A.echelonize() D.append(tuple(list(factors[i]) + [A.row_module()])) return Sequence(D, cr=True)
35.460549
253
0.556243
from sage.categories.all import ModularAbelianVarieties from sage.structure.sequence import Sequence, Sequence_generic from sage.structure.parent_base import ParentWithBase from morphism import HeckeOperator, Morphism, DegeneracyMap from torsion_subgroup import RationalTorsionSubgroup, QQbarTorsionSubgroup from finite_subgroup import (FiniteSubgroup_lattice, FiniteSubgroup, TorsionPoint) from cuspidal_subgroup import CuspidalSubgroup, RationalCuspidalSubgroup, RationalCuspSubgroup from sage.rings.all import (ZZ, QQ, QQbar, LCM, divisors, Integer, prime_range) from sage.rings.ring import is_Ring from sage.modules.free_module import is_FreeModule from sage.modular.arithgroup.all import is_CongruenceSubgroup, is_Gamma0, is_Gamma1, is_GammaH from sage.modular.modsym.all import ModularSymbols from sage.modular.modsym.space import ModularSymbolsSpace from sage.matrix.all import matrix, block_diagonal_matrix, identity_matrix from sage.modules.all import vector from sage.groups.all import AbelianGroup from sage.databases.cremona import cremona_letter_code from sage.misc.all import prod from copy import copy import homology import homspace import lseries def is_ModularAbelianVariety(x): return isinstance(x, ModularAbelianVariety_abstract) class ModularAbelianVariety_abstract(ParentWithBase): def __init__(self, groups, base_field, is_simple=None, newform_level=None, isogeny_number=None, number=None, check=True): if check: if not isinstance(groups, tuple): raise TypeError("groups must be a tuple") for G in groups: if not is_CongruenceSubgroup(G): raise TypeError("each element of groups must be a congruence subgroup") self.__groups = groups if is_simple is not None: self.__is_simple = is_simple if newform_level is not None: self.__newform_level = newform_level if number is not None: self.__degen_t = number if isogeny_number is not None: self.__isogeny_number = isogeny_number if check and not is_Ring(base_field) and base_field.is_field(): raise TypeError("base_field must be a field") ParentWithBase.__init__(self, base_field, category = ModularAbelianVarieties(base_field)) def groups(self): return self.__groups def lattice(self): raise NotImplementedError("BUG -- lattice method must be defined in derived class") def free_module(self): return self.lattice() def vector_space(self): try: return self.__vector_space except AttributeError: self.__vector_space = self.lattice().change_ring(QQ) return self.__vector_space def base_field(self): return self.base_ring() def base_extend(self, K): N = self.__newform_level if hasattr(self, '__newform_level') else None return ModularAbelianVariety(self.groups(), self.lattice(), K, newform_level=N) def __contains__(self, x): if not isinstance(x, TorsionPoint): return False if x.parent().abelian_variety().groups() != self.groups(): return False v = x.element() n = v.denominator() nLambda = self.ambient_variety().lattice().scale(n) return n*v in self.lattice() + nLambda def __cmp__(self, other): if not isinstance(other, ModularAbelianVariety_abstract): return cmp(type(self), type(other)) if self is other: return 0 c = cmp(self.groups(), other.groups()) if c: return c try: c = cmp(self.__newform_level, other.__newform_level) if c: return c except AttributeError: pass try: c = cmp(self.__isogeny_number, other.__isogeny_number) if c: return c except AttributeError: pass try: c = cmp(self.__degen_t, other.__degen_t) if c: return c except AttributeError: pass return cmp(self.lattice(), other.lattice()) def __radd__(self,other): if other == 0: return self raise TypeError def _repr_(self): field = '' if self.base_field() == QQ else ' over %s'%self.base_field() simple = self.is_simple(none_if_not_known=True) if simple and self.dimension() > 0: label = self.label() + ' ' else: label = '' simple = 'Simple a' if simple else 'A' if self.is_ambient(): return '%sbelian variety %s%s of dimension %s'%(simple, self._ambient_repr(), field, self.dimension()) if self.is_subvariety_of_ambient_jacobian(): sub = 'subvariety' else: sub = 'variety factor' return "%sbelian %s %sof dimension %s of %s%s"%( simple, sub, label, self.dimension(), self._ambient_repr(), field) def label(self): degen = str(self.degen_t()).replace(' ','') return '%s%s'%(self.newform_label(), degen) def newform_label(self): N, G = self.newform_level() if is_Gamma0(G): group = '' elif is_Gamma1(G): group = 'G1' elif is_GammaH(G): group = 'GH%s'%(str(G._generators_for_H()).replace(' ','')) return '%s%s%s'%(N, cremona_letter_code(self.isogeny_number()), group) def _isogeny_to_newform_abelian_variety(self): try: return self._newform_isogeny except AttributeError: pass if not self.is_simple(): raise ValueError("self is not simple") ls = [] t, N = self.decomposition()[0].degen_t() A = self.ambient_variety() for i in range(len(self.groups())): g = self.groups()[i] if N == g.level(): J = g.modular_abelian_variety() d = J.degeneracy_map(self.newform_level()[0], t) p = A.project_to_factor(i) mat = p.matrix() * d.matrix() if not (self.lattice().matrix() * mat).is_zero(): break from constructor import AbelianVariety Af = AbelianVariety(self.newform_label()) H = A.Hom(Af.ambient_variety()) m = H(Morphism(H, mat)) self._newform_isogeny = m.restrict_domain(self).restrict_codomain(Af) return self._newform_isogeny def _simple_isogeny(self, other): if not is_ModularAbelianVariety(other): raise TypeError("other must be a modular abelian variety") if not self.is_simple(): raise ValueError("self is not simple") if not other.is_simple(): raise ValueError("other is not simple") if self.groups() != other.groups(): raise NotImplementedError("_simple_isogeny only implemented when both abelian variety have the same ambient product Jacobian") if (self.newform_level() != other.newform_level()) or \ (self.isogeny_number() != other.isogeny_number()): raise ValueError("self and other do not correspond to the same newform") return other._isogeny_to_newform_abelian_variety().complementary_isogeny() * \ self._isogeny_to_newform_abelian_variety() def _Hom_(self, B, cat=None): if cat is None: K = self.base_field(); L = B.base_field() if K == L: F = K elif K == QQbar or L == QQbar: F = QQbar else: raise ValueError("please specify a category") cat = ModularAbelianVarieties(F) if self is B: return self.endomorphism_ring(cat) else: return homspace.Homspace(self, B, cat) def in_same_ambient_variety(self, other): if not is_ModularAbelianVariety(other): return False if self.groups() != other.groups(): return False if not self.is_subvariety_of_ambient_jacobian() or not other.is_subvariety_of_ambient_jacobian(): return False return True def modular_kernel(self): try: return self.__modular_kernel except AttributeError: _, f, _ = self.dual() G = f.kernel()[0] self.__modular_kernel = G return G def modular_degree(self): n = self.modular_kernel().order() return ZZ(n.sqrt()) def intersection(self, other): if isinstance(other, FiniteSubgroup): return other.intersection(self) if not self.in_same_ambient_variety(other): raise TypeError("other must be an abelian variety in the same ambient space") V = self.vector_space().intersection(other.vector_space()) if V.dimension() > 0: lattice = V.intersection(self.lattice() + other.lattice()) A = ModularAbelianVariety(self.groups(), lattice, self.base_field(), check=False) else: A = self.zero_subvariety() # 2. Compute the finite intersection group when the # intersection is finite, or a group that maps surjectively # onto the component group in general. # First we get basis matrices for the lattices that define # both abelian varieties. L = self.lattice().basis_matrix() M = other.lattice().basis_matrix() # Then we stack matrices and find a subset that forms a # basis. LM = L.stack(M) P = LM.pivot_rows() V = (ZZ**L.ncols()).span_of_basis([LM.row(p) for p in P]) S = (self.lattice() + other.lattice()).saturation() n = self.lattice().rank() # Finally we project onto the L factor. gens = [L.linear_combination_of_rows(v.list()[:n]) for v in V.coordinate_module(S).basis()] if A.dimension() > 0: finitegroup_base_field = QQbar else: finitegroup_base_field = self.base_field() G = self.finite_subgroup(gens, field_of_definition=finitegroup_base_field) return G, A def __add__(self, other): if not is_ModularAbelianVariety(other): if other == 0: return self raise TypeError("other must be a modular abelian variety") if self.groups() != other.groups(): raise ValueError("incompatible ambient Jacobians") L = self.vector_space() + other.vector_space() M = L.intersection(self._ambient_lattice()) return ModularAbelianVariety(self.groups(), M, self.base_field(), check=False) def direct_product(self, other): return self * other def __pow__(self, n): n = ZZ(n) if n < 0: raise ValueError("n must be nonnegative") if n == 0: return self.zero_subvariety() if n == 1: return self groups = self.groups() * n L = self.lattice().basis_matrix() lattice = block_diagonal_matrix([L]*n).row_module(ZZ) return ModularAbelianVariety(groups, lattice, self.base_field(), check=False) def __mul__(self, other): if not is_ModularAbelianVariety(other): raise TypeError("other must be a modular abelian variety") if other.base_ring() != self.base_ring(): raise TypeError("self and other must have the same base ring") groups = tuple(list(self.groups()) + list(other.groups())) lattice = self.lattice().direct_sum(other.lattice()) base_field = self.base_ring() return ModularAbelianVariety(groups, lattice, base_field, check=False) def quotient(self, other): return self.__div__(other) def __div__(self, other): if isinstance(other, FiniteSubgroup): if other.abelian_variety() != self: other = self.finite_subgroup(other) return self._quotient_by_finite_subgroup(other) elif isinstance(other, ModularAbelianVariety_abstract) and other.is_subvariety(self): return self._quotient_by_abelian_subvariety(other) else: raise TypeError("other must be a subgroup or abelian subvariety") def degeneracy_map(self, M_ls, t_ls): if not isinstance(M_ls, list): M_ls = [M_ls] if not isinstance(t_ls, list): t_ls = [t_ls] groups = self.groups() length = len(M_ls) if length != len(t_ls): raise ValueError("must have same number of Ms and ts") if length != len(groups): raise ValueError("must have same number of Ms and groups in ambient variety") for i in range(length): N = groups[i].level() if (M_ls[i]%N) and (N%M_ls[i]): raise ValueError("one level must divide the other in %s-th component"%i) if (( max(M_ls[i],N) // min(M_ls[i],N) ) % t_ls[i]): raise ValueError("each t must divide the quotient of the levels") ls = [ self.groups()[i].modular_abelian_variety().degeneracy_map(M_ls[i], t_ls[i]).matrix() for i in range(length) ] new_codomain = prod([ self.groups()[i]._new_group_from_level(M_ls[i]).modular_abelian_variety() for i in range(length) ]) M = block_diagonal_matrix(ls, subdivide=False) H = self.Hom(new_codomain) return H(DegeneracyMap(H, M.restrict_domain(self.lattice()), t_ls)) def _quotient_by_finite_subgroup(self, G): if G.order() == 1: return self L = self.lattice() + G.lattice() A = ModularAbelianVariety(self.groups(), L, G.field_of_definition()) M = L.coordinate_module(self.lattice()).basis_matrix() phi = self.Hom(A)(M) return A, phi def _quotient_by_abelian_subvariety(self, B): # We first compute the complement of B in self to get # an abelian variety C also in self such that self/B # is isogenous to C. This is the case because the # projection map pi:self --> C is surjective and has # kernel a finite extension of the abelian variety B. C = B.complement(self) # Now that we have C we need to find some abelian variety Q # isogenous to C and a map self --> Q whose kernel is exactly # B. We do this by computing the kernel of the map pi below, # which is an extension of the abelian variety B by a finite # group Phi of complements. Our strategy is to enlarge the # lattice that defines C so that the map pi below suddenly # has connected kernel. pi = self.projection(C) psi = pi.factor_out_component_group() Q = psi.codomain() return Q, psi def projection(self, A, check=True): if check and not A.is_subvariety(self): raise ValueError("A must be an abelian subvariety of self") W = A.complement(self) mat = A.lattice().basis_matrix().stack(W.lattice().basis_matrix()) # solve X * mat = self, i.e. write each row of self in terms of the # rows of mat. X = mat.solve_left(self.lattice().basis_matrix()) # The projection map is got from the first 2*dim(A) columns of X. X = X.matrix_from_columns(range(2*A.dimension())) X, _ = X._clear_denom() return Morphism(self.Hom(A), X) def project_to_factor(self, n): if not self.is_ambient(): raise ValueError("self is not ambient") if n >= len(self.groups()): raise IndexError("index (=%s) too large (max = %s)"%(n, len(self.groups()))) G = self.groups()[n] A = G.modular_abelian_variety() index = sum([ gp.modular_symbols().cuspidal_subspace().dimension() for gp in self.groups()[0:n] ]) H = self.Hom(A) mat = H.matrix_space()(0) mat.set_block(index, 0, identity_matrix(2*A.dimension())) return H(Morphism(H, mat)) def is_subvariety_of_ambient_jacobian(self): try: return self.__is_sub_ambient except AttributeError: self.__is_sub_ambient = (self.lattice().denominator() == 1) return self.__is_sub_ambient def ambient_variety(self): try: return self.__ambient_variety except AttributeError: A = ModularAbelianVariety(self.groups(), ZZ**(2*self._ambient_dimension()), self.base_field(), check=False) self.__ambient_variety = A return A def ambient_morphism(self): try: return self.__ambient_morphism except AttributeError: matrix,_ = self.lattice().basis_matrix()._clear_denom() phi = Morphism(self.Hom(self.ambient_variety()), matrix) self.__ambient_morphism = phi return phi def is_ambient(self): try: return self.__is_ambient except AttributeError: pass L = self.lattice() self.__is_ambient = (self.lattice() == ZZ**L.degree()) return self.__is_ambient def dimension(self): return self.lattice().rank() // 2 def rank(self): return self.lattice().rank() def degree(self): return self._ambient_dimension() def endomorphism_ring(self, category=None): try: return self.__endomorphism_ring except AttributeError: pass self.__endomorphism_ring = homspace.EndomorphismSubring(self, category=category) return self.__endomorphism_ring def sturm_bound(self): try: return self.__sturm_bound except AttributeError: B = max([G.sturm_bound(2) for G in self.groups()]) self.__sturm_bound = B return B def is_hecke_stable(self): try: return self._is_hecke_stable except AttributeError: pass #b = self.modular_symbols().sturm_bound() b = max([ m.sturm_bound() for m in self._ambient_modular_symbols_spaces() ]) J = self.ambient_variety() L = self.lattice() B = self.lattice().basis() for n in prime_range(1,b+1): Tn_matrix = J.hecke_operator(n).matrix() for v in B: if not (v*Tn_matrix in L): self._is_hecke_stable = False return False self._is_hecke_stable = True return True def is_subvariety(self, other): if not is_ModularAbelianVariety(other): return False if self is other: return True if self.groups() != other.groups(): return False L = self.lattice() M = other.lattice() # self is an abelian subvariety of other if and only if # 1. L is a subset of M (so the abelian subvarieties of # the ambient J are equal), and # 2. L is relatively saturated in M, i.e., M/L is # torsion free. if not L.is_submodule(M): return False # To determine if L is relatively saturated we compute the # intersection of M with (L tensor Q) and see if that equals # L. return L.change_ring(QQ).intersection(M) == L def change_ring(self, R): return ModularAbelianVariety(self.groups(), self.lattice(), R, check=False) def level(self): try: return self.__level except AttributeError: self.__level = LCM([G.level() for G in self.groups()]) return self.__level def newform_level(self, none_if_not_known=False): try: return self.__newform_level except AttributeError: if none_if_not_known: return None N = [A.newform_level() for A in self.decomposition()] level = LCM([z[0] for z in N]) groups = sorted(set([z[1] for z in N])) if len(groups) == 1: groups = groups[0] self.__newform_level = level, groups return self.__newform_level def zero_subvariety(self): try: return self.__zero_subvariety except AttributeError: lattice = (ZZ**(2*self.degree())).zero_submodule() A = ModularAbelianVariety(self.groups(), lattice, self.base_field(), is_simple=True, check=False) self.__zero_subvariety = A return A ############################################################################### # Properties of the ambient product of Jacobians ############################################################################### def _ambient_repr(self): v = [] for G in self.groups(): if is_Gamma0(G): v.append('J0(%s)'%G.level()) elif is_Gamma1(G): v.append('J1(%s)'%G.level()) elif is_GammaH(G): v.append('JH(%s,%s)'%(G.level(), G._generators_for_H())) return ' x '.join(v) def _ambient_latex_repr(self): v = [] for G in self.groups(): if is_Gamma0(G): v.append('J_0(%s)'%G.level()) elif is_Gamma1(G): v.append('J_1(%s)'%G.level()) elif is_GammaH(G): v.append('J_H(%s,%s)'%(G.level(), G._generators_for_H())) return ' \\times '.join(v) def _ambient_lattice(self): try: return self.__ambient_lattice except AttributeError: self.__ambient_lattice = ZZ**(2*self.degree()) return self.__ambient_lattice def _ambient_modular_symbols_spaces(self): if not self.is_ambient(): return self.ambient_variety()._ambient_modular_symbols_spaces() try: return self.__ambient_modular_symbols_spaces except AttributeError: X = tuple([ModularSymbols(G).cuspidal_subspace() for G in self.groups()]) self.__ambient_modular_symbols_spaces = X return X def _ambient_modular_symbols_abvars(self): if not self.is_ambient(): return self.ambient_variety()._ambient_modular_symbols_abvars() try: return self.__ambient_modular_symbols_abvars except AttributeError: X = tuple([ModularAbelianVariety_modsym(M) for M in self._ambient_modular_symbols_spaces()]) self.__ambient_modular_symbols_abvars = X return X def _ambient_dimension(self): try: return self.__ambient_dimension except AttributeError: d = sum([G.dimension_cusp_forms(2) for G in self.groups()], Integer(0)) self.__ambient_dimension = d return d def _ambient_hecke_matrix_on_modular_symbols(self, n): if not self.is_ambient(): return self.ambient_variety()._ambient_hecke_matrix_on_modular_symbols(n) try: return self.__ambient_hecke_matrix_on_modular_symbols[n] except AttributeError: self.__ambient_hecke_matrix_on_modular_symbols = {} except KeyError: pass M = self._ambient_modular_symbols_spaces() if len(M) == 0: return matrix(QQ,0) T = M[0].hecke_matrix(n) for i in range(1,len(M)): T = T.block_sum(M[i].hecke_matrix(n)) self.__ambient_hecke_matrix_on_modular_symbols[n] = T return T ############################################################################### # Rational and Integral Homology ############################################################################### def _rational_homology_space(self): try: return self.__rational_homology_space except AttributeError: HQ = self.rational_homology().free_module() self.__rational_homology_space = HQ return HQ def homology(self, base_ring=ZZ): try: return self._homology[base_ring] except AttributeError: self._homology = {} except KeyError: pass if base_ring == ZZ: H = homology.IntegralHomology(self) elif base_ring == QQ: H = homology.RationalHomology(self) else: H = homology.Homology_over_base(self, base_ring) self._homology[base_ring] = H return H def integral_homology(self): return self.homology(ZZ) def rational_homology(self): return self.homology(QQ) ############################################################################### # L-series ############################################################################### def lseries(self): try: return self.__lseries except AttributeError: pass self.__lseries = lseries.Lseries_complex(self) return self.__lseries def padic_lseries(self, p): p = int(p) try: return self.__lseries_padic[p] except AttributeError: self.__lseries_padic = {} except KeyError: pass self.__lseries_padic[p] = lseries.Lseries_padic(self, p) return self.__lseries_padic[p] ############################################################################### # Hecke Operators ############################################################################### def hecke_operator(self, n): try: return self._hecke_operator[n] except AttributeError: self._hecke_operator = {} except KeyError: pass Tn = HeckeOperator(self, n) self._hecke_operator[n] = Tn return Tn def hecke_polynomial(self, n, var='x'): n = Integer(n) if n <= 0: raise ValueError("n must be a positive integer") key = (n,var) try: return self.__hecke_polynomial[key] except AttributeError: self.__hecke_polynomial = {} except KeyError: pass f = self._compute_hecke_polynomial(n, var=var) self.__hecke_polynomial[key] = f return f def _compute_hecke_polynomial(self, n, var='x'): return self.hecke_operator(n).charpoly(var=var) def _integral_hecke_matrix(self, n): A = self._ambient_hecke_matrix_on_modular_symbols(n) return A.restrict(self.lattice()) def _rational_hecke_matrix(self, n): return self._integral_hecke_matrix(n) ############################################################################### # Subgroups ############################################################################### def qbar_torsion_subgroup(self): try: return self.__qbar_torsion_subgroup except AttributeError: G = QQbarTorsionSubgroup(self) self.__qbar_torsion_subgroup = G return G def rational_torsion_subgroup(self): try: return self.__rational_torsion_subgroup except AttributeError: T = RationalTorsionSubgroup(self) self.__rational_torsion_subgroup = T return T def cuspidal_subgroup(self): try: return self._cuspidal_subgroup except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise ValueError("self must be a subvariety of the ambient variety") if self.is_ambient(): T = self._ambient_cuspidal_subgroup(rational_only=False) else: T = self.ambient_variety().cuspidal_subgroup().intersection(self) self._cuspidal_subgroup = T return T def _ambient_cuspidal_subgroup(self, rational_only=False, rational_subgroup=False): n = 2 * self.degree() i = 0 lattice = (ZZ**n).zero_submodule() if rational_subgroup: CS = RationalCuspidalSubgroup elif rational_only: CS = RationalCuspSubgroup else: CS = CuspidalSubgroup for J in self._ambient_modular_symbols_abvars(): L = CS(J).lattice().basis_matrix() Z_left = matrix(QQ,L.nrows(),i) Z_right = matrix(QQ,L.nrows(),n-i-L.ncols()) lattice += (Z_left.augment(L).augment(Z_right)).row_module(ZZ) i += L.ncols() return FiniteSubgroup_lattice(self, lattice, field_of_definition=self.base_field()) def shimura_subgroup(self): N=self.level() J=self.ambient_variety() for p in prime_range(100): if N%p!=0: break phi=J.degeneracy_map(N*p,1) phip=J.degeneracy_map(N*p,p) SIG = (phi-phip).kernel() assert SIG[1].dimension()==0, "The intersection should have dimension 0" return self.intersection(SIG[0]) def rational_cusp_subgroup(self): try: return self._rational_cusp_subgroup except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise ValueError("self must be a subvariety of the ambient variety") if self.is_ambient(): T = self._ambient_cuspidal_subgroup(rational_only=True) else: T = self.ambient_variety().rational_cusp_subgroup().intersection(self) self._rational_cusp_subgroup = T return T def rational_cuspidal_subgroup(self): try: return self._rational_cuspidal_subgroup except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise ValueError("self must be a subvariety of the ambient variety") if self.is_ambient(): T = self._ambient_cuspidal_subgroup(rational_subgroup=True) else: T = self.ambient_variety().rational_cuspidal_subgroup().intersection(self) self._rational_cuspidal_subgroup = T return T def zero_subgroup(self): try: return self.__zero_subgroup except AttributeError: G = FiniteSubgroup_lattice(self, self.lattice(), field_of_definition=QQ) self.__zero_subgroup = G return G def finite_subgroup(self, X, field_of_definition=None, check=True): if isinstance(X, (list, tuple)): X = self._ambient_lattice().span(X) elif isinstance(X, FiniteSubgroup): if field_of_definition is None: field_of_definition = X.field_of_definition() A = X.abelian_variety() if A.groups() != self.groups(): raise ValueError("ambient product Jacobians must be equal") if A == self: X = X.lattice() else: if X.is_subgroup(self): X = (X.lattice() + self.lattice()).intersection(self.vector_space()) else: raise ValueError("X must be a subgroup of self.") if field_of_definition is None: field_of_definition = QQbar else: field_of_definition = field_of_definition return FiniteSubgroup_lattice(self, X, field_of_definition=field_of_definition, check=check) def torsion_subgroup(self, n): try: return self.__torsion_subgroup[n] except KeyError: pass except AttributeError: self.__torsion_subgroup = {} lattice = self.lattice().scale(1/Integer(n)) H = FiniteSubgroup_lattice(self, lattice, field_of_definition=self.base_field()) self.__torsion_subgroup[n] = H return H ############################################################################### # Decomposition ############################################################################### def degen_t(self, none_if_not_known=False): try: return self.__degen_t except AttributeError: if none_if_not_known: return None elif self.dimension() > 0 and self.is_simple(): self.__degen_t = self.decomposition()[0].degen_t() return self.__degen_t raise ValueError("self must be simple") def isogeny_number(self, none_if_not_known=False): try: return self.__isogeny_number except AttributeError: if none_if_not_known: return None elif self.is_simple(): self.__isogeny_number = self.decomposition()[0].isogeny_number() return self.__isogeny_number else: raise ValueError("self must be simple") def is_simple(self, none_if_not_known=False): try: return self.__is_simple except AttributeError: if none_if_not_known: return None self.__is_simple = len(self.decomposition()) <= 1 return self.__is_simple def decomposition(self, simple=True, bound=None): try: return self.__decomposition[(simple, bound)] except KeyError: pass except AttributeError: self.__decomposition = {} if self.is_ambient(): # Decompose each piece, then lift if len(self.groups()) == 0: D = [] elif len(self.groups()) == 1: D = ModularAbelianVariety_modsym(ModularSymbols(self.groups()[0], sign=0).cuspidal_submodule()).decomposition(simple=simple, bound=bound) else: # Decompose each ambient modular symbols factor. #X = [ModularAbelianVariety_modsym(ModularSymbols(G,sign=0).cuspidal_submodule()) for G in self.groups()] from abvar_ambient_jacobian import ModAbVar_ambient_jacobian_class X = [ModAbVar_ambient_jacobian_class(G) for G in self.groups()] E = [A.decomposition(simple=simple, bound=bound) for A in X] i = 0 n = 2*self.dimension() # Now lift each factor of the decomposition to self. G = self.groups() D = [] K = self.base_field() for C in E: for B in C: L = B.lattice().basis_matrix() if simple: is_simple = True else: is_simple = None lattice = matrix(QQ,L.nrows(),i).augment(L).augment(matrix(QQ,L.nrows(),n-i-L.ncols())).row_module(ZZ) D.append(ModularAbelianVariety(G, lattice, K, is_simple=is_simple, newform_level=B.newform_level(), isogeny_number=B.isogeny_number(none_if_not_known=True), number=B.degen_t(none_if_not_known=True))) if len(C) > 0: i += L.ncols() elif not simple: # In this case decompose the ambient space into powers of # simple abelian varieties (i.e. with # \code{simple=False)}, and then intersect the lattice # corresponding to self with each of these factors. D = [] L = self.lattice() groups = self.groups() K = self.base_ring() for X in self.ambient_variety().decomposition(simple=False): lattice = L.intersection(X.vector_space()) if lattice.rank() > 0: the_factor = ModularAbelianVariety(groups, lattice, K, is_simple=X.is_simple(none_if_not_known=True), newform_level=X.newform_level(), isogeny_number=X.isogeny_number(none_if_not_known=True), number=X.degen_t(none_if_not_known=True)) D.append(the_factor) else: # See the documentation for self._classify_ambient_factors # in order to understand what we're doing here. I_F, I_E, X = self._classify_ambient_factors(simple=simple, bound=bound) Z_E = [X[i] for i in I_E] Z_F = [X[i] for i in I_F] F = sum(Z_F, self.zero_subvariety()) if F == self: D = Z_F else: E = sum(Z_E, self.zero_subvariety()) L_B = self.lattice() L_E = E.lattice() L_F = F.lattice() decomp_matrix = L_E.basis_matrix().stack(L_F.basis_matrix()) X = decomp_matrix.solve_left(L_B.basis_matrix()) n = X.ncols() proj = X.matrix_from_columns(range(n-L_F.rank(), n)) section = proj**(-1) D = [] groups = self.groups() K = self.base_field() for A in Z_F: L_A = A.lattice() M = L_F.coordinate_module(L_A).basis_matrix() * section M, _ = M._clear_denom() M = M.saturation() M = M * L_B.basis_matrix() lattice = M.row_module(ZZ) the_factor = ModularAbelianVariety(groups, lattice, K, is_simple=True, newform_level=A.newform_level(), isogeny_number=A.isogeny_number(), number=A.degen_t()) D.append(the_factor) if isinstance(D, Sequence_generic): S = D else: D.sort() S = Sequence(D, immutable=True, cr=True, universe=self.category()) self.__decomposition[(simple, bound)] = S return S def _classify_ambient_factors(self, simple=True, bound=None): amb = self.ambient_variety() S = self.vector_space() X = amb.decomposition(simple=simple, bound=bound) IN = []; OUT = [] i = 0 V = 0 last_dimension = 0 for j in range(len(X)): V += X[j].vector_space() d = S.intersection(V).dimension() if d > last_dimension: IN.append(j) last_dimension = d else: OUT.append(j) return IN, OUT, X def _isogeny_to_product_of_simples(self): try: return self._simple_product_isogeny except AttributeError: pass D = self.decomposition() dest = prod([d._isogeny_to_newform_abelian_variety().image() for d in D]) A = self.ambient_variety() dim = sum([d.dimension() for d in D]) proj_ls = [ A.projection(factor) for factor in D ] mat = matrix(ZZ, 2*self.dimension(), 2*dim) ind = 0 for i in range(len(D)): factor = D[i] proj = proj_ls[i] mat.set_block(0, ind, proj.restrict_domain(self).matrix()) ind += 2*factor.dimension() H = self.Hom(dest) self._simple_product_isogeny = H(Morphism(H, mat)) return self._simple_product_isogeny def _isogeny_to_product_of_powers(self): try: return self._simple_power_product_isogeny except AttributeError: pass D = self.decomposition(simple=False) A = self.ambient_variety() proj_ls = [ A.projection(factor) for factor in D ] dest = prod([phi.image() for phi in proj_ls]) dim = sum([d.dimension() for d in D]) mat = matrix(ZZ, 2*self.dimension(), 2*dim) ind = 0 for i in range(len(D)): factor = D[i] proj = proj_ls[i] mat.set_block(0, ind, proj.restrict_domain(self).matrix()) ind += 2*factor.dimension() H = self.Hom(dest) self._simple_power_product_isogeny = H(Morphism(H, mat)) return self._simple_power_product_isogeny def complement(self, A=None): try: C = self.__complement except AttributeError: pass if self.dimension() is 0: if A is None: C = self.ambient_variety() else: C = A elif A is not None and self.dimension() == A.dimension(): if not self.is_subvariety(A): raise ValueError("self must be a subvariety of A") C = self.zero_subvariety() else: _, factors, X = self._classify_ambient_factors() D = [X[i] for i in factors] C = sum(D) if C: self.__complement = C if A is not None: C = C.intersection(A)[1] else: C = self.zero_subvariety() return C def dual(self): try: return self.__dual except AttributeError: if not self.is_subvariety_of_ambient_jacobian(): raise NotImplementedError("dual not implemented unless abelian variety is a subvariety of the ambient Jacobian product") if not self._complement_shares_no_factors_with_same_label(): raise NotImplementedError("dual not implemented unless complement shares no simple factors with self.") C = self.complement() Q, phi = self.ambient_variety().quotient(C) psi = self.ambient_morphism() self.__dual = Q, phi*psi, phi return self.__dual def _factors_with_same_label(self, other): if not isinstance(other, ModularAbelianVariety_abstract): raise TypeError("other must be an abelian variety") D = self.decomposition() C = set([A.newform_label() for A in other.decomposition()]) Z = [] for X in D: lbl = X.newform_label() if lbl in C: Z.append(X) C.remove(lbl) Z.sort() return Z def _complement_shares_no_factors_with_same_label(self): try: return self.__complement_shares except AttributeError: t = len(self._factors_with_same_label(self.complement())) == 0 self.__complement_shares = t return t def __getitem__(self, i): return self.decomposition()[i] class ModularAbelianVariety(ModularAbelianVariety_abstract): def __init__(self, groups, lattice=None, base_field=QQ, is_simple=None, newform_level=None, isogeny_number=None, number=None, check=True): ModularAbelianVariety_abstract.__init__(self, groups, base_field, is_simple=is_simple, newform_level=newform_level, isogeny_number=isogeny_number, number=number, check=check) if lattice is None: lattice = ZZ**(2*self._ambient_dimension()) if check: n = self._ambient_dimension() if not is_FreeModule(lattice): raise TypeError("lattice must be a free module") if lattice.base_ring() != ZZ: raise TypeError("lattice must be over ZZ") if lattice.degree() != 2*n: raise ValueError("lattice must have degree 2*n (=%s)"%(2*n)) if not lattice.saturation().is_submodule(lattice): raise ValueError("lattice must be full") self.__lattice = lattice def lattice(self): return self.__lattice class ModularAbelianVariety_modsym_abstract(ModularAbelianVariety_abstract): def _modular_symbols(self): raise NotImplementedError("bug -- must define this") def __add__(self, other): if not is_ModularAbelianVariety(other): if other == 0: return self raise TypeError("sum not defined") if not isinstance(other, ModularAbelianVariety_modsym_abstract): return ModularAbelianVariety_abstract.__add__(self, other) if self.groups() != other.groups(): raise TypeError("sum not defined since ambient spaces different") M = self.modular_symbols() + other.modular_symbols() return ModularAbelianVariety_modsym(M) def groups(self): return (self._modular_symbols().group(), ) def lattice(self): try: return self.__lattice except AttributeError: M = self.modular_symbols() S = M.ambient_module().cuspidal_submodule() if M.dimension() == S.dimension(): L = ZZ**M.dimension() else: K0 = M.integral_structure() K1 = S.integral_structure() L = K1.coordinate_module(K0) self.__lattice = L return self.__lattice def _set_lattice(self, lattice): self.__lattice = lattice def modular_symbols(self, sign=0): M = self._modular_symbols().modular_symbols_of_sign(sign) if (sign != 0 and M.dimension() != self.dimension()) or (sign == 0 and M.dimension() != 2*self.dimension()): raise RuntimeError("unable to determine sign (=%s) space of modular symbols"%sign) return M def _compute_hecke_polynomial(self, n, var='x'): return sqrt_poly(self.modular_symbols().hecke_polynomial(n, var)) def _integral_hecke_matrix(self, n, sign=0): return self.modular_symbols(sign).integral_hecke_matrix(n) def _rational_hecke_matrix(self, n, sign=0): return self._integral_hecke_matrix(n, sign=sign).change_ring(QQ) def group(self): return self.modular_symbols().group() def is_subvariety(self, other): if not is_ModularAbelianVariety(other): return False if not isinstance(other, ModularAbelianVariety_modsym_abstract): return ModularAbelianVariety_abstract.is_subvariety(self, other) return self.modular_symbols().is_submodule(other.modular_symbols()) def is_ambient(self): return self.degree() == self.dimension() def dimension(self): try: return self._dimension except AttributeError: M = self._modular_symbols() if M.sign() == 0: d = M.dimension() // 2 else: d = M.dimension() self._dimension = d return d def new_subvariety(self, p=None): try: return self.__new_subvariety[p] except AttributeError: self.__new_subvariety = {} except KeyError: pass A = self.modular_symbols() N = A.new_submodule(p=p) B = ModularAbelianVariety_modsym(N) self.__new_subvariety[p] = B return B def old_subvariety(self, p=None): try: return self.__old_subvariety[p] except AttributeError: self.__old_subvariety = {} except KeyError: pass A = self.modular_symbols() N = A.old_submodule(p=p) B = ModularAbelianVariety_modsym(N) self.__old_subvariety[p] = B return B def decomposition(self, simple=True, bound=None): try: return self.__decomposition[(simple, bound)] except KeyError: pass except AttributeError: self.__decomposition = {} if not self.is_ambient(): S = ModularAbelianVariety_abstract.decomposition(self, simple=simple, bound=bound) else: A = self.modular_symbols() amb = A.ambient_module() G = amb.group() S = amb.cuspidal_submodule().integral_structure() if simple: M = A.level() D = [] for N in reversed(divisors(M)): if N > 1: isogeny_number = 0 A = amb.modular_symbols_of_level(N).cuspidal_subspace().new_subspace() if bound is None: X = factor_new_space(A) else: X = A.decomposition(bound = bound) for B in X: for t in divisors(M//N): D.append(ModularAbelianVariety_modsym(B.degeneracy_map(M, t).image(), is_simple=True, newform_level=(N, G), isogeny_number=isogeny_number, number=(t,M))) isogeny_number += 1 elif A == amb.cuspidal_submodule(): D = [ModularAbelianVariety_modsym(B) for B in A.decomposition(bound = bound)] else: D = ModularAbelianVariety_abstract.decomposition(self, simple=simple, bound=bound) D.sort() S = Sequence(D, immutable=True, cr=True, universe=self.category()) self.__decomposition[(simple, bound)] = S return S class ModularAbelianVariety_modsym(ModularAbelianVariety_modsym_abstract): def __init__(self, modsym, lattice=None, newform_level=None, is_simple=None, isogeny_number=None, number=None, check=True): if check: if not isinstance(modsym, ModularSymbolsSpace): raise TypeError("modsym must be a modular symbols space") if modsym.sign() != 0: raise TypeError("modular symbols space must have sign 0") if not modsym.is_cuspidal(): raise ValueError("modsym must be cuspidal") ModularAbelianVariety_abstract.__init__(self, (modsym.group(), ), modsym.base_ring(), newform_level=newform_level, is_simple=is_simple, isogeny_number=isogeny_number, number=number, check=check) if lattice is not None: self._set_lattice(lattice) self.__modsym = modsym def _modular_symbols(self): return self.__modsym def component_group_order(self, p): if not self.is_simple(): raise ValueError("self must be simple") p = Integer(p) if not p.is_prime(): raise ValueError("p must be a prime integer") try: return self.__component_group[p][0] except AttributeError: self.__component_group = {} except KeyError: pass if self.level() % p != 0: one = Integer(1) self.__component_group[p] = (one,one,one) return one if not is_Gamma0(self.group()): raise NotImplementedError("computation of component group not implemented when group isn't Gamma0") if self.level() % (p*p) == 0: raise NotImplementedError("computation of component group not implemented when p^2 divides the level") # 1. Compute factor of Brandt module space, and put integral structure on it. # TODO -- in case self.level() is prime, should use # supersingular module instead for massive speedup... Of # course, then one can just use Emertons theorem that the # component group order equals the torsion order, and avoid # all of this! XI = self.brandt_module(p) Y = XI.ambient_module() n = Y.dimension() # X_ZZ is the submodule of degree 0 divisors M = ZZ**n deg_zero = [] for k in range(1,n): v = vector(ZZ, n) v[0] = 1 v[k] = -1 deg_zero.append(v) X_ZZ = M.span(deg_zero, ZZ) XI_ZZ = XI.free_module().intersection(M) # 2. Compute the map alpha: X --> Hom(X[I],Z) over ZZ # todo -- this could be done more quickly with a clever matrix multiply B = [XI(v) for v in XI_ZZ.basis()] mat = [] for v in M.basis(): w = Y(v) mat.append([w.monodromy_pairing(b) for b in B]) monodromy = matrix(ZZ, mat) alpha = X_ZZ.basis_matrix().change_ring(ZZ) * monodromy # 3. Compute invariants: # * Phi_X = #coker(alpha) # * m_X = #(alpha(X)/alpha(X[I])) alphaX = alpha.row_module() Phi_X_invariants = alphaX.basis_matrix().change_ring(ZZ).elementary_divisors() Phi_X = prod(Phi_X_invariants + [Integer(1)]) W = alphaX.span([b*monodromy for b in XI_ZZ.basis()], ZZ) m_X = Integer(W.index_in(alphaX)) # 4. Compute the modular degree moddeg = self.modular_degree() # 5. Obtain the component group order using Theorem 1 of [Kohel-Stein] Phi = Phi_X * moddeg / m_X # 6. Record the answer self.__component_group[p] = (Phi, Phi_X_invariants, m_X) return Phi def _invariants_of_image_of_component_group_of_J0(self, p): self.component_group_order(p) return list(self.__component_group[p][1]) # make a copy def tamagawa_number(self, p): try: return self.__tamagawa_number[p] except AttributeError: self.__tamagawa_number = {} except KeyError: pass if not self.is_simple(): raise ValueError("self must be simple") try: g = self.component_group_order(p) except NotImplementedError: raise NotImplementedError("Tamagawa number can't be determined using known algorithms, so consider using the tamagawa_number_bounds function instead") div, mul, mul_primes = self.tamagawa_number_bounds(p) if div == mul: cp = div else: raise NotImplementedError("the Tamagawa number at %s is a power of 2, but the exact power can't be determined using known algorithms. Consider using the tamagawa_number_bounds function instead."%p) self.__tamagawa_number[p] = cp return cp def tamagawa_number_bounds(self, p): try: return self.__tamagawa_number_bounds[p] except AttributeError: self.__tamagawa_number_bounds = {} except KeyError: pass if not self.is_simple(): raise ValueError("self must be simple") N = self.level() div = 1; mul = 0; mul_primes = [] if N % p != 0: div = 1; mul = 1 elif N.valuation(p) == 1: M = self.modular_symbols(sign=1) if is_Gamma0(M.group()): g = self.component_group_order(p) W = M.atkin_lehner_operator(p).matrix() cp = None if W == -1: # Frob acts trivially div = g; mul = g elif W == 1: # Frob acts by -1 n = g.valuation(2) if n <= 1: div = 2**n else: phi_X_invs = self._invariants_of_image_of_component_group_of_J0(p) m = max(1, len([z for z in phi_X_invs if z%2==0])) div = 2**m mul = 2**n else: raise NotImplementedError("Atkin-Lehner at p must act as a scalar") else: mul_primes = list(sorted(set([p] + [q for q in prime_range(2,2*self.dimension()+2)]))) div = Integer(div) mul = Integer(mul) mul_primes = tuple(mul_primes) self.__tamagawa_number_bounds[p] = (div, mul, mul_primes) return (div, mul, mul_primes) def brandt_module(self, p): try: return self.__brandt_module[p] except AttributeError: self.__brandt_module = {} except KeyError: pass p = Integer(p) if not is_Gamma0(self.group()): raise NotImplementedError("Brandt module only defined on Gamma0") if not p.is_prime(): raise ValueError("p must be a prime integer") if self.level().valuation(p) != 1: raise ValueError("p must exactly divide the level") M = self.level() / p from sage.modular.all import BrandtModule V = BrandtModule(p, M) # now cut out version of self in B S = self.modular_symbols(sign=1) B = S.hecke_bound() if self.dimension() <= 3: q = 2 while V.dimension() > self.dimension() and q <= B: f = S.hecke_polynomial(q) V = f(V.hecke_operator(q)).kernel() q = next_prime(q) if V.dimension() > self.dimension(): raise RuntimeError("unable to cut out Brandt module (got dimension %s instead of %s)"%(V.dimension(), self.dimension())) else: D = V.decomposition() D = [A for A in D if A.dimension() == self.dimension()] # now figure out which element of D is isomorphic to self. q = 2 while len(D) > 1 and q <= B: f = S.hecke_polynomial(q) D = [A for A in D if A.hecke_polynomial(q) == f] q = next_prime(q) if len(D) != 1: raise RuntimeError("unable to locate Brandt module (got %s candidates instead of 1)"%(len(D))) V = D[0] self.__brandt_module[p] = V return V def sqrt_poly(f): if not f.is_monic(): raise ValueError("f must be monic") try: return prod([g**Integer(e/Integer(2)) for g,e in f.factor()]) except TypeError: raise ValueError("f must be a perfect square") #################################################################################################### # Useful for decomposing exactly the sort of modular symbols spaces that come up here. from random import randrange from sage.rings.arith import next_prime def random_hecke_operator(M, t=None, p=2): r = 0 while r == 0: r = randrange(1,p//2+1) * ZZ.random_element() t = (0 if t is None else t) + r*M.hecke_operator(p) return t, next_prime(p) def factor_new_space(M): t = None; p = 2 for i in range(200): t, p = random_hecke_operator(M, t, p) f = t.charpoly() cube_free = True for _, e in f.factor(): if e > 2: cube_free = False break if cube_free: return t.decomposition() t, p = random_hecke_operator(M, t, p) raise RuntimeError("unable to factor new space -- this should not happen") # should never happen def factor_modsym_space_new_factors(M): eps = M.character() K = eps.conductor() if eps is not None else 1 N = [M.modular_symbols_of_level(d).cuspidal_subspace().new_subspace() \ for d in M.level().divisors() if d%K == 0 and (d == 11 or d >= 13)] return [factor_new_space(A) for A in N] def simple_factorization_of_modsym_space(M, simple=True): D = [] N = M.level() for G in factor_modsym_space_new_factors(M): if len(G) > 0: # Compute the matrices of the degeneracy maps up. T = divisors(N//G[0].level()) degen = [G[0].ambient_module().degeneracy_map(N, t).matrix() for t in T] # Construct a matrix with rows the basis for all the factors # stacked on top of each other. We just multiply this by each # degeneracy matrix to get the basis for the images of the # factors at higher level. By doing matrix multiplies, we # save time over taking images of individual factors. matrix = G[0].basis_matrix() for A in G[1:]: matrix = matrix.stack(A.basis_matrix()) # Compute the actual images ims = [matrix * z for z in degen] # Construct the corresponding subspaces at higher level. j = 0 for (isog,A) in enumerate(G): d = A.dimension() if simple: for i in range(len(T)): V = ims[i].matrix_from_rows(range(j, j+d)).row_module() W = M.submodule(V, check=False) D.append( (A.level(), isog, T[i], W) ) else: V = sum(ims[i].matrix_from_rows(range(j, j+d)).row_module() for i in range(len(T))) W = M.submodule(V, check=False) D.append( (A.level(), isog, None, W)) j += d return Sequence(D, cr=True) def modsym_lattices(M, factors): # 1. Change basis of everything to the ambient integral modular symbols space # 2. Clear denominator. # 3. Echelonize/saturate each factor if len(factors) == 0: return factors D = [] I = M.cuspidal_submodule().integral_structure().basis_matrix() A = factors[0][-1].basis_matrix() rows = [range(A.nrows())] for F in factors[1:]: mat = F[-1].basis_matrix() i = rows[-1][-1]+1 rows.append(range(i, i + mat.nrows())) A = A.stack(mat) X = I.solve_left(A) X, _ = X._clear_denom() for i, R in enumerate(rows): A = X.matrix_from_rows(R) A = copy(A.saturation()) A.echelonize() D.append(tuple(list(factors[i]) + [A.row_module()])) return Sequence(D, cr=True)
true
true
f70c35c3bf5d9fb9d99edee5fbf24e31338a5077
16,625
py
Python
test/run-spec-test.py
losfair/wasm3
7a47041e3af6f69f5e2b469aa9dab301d5878f50
[ "MIT" ]
null
null
null
test/run-spec-test.py
losfair/wasm3
7a47041e3af6f69f5e2b469aa9dab301d5878f50
[ "MIT" ]
null
null
null
test/run-spec-test.py
losfair/wasm3
7a47041e3af6f69f5e2b469aa9dab301d5878f50
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Author: Volodymyr Shymanskyy # Usage: # ./run-spec-test.py # ./run-spec-test.py ./core/i32.json # ./run-spec-test.py ./core/float_exprs.json --line 2070 # ./run-spec-test.py ./proposals/tail-call/*.json # ./run-spec-test.py --exec ../build-custom/wasm3 # ./run-spec-test.py --engine "wasmer run" --exec ../build-wasi/wasm3.wasm # ./run-spec-test.py --engine "wasmer run --backend=llvm" --exec ../build-wasi/wasm3.wasm # # TODO # - Get more tests from: https://github.com/microsoft/ChakraCore/tree/master/test/WasmSpec # - Fix "Empty Stack" check # - Check Canonical NaN and Arithmetic NaN separately # - Fix imports.wast import argparse import os, sys, glob, time import subprocess import json import re import struct import math import pathlib scriptDir = os.path.dirname(os.path.abspath(sys.argv[0])) sys.path.append(os.path.join(scriptDir, '..', 'extra')) from testutils import * from pprint import pprint # # Args handling # parser = argparse.ArgumentParser() parser.add_argument("--exec", metavar="<interpreter>", default="../build/wasm3") parser.add_argument("--engine", metavar="<engine>") parser.add_argument("--timeout", type=int, default=30) parser.add_argument("--line", metavar="<source line>", type=int) parser.add_argument("--all", action="store_true") parser.add_argument("--show-logs", action="store_true") parser.add_argument("--format", choices=["raw", "hex", "fp"], default="fp") parser.add_argument("-v", "--verbose", action="store_true") parser.add_argument("-s", "--silent", action="store_true") parser.add_argument("file", nargs='*') args = parser.parse_args() if args.line: args.show_logs = True # # Utilities # log = open("spec-test.log","w+") log.write("======================\n") def warning(msg): log.write("Warning: " + msg + "\n") log.flush() if args.verbose: print(f"{ansi.WARNING}Warning:{ansi.ENDC} {msg}") def fatal(msg): log.write("Fatal: " + msg + "\n") log.flush() print(f"{ansi.FAIL}Fatal:{ansi.ENDC} {msg}") sys.exit(1) def binaryToFloat(num, t): if t == "f32": return struct.unpack('!f', struct.pack('!L', int(num)))[0] elif t == "f64": return struct.unpack('!d', struct.pack('!Q', int(num)))[0] else: fatal(f"Unknown type '{t}'") def escape(s): c = ord(s) if c < 128 and s.isprintable() and not s in " \n\r\t\\": return s if c <= 0xff: return r'\x{0:02x}'.format(c) elif c <= 0xffff: return r'\u{0:04x}'.format(c) else: return r'\U{0:08x}'.format(c) def escape_str(s): if s == "": return r'\x00' return ''.join(escape(c) for c in s) # # Value format options # def formatValueRaw(num, t): return str(num) def formatValueHex(num, t): if t == "f32" or t == "i32": return "{0:#0{1}x}".format(int(num), 8+2) elif t == "f64" or t == "i64": return "{0:#0{1}x}".format(int(num), 16+2) else: return str(num) def formatValueFloat(num, t): if t == "f32": s = 6 elif t == "f64": s = 10 else: return str(num) result = "{0:.{1}f}".format(binaryToFloat(num, t), s).rstrip('0') if result.endswith('.'): result = result + '0' if len(result) > s*2: result = "{0:.{1}e}".format(binaryToFloat(num, t), s) return result formaters = { 'raw': formatValueRaw, 'hex': formatValueHex, 'fp': formatValueFloat, } formatValue = formaters[args.format] if args.format == "fp": print("When using fp display format, values are compared loosely (some tests may produce false positives)") # # Spec tests preparation # if not (os.path.isdir("./core") and os.path.isdir("./proposals")): from io import BytesIO from zipfile import ZipFile from urllib.request import urlopen officialSpec = "https://github.com/wasm3/wasm-core-testsuite/archive/master.zip" print(f"Downloading {officialSpec}") resp = urlopen(officialSpec) with ZipFile(BytesIO(resp.read())) as zipFile: for zipInfo in zipFile.infolist(): if re.match(r".*-master/.*/.*(\.wasm|\.json)", zipInfo.filename): parts = pathlib.Path(zipInfo.filename).parts newpath = str(pathlib.Path(*parts[1:-1])) newfn = str(pathlib.Path(*parts[-1:])) ensure_path(newpath) newpath = newpath + "/" + newfn zipInfo.filename = newpath zipFile.extract(zipInfo) # # Wasm3 REPL # from subprocess import Popen, STDOUT, PIPE from threading import Thread from queue import Queue, Empty import shlex def get_engine_cmd(engine, exe): if engine: cmd = shlex.split(engine) if "wasirun" in engine or "wasm3" in engine: return cmd + [exe, "--repl"] elif "wasmer" in engine: return cmd + ["--dir=.", exe, "--", "--repl"] elif "wasmtime" in engine: return cmd + ["--dir=.", exe, "--", "--repl"] elif "iwasm" in engine: return cmd + ["--dir=.", exe, "--repl"] elif "wavm" in engine: return cmd + ["--mount-root", ".", exe, "--repl"] # TODO, fix path else: fatal(f"Don't know how to run engine {engine}") else: if exe.endswith(".wasm"): fatal(f"Need engine to execute wasm") return shlex.split(exe) + ["--repl"] class Wasm3(): def __init__(self, exe, engine=None): self.exe = exe self.engine = engine self.p = None self.loaded = None self.timeout = args.timeout self.autorestart = True self.run() def run(self): if self.p: self.terminate() cmd = get_engine_cmd(self.engine, self.exe) #print(f"wasm3: Starting {' '.join(cmd)}") self.q = Queue() self.p = Popen(cmd, bufsize=0, stdin=PIPE, stdout=PIPE, stderr=STDOUT) def _read_output(out, queue): for data in iter(lambda: out.read(1024), b''): queue.put(data) queue.put(None) self.t = Thread(target=_read_output, args=(self.p.stdout, self.q)) self.t.daemon = True self.t.start() try: self._read_until("wasm3> ") except Exception as e: print(f"wasm3: Could not start: {e}") def restart(self): print(f"wasm3: Restarting") for i in range(10): try: self.run() try: if self.loaded: self.load(self.loaded) except Exception as e: pass break except Exception as e: print(f"wasm3: {e} => retry") time.sleep(0.1) def init(self): return self._run_cmd(f":init\n") def version(self): return self._run_cmd(f":version\n") def load(self, fn): self.loaded = None res = self._run_cmd(f":load {fn}\n") self.loaded = fn return res def invoke(self, cmd): return self._run_cmd(" ".join(map(str, cmd)) + "\n") def _run_cmd(self, cmd): if self.autorestart and not self._is_running(): self.restart() self._flush_input() #print(f"wasm3: {cmd.strip()}") self._write(cmd) return self._read_until("wasm3> ") def _read_until(self, token): buff = "" tout = time.time() + self.timeout error = None while time.time() < tout: try: data = self.q.get(timeout=0.1) if data == None: error = "Crashed" break buff = buff + data.decode("utf-8") idx = buff.rfind(token) if idx >= 0: return buff[0:idx].strip() except Empty: pass else: error = "Timeout" self.terminate() raise Exception(error) def _write(self, data): self.p.stdin.write(data.encode("utf-8")) self.p.stdin.flush() def _is_running(self): return self.p and (self.p.poll() == None) def _flush_input(self): while not self.q.empty(): self.q.get() def terminate(self): self.p.stdin.close() self.p.terminate() self.p.wait(timeout=1.0) self.p = None # # Actual test # wasm3 = Wasm3(args.exec, args.engine) print("Version: " + wasm3.version()) blacklist = Blacklist([ "float_exprs.wast:* f32.nonarithmetic_nan_bitpattern*", "imports.wast:*", "names.wast:630 *", # name that starts with '\0' ]) stats = dotdict(total_run=0, skipped=0, failed=0, crashed=0, timeout=0, success=0, missing=0) # Convert some trap names from the original spec trapmap = { "unreachable": "unreachable executed" } def runInvoke(test): test.cmd = [test.action.field] displayArgs = [] for arg in test.action.args: test.cmd.append(arg['value']) displayArgs.append(formatValue(arg['value'], arg['type'])) test_id = f"{test.source} {test.wasm} {test.cmd[0]}({', '.join(test.cmd[1:])})" if test_id in blacklist and not args.all: warning(f"Skipped {test_id} (blacklisted)") stats.skipped += 1 return if args.verbose: print(f"Running {test_id}") stats.total_run += 1 output = "" actual = None actual_val = None force_fail = False try: output = wasm3.invoke(test.cmd) except Exception as e: actual = f"<{e}>" force_fail = True # Parse the actual output if not actual: result = re.findall(r'Result: (.*?)$', "\n" + output + "\n", re.MULTILINE) if len(result) > 0: actual = "result " + result[-1] actual_val = result[0] if not actual: result = re.findall(r'Error: \[trap\] (.*?) \(', "\n" + output + "\n", re.MULTILINE) if len(result) > 0: actual = "trap " + result[-1] if not actual: result = re.findall(r'Error: (.*?)$', "\n" + output + "\n", re.MULTILINE) if len(result) > 0: actual = "error " + result[-1] if not actual: actual = "<No Result>" force_fail = True if actual == "error no operation ()": actual = "<Not Implemented>" stats.missing += 1 force_fail = True elif actual == "<Crashed>": stats.crashed += 1 force_fail = True elif actual == "<Timeout>": stats.timeout += 1 force_fail = True # Prepare the expected result expect = None if "expected" in test: if len(test.expected) == 0: expect = "result <Empty Stack>" elif len(test.expected) == 1: t = test.expected[0]['type'] value = str(test.expected[0]['value']) expect = "result " + value if actual_val != None: if (t == "f32" or t == "f64") and (value == "<Canonical NaN>" or value == "<Arithmetic NaN>"): val = binaryToFloat(actual_val, t) #warning(f"{actual_val} => {val}") if math.isnan(val): actual = "<Some NaN>" expect = "<Some NaN>" else: expect = "result " + formatValue(value, t) actual = "result " + formatValue(actual_val, t) else: warning(f"Test {test.source} specifies multiple results") expect = "result <Multiple>" elif "expected_trap" in test: if test.expected_trap in trapmap: test.expected_trap = trapmap[test.expected_trap] expect = "trap " + str(test.expected_trap) elif "expected_anything" in test: expect = "<Anything>" else: expect = "<Unknown>" def showTestResult(): print(" ----------------------") print(f"Test: {ansi.HEADER}{test_id}{ansi.ENDC}") print(f"Args: {', '.join(displayArgs)}") print(f"Expected: {ansi.OKGREEN}{expect}{ansi.ENDC}") print(f"Actual: {ansi.WARNING}{actual}{ansi.ENDC}") if args.show_logs and len(output): print(f"Log:") print(output) log.write(f"{test.source}\t|\t{test.wasm} {test.action.field}({', '.join(displayArgs)})\t=>\t\t") if actual == expect or (expect == "<Anything>" and not force_fail): stats.success += 1 log.write(f"OK: {actual}\n") if args.line: showTestResult() else: stats.failed += 1 log.write(f"FAIL: {actual}, should be: {expect}\n") if args.silent: return showTestResult() #sys.exit(1) if args.file: jsonFiles = args.file else: jsonFiles = glob.glob(os.path.join(".", "core", "*.json")) jsonFiles = list(map(lambda x: os.path.relpath(x, scriptDir), jsonFiles)) jsonFiles.sort() for fn in jsonFiles: with open(fn) as f: data = json.load(f) wast_source = filename(data["source_filename"]) wasm_module = "" print(f"Running {fn}") wasm3.init() for cmd in data["commands"]: test = dotdict() test.line = int(cmd["line"]) test.source = wast_source + ":" + str(test.line) test.wasm = wasm_module test.type = cmd["type"] if test.type == "module": wasm_module = cmd["filename"] if args.verbose: print(f"Loading {wasm_module}") try: wasm_fn = os.path.join(pathname(fn), wasm_module) wasm3.load(wasm_fn) except Exception as e: pass #fatal(str(e)) elif ( test.type == "action" or test.type == "assert_return" or test.type == "assert_trap" or test.type == "assert_exhaustion" or test.type == "assert_return_canonical_nan" or test.type == "assert_return_arithmetic_nan"): if args.line and test.line != args.line: continue if test.type == "action": test.expected_anything = True elif test.type == "assert_return": test.expected = cmd["expected"] elif test.type == "assert_return_canonical_nan": test.expected = cmd["expected"] test.expected[0]["value"] = "<Canonical NaN>" elif test.type == "assert_return_arithmetic_nan": test.expected = cmd["expected"] test.expected[0]["value"] = "<Arithmetic NaN>" elif test.type == "assert_trap": test.expected_trap = cmd["text"] elif test.type == "assert_exhaustion": test.expected_trap = "stack overflow" else: stats.skipped += 1 warning(f"Skipped {test.source} ({test.type} not implemented)") continue test.action = dotdict(cmd["action"]) if test.action.type == "invoke": # TODO: invoking in modules not implemented if test.action.module: stats.skipped += 1 warning(f"Skipped {test.source} (invoke in module)") continue test.action.field = escape_str(test.action.field) runInvoke(test) else: stats.skipped += 1 warning(f"Skipped {test.source} (unknown action type '{test.action.type}')") # These are irrelevant elif (test.type == "assert_invalid" or test.type == "assert_malformed" or test.type == "assert_uninstantiable"): pass # Others - report as skipped else: stats.skipped += 1 warning(f"Skipped {test.source} ('{test.type}' not implemented)") if (stats.failed + stats.success) != stats.total_run: warning("Statistics summary invalid") pprint(stats) if stats.failed > 0: failed = (stats.failed*100)/stats.total_run print(f"{ansi.FAIL}=======================") print(f" FAILED: {failed:.2f}%") if stats.crashed > 0: print(f" Crashed: {stats.crashed}") print(f"======================={ansi.ENDC}") sys.exit(1) elif stats.success > 0: print(f"{ansi.OKGREEN}=======================") print(f" {stats.success}/{stats.total_run} tests OK") if stats.skipped > 0: print(f"{ansi.WARNING} ({stats.skipped} tests skipped){ansi.OKGREEN}") print(f"======================={ansi.ENDC}")
29.529307
111
0.546045
import argparse import os, sys, glob, time import subprocess import json import re import struct import math import pathlib scriptDir = os.path.dirname(os.path.abspath(sys.argv[0])) sys.path.append(os.path.join(scriptDir, '..', 'extra')) from testutils import * from pprint import pprint parser = argparse.ArgumentParser() parser.add_argument("--exec", metavar="<interpreter>", default="../build/wasm3") parser.add_argument("--engine", metavar="<engine>") parser.add_argument("--timeout", type=int, default=30) parser.add_argument("--line", metavar="<source line>", type=int) parser.add_argument("--all", action="store_true") parser.add_argument("--show-logs", action="store_true") parser.add_argument("--format", choices=["raw", "hex", "fp"], default="fp") parser.add_argument("-v", "--verbose", action="store_true") parser.add_argument("-s", "--silent", action="store_true") parser.add_argument("file", nargs='*') args = parser.parse_args() if args.line: args.show_logs = True log = open("spec-test.log","w+") log.write("======================\n") def warning(msg): log.write("Warning: " + msg + "\n") log.flush() if args.verbose: print(f"{ansi.WARNING}Warning:{ansi.ENDC} {msg}") def fatal(msg): log.write("Fatal: " + msg + "\n") log.flush() print(f"{ansi.FAIL}Fatal:{ansi.ENDC} {msg}") sys.exit(1) def binaryToFloat(num, t): if t == "f32": return struct.unpack('!f', struct.pack('!L', int(num)))[0] elif t == "f64": return struct.unpack('!d', struct.pack('!Q', int(num)))[0] else: fatal(f"Unknown type '{t}'") def escape(s): c = ord(s) if c < 128 and s.isprintable() and not s in " \n\r\t\\": return s if c <= 0xff: return r'\x{0:02x}'.format(c) elif c <= 0xffff: return r'\u{0:04x}'.format(c) else: return r'\U{0:08x}'.format(c) def escape_str(s): if s == "": return r'\x00' return ''.join(escape(c) for c in s) def formatValueRaw(num, t): return str(num) def formatValueHex(num, t): if t == "f32" or t == "i32": return "{0:#0{1}x}".format(int(num), 8+2) elif t == "f64" or t == "i64": return "{0:#0{1}x}".format(int(num), 16+2) else: return str(num) def formatValueFloat(num, t): if t == "f32": s = 6 elif t == "f64": s = 10 else: return str(num) result = "{0:.{1}f}".format(binaryToFloat(num, t), s).rstrip('0') if result.endswith('.'): result = result + '0' if len(result) > s*2: result = "{0:.{1}e}".format(binaryToFloat(num, t), s) return result formaters = { 'raw': formatValueRaw, 'hex': formatValueHex, 'fp': formatValueFloat, } formatValue = formaters[args.format] if args.format == "fp": print("When using fp display format, values are compared loosely (some tests may produce false positives)") if not (os.path.isdir("./core") and os.path.isdir("./proposals")): from io import BytesIO from zipfile import ZipFile from urllib.request import urlopen officialSpec = "https://github.com/wasm3/wasm-core-testsuite/archive/master.zip" print(f"Downloading {officialSpec}") resp = urlopen(officialSpec) with ZipFile(BytesIO(resp.read())) as zipFile: for zipInfo in zipFile.infolist(): if re.match(r".*-master/.*/.*(\.wasm|\.json)", zipInfo.filename): parts = pathlib.Path(zipInfo.filename).parts newpath = str(pathlib.Path(*parts[1:-1])) newfn = str(pathlib.Path(*parts[-1:])) ensure_path(newpath) newpath = newpath + "/" + newfn zipInfo.filename = newpath zipFile.extract(zipInfo) from subprocess import Popen, STDOUT, PIPE from threading import Thread from queue import Queue, Empty import shlex def get_engine_cmd(engine, exe): if engine: cmd = shlex.split(engine) if "wasirun" in engine or "wasm3" in engine: return cmd + [exe, "--repl"] elif "wasmer" in engine: return cmd + ["--dir=.", exe, "--", "--repl"] elif "wasmtime" in engine: return cmd + ["--dir=.", exe, "--", "--repl"] elif "iwasm" in engine: return cmd + ["--dir=.", exe, "--repl"] elif "wavm" in engine: return cmd + ["--mount-root", ".", exe, "--repl"] else: fatal(f"Don't know how to run engine {engine}") else: if exe.endswith(".wasm"): fatal(f"Need engine to execute wasm") return shlex.split(exe) + ["--repl"] class Wasm3(): def __init__(self, exe, engine=None): self.exe = exe self.engine = engine self.p = None self.loaded = None self.timeout = args.timeout self.autorestart = True self.run() def run(self): if self.p: self.terminate() cmd = get_engine_cmd(self.engine, self.exe) #print(f"wasm3: Starting {' '.join(cmd)}") self.q = Queue() self.p = Popen(cmd, bufsize=0, stdin=PIPE, stdout=PIPE, stderr=STDOUT) def _read_output(out, queue): for data in iter(lambda: out.read(1024), b''): queue.put(data) queue.put(None) self.t = Thread(target=_read_output, args=(self.p.stdout, self.q)) self.t.daemon = True self.t.start() try: self._read_until("wasm3> ") except Exception as e: print(f"wasm3: Could not start: {e}") def restart(self): print(f"wasm3: Restarting") for i in range(10): try: self.run() try: if self.loaded: self.load(self.loaded) except Exception as e: pass break except Exception as e: print(f"wasm3: {e} => retry") time.sleep(0.1) def init(self): return self._run_cmd(f":init\n") def version(self): return self._run_cmd(f":version\n") def load(self, fn): self.loaded = None res = self._run_cmd(f":load {fn}\n") self.loaded = fn return res def invoke(self, cmd): return self._run_cmd(" ".join(map(str, cmd)) + "\n") def _run_cmd(self, cmd): if self.autorestart and not self._is_running(): self.restart() self._flush_input() #print(f"wasm3: {cmd.strip()}") self._write(cmd) return self._read_until("wasm3> ") def _read_until(self, token): buff = "" tout = time.time() + self.timeout error = None while time.time() < tout: try: data = self.q.get(timeout=0.1) if data == None: error = "Crashed" break buff = buff + data.decode("utf-8") idx = buff.rfind(token) if idx >= 0: return buff[0:idx].strip() except Empty: pass else: error = "Timeout" self.terminate() raise Exception(error) def _write(self, data): self.p.stdin.write(data.encode("utf-8")) self.p.stdin.flush() def _is_running(self): return self.p and (self.p.poll() == None) def _flush_input(self): while not self.q.empty(): self.q.get() def terminate(self): self.p.stdin.close() self.p.terminate() self.p.wait(timeout=1.0) self.p = None # # Actual test # wasm3 = Wasm3(args.exec, args.engine) print("Version: " + wasm3.version()) blacklist = Blacklist([ "float_exprs.wast:* f32.nonarithmetic_nan_bitpattern*", "imports.wast:*", "names.wast:630 *", # name that starts with '\0' ]) stats = dotdict(total_run=0, skipped=0, failed=0, crashed=0, timeout=0, success=0, missing=0) # Convert some trap names from the original spec trapmap = { "unreachable": "unreachable executed" } def runInvoke(test): test.cmd = [test.action.field] displayArgs = [] for arg in test.action.args: test.cmd.append(arg['value']) displayArgs.append(formatValue(arg['value'], arg['type'])) test_id = f"{test.source} {test.wasm} {test.cmd[0]}({', '.join(test.cmd[1:])})" if test_id in blacklist and not args.all: warning(f"Skipped {test_id} (blacklisted)") stats.skipped += 1 return if args.verbose: print(f"Running {test_id}") stats.total_run += 1 output = "" actual = None actual_val = None force_fail = False try: output = wasm3.invoke(test.cmd) except Exception as e: actual = f"<{e}>" force_fail = True # Parse the actual output if not actual: result = re.findall(r'Result: (.*?)$', "\n" + output + "\n", re.MULTILINE) if len(result) > 0: actual = "result " + result[-1] actual_val = result[0] if not actual: result = re.findall(r'Error: \[trap\] (.*?) \(', "\n" + output + "\n", re.MULTILINE) if len(result) > 0: actual = "trap " + result[-1] if not actual: result = re.findall(r'Error: (.*?)$', "\n" + output + "\n", re.MULTILINE) if len(result) > 0: actual = "error " + result[-1] if not actual: actual = "<No Result>" force_fail = True if actual == "error no operation ()": actual = "<Not Implemented>" stats.missing += 1 force_fail = True elif actual == "<Crashed>": stats.crashed += 1 force_fail = True elif actual == "<Timeout>": stats.timeout += 1 force_fail = True # Prepare the expected result expect = None if "expected" in test: if len(test.expected) == 0: expect = "result <Empty Stack>" elif len(test.expected) == 1: t = test.expected[0]['type'] value = str(test.expected[0]['value']) expect = "result " + value if actual_val != None: if (t == "f32" or t == "f64") and (value == "<Canonical NaN>" or value == "<Arithmetic NaN>"): val = binaryToFloat(actual_val, t) #warning(f"{actual_val} => {val}") if math.isnan(val): actual = "<Some NaN>" expect = "<Some NaN>" else: expect = "result " + formatValue(value, t) actual = "result " + formatValue(actual_val, t) else: warning(f"Test {test.source} specifies multiple results") expect = "result <Multiple>" elif "expected_trap" in test: if test.expected_trap in trapmap: test.expected_trap = trapmap[test.expected_trap] expect = "trap " + str(test.expected_trap) elif "expected_anything" in test: expect = "<Anything>" else: expect = "<Unknown>" def showTestResult(): print(" ----------------------") print(f"Test: {ansi.HEADER}{test_id}{ansi.ENDC}") print(f"Args: {', '.join(displayArgs)}") print(f"Expected: {ansi.OKGREEN}{expect}{ansi.ENDC}") print(f"Actual: {ansi.WARNING}{actual}{ansi.ENDC}") if args.show_logs and len(output): print(f"Log:") print(output) log.write(f"{test.source}\t|\t{test.wasm} {test.action.field}({', '.join(displayArgs)})\t=>\t\t") if actual == expect or (expect == "<Anything>" and not force_fail): stats.success += 1 log.write(f"OK: {actual}\n") if args.line: showTestResult() else: stats.failed += 1 log.write(f"FAIL: {actual}, should be: {expect}\n") if args.silent: return showTestResult() #sys.exit(1) if args.file: jsonFiles = args.file else: jsonFiles = glob.glob(os.path.join(".", "core", "*.json")) jsonFiles = list(map(lambda x: os.path.relpath(x, scriptDir), jsonFiles)) jsonFiles.sort() for fn in jsonFiles: with open(fn) as f: data = json.load(f) wast_source = filename(data["source_filename"]) wasm_module = "" print(f"Running {fn}") wasm3.init() for cmd in data["commands"]: test = dotdict() test.line = int(cmd["line"]) test.source = wast_source + ":" + str(test.line) test.wasm = wasm_module test.type = cmd["type"] if test.type == "module": wasm_module = cmd["filename"] if args.verbose: print(f"Loading {wasm_module}") try: wasm_fn = os.path.join(pathname(fn), wasm_module) wasm3.load(wasm_fn) except Exception as e: pass #fatal(str(e)) elif ( test.type == "action" or test.type == "assert_return" or test.type == "assert_trap" or test.type == "assert_exhaustion" or test.type == "assert_return_canonical_nan" or test.type == "assert_return_arithmetic_nan"): if args.line and test.line != args.line: continue if test.type == "action": test.expected_anything = True elif test.type == "assert_return": test.expected = cmd["expected"] elif test.type == "assert_return_canonical_nan": test.expected = cmd["expected"] test.expected[0]["value"] = "<Canonical NaN>" elif test.type == "assert_return_arithmetic_nan": test.expected = cmd["expected"] test.expected[0]["value"] = "<Arithmetic NaN>" elif test.type == "assert_trap": test.expected_trap = cmd["text"] elif test.type == "assert_exhaustion": test.expected_trap = "stack overflow" else: stats.skipped += 1 warning(f"Skipped {test.source} ({test.type} not implemented)") continue test.action = dotdict(cmd["action"]) if test.action.type == "invoke": # TODO: invoking in modules not implemented if test.action.module: stats.skipped += 1 warning(f"Skipped {test.source} (invoke in module)") continue test.action.field = escape_str(test.action.field) runInvoke(test) else: stats.skipped += 1 warning(f"Skipped {test.source} (unknown action type '{test.action.type}')") # These are irrelevant elif (test.type == "assert_invalid" or test.type == "assert_malformed" or test.type == "assert_uninstantiable"): pass # Others - report as skipped else: stats.skipped += 1 warning(f"Skipped {test.source} ('{test.type}' not implemented)") if (stats.failed + stats.success) != stats.total_run: warning("Statistics summary invalid") pprint(stats) if stats.failed > 0: failed = (stats.failed*100)/stats.total_run print(f"{ansi.FAIL}=======================") print(f" FAILED: {failed:.2f}%") if stats.crashed > 0: print(f" Crashed: {stats.crashed}") print(f"======================={ansi.ENDC}") sys.exit(1) elif stats.success > 0: print(f"{ansi.OKGREEN}=======================") print(f" {stats.success}/{stats.total_run} tests OK") if stats.skipped > 0: print(f"{ansi.WARNING} ({stats.skipped} tests skipped){ansi.OKGREEN}") print(f"======================={ansi.ENDC}")
true
true
f70c35edfcb73c5024264ba6523c1067d54f29ec
2,147
py
Python
src/python/tests/appengine/handlers/testcase_detail/delete_test.py
mi-ac/clusterfuzz
0b5c023eca9e3aac41faba17da8f341c0ca2ddc7
[ "Apache-2.0" ]
1
2021-12-20T14:48:42.000Z
2021-12-20T14:48:42.000Z
src/python/tests/appengine/handlers/testcase_detail/delete_test.py
mi-ac/clusterfuzz
0b5c023eca9e3aac41faba17da8f341c0ca2ddc7
[ "Apache-2.0" ]
2
2021-09-28T05:36:03.000Z
2021-12-13T20:48:34.000Z
src/python/tests/appengine/handlers/testcase_detail/delete_test.py
mi-ac/clusterfuzz
0b5c023eca9e3aac41faba17da8f341c0ca2ddc7
[ "Apache-2.0" ]
1
2021-11-06T06:22:00.000Z
2021-11-06T06:22:00.000Z
# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Delete tests.""" import flask import unittest import webtest from datastore import data_types from handlers.testcase_detail import delete from libs import form from tests.test_libs import helpers as test_helpers from tests.test_libs import test_utils @test_utils.with_cloud_emulators('datastore') class HandlerTest(unittest.TestCase): """Test HandlerTest.""" def setUp(self): test_helpers.patch(self, [ 'libs.auth.get_current_user', 'libs.auth.is_current_user_admin', ]) self.mock.is_current_user_admin.return_value = True self.mock.get_current_user().email = 'test@user.com' flaskapp = flask.Flask('testflask') flaskapp.add_url_rule('/', view_func=delete.Handler.as_view('/')) self.app = webtest.TestApp(flaskapp) def test_assigned_issue(self): """The testcase is assigned an issue.""" testcase = data_types.Testcase() testcase.bug_information = '1234' testcase.put() resp = self.app.post_json( '/', { 'testcaseId': testcase.key.id(), 'csrf_token': form.generate_csrf_token() }, expect_errors=True) self.assertEqual(400, resp.status_int) self.assertIsNotNone(testcase.key.get()) def test_succeed(self): """Delete.""" testcase = data_types.Testcase() testcase.bug_information = None testcase.put() resp = self.app.post_json('/', { 'testcaseId': testcase.key.id(), 'csrf_token': form.generate_csrf_token() }) self.assertEqual(200, resp.status_int) self.assertIsNone(testcase.key.get())
31.573529
74
0.706567
import flask import unittest import webtest from datastore import data_types from handlers.testcase_detail import delete from libs import form from tests.test_libs import helpers as test_helpers from tests.test_libs import test_utils @test_utils.with_cloud_emulators('datastore') class HandlerTest(unittest.TestCase): def setUp(self): test_helpers.patch(self, [ 'libs.auth.get_current_user', 'libs.auth.is_current_user_admin', ]) self.mock.is_current_user_admin.return_value = True self.mock.get_current_user().email = 'test@user.com' flaskapp = flask.Flask('testflask') flaskapp.add_url_rule('/', view_func=delete.Handler.as_view('/')) self.app = webtest.TestApp(flaskapp) def test_assigned_issue(self): testcase = data_types.Testcase() testcase.bug_information = '1234' testcase.put() resp = self.app.post_json( '/', { 'testcaseId': testcase.key.id(), 'csrf_token': form.generate_csrf_token() }, expect_errors=True) self.assertEqual(400, resp.status_int) self.assertIsNotNone(testcase.key.get()) def test_succeed(self): testcase = data_types.Testcase() testcase.bug_information = None testcase.put() resp = self.app.post_json('/', { 'testcaseId': testcase.key.id(), 'csrf_token': form.generate_csrf_token() }) self.assertEqual(200, resp.status_int) self.assertIsNone(testcase.key.get())
true
true
f70c37294d6f69da394b7f0744c44c5134a50d0e
767
py
Python
lektor/types/fake.py
yagebu/lektor
a31d8e57a5b2f4b090072527269c26a65202c736
[ "BSD-3-Clause" ]
4,104
2015-11-16T18:37:51.000Z
2022-03-28T14:28:24.000Z
lektor/types/fake.py
yagebu/lektor
a31d8e57a5b2f4b090072527269c26a65202c736
[ "BSD-3-Clause" ]
854
2015-12-05T12:19:02.000Z
2022-03-31T16:47:27.000Z
lektor/types/fake.py
yagebu/lektor
a31d8e57a5b2f4b090072527269c26a65202c736
[ "BSD-3-Clause" ]
442
2015-11-21T10:18:02.000Z
2022-03-29T19:55:17.000Z
from lektor.constants import PRIMARY_ALT from lektor.i18n import get_i18n_block from lektor.types.base import Type class FakeType(Type): def value_from_raw(self, raw): return None def to_json(self, pad, record=None, alt=PRIMARY_ALT): rv = Type.to_json(self, pad, record, alt) rv["is_fake_type"] = True return rv class LineType(FakeType): widget = "f-line" class SpacingType(FakeType): widget = "f-spacing" class InfoType(FakeType): widget = "f-info" class HeadingType(FakeType): widget = "f-heading" def to_json(self, pad, record=None, alt=PRIMARY_ALT): rv = FakeType.to_json(self, pad, record, alt) rv["heading_i18n"] = get_i18n_block(self.options, "heading") return rv
21.914286
68
0.670143
from lektor.constants import PRIMARY_ALT from lektor.i18n import get_i18n_block from lektor.types.base import Type class FakeType(Type): def value_from_raw(self, raw): return None def to_json(self, pad, record=None, alt=PRIMARY_ALT): rv = Type.to_json(self, pad, record, alt) rv["is_fake_type"] = True return rv class LineType(FakeType): widget = "f-line" class SpacingType(FakeType): widget = "f-spacing" class InfoType(FakeType): widget = "f-info" class HeadingType(FakeType): widget = "f-heading" def to_json(self, pad, record=None, alt=PRIMARY_ALT): rv = FakeType.to_json(self, pad, record, alt) rv["heading_i18n"] = get_i18n_block(self.options, "heading") return rv
true
true
f70c37c40d7c1c21c618b91fdf97259c369995d3
3,246
py
Python
trafficlights/trafficlight.py
jean-charles-gibier/TrafficLights
a513e28c3c7349abefb39553ec46d3487af60a3d
[ "CC0-1.0" ]
null
null
null
trafficlights/trafficlight.py
jean-charles-gibier/TrafficLights
a513e28c3c7349abefb39553ec46d3487af60a3d
[ "CC0-1.0" ]
null
null
null
trafficlights/trafficlight.py
jean-charles-gibier/TrafficLights
a513e28c3c7349abefb39553ec46d3487af60a3d
[ "CC0-1.0" ]
null
null
null
"""Module implémentant des classes en relations avec le menu.""" from typing import Callable, Dict, List, Tuple, AnyStr class Trafficlight: """Modélise un feu de circulation présentant un état lumineux donné. wrarn : la couleur affectée par défaut n'est pas validée par l'init autrement dit on peut initialiser avec une couleur inexistante dans le choix final """ def __init__(self, name: str = "None", current: str = "rouge") -> None: """Initialise une instance de feu. Args: name: identifiant du feu courant current: etat par defaut """ self._name: str = name self._current: str = current self._triggers: Dict = {} def add(self, trigger: AnyStr, source: AnyStr, dest: AnyStr) -> "Trafficlight": """Ajoute une transition entre 2 etats. Args: trigger: action déclenchant la transition, source: etat de départ, dest: etat d'arivee """ trigger = trigger.lower().strip() source = source.lower().strip() self._triggers[trigger] = self._triggers.get(trigger, {}) self._triggers[trigger][source] = dest return self def next(self, **args): """Recupere le prochain etat et appelle le renderer.""" # vérification du choix de la couleur if "color" in args and self._triggers["next"][self._current] != args["color"]: print( "La couleur {} ne peut suivre la couleur {} ".format( args["color"], self._current ) ) return getattr(self, "render"), args self._current = self._triggers["next"][self._current] return getattr(self, "render"), args def quitter(self, **args): """Quitte le parcours.""" return None, None def __str__(self) -> str: """Formate le contenu de l'objet en vue de sa présentation à l'utilisateur.""" lines: List[str] = [f"{self._name.title()}\t({self._current})\n"] for key, value in enumerate(self._triggers.keys()): lines.append(f"{key} - {value}") lines.append("Ou entrez la valeur en toutes lettres.") lines.append("") lines.append(">>> ") return "\n".join(lines) def render(self, **args) -> Tuple[Callable, Dict]: """Affiche le choix des triggers à l'utilisateur et attend la réponse de ce dernier. """ entries: Dict = { str(key): value for (key, value) in enumerate(self._triggers.keys()) } while True: choice = input(self).lower().strip() if choice in entries: return getattr(self, entries[choice]), {} elif choice in self._triggers["next"]: return getattr(self, "next"), {"color": choice} else: print("Erreur de saisie.") def get_current(self) -> str: """renvoie l'etat courant du feu. """ return self._current def set_current(self, value: AnyStr): """ affecte l'etat courant du feu. """ self.next(color=value)
37.744186
93
0.558534
from typing import Callable, Dict, List, Tuple, AnyStr class Trafficlight: def __init__(self, name: str = "None", current: str = "rouge") -> None: self._name: str = name self._current: str = current self._triggers: Dict = {} def add(self, trigger: AnyStr, source: AnyStr, dest: AnyStr) -> "Trafficlight": trigger = trigger.lower().strip() source = source.lower().strip() self._triggers[trigger] = self._triggers.get(trigger, {}) self._triggers[trigger][source] = dest return self def next(self, **args): if "color" in args and self._triggers["next"][self._current] != args["color"]: print( "La couleur {} ne peut suivre la couleur {} ".format( args["color"], self._current ) ) return getattr(self, "render"), args self._current = self._triggers["next"][self._current] return getattr(self, "render"), args def quitter(self, **args): return None, None def __str__(self) -> str: lines: List[str] = [f"{self._name.title()}\t({self._current})\n"] for key, value in enumerate(self._triggers.keys()): lines.append(f"{key} - {value}") lines.append("Ou entrez la valeur en toutes lettres.") lines.append("") lines.append(">>> ") return "\n".join(lines) def render(self, **args) -> Tuple[Callable, Dict]: entries: Dict = { str(key): value for (key, value) in enumerate(self._triggers.keys()) } while True: choice = input(self).lower().strip() if choice in entries: return getattr(self, entries[choice]), {} elif choice in self._triggers["next"]: return getattr(self, "next"), {"color": choice} else: print("Erreur de saisie.") def get_current(self) -> str: return self._current def set_current(self, value: AnyStr): self.next(color=value)
true
true
f70c38682f59465a9fb9eb7311497596f5bc838a
1,201
py
Python
operators/clip.py
ngiambla/nnflex
7c8bf46218ea70c6dad1efedf9e2069e41c4c3fa
[ "MIT" ]
null
null
null
operators/clip.py
ngiambla/nnflex
7c8bf46218ea70c6dad1efedf9e2069e41c4c3fa
[ "MIT" ]
null
null
null
operators/clip.py
ngiambla/nnflex
7c8bf46218ea70c6dad1efedf9e2069e41c4c3fa
[ "MIT" ]
null
null
null
''' clip.py: Implement's the clip ONNX node as a flexnode (for use with any accelerator) ''' import uuid import numpy as np from operators.flexnode import FlexNode from core.defines import Operator from core.messaging import Message class Clip(FlexNode): def __init__(self, onnx_node, inputs, outputs): FlexNode.__init__(self, onnx_node, inputs, outputs) self._min = -3.402823466e+38 self._max = 3.402823466e+38 if len(inputs) != 1 and len(inputs) != 3: raise ValueError("Clip can only have 1 or 3 inputs.") self._input = inputs[0] if len(inputs) == 3: self._min = inputs[1] self._max = inputs[2] def map(self, memory_mapper): pass def unmap(self, memory_mapper): pass def _inputs2mem(self, memory_xfer_engine): pass def _mem2output(self, memory_xfer_engine): pass def compile(self, source, destinations): tile_commands = list() # Here, we are NOT generating tile_commands, (although, this is not difficult.) np.copyto(self._outputs[0], np.clip(self._input, self._min, self._max)) return tile_commands
24.02
87
0.636969
import uuid import numpy as np from operators.flexnode import FlexNode from core.defines import Operator from core.messaging import Message class Clip(FlexNode): def __init__(self, onnx_node, inputs, outputs): FlexNode.__init__(self, onnx_node, inputs, outputs) self._min = -3.402823466e+38 self._max = 3.402823466e+38 if len(inputs) != 1 and len(inputs) != 3: raise ValueError("Clip can only have 1 or 3 inputs.") self._input = inputs[0] if len(inputs) == 3: self._min = inputs[1] self._max = inputs[2] def map(self, memory_mapper): pass def unmap(self, memory_mapper): pass def _inputs2mem(self, memory_xfer_engine): pass def _mem2output(self, memory_xfer_engine): pass def compile(self, source, destinations): tile_commands = list() np.copyto(self._outputs[0], np.clip(self._input, self._min, self._max)) return tile_commands
true
true
f70c3872a89eb986046522f1dedbedb36f6db349
22,020
py
Python
django/db/backends/sqlite3/base.py
krallin/django
c94db53eaa9b344f9227fa4dff2b1a5e9c7dce9d
[ "BSD-3-Clause" ]
null
null
null
django/db/backends/sqlite3/base.py
krallin/django
c94db53eaa9b344f9227fa4dff2b1a5e9c7dce9d
[ "BSD-3-Clause" ]
null
null
null
django/db/backends/sqlite3/base.py
krallin/django
c94db53eaa9b344f9227fa4dff2b1a5e9c7dce9d
[ "BSD-3-Clause" ]
null
null
null
""" SQLite3 backend for django. Works with either the pysqlite2 module or the sqlite3 module in the standard library. """ from __future__ import unicode_literals import datetime import decimal import warnings import re from django.db import utils from django.db.backends import * from django.db.backends.sqlite3.client import DatabaseClient from django.db.backends.sqlite3.creation import DatabaseCreation from django.db.backends.sqlite3.introspection import DatabaseIntrospection from django.db.models import fields from django.db.models.sql import aggregates from django.utils.dateparse import parse_date, parse_datetime, parse_time from django.utils.functional import cached_property from django.utils.safestring import SafeBytes from django.utils import six from django.utils import timezone try: try: from pysqlite2 import dbapi2 as Database except ImportError: from sqlite3 import dbapi2 as Database except ImportError as exc: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading either pysqlite2 or sqlite3 modules (tried in that order): %s" % exc) try: import pytz except ImportError: pytz = None DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError def parse_datetime_with_timezone_support(value): dt = parse_datetime(value) # Confirm that dt is naive before overwriting its tzinfo. if dt is not None and settings.USE_TZ and timezone.is_naive(dt): dt = dt.replace(tzinfo=timezone.utc) return dt def adapt_datetime_with_timezone_support(value): # Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL. if settings.USE_TZ: if timezone.is_naive(value): warnings.warn("SQLite received a naive datetime (%s)" " while time zone support is active." % value, RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) value = value.astimezone(timezone.utc).replace(tzinfo=None) return value.isoformat(str(" ")) def decoder(conv_func): """ The Python sqlite3 interface returns always byte strings. This function converts the received value to a regular string before passing it to the receiver function. """ return lambda s: conv_func(s.decode('utf-8')) Database.register_converter(str("bool"), decoder(lambda s: s == '1')) Database.register_converter(str("time"), decoder(parse_time)) Database.register_converter(str("date"), decoder(parse_date)) Database.register_converter(str("datetime"), decoder(parse_datetime_with_timezone_support)) Database.register_converter(str("timestamp"), decoder(parse_datetime_with_timezone_support)) Database.register_converter(str("TIMESTAMP"), decoder(parse_datetime_with_timezone_support)) Database.register_converter(str("decimal"), decoder(util.typecast_decimal)) Database.register_adapter(datetime.datetime, adapt_datetime_with_timezone_support) Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal) if Database.version_info >= (2, 4, 1): # Starting in 2.4.1, the str type is not accepted anymore, therefore, # we convert all str objects to Unicode # As registering a adapter for a primitive type causes a small # slow-down, this adapter is only registered for sqlite3 versions # needing it (Python 2.6 and up). Database.register_adapter(str, lambda s: s.decode('utf-8')) Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8')) class DatabaseFeatures(BaseDatabaseFeatures): # SQLite cannot handle us only partially reading from a cursor's result set # and then writing the same rows to the database in another cursor. This # setting ensures we always read result sets fully into memory all in one # go. can_use_chunked_reads = False test_db_allows_multiple_connections = False supports_unspecified_pk = True supports_timezones = False supports_1000_query_parameters = False supports_mixed_date_datetime_comparisons = False has_bulk_insert = True can_combine_inserts_with_and_without_auto_increment_pk = False autocommits_when_autocommit_is_off = True @cached_property def uses_savepoints(self): return Database.sqlite_version_info >= (3, 6, 8) @cached_property def supports_stddev(self): """Confirm support for STDDEV and related stats functions SQLite supports STDDEV as an extension package; so connection.ops.check_aggregate_support() can't unilaterally rule out support for STDDEV. We need to manually check whether the call works. """ cursor = self.connection.cursor() cursor.execute('CREATE TABLE STDDEV_TEST (X INT)') try: cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST') has_support = True except utils.DatabaseError: has_support = False cursor.execute('DROP TABLE STDDEV_TEST') return has_support @cached_property def has_zoneinfo_database(self): return pytz is not None class DatabaseOperations(BaseDatabaseOperations): def bulk_batch_size(self, fields, objs): """ SQLite has a compile-time default (SQLITE_LIMIT_VARIABLE_NUMBER) of 999 variables per query. If there is just single field to insert, then we can hit another limit, SQLITE_MAX_COMPOUND_SELECT which defaults to 500. """ limit = 999 if len(fields) > 1 else 500 return (limit // len(fields)) if len(fields) > 0 else len(objs) def check_aggregate_support(self, aggregate): bad_fields = (fields.DateField, fields.DateTimeField, fields.TimeField) bad_aggregates = (aggregates.Sum, aggregates.Avg, aggregates.Variance, aggregates.StdDev) if (isinstance(aggregate.source, bad_fields) and isinstance(aggregate, bad_aggregates)): raise NotImplementedError( 'You cannot use Sum, Avg, StdDev and Variance aggregations ' 'on date/time fields in sqlite3 ' 'since date/time is saved as text.') def date_extract_sql(self, lookup_type, field_name): # sqlite doesn't support extract, so we fake it with the user-defined # function django_date_extract that's registered in connect(). Note that # single quotes are used because this is a string (and could otherwise # cause a collision with a field name). return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name) def date_interval_sql(self, sql, connector, timedelta): # It would be more straightforward if we could use the sqlite strftime # function, but it does not allow for keeping six digits of fractional # second information, nor does it allow for formatting date and datetime # values differently. So instead we register our own function that # formats the datetime combined with the delta in a manner suitable # for comparisons. return 'django_format_dtdelta(%s, "%s", "%d", "%d", "%d")' % (sql, connector, timedelta.days, timedelta.seconds, timedelta.microseconds) def date_trunc_sql(self, lookup_type, field_name): # sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined # function django_date_trunc that's registered in connect(). Note that # single quotes are used because this is a string (and could otherwise # cause a collision with a field name). return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name) def datetime_extract_sql(self, lookup_type, field_name, tzname): # Same comment as in date_extract_sql. if settings.USE_TZ: if pytz is None: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("This query requires pytz, " "but it isn't installed.") return "django_datetime_extract('%s', %s, %%s)" % ( lookup_type.lower(), field_name), [tzname] def datetime_trunc_sql(self, lookup_type, field_name, tzname): # Same comment as in date_trunc_sql. if settings.USE_TZ: if pytz is None: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("This query requires pytz, " "but it isn't installed.") return "django_datetime_trunc('%s', %s, %%s)" % ( lookup_type.lower(), field_name), [tzname] def drop_foreignkey_sql(self): return "" def pk_default_value(self): return "NULL" def quote_name(self, name): if name.startswith('"') and name.endswith('"'): return name # Quoting once is enough. return '"%s"' % name def no_limit_value(self): return -1 def sql_flush(self, style, tables, sequences): # NB: The generated SQL below is specific to SQLite # Note: The DELETE FROM... SQL generated below works for SQLite databases # because constraints don't exist sql = ['%s %s %s;' % \ (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table)) ) for table in tables] # Note: No requirement for reset of auto-incremented indices (cf. other # sql_flush() implementations). Just return SQL at this point return sql def value_to_db_datetime(self, value): if value is None: return None # SQLite doesn't support tz-aware datetimes if timezone.is_aware(value): if settings.USE_TZ: value = value.astimezone(timezone.utc).replace(tzinfo=None) else: raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.") return six.text_type(value) def value_to_db_time(self, value): if value is None: return None # SQLite doesn't support tz-aware datetimes if timezone.is_aware(value): raise ValueError("SQLite backend does not support timezone-aware times.") return six.text_type(value) def convert_values(self, value, field): """SQLite returns floats when it should be returning decimals, and gets dates and datetimes wrong. For consistency with other backends, coerce when required. """ internal_type = field.get_internal_type() if internal_type == 'DecimalField': return util.typecast_decimal(field.format_number(value)) elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField': return int(value) elif internal_type == 'DateField': return parse_date(value) elif internal_type == 'DateTimeField': return parse_datetime_with_timezone_support(value) elif internal_type == 'TimeField': return parse_time(value) # No field, or the field isn't known to be a decimal or integer return value def bulk_insert_sql(self, fields, num_values): res = [] res.append("SELECT %s" % ", ".join( "%%s AS %s" % self.quote_name(f.column) for f in fields )) res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1)) return " ".join(res) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'sqlite' # SQLite requires LIKE statements to include an ESCAPE clause if the value # being escaped has a percent or underscore in it. # See http://www.sqlite.org/lang_expr.html for an explanation. operators = { 'exact': '= %s', 'iexact': "LIKE %s ESCAPE '\\'", 'contains': "LIKE %s ESCAPE '\\'", 'icontains': "LIKE %s ESCAPE '\\'", 'regex': 'REGEXP %s', 'iregex': "REGEXP '(?i)' || %s", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE %s ESCAPE '\\'", 'endswith': "LIKE %s ESCAPE '\\'", 'istartswith': "LIKE %s ESCAPE '\\'", 'iendswith': "LIKE %s ESCAPE '\\'", } Database = Database def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.features = DatabaseFeatures(self) self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = BaseDatabaseValidation(self) def get_connection_params(self): settings_dict = self.settings_dict if not settings_dict['NAME']: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") kwargs = { 'database': settings_dict['NAME'], 'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES, } kwargs.update(settings_dict['OPTIONS']) # Always allow the underlying SQLite connection to be shareable # between multiple threads. The safe-guarding will be handled at a # higher level by the `BaseDatabaseWrapper.allow_thread_sharing` # property. This is necessary as the shareability is disabled by # default in pysqlite and it cannot be changed once a connection is # opened. if 'check_same_thread' in kwargs and kwargs['check_same_thread']: warnings.warn( 'The `check_same_thread` option was provided and set to ' 'True. It will be overriden with False. Use the ' '`DatabaseWrapper.allow_thread_sharing` property instead ' 'for controlling thread shareability.', RuntimeWarning ) kwargs.update({'check_same_thread': False}) return kwargs def get_new_connection(self, conn_params): conn = Database.connect(**conn_params) conn.create_function("django_date_extract", 2, _sqlite_date_extract) conn.create_function("django_date_trunc", 2, _sqlite_date_trunc) conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract) conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc) conn.create_function("regexp", 2, _sqlite_regexp) conn.create_function("django_format_dtdelta", 5, _sqlite_format_dtdelta) return conn def init_connection_state(self): pass def create_cursor(self): return self.connection.cursor(factory=SQLiteCursorWrapper) def close(self): self.validate_thread_sharing() # If database is in memory, closing the connection destroys the # database. To prevent accidental data loss, ignore close requests on # an in-memory db. if self.settings_dict['NAME'] != ":memory:": BaseDatabaseWrapper.close(self) def _savepoint_allowed(self): # When 'isolation_level' is not None, sqlite3 commits before each # savepoint; it's a bug. When it is None, savepoints don't make sense # because autocommit is enabled. The only exception is inside atomic # blocks. To work around that bug, on SQLite, atomic starts a # transaction explicitly rather than simply disable autocommit. return self.in_atomic_block def _set_autocommit(self, autocommit): if autocommit: level = None else: # sqlite3's internal default is ''. It's different from None. # See Modules/_sqlite/connection.c. level = '' # 'isolation_level' is a misleading API. # SQLite always runs at the SERIALIZABLE isolation level. self.connection.isolation_level = level def check_constraints(self, table_names=None): """ Checks each table name in `table_names` for rows with invalid foreign key references. This method is intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to determine if rows with invalid references were entered while constraint checks were off. Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides detailed information about the invalid reference in the error message. Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE") """ cursor = self.cursor() if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute(""" SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL""" % (primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name)) for bad_row in cursor.fetchall(): raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid " "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)) def is_usable(self): return True def _start_transaction_under_autocommit(self): """ Start a transaction explicitly in autocommit mode. Staying in autocommit mode works around a bug of sqlite3 that breaks savepoints when autocommit is disabled. """ self.cursor().execute("BEGIN") FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s') class SQLiteCursorWrapper(Database.Cursor): """ Django uses "format" style placeholders, but pysqlite2 uses "qmark" style. This fixes it -- but note that if you want to use a literal "%s" in a query, you'll need to use "%%s". """ def execute(self, query, params=()): query = self.convert_query(query) return Database.Cursor.execute(self, query, params) def executemany(self, query, param_list): query = self.convert_query(query) return Database.Cursor.executemany(self, query, param_list) def convert_query(self, query): return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%') def _sqlite_date_extract(lookup_type, dt): if dt is None: return None try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if lookup_type == 'week_day': return (dt.isoweekday() % 7) + 1 else: return getattr(dt, lookup_type) def _sqlite_date_trunc(lookup_type, dt): try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if lookup_type == 'year': return "%i-01-01" % dt.year elif lookup_type == 'month': return "%i-%02i-01" % (dt.year, dt.month) elif lookup_type == 'day': return "%i-%02i-%02i" % (dt.year, dt.month, dt.day) def _sqlite_datetime_extract(lookup_type, dt, tzname): if dt is None: return None try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if tzname is not None: dt = timezone.localtime(dt, pytz.timezone(tzname)) if lookup_type == 'week_day': return (dt.isoweekday() % 7) + 1 else: return getattr(dt, lookup_type) def _sqlite_datetime_trunc(lookup_type, dt, tzname): try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if tzname is not None: dt = timezone.localtime(dt, pytz.timezone(tzname)) if lookup_type == 'year': return "%i-01-01 00:00:00" % dt.year elif lookup_type == 'month': return "%i-%02i-01 00:00:00" % (dt.year, dt.month) elif lookup_type == 'day': return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day) elif lookup_type == 'hour': return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour) elif lookup_type == 'minute': return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) elif lookup_type == 'second': return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) def _sqlite_format_dtdelta(dt, conn, days, secs, usecs): try: dt = util.typecast_timestamp(dt) delta = datetime.timedelta(int(days), int(secs), int(usecs)) if conn.strip() == '+': dt = dt + delta else: dt = dt - delta except (ValueError, TypeError): return None # typecast_timestamp returns a date or a datetime without timezone. # It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]" return str(dt) def _sqlite_regexp(re_pattern, re_string): return bool(re.search(re_pattern, re_string))
42.264875
120
0.655767
from __future__ import unicode_literals import datetime import decimal import warnings import re from django.db import utils from django.db.backends import * from django.db.backends.sqlite3.client import DatabaseClient from django.db.backends.sqlite3.creation import DatabaseCreation from django.db.backends.sqlite3.introspection import DatabaseIntrospection from django.db.models import fields from django.db.models.sql import aggregates from django.utils.dateparse import parse_date, parse_datetime, parse_time from django.utils.functional import cached_property from django.utils.safestring import SafeBytes from django.utils import six from django.utils import timezone try: try: from pysqlite2 import dbapi2 as Database except ImportError: from sqlite3 import dbapi2 as Database except ImportError as exc: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("Error loading either pysqlite2 or sqlite3 modules (tried in that order): %s" % exc) try: import pytz except ImportError: pytz = None DatabaseError = Database.DatabaseError IntegrityError = Database.IntegrityError def parse_datetime_with_timezone_support(value): dt = parse_datetime(value) if dt is not None and settings.USE_TZ and timezone.is_naive(dt): dt = dt.replace(tzinfo=timezone.utc) return dt def adapt_datetime_with_timezone_support(value): if settings.USE_TZ: if timezone.is_naive(value): warnings.warn("SQLite received a naive datetime (%s)" " while time zone support is active." % value, RuntimeWarning) default_timezone = timezone.get_default_timezone() value = timezone.make_aware(value, default_timezone) value = value.astimezone(timezone.utc).replace(tzinfo=None) return value.isoformat(str(" ")) def decoder(conv_func): return lambda s: conv_func(s.decode('utf-8')) Database.register_converter(str("bool"), decoder(lambda s: s == '1')) Database.register_converter(str("time"), decoder(parse_time)) Database.register_converter(str("date"), decoder(parse_date)) Database.register_converter(str("datetime"), decoder(parse_datetime_with_timezone_support)) Database.register_converter(str("timestamp"), decoder(parse_datetime_with_timezone_support)) Database.register_converter(str("TIMESTAMP"), decoder(parse_datetime_with_timezone_support)) Database.register_converter(str("decimal"), decoder(util.typecast_decimal)) Database.register_adapter(datetime.datetime, adapt_datetime_with_timezone_support) Database.register_adapter(decimal.Decimal, util.rev_typecast_decimal) if Database.version_info >= (2, 4, 1): Database.register_adapter(str, lambda s: s.decode('utf-8')) Database.register_adapter(SafeBytes, lambda s: s.decode('utf-8')) class DatabaseFeatures(BaseDatabaseFeatures): # and then writing the same rows to the database in another cursor. This # setting ensures we always read result sets fully into memory all in one # go. can_use_chunked_reads = False test_db_allows_multiple_connections = False supports_unspecified_pk = True supports_timezones = False supports_1000_query_parameters = False supports_mixed_date_datetime_comparisons = False has_bulk_insert = True can_combine_inserts_with_and_without_auto_increment_pk = False autocommits_when_autocommit_is_off = True @cached_property def uses_savepoints(self): return Database.sqlite_version_info >= (3, 6, 8) @cached_property def supports_stddev(self): cursor = self.connection.cursor() cursor.execute('CREATE TABLE STDDEV_TEST (X INT)') try: cursor.execute('SELECT STDDEV(*) FROM STDDEV_TEST') has_support = True except utils.DatabaseError: has_support = False cursor.execute('DROP TABLE STDDEV_TEST') return has_support @cached_property def has_zoneinfo_database(self): return pytz is not None class DatabaseOperations(BaseDatabaseOperations): def bulk_batch_size(self, fields, objs): limit = 999 if len(fields) > 1 else 500 return (limit // len(fields)) if len(fields) > 0 else len(objs) def check_aggregate_support(self, aggregate): bad_fields = (fields.DateField, fields.DateTimeField, fields.TimeField) bad_aggregates = (aggregates.Sum, aggregates.Avg, aggregates.Variance, aggregates.StdDev) if (isinstance(aggregate.source, bad_fields) and isinstance(aggregate, bad_aggregates)): raise NotImplementedError( 'You cannot use Sum, Avg, StdDev and Variance aggregations ' 'on date/time fields in sqlite3 ' 'since date/time is saved as text.') def date_extract_sql(self, lookup_type, field_name): # sqlite doesn't support extract, so we fake it with the user-defined # single quotes are used because this is a string (and could otherwise # cause a collision with a field name). return "django_date_extract('%s', %s)" % (lookup_type.lower(), field_name) def date_interval_sql(self, sql, connector, timedelta): # It would be more straightforward if we could use the sqlite strftime # function, but it does not allow for keeping six digits of fractional # second information, nor does it allow for formatting date and datetime # values differently. So instead we register our own function that # formats the datetime combined with the delta in a manner suitable # for comparisons. return 'django_format_dtdelta(%s, "%s", "%d", "%d", "%d")' % (sql, connector, timedelta.days, timedelta.seconds, timedelta.microseconds) def date_trunc_sql(self, lookup_type, field_name): # sqlite doesn't support DATE_TRUNC, so we fake it with a user-defined # single quotes are used because this is a string (and could otherwise # cause a collision with a field name). return "django_date_trunc('%s', %s)" % (lookup_type.lower(), field_name) def datetime_extract_sql(self, lookup_type, field_name, tzname): # Same comment as in date_extract_sql. if settings.USE_TZ: if pytz is None: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("This query requires pytz, " "but it isn't installed.") return "django_datetime_extract('%s', %s, %%s)" % ( lookup_type.lower(), field_name), [tzname] def datetime_trunc_sql(self, lookup_type, field_name, tzname): if settings.USE_TZ: if pytz is None: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured("This query requires pytz, " "but it isn't installed.") return "django_datetime_trunc('%s', %s, %%s)" % ( lookup_type.lower(), field_name), [tzname] def drop_foreignkey_sql(self): return "" def pk_default_value(self): return "NULL" def quote_name(self, name): if name.startswith('"') and name.endswith('"'): return name # Quoting once is enough. return '"%s"' % name def no_limit_value(self): return -1 def sql_flush(self, style, tables, sequences): # NB: The generated SQL below is specific to SQLite # Note: The DELETE FROM... SQL generated below works for SQLite databases # because constraints don't exist sql = ['%s %s %s;' % \ (style.SQL_KEYWORD('DELETE'), style.SQL_KEYWORD('FROM'), style.SQL_FIELD(self.quote_name(table)) ) for table in tables] return sql def value_to_db_datetime(self, value): if value is None: return None if timezone.is_aware(value): if settings.USE_TZ: value = value.astimezone(timezone.utc).replace(tzinfo=None) else: raise ValueError("SQLite backend does not support timezone-aware datetimes when USE_TZ is False.") return six.text_type(value) def value_to_db_time(self, value): if value is None: return None # SQLite doesn't support tz-aware datetimes if timezone.is_aware(value): raise ValueError("SQLite backend does not support timezone-aware times.") return six.text_type(value) def convert_values(self, value, field): internal_type = field.get_internal_type() if internal_type == 'DecimalField': return util.typecast_decimal(field.format_number(value)) elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField': return int(value) elif internal_type == 'DateField': return parse_date(value) elif internal_type == 'DateTimeField': return parse_datetime_with_timezone_support(value) elif internal_type == 'TimeField': return parse_time(value) return value def bulk_insert_sql(self, fields, num_values): res = [] res.append("SELECT %s" % ", ".join( "%%s AS %s" % self.quote_name(f.column) for f in fields )) res.extend(["UNION ALL SELECT %s" % ", ".join(["%s"] * len(fields))] * (num_values - 1)) return " ".join(res) class DatabaseWrapper(BaseDatabaseWrapper): vendor = 'sqlite' # SQLite requires LIKE statements to include an ESCAPE clause if the value # being escaped has a percent or underscore in it. # See http://www.sqlite.org/lang_expr.html for an explanation. operators = { 'exact': '= %s', 'iexact': "LIKE %s ESCAPE '\\'", 'contains': "LIKE %s ESCAPE '\\'", 'icontains': "LIKE %s ESCAPE '\\'", 'regex': 'REGEXP %s', 'iregex': "REGEXP '(?i)' || %s", 'gt': '> %s', 'gte': '>= %s', 'lt': '< %s', 'lte': '<= %s', 'startswith': "LIKE %s ESCAPE '\\'", 'endswith': "LIKE %s ESCAPE '\\'", 'istartswith': "LIKE %s ESCAPE '\\'", 'iendswith': "LIKE %s ESCAPE '\\'", } Database = Database def __init__(self, *args, **kwargs): super(DatabaseWrapper, self).__init__(*args, **kwargs) self.features = DatabaseFeatures(self) self.ops = DatabaseOperations(self) self.client = DatabaseClient(self) self.creation = DatabaseCreation(self) self.introspection = DatabaseIntrospection(self) self.validation = BaseDatabaseValidation(self) def get_connection_params(self): settings_dict = self.settings_dict if not settings_dict['NAME']: from django.core.exceptions import ImproperlyConfigured raise ImproperlyConfigured( "settings.DATABASES is improperly configured. " "Please supply the NAME value.") kwargs = { 'database': settings_dict['NAME'], 'detect_types': Database.PARSE_DECLTYPES | Database.PARSE_COLNAMES, } kwargs.update(settings_dict['OPTIONS']) # Always allow the underlying SQLite connection to be shareable # between multiple threads. The safe-guarding will be handled at a # higher level by the `BaseDatabaseWrapper.allow_thread_sharing` # property. This is necessary as the shareability is disabled by # default in pysqlite and it cannot be changed once a connection is # opened. if 'check_same_thread' in kwargs and kwargs['check_same_thread']: warnings.warn( 'The `check_same_thread` option was provided and set to ' 'True. It will be overriden with False. Use the ' '`DatabaseWrapper.allow_thread_sharing` property instead ' 'for controlling thread shareability.', RuntimeWarning ) kwargs.update({'check_same_thread': False}) return kwargs def get_new_connection(self, conn_params): conn = Database.connect(**conn_params) conn.create_function("django_date_extract", 2, _sqlite_date_extract) conn.create_function("django_date_trunc", 2, _sqlite_date_trunc) conn.create_function("django_datetime_extract", 3, _sqlite_datetime_extract) conn.create_function("django_datetime_trunc", 3, _sqlite_datetime_trunc) conn.create_function("regexp", 2, _sqlite_regexp) conn.create_function("django_format_dtdelta", 5, _sqlite_format_dtdelta) return conn def init_connection_state(self): pass def create_cursor(self): return self.connection.cursor(factory=SQLiteCursorWrapper) def close(self): self.validate_thread_sharing() # If database is in memory, closing the connection destroys the # database. To prevent accidental data loss, ignore close requests on # an in-memory db. if self.settings_dict['NAME'] != ":memory:": BaseDatabaseWrapper.close(self) def _savepoint_allowed(self): # When 'isolation_level' is not None, sqlite3 commits before each # savepoint; it's a bug. When it is None, savepoints don't make sense # because autocommit is enabled. The only exception is inside atomic # blocks. To work around that bug, on SQLite, atomic starts a # transaction explicitly rather than simply disable autocommit. return self.in_atomic_block def _set_autocommit(self, autocommit): if autocommit: level = None else: # sqlite3's internal default is ''. It's different from None. # See Modules/_sqlite/connection.c. level = '' # 'isolation_level' is a misleading API. # SQLite always runs at the SERIALIZABLE isolation level. self.connection.isolation_level = level def check_constraints(self, table_names=None): cursor = self.cursor() if table_names is None: table_names = self.introspection.table_names(cursor) for table_name in table_names: primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name) if not primary_key_column_name: continue key_columns = self.introspection.get_key_columns(cursor, table_name) for column_name, referenced_table_name, referenced_column_name in key_columns: cursor.execute(""" SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING LEFT JOIN `%s` as REFERRED ON (REFERRING.`%s` = REFERRED.`%s`) WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL""" % (primary_key_column_name, column_name, table_name, referenced_table_name, column_name, referenced_column_name, column_name, referenced_column_name)) for bad_row in cursor.fetchall(): raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid " "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s." % (table_name, bad_row[0], table_name, column_name, bad_row[1], referenced_table_name, referenced_column_name)) def is_usable(self): return True def _start_transaction_under_autocommit(self): self.cursor().execute("BEGIN") FORMAT_QMARK_REGEX = re.compile(r'(?<!%)%s') class SQLiteCursorWrapper(Database.Cursor): def execute(self, query, params=()): query = self.convert_query(query) return Database.Cursor.execute(self, query, params) def executemany(self, query, param_list): query = self.convert_query(query) return Database.Cursor.executemany(self, query, param_list) def convert_query(self, query): return FORMAT_QMARK_REGEX.sub('?', query).replace('%%', '%') def _sqlite_date_extract(lookup_type, dt): if dt is None: return None try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if lookup_type == 'week_day': return (dt.isoweekday() % 7) + 1 else: return getattr(dt, lookup_type) def _sqlite_date_trunc(lookup_type, dt): try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if lookup_type == 'year': return "%i-01-01" % dt.year elif lookup_type == 'month': return "%i-%02i-01" % (dt.year, dt.month) elif lookup_type == 'day': return "%i-%02i-%02i" % (dt.year, dt.month, dt.day) def _sqlite_datetime_extract(lookup_type, dt, tzname): if dt is None: return None try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if tzname is not None: dt = timezone.localtime(dt, pytz.timezone(tzname)) if lookup_type == 'week_day': return (dt.isoweekday() % 7) + 1 else: return getattr(dt, lookup_type) def _sqlite_datetime_trunc(lookup_type, dt, tzname): try: dt = util.typecast_timestamp(dt) except (ValueError, TypeError): return None if tzname is not None: dt = timezone.localtime(dt, pytz.timezone(tzname)) if lookup_type == 'year': return "%i-01-01 00:00:00" % dt.year elif lookup_type == 'month': return "%i-%02i-01 00:00:00" % (dt.year, dt.month) elif lookup_type == 'day': return "%i-%02i-%02i 00:00:00" % (dt.year, dt.month, dt.day) elif lookup_type == 'hour': return "%i-%02i-%02i %02i:00:00" % (dt.year, dt.month, dt.day, dt.hour) elif lookup_type == 'minute': return "%i-%02i-%02i %02i:%02i:00" % (dt.year, dt.month, dt.day, dt.hour, dt.minute) elif lookup_type == 'second': return "%i-%02i-%02i %02i:%02i:%02i" % (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) def _sqlite_format_dtdelta(dt, conn, days, secs, usecs): try: dt = util.typecast_timestamp(dt) delta = datetime.timedelta(int(days), int(secs), int(usecs)) if conn.strip() == '+': dt = dt + delta else: dt = dt - delta except (ValueError, TypeError): return None # typecast_timestamp returns a date or a datetime without timezone. # It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]" return str(dt) def _sqlite_regexp(re_pattern, re_string): return bool(re.search(re_pattern, re_string))
true
true
f70c38f2abdfbf52893c3f87e11b6b3559539623
817
py
Python
Desafio069 - Cadastro de Pessoas.py
tmoura1981/Python_Exercicios
c873e2758dfd9058d2c2d83b5b38b522c6264029
[ "MIT" ]
1
2021-11-25T11:19:59.000Z
2021-11-25T11:19:59.000Z
Desafio069 - Cadastro de Pessoas.py
tmoura1981/Python_Exercicios
c873e2758dfd9058d2c2d83b5b38b522c6264029
[ "MIT" ]
null
null
null
Desafio069 - Cadastro de Pessoas.py
tmoura1981/Python_Exercicios
c873e2758dfd9058d2c2d83b5b38b522c6264029
[ "MIT" ]
null
null
null
titulo = 'Cadastro de Pessoas' print(titulo.center(50, '=')) print('') idade = total = homens = mulheres = 0 sexo = '' while True: idade = int(input('Idade: ')) sexo = input('Sexo: [M] ou [F]? ').upper().strip()[0] while sexo not in 'MF': sexo = input('Sexo: [M] ou [F]? ').upper().strip()[0] resposta = input('Continuar? [S] ou [N] ').upper().strip()[0] while resposta not in 'SN': resposta = input('Continuar? [S] ou [N] ').upper().strip()[0] print('-' * 50) if idade >= 18: total += 1 if sexo == 'M': homens += 1 if sexo == 'F' and idade < 20: mulheres += 1 if resposta == 'N': break print(f'Total de maiores de 18 anos: {total}') print(f'Total de homens: {homens}') print(f'Total de mulheres menores de 20 anos: {mulheres}')
31.423077
69
0.549572
titulo = 'Cadastro de Pessoas' print(titulo.center(50, '=')) print('') idade = total = homens = mulheres = 0 sexo = '' while True: idade = int(input('Idade: ')) sexo = input('Sexo: [M] ou [F]? ').upper().strip()[0] while sexo not in 'MF': sexo = input('Sexo: [M] ou [F]? ').upper().strip()[0] resposta = input('Continuar? [S] ou [N] ').upper().strip()[0] while resposta not in 'SN': resposta = input('Continuar? [S] ou [N] ').upper().strip()[0] print('-' * 50) if idade >= 18: total += 1 if sexo == 'M': homens += 1 if sexo == 'F' and idade < 20: mulheres += 1 if resposta == 'N': break print(f'Total de maiores de 18 anos: {total}') print(f'Total de homens: {homens}') print(f'Total de mulheres menores de 20 anos: {mulheres}')
true
true
f70c39a91009422b412e87c735b11f70e09938c7
743
py
Python
paths.py
Leather128/Pygame-Pong
33f6d76049c24edb56532314e1b29f4536269733
[ "Apache-2.0" ]
null
null
null
paths.py
Leather128/Pygame-Pong
33f6d76049c24edb56532314e1b29f4536269733
[ "Apache-2.0" ]
null
null
null
paths.py
Leather128/Pygame-Pong
33f6d76049c24edb56532314e1b29f4536269733
[ "Apache-2.0" ]
null
null
null
import os # Finds path of any file in the assets folder # def findPath(folders, file, extension): # Checks if it's an array (or other type of list idk, basically this should do the job) # if(isinstance(folders, list)): # Default folder path, being nothing # folderPath = "" # Loops through all the folders # for x in folders: # Joins them together # folderPath = os.path.join(folderPath, x) return os.path.join("assets", folderPath, file + "." + extension) else: # Error handling, so the game doesn't just tell you that file doesn't exist # raise ValueError('The folder path you inputted, is not an array! Your folder path: "' + str(folders) + '"')
41.277778
115
0.631225
import os def findPath(folders, file, extension): if(isinstance(folders, list)): # Default folder path, being nothing # folderPath = "" # Loops through all the folders # for x in folders: # Joins them together # folderPath = os.path.join(folderPath, x) return os.path.join("assets", folderPath, file + "." + extension) else: # Error handling, so the game doesn't just tell you that file doesn't exist # raise ValueError('The folder path you inputted, is not an array! Your folder path: "' + str(folders) + '"')
true
true
f70c3a0de9e404646df16597de37a8fc1a1bd4bf
3,818
py
Python
setup.py
jimfulton/python-bigquery-sqlalchemy
0443d7267a9330cba095193b4f9635574c8f7b05
[ "MIT" ]
null
null
null
setup.py
jimfulton/python-bigquery-sqlalchemy
0443d7267a9330cba095193b4f9635574c8f7b05
[ "MIT" ]
null
null
null
setup.py
jimfulton/python-bigquery-sqlalchemy
0443d7267a9330cba095193b4f9635574c8f7b05
[ "MIT" ]
null
null
null
#!/usr/bin/env python # Copyright (c) 2017 The sqlalchemy-bigquery Authors # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of # the Software, and to permit persons to whom the Software is furnished to do so, # subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. import io import itertools import os import re from setuptools import setup # Package metadata. name = "sqlalchemy-bigquery" description = "SQLAlchemy dialect for BigQuery" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" package_root = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(package_root, "sqlalchemy_bigquery", "version.py")) as f: version = re.search('__version__ = "([^"]+)"', f.read()).group(1) def readme(): with io.open("README.rst", "r", encoding="utf8") as f: return f.read() extras = dict( geography=["GeoAlchemy2", "shapely"], alembic=["alembic"], tests=["packaging", "pytz"], ) extras["all"] = set(itertools.chain.from_iterable(extras.values())) setup( name=name, version=version, description=description, long_description=readme(), long_description_content_type="text/x-rst", author="The Sqlalchemy-Bigquery Authors", author_email="googleapis-packages@google.com", packages=["sqlalchemy_bigquery"], url="https://github.com/googleapis/python-bigquery-sqlalchemy", keywords=["bigquery", "sqlalchemy"], classifiers=[ release_status, "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Operating System :: OS Independent", "Topic :: Database :: Front-Ends", ], platforms="Posix; MacOS X; Windows", install_requires=[ "google-api-core>=1.30.0", # Work-around bug in cloud core deps. # NOTE: Maintainers, please do not require google-auth>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=2.24.1", "sqlalchemy>=1.2.0,<1.5.0dev", "future", ], extras_require=extras, python_requires=">=3.6, <3.10", tests_require=["packaging", "pytz"], entry_points={ "sqlalchemy.dialects": ["bigquery = sqlalchemy_bigquery:BigQueryDialect"] }, # Document that this replaces pybigquery, however, this isn't # enforced by pip, because doing so would allow rogue packages to # obsolete legitimate ones. obsoletes=["pybigquery"], )
37.80198
82
0.688318
import io import itertools import os import re from setuptools import setup name = "sqlalchemy-bigquery" description = "SQLAlchemy dialect for BigQuery" release_status = "Development Status :: 5 - Production/Stable" package_root = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(package_root, "sqlalchemy_bigquery", "version.py")) as f: version = re.search('__version__ = "([^"]+)"', f.read()).group(1) def readme(): with io.open("README.rst", "r", encoding="utf8") as f: return f.read() extras = dict( geography=["GeoAlchemy2", "shapely"], alembic=["alembic"], tests=["packaging", "pytz"], ) extras["all"] = set(itertools.chain.from_iterable(extras.values())) setup( name=name, version=version, description=description, long_description=readme(), long_description_content_type="text/x-rst", author="The Sqlalchemy-Bigquery Authors", author_email="googleapis-packages@google.com", packages=["sqlalchemy_bigquery"], url="https://github.com/googleapis/python-bigquery-sqlalchemy", keywords=["bigquery", "sqlalchemy"], classifiers=[ release_status, "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Operating System :: OS Independent", "Topic :: Database :: Front-Ends", ], platforms="Posix; MacOS X; Windows", install_requires=[ "google-api-core>=1.30.0", # Work-around bug in cloud core deps. # NOTE: Maintainers, please do not require google-auth>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-auth>=1.25.0,<3.0.0dev", # Work around pip wack. "google-cloud-bigquery>=2.24.1", "sqlalchemy>=1.2.0,<1.5.0dev", "future", ], extras_require=extras, python_requires=">=3.6, <3.10", tests_require=["packaging", "pytz"], entry_points={ "sqlalchemy.dialects": ["bigquery = sqlalchemy_bigquery:BigQueryDialect"] }, # Document that this replaces pybigquery, however, this isn't # enforced by pip, because doing so would allow rogue packages to # obsolete legitimate ones. obsoletes=["pybigquery"], )
true
true
f70c3a66baf4cad25be1fb1673d7a08bb0a51d03
11,737
py
Python
TUI/Inst/APOGEE/ShutterWdgSet.py
StarkillerX42/stui
668628cf7539e7d2be12846033141e4eb8616fe1
[ "BSD-3-Clause" ]
null
null
null
TUI/Inst/APOGEE/ShutterWdgSet.py
StarkillerX42/stui
668628cf7539e7d2be12846033141e4eb8616fe1
[ "BSD-3-Clause" ]
null
null
null
TUI/Inst/APOGEE/ShutterWdgSet.py
StarkillerX42/stui
668628cf7539e7d2be12846033141e4eb8616fe1
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python """APOGEE cold shutter control and status History: 2011-08-30 ROwen 2011-09-01 ROwen Added support for cancelling commands. 2012-11-14 ROwen Stop using Checkbutton indicatoron=False; it is no longer supported on MacOS X. 2015-11-03 ROwen Replace "== None" with "is None" and "!= None" with "is not None" to modernize the code. """ import Tkinter import RO.Constants import RO.Wdg import RO.TkUtil import RO.StringUtil import TUI.Models import TUI.Base.Wdg from . import BaseDeviceWdg class ShutterWdgSet(object): """Widgets to control APOGEE's cold shutter and the associated LEDs """ _ShutterCat = "shutter" _NumLEDs = 4 def __init__(self, gridder, statusBar, colSpan=3, helpURL=None): """Create a ShutterWdgSet for the APOGEE cold shutter and calibration LEDs Inputs: - gridder: an instance of RO.Wdg.Gridder; the widgets are gridded starting at the next row and default column - statusBar: status bar (to send commands) - colSpan: the number of columns to span - helpURL: path to an HTML help file or None Note: you may wish to call master.columnconfigure(n, weight=1) where n is the last column of this widget set so that the environment widget panel can fill available space without resizing the columns of other widgets. """ self.statusBar = statusBar self.helpURL = helpURL self.gridder = gridder master = self.gridder._master self.model = TUI.Models.getModel("apogee") self.showHideWdg = RO.Wdg.Checkbutton( master = master, text = "Shutter", callFunc = self._doShowHide, helpText = "Show cold shutter controls?", helpURL = helpURL, ) self.summaryWdg = RO.Wdg.StrLabel( master = master, anchor = "w", helpText = "Shutter status", helpURL = helpURL, ) gridder.gridWdg(self.showHideWdg, self.summaryWdg, sticky="w", colSpan=colSpan-1) # hidable frame showing the controls self.detailWdg = Tkinter.Frame( master = master, borderwidth = 1, relief = "solid", ) self.gridder.gridWdg(False, self.detailWdg, colSpan=colSpan, sticky="w", cat=self._ShutterCat) detailGridder = RO.Wdg.Gridder(self.detailWdg, sticky="w") self.shutterWdg = _ShutterWdg( master = self.detailWdg, statusBar = self.statusBar, helpURL = helpURL, ) detailGridder.gridWdg("Shutter", self.shutterWdg) self.ledWdg = _LEDWdg( master = self.detailWdg, statusBar = self.statusBar, numLEDs = self._NumLEDs, helpURL = helpURL, ) detailGridder.gridWdg("LEDs", self.ledWdg) self.model = TUI.Models.getModel("apogee") self.model.shutterIndexer.addCallback(self._updSummary) self.model.shutterLimitSwitch.addCallback(self._updSummary) self.model.shutterLED.addCallback(self._updSummary) self.showHideWdg.addCallback(self._doShowHide, callNow = True) def _doShowHide(self, wdg=None): argDict = { self._ShutterCat: self.showHideWdg.getBool(), } self.gridder.showHideWdg(**argDict) def _updSummary(self, *dumArgs): """Update collimator summary label """ severity = RO.Constants.sevNormal sumStr = "OK" isCurrent = self.model.shutterIndexer.isCurrent if self.model.shutterIndexer[0] == False: sumStr = "Off" severity = RO.Constants.sevError else: shutterStr, shutterSeverity = self.shutterWdg.getSummary() ledStr, ledSeverity = self.ledWdg.getSummary() sumStr = "%s; %s" % (shutterStr, ledStr) severity = max(shutterSeverity, ledSeverity) self.summaryWdg.set(sumStr, isCurrent=isCurrent, severity=severity) class _ShutterWdg(BaseDeviceWdg.BaseDeviceWdg): """A widget to open or close the cold shutter """ actor = "apogee" def __init__(self, master, statusBar, helpURL=None): BaseDeviceWdg.BaseDeviceWdg.__init__(self, master = master, actor = "apogee", statusBar = statusBar, helpURL = helpURL, ) self.shutterWdg = RO.Wdg.Checkbutton( master = self, onvalue = "Open", offvalue = "Closed", autoIsCurrent = True, showValue = True, callFunc = self.doShutter, helpText = "Open or close cold shutter", helpURL = helpURL, ) self.shutterWdg.pack(side="left") self.cancelBtn.pack(side="left") self.model = TUI.Models.getModel(self.actor) self.model.shutterLimitSwitch.addCallback(self.updateStatus) def doShutter(self, wdg=None): """Send a command to open or close the shutter """ doOpen = self.shutterWdg.getBool() if doOpen: cmdStr = "shutter open" else: cmdStr = "shutter close" self.doCmd(cmdStr) def enableButtons(self, dumCmd=None): """Enable or disable widgets, as appropriate """ isRunning = self.isRunning self.shutterWdg.setEnable(not isRunning) self.cancelBtn.setEnable(isRunning) def getSummary(self): """Return a string and severity summarizing the current state """ shutterLimits = tuple(self.model.shutterLimitSwitch[0:2]) return { (False, False): ("?", RO.Constants.sevWarning), (False, True): ("Closed", RO.Constants.sevNormal), (True, False): ("Open", RO.Constants.sevNormal), (True, True): ("Bad", RO.Constants.sevError), }.get(shutterLimits, ("?", RO.Constants.sevError)) def updateStatus(self, keyVar=None): """shutterLimitSwitch keyword callback """ keyVar = self.model.shutterLimitSwitch isCurrent = keyVar.isCurrent isOpen, isClosed = keyVar[0:2] with self.updateLock(): if None in (isOpen, isClosed): self.shutterWdg.setIsCurrent(isCurrent) return if isOpen and not isClosed: self.shutterWdg.setDefault(True) self.shutterWdg.set(True, isCurrent=isCurrent) elif isClosed and not isOpen: self.shutterWdg.setDefault(False) self.shutterWdg.set(False, isCurrent=isCurrent) else: self.shutterWdg.setIsCurrent(False) class _LEDWdg(BaseDeviceWdg.BaseDeviceWdg): def __init__(self, master, statusBar, numLEDs, helpURL=None): BaseDeviceWdg.BaseDeviceWdg.__init__(self, master = master, actor = "apogee", statusBar = statusBar, helpURL = helpURL, ) self.numLEDs = int(numLEDs) self.allOnMask = (1 << self.numLEDs) - 1 self.ledWdgSet = [] for ledInd in range(self.numLEDs): ledName = str(ledInd + 1) ledWdg = RO.Wdg.Checkbutton( master = self, text = "", callFunc = self.toggleOne, autoIsCurrent = True, helpText = "Turn LED %s on or off" % (ledName,), helpURL = helpURL, ) ledWdg.pack(side="left") self.ledWdgSet.append(ledWdg) self.allOffBtn = RO.Wdg.Button( master = self, text = "All Off", callFunc = self.turnAllOff, helpText = "Turn all LEDs off", helpURL = helpURL, ) self.allOffBtn.pack(side="left") self.allOnBtn = RO.Wdg.Button( master = self, text = "All On", callFunc = self.turnAllOn, helpText = "Turn all LEDs on", helpURL = helpURL, ) self.allOnBtn.pack(side="left") self.cancelBtn.pack(side="left") self.model = TUI.Models.getModel(self.actor) self.model.shutterLED.addCallback(self.updateStatus) def setLEDs(self, ledMask, setWdg): """Send a command to turn on and off the specified LEDs Inputs: - ledMask: a bit mask of which LEDs should be on (0=all off) - setWdg: it True then set the widgets; set True for buttons that control multiple widgets; set False if the user has toggled the button manually """ cmdStr = "shutter ledControl=%d" % (ledMask,) self.doCmd(cmdStr) if setWdg and not self.updatingStatus: self._setLEDWdg(ledMask) def enableButtons(self, dumCmd=None): """Enable or disable widgets, as appropriate """ isRunning = self.isRunning self.cancelBtn.setEnable(isRunning) for wdg in self.ledWdgSet: wdg.setEnable(not isRunning) self.allOffBtn.setEnable(not isRunning) self.allOnBtn.setEnable(not isRunning) def turnAllOff(self, wdg=None): """Turn all LEDs off """ self.setLEDs(0, setWdg=True) def turnAllOn(self, wdg=None): """Turn all LEDs on """ self.setLEDs(self.allOnMask, setWdg=True) def toggleOne(self, wdg=None): """Toggle one LED on or off """ ledMask = 0 for ind, wdg in enumerate(self.ledWdgSet): if wdg.getBool(): ledMask += 1 << ind self.setLEDs(ledMask, setWdg=False) def getSummary(self): """Return a summary string and associated severity """ severity = RO.Constants.sevWarning ledMask = self.model.shutterLED[0] if ledMask is None: sumStr = "LEDs ?" elif ledMask == 0: sumStr = "LEDs all off" severity = RO.Constants.sevNormal elif ledMask == self.allOnMask: sumStr = "LEDs ALL ON" else: onList = [str(ind+1) for ind in range(self.numLEDs) if ledMask & (1 << ind) != 0] if len(onList) == 1: pfx = "LED" else: pfx = "LEDs" sumStr = "%s %s ON" % (pfx, " ".join(onList)) return sumStr, severity def updateStatus(self, keyVar=None): """shutterLED keyword callback """ keyVar = self.model.shutterLED isCurrent = keyVar.isCurrent ledMask = keyVar[0] if ledMask is None: for wdg in self.ledWdgSet: wdg.setIsCurrent(isCurrent) return with self.updateLock(): for ind, wdg in enumerate(self.ledWdgSet): isOn = ledMask & (1 << ind) != 0 wdg.setDefault(isOn) wdg.setBool(isOn, isCurrent=isCurrent) def _setLEDWdg(self, ledMask): """Set LED widgets to a particular state """ with self.updateLock(): for ind, wdg in enumerate(self.ledWdgSet): isOn = ledMask & (1 << ind) != 0 wdg.setBool(isOn) if __name__ == "__main__": from . import TestData tuiModel = TestData.tuiModel root = tuiModel.tkRoot statusBar = TUI.Base.Wdg.StatusBar(root) testFrame = Tkinter.Frame(root) gridder = RO.Wdg.Gridder(testFrame) shutterWdgSet = ShutterWdgSet(gridder, statusBar) testFrame.pack(side="top", expand=True) testFrame.columnconfigure(2, weight=1) statusBar.pack(side="top", expand=True, fill="x") TestData.start() tuiModel.reactor.run()
32.693593
108
0.583113
import Tkinter import RO.Constants import RO.Wdg import RO.TkUtil import RO.StringUtil import TUI.Models import TUI.Base.Wdg from . import BaseDeviceWdg class ShutterWdgSet(object): _ShutterCat = "shutter" _NumLEDs = 4 def __init__(self, gridder, statusBar, colSpan=3, helpURL=None): self.statusBar = statusBar self.helpURL = helpURL self.gridder = gridder master = self.gridder._master self.model = TUI.Models.getModel("apogee") self.showHideWdg = RO.Wdg.Checkbutton( master = master, text = "Shutter", callFunc = self._doShowHide, helpText = "Show cold shutter controls?", helpURL = helpURL, ) self.summaryWdg = RO.Wdg.StrLabel( master = master, anchor = "w", helpText = "Shutter status", helpURL = helpURL, ) gridder.gridWdg(self.showHideWdg, self.summaryWdg, sticky="w", colSpan=colSpan-1) self.detailWdg = Tkinter.Frame( master = master, borderwidth = 1, relief = "solid", ) self.gridder.gridWdg(False, self.detailWdg, colSpan=colSpan, sticky="w", cat=self._ShutterCat) detailGridder = RO.Wdg.Gridder(self.detailWdg, sticky="w") self.shutterWdg = _ShutterWdg( master = self.detailWdg, statusBar = self.statusBar, helpURL = helpURL, ) detailGridder.gridWdg("Shutter", self.shutterWdg) self.ledWdg = _LEDWdg( master = self.detailWdg, statusBar = self.statusBar, numLEDs = self._NumLEDs, helpURL = helpURL, ) detailGridder.gridWdg("LEDs", self.ledWdg) self.model = TUI.Models.getModel("apogee") self.model.shutterIndexer.addCallback(self._updSummary) self.model.shutterLimitSwitch.addCallback(self._updSummary) self.model.shutterLED.addCallback(self._updSummary) self.showHideWdg.addCallback(self._doShowHide, callNow = True) def _doShowHide(self, wdg=None): argDict = { self._ShutterCat: self.showHideWdg.getBool(), } self.gridder.showHideWdg(**argDict) def _updSummary(self, *dumArgs): severity = RO.Constants.sevNormal sumStr = "OK" isCurrent = self.model.shutterIndexer.isCurrent if self.model.shutterIndexer[0] == False: sumStr = "Off" severity = RO.Constants.sevError else: shutterStr, shutterSeverity = self.shutterWdg.getSummary() ledStr, ledSeverity = self.ledWdg.getSummary() sumStr = "%s; %s" % (shutterStr, ledStr) severity = max(shutterSeverity, ledSeverity) self.summaryWdg.set(sumStr, isCurrent=isCurrent, severity=severity) class _ShutterWdg(BaseDeviceWdg.BaseDeviceWdg): actor = "apogee" def __init__(self, master, statusBar, helpURL=None): BaseDeviceWdg.BaseDeviceWdg.__init__(self, master = master, actor = "apogee", statusBar = statusBar, helpURL = helpURL, ) self.shutterWdg = RO.Wdg.Checkbutton( master = self, onvalue = "Open", offvalue = "Closed", autoIsCurrent = True, showValue = True, callFunc = self.doShutter, helpText = "Open or close cold shutter", helpURL = helpURL, ) self.shutterWdg.pack(side="left") self.cancelBtn.pack(side="left") self.model = TUI.Models.getModel(self.actor) self.model.shutterLimitSwitch.addCallback(self.updateStatus) def doShutter(self, wdg=None): doOpen = self.shutterWdg.getBool() if doOpen: cmdStr = "shutter open" else: cmdStr = "shutter close" self.doCmd(cmdStr) def enableButtons(self, dumCmd=None): isRunning = self.isRunning self.shutterWdg.setEnable(not isRunning) self.cancelBtn.setEnable(isRunning) def getSummary(self): shutterLimits = tuple(self.model.shutterLimitSwitch[0:2]) return { (False, False): ("?", RO.Constants.sevWarning), (False, True): ("Closed", RO.Constants.sevNormal), (True, False): ("Open", RO.Constants.sevNormal), (True, True): ("Bad", RO.Constants.sevError), }.get(shutterLimits, ("?", RO.Constants.sevError)) def updateStatus(self, keyVar=None): keyVar = self.model.shutterLimitSwitch isCurrent = keyVar.isCurrent isOpen, isClosed = keyVar[0:2] with self.updateLock(): if None in (isOpen, isClosed): self.shutterWdg.setIsCurrent(isCurrent) return if isOpen and not isClosed: self.shutterWdg.setDefault(True) self.shutterWdg.set(True, isCurrent=isCurrent) elif isClosed and not isOpen: self.shutterWdg.setDefault(False) self.shutterWdg.set(False, isCurrent=isCurrent) else: self.shutterWdg.setIsCurrent(False) class _LEDWdg(BaseDeviceWdg.BaseDeviceWdg): def __init__(self, master, statusBar, numLEDs, helpURL=None): BaseDeviceWdg.BaseDeviceWdg.__init__(self, master = master, actor = "apogee", statusBar = statusBar, helpURL = helpURL, ) self.numLEDs = int(numLEDs) self.allOnMask = (1 << self.numLEDs) - 1 self.ledWdgSet = [] for ledInd in range(self.numLEDs): ledName = str(ledInd + 1) ledWdg = RO.Wdg.Checkbutton( master = self, text = "", callFunc = self.toggleOne, autoIsCurrent = True, helpText = "Turn LED %s on or off" % (ledName,), helpURL = helpURL, ) ledWdg.pack(side="left") self.ledWdgSet.append(ledWdg) self.allOffBtn = RO.Wdg.Button( master = self, text = "All Off", callFunc = self.turnAllOff, helpText = "Turn all LEDs off", helpURL = helpURL, ) self.allOffBtn.pack(side="left") self.allOnBtn = RO.Wdg.Button( master = self, text = "All On", callFunc = self.turnAllOn, helpText = "Turn all LEDs on", helpURL = helpURL, ) self.allOnBtn.pack(side="left") self.cancelBtn.pack(side="left") self.model = TUI.Models.getModel(self.actor) self.model.shutterLED.addCallback(self.updateStatus) def setLEDs(self, ledMask, setWdg): cmdStr = "shutter ledControl=%d" % (ledMask,) self.doCmd(cmdStr) if setWdg and not self.updatingStatus: self._setLEDWdg(ledMask) def enableButtons(self, dumCmd=None): isRunning = self.isRunning self.cancelBtn.setEnable(isRunning) for wdg in self.ledWdgSet: wdg.setEnable(not isRunning) self.allOffBtn.setEnable(not isRunning) self.allOnBtn.setEnable(not isRunning) def turnAllOff(self, wdg=None): self.setLEDs(0, setWdg=True) def turnAllOn(self, wdg=None): self.setLEDs(self.allOnMask, setWdg=True) def toggleOne(self, wdg=None): ledMask = 0 for ind, wdg in enumerate(self.ledWdgSet): if wdg.getBool(): ledMask += 1 << ind self.setLEDs(ledMask, setWdg=False) def getSummary(self): severity = RO.Constants.sevWarning ledMask = self.model.shutterLED[0] if ledMask is None: sumStr = "LEDs ?" elif ledMask == 0: sumStr = "LEDs all off" severity = RO.Constants.sevNormal elif ledMask == self.allOnMask: sumStr = "LEDs ALL ON" else: onList = [str(ind+1) for ind in range(self.numLEDs) if ledMask & (1 << ind) != 0] if len(onList) == 1: pfx = "LED" else: pfx = "LEDs" sumStr = "%s %s ON" % (pfx, " ".join(onList)) return sumStr, severity def updateStatus(self, keyVar=None): keyVar = self.model.shutterLED isCurrent = keyVar.isCurrent ledMask = keyVar[0] if ledMask is None: for wdg in self.ledWdgSet: wdg.setIsCurrent(isCurrent) return with self.updateLock(): for ind, wdg in enumerate(self.ledWdgSet): isOn = ledMask & (1 << ind) != 0 wdg.setDefault(isOn) wdg.setBool(isOn, isCurrent=isCurrent) def _setLEDWdg(self, ledMask): with self.updateLock(): for ind, wdg in enumerate(self.ledWdgSet): isOn = ledMask & (1 << ind) != 0 wdg.setBool(isOn) if __name__ == "__main__": from . import TestData tuiModel = TestData.tuiModel root = tuiModel.tkRoot statusBar = TUI.Base.Wdg.StatusBar(root) testFrame = Tkinter.Frame(root) gridder = RO.Wdg.Gridder(testFrame) shutterWdgSet = ShutterWdgSet(gridder, statusBar) testFrame.pack(side="top", expand=True) testFrame.columnconfigure(2, weight=1) statusBar.pack(side="top", expand=True, fill="x") TestData.start() tuiModel.reactor.run()
true
true
f70c3b84214c735184f59a3de3b8b210a112ae56
2,142
py
Python
nnutil2/layers/segment.py
aroig/nnutil2
1fc77df351d4eee1166688e25a94287a5cfa27c4
[ "BSD-3-Clause" ]
null
null
null
nnutil2/layers/segment.py
aroig/nnutil2
1fc77df351d4eee1166688e25a94287a5cfa27c4
[ "BSD-3-Clause" ]
3
2020-11-13T18:33:29.000Z
2021-08-25T15:55:57.000Z
nnutil2/layers/segment.py
aroig/nnutil2
1fc77df351d4eee1166688e25a94287a5cfa27c4
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # nnutil2 - Tensorflow utilities for training neural networks # Copyright (c) 2019, Abdó Roig-Maranges <abdo.roig@gmail.com> # # This file is part of 'nnutil2'. # # This file may be modified and distributed under the terms of the 3-clause BSD # license. See the LICENSE file for details. from typing import List import tensorflow as tf from ..util import kwargs_for from .layer import Layer class Segment(Layer): """A sequential collection of layers""" def __init__(self, layers: List[Layer] = [], activation=None, **kwargs): super(Segment, self).__init__(**kwargs) self._segment_layers = layers self._segment_activation = tf.keras.activations.get(activation) self._segment_states = [] def get_config(self): config = { 'layers': [ly.get_config() for ly in self._layers], 'activation': self._segment_activation } base_config = super(Segment, self).get_config() return dict(list(base_config.items()) + list(config.items())) def call(self, inputs, **kwargs): x = inputs self._segment_states.append(x) for l in self._segment_layers: layer_kwargs = kwargs_for(kwargs, l.call) x = l(x, **layer_kwargs) self._segment_states.append(x) if self._segment_activation is not None: x = self._segment_activation(x) self._segment_states.append(x) return x def compute_output_shape(self, input_shape): shape = input_shape for l in self._segment_layers: shape = l.compute_output_shape(shape) return shape @property def flat_layers(self): layers = [] def add_layers(ly): if isinstance(ly, Segment): for ly2 in ly.layers: add_layers(ly2) else: layers.append(ly) add_layers(self) return layers @property def layers(self): return self._segment_layers @property def states(self): return self._segment_states
27.113924
79
0.618581
from typing import List import tensorflow as tf from ..util import kwargs_for from .layer import Layer class Segment(Layer): def __init__(self, layers: List[Layer] = [], activation=None, **kwargs): super(Segment, self).__init__(**kwargs) self._segment_layers = layers self._segment_activation = tf.keras.activations.get(activation) self._segment_states = [] def get_config(self): config = { 'layers': [ly.get_config() for ly in self._layers], 'activation': self._segment_activation } base_config = super(Segment, self).get_config() return dict(list(base_config.items()) + list(config.items())) def call(self, inputs, **kwargs): x = inputs self._segment_states.append(x) for l in self._segment_layers: layer_kwargs = kwargs_for(kwargs, l.call) x = l(x, **layer_kwargs) self._segment_states.append(x) if self._segment_activation is not None: x = self._segment_activation(x) self._segment_states.append(x) return x def compute_output_shape(self, input_shape): shape = input_shape for l in self._segment_layers: shape = l.compute_output_shape(shape) return shape @property def flat_layers(self): layers = [] def add_layers(ly): if isinstance(ly, Segment): for ly2 in ly.layers: add_layers(ly2) else: layers.append(ly) add_layers(self) return layers @property def layers(self): return self._segment_layers @property def states(self): return self._segment_states
true
true
f70c3bc48d5319be629fe2d74c0f6f5b9d4ad98a
1,556
py
Python
setup.py
KarstenSchulz/twodolib
b9b2f0f23e5aeaf5edf5f5591319b6b3aea824d8
[ "ISC" ]
10
2015-09-24T02:23:24.000Z
2020-03-08T09:21:20.000Z
setup.py
KarstenSchulz/twodolib
b9b2f0f23e5aeaf5edf5f5591319b6b3aea824d8
[ "ISC" ]
7
2015-09-15T15:45:00.000Z
2020-08-21T07:04:44.000Z
setup.py
KarstenSchulz/twodolib
b9b2f0f23e5aeaf5edf5f5591319b6b3aea824d8
[ "ISC" ]
2
2017-07-03T14:15:50.000Z
2019-08-30T13:58:39.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """Setup script for twodolib.""" try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') test_requirements = ['tox', ] setup( name='twodolib', version='0.5.4', description="Functions to manage the 2DoApp from the command line.", long_description=readme + '\n\n' + history, author="Karsten Schulz", author_email='github@karstenschulz.biz', url='https://github.com/KarstenSchulz/twodolib', packages=[ 'twodolib', ], package_dir={'twodolib': 'twodolib'}, include_package_data=True, entry_points={ 'console_scripts': [ 'task2do = twodolib.cli:main' ] }, # install_requires=requirements, license="ISCL", zip_safe=False, keywords='twodolib tool task2do task-management', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: MacOS X', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Utilities', ], test_suite='tests', tests_require=test_requirements )
28.290909
72
0.621465
try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') test_requirements = ['tox', ] setup( name='twodolib', version='0.5.4', description="Functions to manage the 2DoApp from the command line.", long_description=readme + '\n\n' + history, author="Karsten Schulz", author_email='github@karstenschulz.biz', url='https://github.com/KarstenSchulz/twodolib', packages=[ 'twodolib', ], package_dir={'twodolib': 'twodolib'}, include_package_data=True, entry_points={ 'console_scripts': [ 'task2do = twodolib.cli:main' ] }, license="ISCL", zip_safe=False, keywords='twodolib tool task2do task-management', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Environment :: MacOS X', 'Intended Audience :: End Users/Desktop', 'License :: OSI Approved :: ISC License (ISCL)', 'Natural Language :: English', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Topic :: Utilities', ], test_suite='tests', tests_require=test_requirements )
true
true
f70c3bf1f81136316a9893d94d4bb86d634c3446
13,790
py
Python
old_version_python/window.py
eweca-d/DysonSphereProgramCalculator
935e3f15aeba7b10f716bee54d98b36d6fca771c
[ "MIT" ]
32
2021-02-07T04:35:52.000Z
2021-12-14T07:02:27.000Z
old_version_python/window.py
ladeca-d/DysonSphereProgramCalculator
935e3f15aeba7b10f716bee54d98b36d6fca771c
[ "MIT" ]
null
null
null
old_version_python/window.py
ladeca-d/DysonSphereProgramCalculator
935e3f15aeba7b10f716bee54d98b36d6fca771c
[ "MIT" ]
11
2021-02-08T11:14:17.000Z
2021-12-17T10:13:54.000Z
from PySide2.QtWidgets import QPushButton, QMainWindow, QLabel, QLineEdit, QGroupBox from math import ceil import source class MainWindow(QMainWindow): def __init__(self, screen_width, screen_height): self.screen_width = screen_width self.screen_height = screen_height self.screen_ratio = screen_width / 3840 self.half_screen_ratio = 0.45 + self.screen_ratio / 2 self.production_speed_ratio = 1 self.window = QMainWindow() self.window.resize(self.screen_width, self.screen_height) self.window.setWindowTitle('戴森球计划产量计算器 ver.0.1') self.grid_width = 75 * self.screen_ratio self.grid_height = 50 * self.screen_ratio self.init_bias = 50 * self.screen_ratio self.interval = 0 * self.screen_ratio self.box_width = self.grid_width * 4 + self.interval + 5 * self.screen_ratio self.box_height = self.grid_height * 2 + self.init_bias + 5 * self.screen_ratio # Subtitle: app name - author self.subtitle_font_size = 50 * self.screen_ratio if self.screen_ratio > 0.7: self.subtitle_font_size = 50 * self.screen_ratio / 1.5 subtitle = QLabel(self.window) subtitle.setText('戴森球计划 材料生产计算器 -- by 魂月') subtitle.setStyleSheet('QLabel {font: 75 ' + str(int(self.subtitle_font_size)) + 'pt "宋体";}') subtitle.move(1000 * self.screen_ratio, int(25 * self.screen_ratio)) subtitle.resize(1840 * self.screen_ratio, self.box_height * self.screen_ratio) # Bottom: 取整机器数量 self.button = QPushButton('取整机器数量', self.window) self.button.move(2840 * self.screen_ratio, int(25 * self.screen_ratio) + int(self.box_height / 3)) self.button.resize(400 * self.screen_ratio, int(self.box_height / 3)) self.button.setStyleSheet('QPushButton {font: ' + str(int(self.subtitle_font_size / 2)) + 'pt "宋体";}') self.button.clicked.connect(self.ceil_machine_number) self.ox = (self.screen_width - 12 * self.box_width) / 2 self.oy = self.box_height + 50 * self.screen_ratio self.font_size = 14 * self.half_screen_ratio self.line_edit_font_size = self.font_size * 0.9 * 0.75 self.element = source.element self.production = source.production self.supporter = source.support self.bi_material = source.bi_material self.sorted_element = source.sorted_element self.element_box = [[[None, None, None, None] for _ in range(len(self.element[0]))] for _ in range(len(self.element))] self.element_amount = [[[0, 0, 0, 0] for _ in range(len(self.element[0]))] for _ in range(len(self.element))] self.table_gen() for resource in self.sorted_element: i, j = self.get_idx(resource) for k in range(4): self.element_box[i][j][k].editingFinished.connect(self.update_element_amount) def table_gen(self): nrows = len(self.element) ncols = len(self.element[0]) for i in range(nrows): for j in range(ncols): foo = self.box_gen(self.ox + j * self.box_width, self.oy + i * self.box_height, self.element[i][j]) if len(foo) == 4: for k in range(4): self.element_box[i][j][k] = foo[k] def box_gen(self, x, y, resource=''): group_box = QGroupBox(self.window) group_box.move(x, y) group_box.resize(self.box_width, self.box_height) if resource == '': return [] group_box.setTitle('') group_box.setStyleSheet('QGroupBox { background-color: \ rgb(255, 255, 255); border: 3px solid rgb(122, 255, 100); } \ QGroupBox::title{font: 75 ' + str(100 * self.screen_ratio) + 'pt "宋体"; color: rgb(255, 0, 0)}') label_again = QLabel(group_box) label_again.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"; color: rgb(255, 0, 0)}') label_again.setText(resource) label_again.move(int(self.grid_width * 0.7), 5 * self.screen_ratio) label_again.resize(int(self.grid_width * 3.3), self.init_bias - 5) product_label00 = QLabel(group_box) product_label00.setText('产量') product_label00.move(3, self.init_bias) product_label00.resize(self.grid_width, self.grid_height) product_label00.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product00 = QLineEdit(group_box) product00.setText('0') product00.move(self.grid_width, self.init_bias) product00.resize(self.grid_width, self.grid_height) product00.setEnabled(False) product00.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') product_label10 = QLabel(group_box) product_label10.setText('额外') product_label10.move(3, self.grid_height + self.init_bias) product_label10.resize(self.grid_width, self.grid_height) product_label10.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product10 = QLineEdit(group_box) product10.setText('0') product10.move(self.grid_width, self.grid_height + self.init_bias) product10.resize(self.grid_width, self.grid_height) product10.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') product_label01 = QLabel(group_box) product_label01.setText('机器') product_label01.move(self.grid_width * 2 + self.interval, self.init_bias) product_label01.resize(self.grid_width, self.grid_height) product_label01.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product01 = QLineEdit(group_box) product01.setText('0.0') product01.move(self.grid_width * 3 + self.interval, self.init_bias) product01.resize(self.grid_width, self.grid_height) product01.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') product01.setEnabled(False) product_label11 = QLabel(group_box) product_label11.setText('已有') product_label11.move(self.grid_width * 2 + self.interval, self.grid_height + self.init_bias) product_label11.resize(self.grid_width, self.grid_height) product_label11.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product11 = QLineEdit(group_box) product11.setText('0') product11.move(self.grid_width * 3 + self.interval, self.grid_height + self.init_bias) product11.resize(self.grid_width, self.grid_height) product11.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') if resource in self.supporter: product11.setEnabled(True) else: product11.setEnabled(False) return [product00, product01, product10, product11] # update the window by the values of the self.element_amount. def update_view(self, is_int=[True, False, True, True]): for resource in self.sorted_element: i, j = self.get_idx(resource) for k in range(4): amount = round(self.element_amount[i][j][k], 1) if is_int[k]: amount = int(self.element_amount[i][j][k]) self.element_box[i][j][k].setText(str(amount)) def get_idx(self, resource): idx = None if resource != '': for i in range(len(self.element)): for j in range(len(self.element[0])): if resource == self.element[i][j]: idx = [i, j] return idx def produce_resource(self, resource, increase_production_number): # Add resource amount in self.element_amount. idx = self.get_idx(resource) if not idx: exit(1) else: i, j = idx self.element_amount[i][j][0] += increase_production_number production_speed = self.production[resource][0][0] self.element_amount[i][j][1] += increase_production_number / production_speed # Start to product required amount of the resource. component = self.production[resource][1:] if not component: return for obj_resource in component: production_name = obj_resource[0] production_number = increase_production_number * obj_resource[1] self.produce_resource(production_name, production_number) def calculate_supporter(self): for supporter, properties in self.supporter.items(): i, j = self.get_idx(supporter) amount = self.element_amount[i][j][3] for production in properties: i, j = self.get_idx(production[0]) production_amount = self.element_amount[i][j][0] convert_amount_to_production_amount = amount * production[1] need_negative_production = convert_amount_to_production_amount - production_amount if need_negative_production > 0: self.produce_resource(production[0], -1 * production_amount) else: self.produce_resource(production[0], -1 * convert_amount_to_production_amount) def calculate_bi_raw_material(self): # Calculate the need of the bi_raw_materials. for material, properties in self.bi_material.items(): # production1 production1 = properties[0][0] i, j = self.get_idx(production1) production1_amount = properties[0][1] need_production1_amount = self.element_amount[i][j][0] need_material_amount1 = need_production1_amount / production1_amount # production2 production2 = properties[1][0] i, j = self.get_idx(production2) production2_amount = properties[1][1] need_production2_amount = self.element_amount[i][j][0] need_material_amount2 = need_production2_amount / production2_amount # Calculate the need of the material need_material_amount = max(need_material_amount1, need_material_amount2) i, j = self.get_idx(material) self.element_amount[i][j][0] = need_material_amount material_production_speed = self.production[material][0][0] self.element_amount[i][j][1] = need_material_amount / material_production_speed def update_element_amount(self, has_supporter=True): # Read all LineEdit boxes. for resource in self.sorted_element: i, j = self.get_idx(resource) for k in range(4): input_value = self.element_box[i][j][k].text() if k == 0 or k == 1 or input_value == '': self.element_amount[i][j][k] = 0.0 else: self.element_amount[i][j][k] = float(input_value) # Produce the required amount of all resources. for resource in self.sorted_element: i, j = self.get_idx(resource) production_amount = self.element_amount[i][j][2] - self.element_amount[i][j][3] if production_amount < 0: self.produce_resource(resource, 0) else: self.produce_resource(resource, production_amount) # Calculate the second product of the special supporter. if has_supporter: self.calculate_supporter() # Calculate the need of the bi_raw_material. self.calculate_bi_raw_material() # Update the view of the app. self.update_view() def ceil_machine_number(self): # Re-update element amount without considering supporter. self.update_element_amount(False) # Calculate supporter. supporter_stack = dict() for support, products in self.supporter.items(): i, j = self.get_idx(support) support_amount = self.element_amount[i][j][3] for product in products: product_name = product[0] product_amount = product[1] supporter_stack[product_name] = support_amount * product_amount # Ceil machine amount and produce the required amount of the resources. for resource in self.sorted_element: if resource not in self.supporter: i, j = self.get_idx(resource) production_speed = self.production[resource][0][0] if resource in supporter_stack: cur_resource_amount = self.element_amount[i][j][0] real_resource_amount = cur_resource_amount - supporter_stack[resource] if real_resource_amount > 0: cur_machine_amount = real_resource_amount / production_speed new_machine_amount = ceil(cur_machine_amount) else: new_machine_amount = 0 else: cur_machine_amount = self.element_amount[i][j][1] new_machine_amount = ceil(cur_machine_amount) cur_resource_amount = self.element_amount[i][j][0] incre_resource_amount = new_machine_amount * production_speed - cur_resource_amount self.produce_resource(resource, incre_resource_amount) self.element_amount[i][j][1] = new_machine_amount # Calculate the need of the bi_raw_material. self.calculate_bi_raw_material() # Update the view of the app. # Production amount is allowed to be float since its unit is piece/min. self.update_view([False, True, True, True]) def show(self): self.window.show()
48.385965
126
0.626613
from PySide2.QtWidgets import QPushButton, QMainWindow, QLabel, QLineEdit, QGroupBox from math import ceil import source class MainWindow(QMainWindow): def __init__(self, screen_width, screen_height): self.screen_width = screen_width self.screen_height = screen_height self.screen_ratio = screen_width / 3840 self.half_screen_ratio = 0.45 + self.screen_ratio / 2 self.production_speed_ratio = 1 self.window = QMainWindow() self.window.resize(self.screen_width, self.screen_height) self.window.setWindowTitle('戴森球计划产量计算器 ver.0.1') self.grid_width = 75 * self.screen_ratio self.grid_height = 50 * self.screen_ratio self.init_bias = 50 * self.screen_ratio self.interval = 0 * self.screen_ratio self.box_width = self.grid_width * 4 + self.interval + 5 * self.screen_ratio self.box_height = self.grid_height * 2 + self.init_bias + 5 * self.screen_ratio self.subtitle_font_size = 50 * self.screen_ratio if self.screen_ratio > 0.7: self.subtitle_font_size = 50 * self.screen_ratio / 1.5 subtitle = QLabel(self.window) subtitle.setText('戴森球计划 材料生产计算器 -- by 魂月') subtitle.setStyleSheet('QLabel {font: 75 ' + str(int(self.subtitle_font_size)) + 'pt "宋体";}') subtitle.move(1000 * self.screen_ratio, int(25 * self.screen_ratio)) subtitle.resize(1840 * self.screen_ratio, self.box_height * self.screen_ratio) self.button = QPushButton('取整机器数量', self.window) self.button.move(2840 * self.screen_ratio, int(25 * self.screen_ratio) + int(self.box_height / 3)) self.button.resize(400 * self.screen_ratio, int(self.box_height / 3)) self.button.setStyleSheet('QPushButton {font: ' + str(int(self.subtitle_font_size / 2)) + 'pt "宋体";}') self.button.clicked.connect(self.ceil_machine_number) self.ox = (self.screen_width - 12 * self.box_width) / 2 self.oy = self.box_height + 50 * self.screen_ratio self.font_size = 14 * self.half_screen_ratio self.line_edit_font_size = self.font_size * 0.9 * 0.75 self.element = source.element self.production = source.production self.supporter = source.support self.bi_material = source.bi_material self.sorted_element = source.sorted_element self.element_box = [[[None, None, None, None] for _ in range(len(self.element[0]))] for _ in range(len(self.element))] self.element_amount = [[[0, 0, 0, 0] for _ in range(len(self.element[0]))] for _ in range(len(self.element))] self.table_gen() for resource in self.sorted_element: i, j = self.get_idx(resource) for k in range(4): self.element_box[i][j][k].editingFinished.connect(self.update_element_amount) def table_gen(self): nrows = len(self.element) ncols = len(self.element[0]) for i in range(nrows): for j in range(ncols): foo = self.box_gen(self.ox + j * self.box_width, self.oy + i * self.box_height, self.element[i][j]) if len(foo) == 4: for k in range(4): self.element_box[i][j][k] = foo[k] def box_gen(self, x, y, resource=''): group_box = QGroupBox(self.window) group_box.move(x, y) group_box.resize(self.box_width, self.box_height) if resource == '': return [] group_box.setTitle('') group_box.setStyleSheet('QGroupBox { background-color: \ rgb(255, 255, 255); border: 3px solid rgb(122, 255, 100); } \ QGroupBox::title{font: 75 ' + str(100 * self.screen_ratio) + 'pt "宋体"; color: rgb(255, 0, 0)}') label_again = QLabel(group_box) label_again.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"; color: rgb(255, 0, 0)}') label_again.setText(resource) label_again.move(int(self.grid_width * 0.7), 5 * self.screen_ratio) label_again.resize(int(self.grid_width * 3.3), self.init_bias - 5) product_label00 = QLabel(group_box) product_label00.setText('产量') product_label00.move(3, self.init_bias) product_label00.resize(self.grid_width, self.grid_height) product_label00.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product00 = QLineEdit(group_box) product00.setText('0') product00.move(self.grid_width, self.init_bias) product00.resize(self.grid_width, self.grid_height) product00.setEnabled(False) product00.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') product_label10 = QLabel(group_box) product_label10.setText('额外') product_label10.move(3, self.grid_height + self.init_bias) product_label10.resize(self.grid_width, self.grid_height) product_label10.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product10 = QLineEdit(group_box) product10.setText('0') product10.move(self.grid_width, self.grid_height + self.init_bias) product10.resize(self.grid_width, self.grid_height) product10.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') product_label01 = QLabel(group_box) product_label01.setText('机器') product_label01.move(self.grid_width * 2 + self.interval, self.init_bias) product_label01.resize(self.grid_width, self.grid_height) product_label01.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product01 = QLineEdit(group_box) product01.setText('0.0') product01.move(self.grid_width * 3 + self.interval, self.init_bias) product01.resize(self.grid_width, self.grid_height) product01.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') product01.setEnabled(False) product_label11 = QLabel(group_box) product_label11.setText('已有') product_label11.move(self.grid_width * 2 + self.interval, self.grid_height + self.init_bias) product_label11.resize(self.grid_width, self.grid_height) product_label11.setStyleSheet('QLabel {font: 75 ' + str(self.font_size) + 'pt "宋体"}') product11 = QLineEdit(group_box) product11.setText('0') product11.move(self.grid_width * 3 + self.interval, self.grid_height + self.init_bias) product11.resize(self.grid_width, self.grid_height) product11.setStyleSheet('QLineEdit {font: ' + str(self.line_edit_font_size) + 'pt "宋体"}') if resource in self.supporter: product11.setEnabled(True) else: product11.setEnabled(False) return [product00, product01, product10, product11] def update_view(self, is_int=[True, False, True, True]): for resource in self.sorted_element: i, j = self.get_idx(resource) for k in range(4): amount = round(self.element_amount[i][j][k], 1) if is_int[k]: amount = int(self.element_amount[i][j][k]) self.element_box[i][j][k].setText(str(amount)) def get_idx(self, resource): idx = None if resource != '': for i in range(len(self.element)): for j in range(len(self.element[0])): if resource == self.element[i][j]: idx = [i, j] return idx def produce_resource(self, resource, increase_production_number): idx = self.get_idx(resource) if not idx: exit(1) else: i, j = idx self.element_amount[i][j][0] += increase_production_number production_speed = self.production[resource][0][0] self.element_amount[i][j][1] += increase_production_number / production_speed component = self.production[resource][1:] if not component: return for obj_resource in component: production_name = obj_resource[0] production_number = increase_production_number * obj_resource[1] self.produce_resource(production_name, production_number) def calculate_supporter(self): for supporter, properties in self.supporter.items(): i, j = self.get_idx(supporter) amount = self.element_amount[i][j][3] for production in properties: i, j = self.get_idx(production[0]) production_amount = self.element_amount[i][j][0] convert_amount_to_production_amount = amount * production[1] need_negative_production = convert_amount_to_production_amount - production_amount if need_negative_production > 0: self.produce_resource(production[0], -1 * production_amount) else: self.produce_resource(production[0], -1 * convert_amount_to_production_amount) def calculate_bi_raw_material(self): for material, properties in self.bi_material.items(): production1 = properties[0][0] i, j = self.get_idx(production1) production1_amount = properties[0][1] need_production1_amount = self.element_amount[i][j][0] need_material_amount1 = need_production1_amount / production1_amount production2 = properties[1][0] i, j = self.get_idx(production2) production2_amount = properties[1][1] need_production2_amount = self.element_amount[i][j][0] need_material_amount2 = need_production2_amount / production2_amount need_material_amount = max(need_material_amount1, need_material_amount2) i, j = self.get_idx(material) self.element_amount[i][j][0] = need_material_amount material_production_speed = self.production[material][0][0] self.element_amount[i][j][1] = need_material_amount / material_production_speed def update_element_amount(self, has_supporter=True): for resource in self.sorted_element: i, j = self.get_idx(resource) for k in range(4): input_value = self.element_box[i][j][k].text() if k == 0 or k == 1 or input_value == '': self.element_amount[i][j][k] = 0.0 else: self.element_amount[i][j][k] = float(input_value) for resource in self.sorted_element: i, j = self.get_idx(resource) production_amount = self.element_amount[i][j][2] - self.element_amount[i][j][3] if production_amount < 0: self.produce_resource(resource, 0) else: self.produce_resource(resource, production_amount) if has_supporter: self.calculate_supporter() self.calculate_bi_raw_material() self.update_view() def ceil_machine_number(self): self.update_element_amount(False) supporter_stack = dict() for support, products in self.supporter.items(): i, j = self.get_idx(support) support_amount = self.element_amount[i][j][3] for product in products: product_name = product[0] product_amount = product[1] supporter_stack[product_name] = support_amount * product_amount for resource in self.sorted_element: if resource not in self.supporter: i, j = self.get_idx(resource) production_speed = self.production[resource][0][0] if resource in supporter_stack: cur_resource_amount = self.element_amount[i][j][0] real_resource_amount = cur_resource_amount - supporter_stack[resource] if real_resource_amount > 0: cur_machine_amount = real_resource_amount / production_speed new_machine_amount = ceil(cur_machine_amount) else: new_machine_amount = 0 else: cur_machine_amount = self.element_amount[i][j][1] new_machine_amount = ceil(cur_machine_amount) cur_resource_amount = self.element_amount[i][j][0] incre_resource_amount = new_machine_amount * production_speed - cur_resource_amount self.produce_resource(resource, incre_resource_amount) self.element_amount[i][j][1] = new_machine_amount self.calculate_bi_raw_material() self.update_view([False, True, True, True]) def show(self): self.window.show()
true
true
f70c3cfc48871815483645f3e9d8c0ea781f9872
1,352
py
Python
saleor/graphql/product/types/digital_contents.py
shannenye/saleor
f6a2d35fd8ae8d614edd952cd5c58adb82e56ab2
[ "CC-BY-4.0" ]
2
2021-01-31T00:28:42.000Z
2021-01-31T12:30:46.000Z
saleor/graphql/product/types/digital_contents.py
shannenye/saleor
f6a2d35fd8ae8d614edd952cd5c58adb82e56ab2
[ "CC-BY-4.0" ]
19
2022-02-21T04:34:12.000Z
2022-03-28T04:55:08.000Z
saleor/graphql/product/types/digital_contents.py
shannenye/saleor
f6a2d35fd8ae8d614edd952cd5c58adb82e56ab2
[ "CC-BY-4.0" ]
3
2021-02-22T16:37:18.000Z
2021-02-22T18:56:28.000Z
import graphene from graphene import relay from ....product import models from ...core.connection import CountableDjangoObjectType from ...core.scalars import UUID from ...meta.types import ObjectWithMetadata class DigitalContentUrl(CountableDjangoObjectType): url = graphene.String(description="URL for digital content.") token = graphene.Field( UUID, description=("UUID of digital content."), required=True ) class Meta: model = models.DigitalContentUrl only_fields = ["content", "created", "download_num"] interfaces = (relay.Node,) @staticmethod def resolve_url(root: models.DigitalContentUrl, *_args): return root.get_absolute_url() class DigitalContent(CountableDjangoObjectType): urls = graphene.List( lambda: DigitalContentUrl, description="List of URLs for the digital variant.", ) class Meta: model = models.DigitalContent only_fields = [ "automatic_fulfillment", "content_file", "max_downloads", "product_variant", "url_valid_days", "urls", "use_default_settings", ] interfaces = (relay.Node, ObjectWithMetadata) @staticmethod def resolve_urls(root: models.DigitalContent, **_kwargs): return root.urls.all()
28.166667
69
0.656805
import graphene from graphene import relay from ....product import models from ...core.connection import CountableDjangoObjectType from ...core.scalars import UUID from ...meta.types import ObjectWithMetadata class DigitalContentUrl(CountableDjangoObjectType): url = graphene.String(description="URL for digital content.") token = graphene.Field( UUID, description=("UUID of digital content."), required=True ) class Meta: model = models.DigitalContentUrl only_fields = ["content", "created", "download_num"] interfaces = (relay.Node,) @staticmethod def resolve_url(root: models.DigitalContentUrl, *_args): return root.get_absolute_url() class DigitalContent(CountableDjangoObjectType): urls = graphene.List( lambda: DigitalContentUrl, description="List of URLs for the digital variant.", ) class Meta: model = models.DigitalContent only_fields = [ "automatic_fulfillment", "content_file", "max_downloads", "product_variant", "url_valid_days", "urls", "use_default_settings", ] interfaces = (relay.Node, ObjectWithMetadata) @staticmethod def resolve_urls(root: models.DigitalContent, **_kwargs): return root.urls.all()
true
true
f70c3d27090122cbd9fceb2fd2fbf70a2213269b
15,418
py
Python
python/yb/download_and_extract_archive.py
deepld/yugabyte-db
3a4464deef220f5bbc58cce22ad6bfa8c57c98a8
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
python/yb/download_and_extract_archive.py
deepld/yugabyte-db
3a4464deef220f5bbc58cce22ad6bfa8c57c98a8
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
python/yb/download_and_extract_archive.py
deepld/yugabyte-db
3a4464deef220f5bbc58cce22ad6bfa8c57c98a8
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
#!/usr/bin/env python # Copyright (c) YugaByte, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and limitations # under the License. """ Downloads and extracts an archive with pre-built third-party dependencies. """ # This script should not use any non-standard modules and should run with Python 2 and Python 3. # It could be run before the main Python interpreter we'll be using for most of our scripts is # even installed. import os import sys import re import logging import socket import random import atexit import subprocess import argparse import tempfile import time import getpass import platform import fcntl import errno g_verbose = False EXPECTED_ARCHIVE_EXTENSION = '.tar.gz' CHECKSUM_EXTENSION = '.sha256' def remove_ignore_errors(file_path): file_path = os.path.abspath(file_path) if os.path.isfile(file_path): try: os.remove(file_path) except Exception as e: logging.warning("Error removing %s: %s, ignoring", file_path, e) def run_cmd(args): if g_verbose: logging.info("Running command: %s", args) try: subprocess.check_call(args) except: # noqa logging.error("Error trying to run command: %s", args) raise def validate_sha256sum(checksum_str): if not re.match(r'^[0-9a-f]{64}$', checksum_str): raise ValueError("Invalid SHA256 checksum: '%s', expected 64 hex characters", checksum_str) def read_file_and_strip(file_path): with open(file_path) as f: return f.read().strip() def compute_sha256sum(file_path): cmd_line = None if sys.platform.startswith('linux'): cmd_line = ['sha256sum', file_path] elif sys.platform.startswith('darwin'): cmd_line = ['shasum', '--algorithm', '256', file_path] else: raise ValueError("Don't know how to compute SHA256 checksum on platform %s" % sys.platform) checksum_str = subprocess.check_output(cmd_line).strip().split()[0].decode('utf-8') validate_sha256sum(checksum_str) return checksum_str def verify_sha256sum(checksum_file_path, data_file_path): if not os.path.exists(checksum_file_path): raise IOError("Checksum file does not exist: %s" % checksum_file_path) if not os.path.exists(data_file_path): raise IOError("Data file does not exist: %s", data_file_path) if not checksum_file_path.endswith(CHECKSUM_EXTENSION): raise ValueError("Checksum file path must end with '%s', got: %s" % ( CHECKSUM_EXTENSION, checksum_file_path)) # Guard against someone passing in the actual data file instead of the checksum file. checksum_file_size = os.stat(checksum_file_path).st_size if checksum_file_size > 4096: raise IOError("Checksum file size is too big: %d bytes (file path: %s)" % ( checksum_file_size, checksum_file_path)) expected_checksum = read_file_and_strip(checksum_file_path).split()[0] actual_checksum = compute_sha256sum(data_file_path) if actual_checksum == expected_checksum: return True err_msg = "Invalid checksum for file %s: got %s, expected %s" % ( data_file_path, actual_checksum, expected_checksum) logging.warning(err_msg) return False def download_url(url, dest_path): start_time_sec = time.time() logging.info("Downloading %s to %s", url, dest_path) dest_dir = os.path.dirname(dest_path) if not os.path.isdir(dest_dir): raise IOError("Destination directory %s does not exist" % dest_dir) run_cmd(['curl', '-LsS', url, '-o', dest_path]) if not os.path.exists(dest_path): raise IOError("Failed to download %s: file %s does not exist" % (url, dest_path)) elapsed_sec = time.time() - start_time_sec logging.info("Downloaded %s to %s in %.1fs" % (url, dest_path, elapsed_sec)) def move_file(src_path, dest_path): if g_verbose: logging.info("Trying to move file %s to %s", src_path, dest_path) if not os.path.exists(src_path): raise IOError("Does not exist: %s" % src_path) if not os.path.isfile(src_path): raise IOError("Not a file: %s" % src_path) if os.path.isdir(dest_path): raise IOError("Destination path can't be a directory: %s" % dest_path) if os.path.exists(dest_path): logging.warning("Destination path already exists: %s, moving %s there anyway" % ( dest_path, src_path)) dest_parent_dir = os.path.dirname(dest_path) if not os.path.isdir(dest_parent_dir): raise IOError("Destination directory %s does not exist" % dest_parent_dir) os.rename(src_path, dest_path) def check_dir_exists_and_is_writable(dir_path, description): if not os.path.isdir(dir_path): raise IOError("%s directory %s does not exist" % (description, dir_path)) if not os.access(dir_path, os.W_OK): raise IOError("%s directory %s is not writable by current user (%s)" % ( description, dir_path, getpass.getuser())) # From https://github.com/ianlini/mkdir-p/blob/master/mkdir_p/mkdir_p.py def mkdir_p(path, mode=0o777): try: os.makedirs(path, mode=mode) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def exists_or_is_link(dest): """ A file could be a link to a non-existent directory, or to a directory owned by a different user in a directory with sticky bit set. In such cases os.path.exists might return false, but islink will return true. """ return os.path.exists(dest) or os.path.islink(dest) def download_and_extract(url, dest_dir_parent, local_cache_dir, nfs_cache_dir): tar_gz_name = os.path.basename(url) checksum_file_name = tar_gz_name + CHECKSUM_EXTENSION install_dir_name = tar_gz_name[:-len(EXPECTED_ARCHIVE_EXTENSION)] dest_dir = os.path.join(dest_dir_parent, install_dir_name) if os.path.isdir(dest_dir): logging.info("Directory %s already exists, no need to install." % dest_dir) return if not os.path.isdir(local_cache_dir): logging.info("Directory %s does not exist, trying to create", local_cache_dir) try: mkdir_p(local_cache_dir) except Exception as ex: logging.info("Failed creating directory '%s': %s", local_cache_dir, ex) check_dir_exists_and_is_writable(local_cache_dir, "Local cache") if not url.endswith(EXPECTED_ARCHIVE_EXTENSION): raise ValueError("Archive download URL is expected to end with %s, got: %s" % ( url, EXPECTED_ARCHIVE_EXTENSION)) if os.path.isdir(dest_dir): logging.info("Directory %s already exists, someone must have created it concurrently.", dest_dir) return start_time_sec = time.time() logging.info("Installing %s into directory %s", url, dest_dir) tmp_dir_prefix = os.path.abspath(os.path.join(dest_dir_parent, install_dir_name + '.tmp.')) mkdir_p(dest_dir_parent) tmp_dir = tempfile.mkdtemp(prefix=tmp_dir_prefix) def cleanup(): if os.path.isdir(tmp_dir): run_cmd(['rm', '-rf', tmp_dir]) atexit.register(cleanup) for cache_dir in [local_cache_dir, nfs_cache_dir]: cached_tar_gz_path = os.path.join(cache_dir, tar_gz_name) cached_checksum_path = cached_tar_gz_path + CHECKSUM_EXTENSION tar_gz_path = None if os.path.exists(cached_tar_gz_path) and os.path.exists(cached_checksum_path): logging.info("Verifying the checksum of %s", cached_tar_gz_path) if verify_sha256sum(cached_checksum_path, cached_tar_gz_path): tar_gz_path = os.path.join(cache_dir, tar_gz_name) break else: remove_ignore_errors(cached_tar_gz_path) remove_ignore_errors(cached_checksum_path) if tar_gz_path is None: tmp_tar_gz_path = os.path.join(tmp_dir, tar_gz_name) tmp_checksum_path = os.path.join(tmp_dir, checksum_file_name) download_url(url + CHECKSUM_EXTENSION, tmp_checksum_path) download_url(url, tmp_tar_gz_path) if not verify_sha256sum(tmp_checksum_path, tmp_tar_gz_path): raise ValueError("Checksum verification failed for the download of %s" % url) file_names = [tar_gz_name, checksum_file_name] for file_name in file_names: move_file(os.path.join(tmp_dir, file_name), os.path.join(local_cache_dir, file_name)) tar_gz_path = os.path.join(local_cache_dir, tar_gz_name) nfs_tar_gz_path = os.path.join(nfs_cache_dir, tar_gz_name) nfs_checksum_file_path = os.path.join(nfs_cache_dir, checksum_file_name) if (os.path.isdir(nfs_cache_dir) and os.access(nfs_cache_dir, os.W_OK) and (not os.path.exists(nfs_tar_gz_path) or not os.path.exists(nfs_checksum_file_path))): for file_name in file_names: run_cmd(['cp', os.path.join(local_cache_dir, file_name), os.path.join(nfs_cache_dir, file_name)]) logging.info("Extracting %s in %s", tar_gz_path, tmp_dir) run_cmd(['tar', 'xf', tar_gz_path, '-C', tmp_dir]) tmp_extracted_dir = os.path.join(tmp_dir, install_dir_name) if not os.path.exists(tmp_extracted_dir): raise IOError( "Extracted '%s' in '%s' but a directory named '%s' did not appear" % ( tar_gz_path, os.getcwd(), tmp_extracted_dir)) if exists_or_is_link(dest_dir): logging.info("Looks like %s was created concurrently", dest_dir) return if install_dir_name.startswith('linuxbrew'): orig_brew_home_file = os.path.join(tmp_extracted_dir, 'ORIG_BREW_HOME') if not os.path.exists(orig_brew_home_file): raise IOError("File '%s' not found after extracting '%s'" % ( orig_brew_home_file, tar_gz_name)) orig_brew_home = read_file_and_strip(orig_brew_home_file) if not orig_brew_home.startswith(dest_dir): raise ValueError( "Original Homebrew/Linuxbrew install home directory is '%s'" " but we are trying to install it in '%s', and that is not a prefix of" " the former." % (orig_brew_home, dest_dir)) already_installed_msg = ( "'%s' already exists, cannot move '%s' to it. Someone else must have " "installed it concurrently. This is OK." % ( orig_brew_home, dest_dir)) def create_brew_symlink_if_needed(): brew_link_src = os.path.basename(orig_brew_home) # dest_dir will now be a symlink pointing to brew_link_src. We are NOT creating a # symlink inside dest_dir. if not exists_or_is_link(dest_dir): logging.info("Creating a symlink '%s' -> '%s'", dest_dir, brew_link_src) try: os.symlink(brew_link_src, dest_dir) except OSError as os_error: if os_error.errno == errno.EEXIST: if exists_or_is_link(dest_dir): logging.info( "Symlink '%s' was created concurrently. This is probably OK.", dest_dir) else: err_msg = ( "Failed creating symlink '%s' -> '%s' with error: %s, but the " "symlink does not actually exist!" % ( dest_dir, brew_link_src, os_error)) logging.error(err_msg) raise IOError(err_msg) else: logging.error("Unexpected error when creating symlink '%s' -> '%s': %s", dest_dir, brew_link_src, os_error) raise os_error assert exists_or_is_link(dest_dir) if not os.path.islink(dest_dir): # A defensive sanity check. err_msg = "%s exists but is not a symbolic link" % dest_dir logging.error(err_msg) raise IOError(err_msg) else: actual_link_src = os.readlink(dest_dir) if actual_link_src != brew_link_src: err_msg = "Symlink %s is not pointing to %s but instead points to %s" % ( dest_dir, brew_link_src, actual_link_src) logging.error(err_msg) raise IOError(err_msg) if os.path.exists(orig_brew_home): logging.info(already_installed_msg) create_brew_symlink_if_needed() return logging.info("Moving '%s' to '%s'" % (tmp_extracted_dir, orig_brew_home)) try: os.rename(tmp_extracted_dir, orig_brew_home) except IOError as io_error: # A defensive sanity check in case locking is not working properly. if io_error == errno.ENOTEMPTY: # For whatever reason, this is what we get when the destination directory # already exists. logging.info(already_installed_msg) create_brew_symlink_if_needed() return create_brew_symlink_if_needed() else: if g_verbose: logging.info("Moving %s to %s", tmp_extracted_dir, dest_dir) os.rename(tmp_extracted_dir, dest_dir) logging.info("Installation of %s took %.1f sec", dest_dir, time.time() - start_time_sec) def main(): # Created files/directories should be writable by the group. os.umask(2) logging.basicConfig( level=logging.INFO, format="%(filename)s:%(lineno)d " + socket.gethostname() + " pid " + str(os.getpid()) + " %(asctime)s %(levelname)s: %(message)s") parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--url', help='URL to download. Must end with .tar.gz.', required=True) parser.add_argument( '--dest-dir-parent', help='Parent directory in which to extract the archive', required=True) parser.add_argument( '--local-cache-dir', default='/opt/yb-build/download_cache', help='Download cache on the local disk') parser.add_argument( '--nfs-cache-dir', default='/Volumes/n/jenkins/download_cache', help='Download cache on NFS') parser.add_argument('--verbose', action='store_true', help='Verbose logging') args = parser.parse_args() if args.verbose or os.getenv('YB_VERBOSE') == '1': global g_verbose g_verbose = True download_and_extract( url=args.url, dest_dir_parent=args.dest_dir_parent, local_cache_dir=args.local_cache_dir, nfs_cache_dir=args.nfs_cache_dir) if __name__ == '__main__': main()
40.046753
99
0.644247
# even installed. import os import sys import re import logging import socket import random import atexit import subprocess import argparse import tempfile import time import getpass import platform import fcntl import errno g_verbose = False EXPECTED_ARCHIVE_EXTENSION = '.tar.gz' CHECKSUM_EXTENSION = '.sha256' def remove_ignore_errors(file_path): file_path = os.path.abspath(file_path) if os.path.isfile(file_path): try: os.remove(file_path) except Exception as e: logging.warning("Error removing %s: %s, ignoring", file_path, e) def run_cmd(args): if g_verbose: logging.info("Running command: %s", args) try: subprocess.check_call(args) except: # noqa logging.error("Error trying to run command: %s", args) raise def validate_sha256sum(checksum_str): if not re.match(r'^[0-9a-f]{64}$', checksum_str): raise ValueError("Invalid SHA256 checksum: '%s', expected 64 hex characters", checksum_str) def read_file_and_strip(file_path): with open(file_path) as f: return f.read().strip() def compute_sha256sum(file_path): cmd_line = None if sys.platform.startswith('linux'): cmd_line = ['sha256sum', file_path] elif sys.platform.startswith('darwin'): cmd_line = ['shasum', '--algorithm', '256', file_path] else: raise ValueError("Don't know how to compute SHA256 checksum on platform %s" % sys.platform) checksum_str = subprocess.check_output(cmd_line).strip().split()[0].decode('utf-8') validate_sha256sum(checksum_str) return checksum_str def verify_sha256sum(checksum_file_path, data_file_path): if not os.path.exists(checksum_file_path): raise IOError("Checksum file does not exist: %s" % checksum_file_path) if not os.path.exists(data_file_path): raise IOError("Data file does not exist: %s", data_file_path) if not checksum_file_path.endswith(CHECKSUM_EXTENSION): raise ValueError("Checksum file path must end with '%s', got: %s" % ( CHECKSUM_EXTENSION, checksum_file_path)) checksum_file_size = os.stat(checksum_file_path).st_size if checksum_file_size > 4096: raise IOError("Checksum file size is too big: %d bytes (file path: %s)" % ( checksum_file_size, checksum_file_path)) expected_checksum = read_file_and_strip(checksum_file_path).split()[0] actual_checksum = compute_sha256sum(data_file_path) if actual_checksum == expected_checksum: return True err_msg = "Invalid checksum for file %s: got %s, expected %s" % ( data_file_path, actual_checksum, expected_checksum) logging.warning(err_msg) return False def download_url(url, dest_path): start_time_sec = time.time() logging.info("Downloading %s to %s", url, dest_path) dest_dir = os.path.dirname(dest_path) if not os.path.isdir(dest_dir): raise IOError("Destination directory %s does not exist" % dest_dir) run_cmd(['curl', '-LsS', url, '-o', dest_path]) if not os.path.exists(dest_path): raise IOError("Failed to download %s: file %s does not exist" % (url, dest_path)) elapsed_sec = time.time() - start_time_sec logging.info("Downloaded %s to %s in %.1fs" % (url, dest_path, elapsed_sec)) def move_file(src_path, dest_path): if g_verbose: logging.info("Trying to move file %s to %s", src_path, dest_path) if not os.path.exists(src_path): raise IOError("Does not exist: %s" % src_path) if not os.path.isfile(src_path): raise IOError("Not a file: %s" % src_path) if os.path.isdir(dest_path): raise IOError("Destination path can't be a directory: %s" % dest_path) if os.path.exists(dest_path): logging.warning("Destination path already exists: %s, moving %s there anyway" % ( dest_path, src_path)) dest_parent_dir = os.path.dirname(dest_path) if not os.path.isdir(dest_parent_dir): raise IOError("Destination directory %s does not exist" % dest_parent_dir) os.rename(src_path, dest_path) def check_dir_exists_and_is_writable(dir_path, description): if not os.path.isdir(dir_path): raise IOError("%s directory %s does not exist" % (description, dir_path)) if not os.access(dir_path, os.W_OK): raise IOError("%s directory %s is not writable by current user (%s)" % ( description, dir_path, getpass.getuser())) # From https://github.com/ianlini/mkdir-p/blob/master/mkdir_p/mkdir_p.py def mkdir_p(path, mode=0o777): try: os.makedirs(path, mode=mode) except OSError as exc: if exc.errno == errno.EEXIST and os.path.isdir(path): pass else: raise def exists_or_is_link(dest): return os.path.exists(dest) or os.path.islink(dest) def download_and_extract(url, dest_dir_parent, local_cache_dir, nfs_cache_dir): tar_gz_name = os.path.basename(url) checksum_file_name = tar_gz_name + CHECKSUM_EXTENSION install_dir_name = tar_gz_name[:-len(EXPECTED_ARCHIVE_EXTENSION)] dest_dir = os.path.join(dest_dir_parent, install_dir_name) if os.path.isdir(dest_dir): logging.info("Directory %s already exists, no need to install." % dest_dir) return if not os.path.isdir(local_cache_dir): logging.info("Directory %s does not exist, trying to create", local_cache_dir) try: mkdir_p(local_cache_dir) except Exception as ex: logging.info("Failed creating directory '%s': %s", local_cache_dir, ex) check_dir_exists_and_is_writable(local_cache_dir, "Local cache") if not url.endswith(EXPECTED_ARCHIVE_EXTENSION): raise ValueError("Archive download URL is expected to end with %s, got: %s" % ( url, EXPECTED_ARCHIVE_EXTENSION)) if os.path.isdir(dest_dir): logging.info("Directory %s already exists, someone must have created it concurrently.", dest_dir) return start_time_sec = time.time() logging.info("Installing %s into directory %s", url, dest_dir) tmp_dir_prefix = os.path.abspath(os.path.join(dest_dir_parent, install_dir_name + '.tmp.')) mkdir_p(dest_dir_parent) tmp_dir = tempfile.mkdtemp(prefix=tmp_dir_prefix) def cleanup(): if os.path.isdir(tmp_dir): run_cmd(['rm', '-rf', tmp_dir]) atexit.register(cleanup) for cache_dir in [local_cache_dir, nfs_cache_dir]: cached_tar_gz_path = os.path.join(cache_dir, tar_gz_name) cached_checksum_path = cached_tar_gz_path + CHECKSUM_EXTENSION tar_gz_path = None if os.path.exists(cached_tar_gz_path) and os.path.exists(cached_checksum_path): logging.info("Verifying the checksum of %s", cached_tar_gz_path) if verify_sha256sum(cached_checksum_path, cached_tar_gz_path): tar_gz_path = os.path.join(cache_dir, tar_gz_name) break else: remove_ignore_errors(cached_tar_gz_path) remove_ignore_errors(cached_checksum_path) if tar_gz_path is None: tmp_tar_gz_path = os.path.join(tmp_dir, tar_gz_name) tmp_checksum_path = os.path.join(tmp_dir, checksum_file_name) download_url(url + CHECKSUM_EXTENSION, tmp_checksum_path) download_url(url, tmp_tar_gz_path) if not verify_sha256sum(tmp_checksum_path, tmp_tar_gz_path): raise ValueError("Checksum verification failed for the download of %s" % url) file_names = [tar_gz_name, checksum_file_name] for file_name in file_names: move_file(os.path.join(tmp_dir, file_name), os.path.join(local_cache_dir, file_name)) tar_gz_path = os.path.join(local_cache_dir, tar_gz_name) nfs_tar_gz_path = os.path.join(nfs_cache_dir, tar_gz_name) nfs_checksum_file_path = os.path.join(nfs_cache_dir, checksum_file_name) if (os.path.isdir(nfs_cache_dir) and os.access(nfs_cache_dir, os.W_OK) and (not os.path.exists(nfs_tar_gz_path) or not os.path.exists(nfs_checksum_file_path))): for file_name in file_names: run_cmd(['cp', os.path.join(local_cache_dir, file_name), os.path.join(nfs_cache_dir, file_name)]) logging.info("Extracting %s in %s", tar_gz_path, tmp_dir) run_cmd(['tar', 'xf', tar_gz_path, '-C', tmp_dir]) tmp_extracted_dir = os.path.join(tmp_dir, install_dir_name) if not os.path.exists(tmp_extracted_dir): raise IOError( "Extracted '%s' in '%s' but a directory named '%s' did not appear" % ( tar_gz_path, os.getcwd(), tmp_extracted_dir)) if exists_or_is_link(dest_dir): logging.info("Looks like %s was created concurrently", dest_dir) return if install_dir_name.startswith('linuxbrew'): orig_brew_home_file = os.path.join(tmp_extracted_dir, 'ORIG_BREW_HOME') if not os.path.exists(orig_brew_home_file): raise IOError("File '%s' not found after extracting '%s'" % ( orig_brew_home_file, tar_gz_name)) orig_brew_home = read_file_and_strip(orig_brew_home_file) if not orig_brew_home.startswith(dest_dir): raise ValueError( "Original Homebrew/Linuxbrew install home directory is '%s'" " but we are trying to install it in '%s', and that is not a prefix of" " the former." % (orig_brew_home, dest_dir)) already_installed_msg = ( "'%s' already exists, cannot move '%s' to it. Someone else must have " "installed it concurrently. This is OK." % ( orig_brew_home, dest_dir)) def create_brew_symlink_if_needed(): brew_link_src = os.path.basename(orig_brew_home) # dest_dir will now be a symlink pointing to brew_link_src. We are NOT creating a # symlink inside dest_dir. if not exists_or_is_link(dest_dir): logging.info("Creating a symlink '%s' -> '%s'", dest_dir, brew_link_src) try: os.symlink(brew_link_src, dest_dir) except OSError as os_error: if os_error.errno == errno.EEXIST: if exists_or_is_link(dest_dir): logging.info( "Symlink '%s' was created concurrently. This is probably OK.", dest_dir) else: err_msg = ( "Failed creating symlink '%s' -> '%s' with error: %s, but the " "symlink does not actually exist!" % ( dest_dir, brew_link_src, os_error)) logging.error(err_msg) raise IOError(err_msg) else: logging.error("Unexpected error when creating symlink '%s' -> '%s': %s", dest_dir, brew_link_src, os_error) raise os_error assert exists_or_is_link(dest_dir) if not os.path.islink(dest_dir): # A defensive sanity check. err_msg = "%s exists but is not a symbolic link" % dest_dir logging.error(err_msg) raise IOError(err_msg) else: actual_link_src = os.readlink(dest_dir) if actual_link_src != brew_link_src: err_msg = "Symlink %s is not pointing to %s but instead points to %s" % ( dest_dir, brew_link_src, actual_link_src) logging.error(err_msg) raise IOError(err_msg) if os.path.exists(orig_brew_home): logging.info(already_installed_msg) create_brew_symlink_if_needed() return logging.info("Moving '%s' to '%s'" % (tmp_extracted_dir, orig_brew_home)) try: os.rename(tmp_extracted_dir, orig_brew_home) except IOError as io_error: # A defensive sanity check in case locking is not working properly. if io_error == errno.ENOTEMPTY: # For whatever reason, this is what we get when the destination directory # already exists. logging.info(already_installed_msg) create_brew_symlink_if_needed() return create_brew_symlink_if_needed() else: if g_verbose: logging.info("Moving %s to %s", tmp_extracted_dir, dest_dir) os.rename(tmp_extracted_dir, dest_dir) logging.info("Installation of %s took %.1f sec", dest_dir, time.time() - start_time_sec) def main(): # Created files/directories should be writable by the group. os.umask(2) logging.basicConfig( level=logging.INFO, format="%(filename)s:%(lineno)d " + socket.gethostname() + " pid " + str(os.getpid()) + " %(asctime)s %(levelname)s: %(message)s") parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--url', help='URL to download. Must end with .tar.gz.', required=True) parser.add_argument( '--dest-dir-parent', help='Parent directory in which to extract the archive', required=True) parser.add_argument( '--local-cache-dir', default='/opt/yb-build/download_cache', help='Download cache on the local disk') parser.add_argument( '--nfs-cache-dir', default='/Volumes/n/jenkins/download_cache', help='Download cache on NFS') parser.add_argument('--verbose', action='store_true', help='Verbose logging') args = parser.parse_args() if args.verbose or os.getenv('YB_VERBOSE') == '1': global g_verbose g_verbose = True download_and_extract( url=args.url, dest_dir_parent=args.dest_dir_parent, local_cache_dir=args.local_cache_dir, nfs_cache_dir=args.nfs_cache_dir) if __name__ == '__main__': main()
true
true
f70c3dfba447061d7e08725b7b0184cabf89a717
162
py
Python
python-port/speedclue/cards.py
sadakatsu/SpeedClueContest
f670e4e594b35e4a5111492dde31414429865ade
[ "MIT" ]
1
2017-10-20T14:24:06.000Z
2017-10-20T14:24:06.000Z
python-port/speedclue/cards.py
sadakatsu/SpeedClueContest
f670e4e594b35e4a5111492dde31414429865ade
[ "MIT" ]
null
null
null
python-port/speedclue/cards.py
sadakatsu/SpeedClueContest
f670e4e594b35e4a5111492dde31414429865ade
[ "MIT" ]
null
null
null
CARDS = ( (('Gr', 'Mu', 'Pe', 'Pl', 'Sc', 'Wh')), (('Ca', 'Kn', 'Pi', 'Re', 'Ro', 'Wr')), (('Ba', 'Bi', 'Co', 'Di', 'Ha', 'Ki', 'Li', 'Lo', 'St')), )
27
61
0.290123
CARDS = ( (('Gr', 'Mu', 'Pe', 'Pl', 'Sc', 'Wh')), (('Ca', 'Kn', 'Pi', 'Re', 'Ro', 'Wr')), (('Ba', 'Bi', 'Co', 'Di', 'Ha', 'Ki', 'Li', 'Lo', 'St')), )
true
true
f70c3e29979e263ef86f35ed84b4216ab4557a46
933
py
Python
emcee/__init__.py
eteq/emcee
b410abe67936fb816c38470b3043532d2e2e6d0b
[ "MIT" ]
14
2020-07-20T07:09:50.000Z
2022-03-11T03:26:13.000Z
emcee/__init__.py
eteq/emcee
b410abe67936fb816c38470b3043532d2e2e6d0b
[ "MIT" ]
38
2020-05-21T19:03:45.000Z
2022-01-17T03:44:25.000Z
emcee/__init__.py
eteq/emcee
b410abe67936fb816c38470b3043532d2e2e6d0b
[ "MIT" ]
8
2020-06-01T14:23:07.000Z
2022-01-28T19:05:52.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import (division, print_function, absolute_import, unicode_literals) from .sampler import * from .mh import * from .ensemble import * from .ptsampler import * from . import utils from . import autocorr __version__ = "2.1.0" def test(): from inspect import getmembers, ismethod from .tests import Tests print("Starting tests...") failures = 0 tests = Tests() for o in getmembers(tests): tests.setUp() if ismethod(o[1]) and o[0].startswith("test"): print("{0} ...".format(o[0])) try: o[1]() except Exception as e: print("Failed with:\n {0.__class__.__name__}: {0}" .format(e)) failures += 1 else: print(" Passed.") print("{0} tests failed".format(failures))
24.552632
69
0.542337
from __future__ import (division, print_function, absolute_import, unicode_literals) from .sampler import * from .mh import * from .ensemble import * from .ptsampler import * from . import utils from . import autocorr __version__ = "2.1.0" def test(): from inspect import getmembers, ismethod from .tests import Tests print("Starting tests...") failures = 0 tests = Tests() for o in getmembers(tests): tests.setUp() if ismethod(o[1]) and o[0].startswith("test"): print("{0} ...".format(o[0])) try: o[1]() except Exception as e: print("Failed with:\n {0.__class__.__name__}: {0}" .format(e)) failures += 1 else: print(" Passed.") print("{0} tests failed".format(failures))
true
true
f70c3ec3e56c2cdd2299164142b1f0f2606d10e9
3,763
py
Python
byceps/services/seating/area_service.py
homeworkprod/byceps
cd0f9f37f7b5eb517106ec761acc7e0bdf75e22e
[ "BSD-3-Clause" ]
23
2015-08-03T23:28:54.000Z
2018-12-12T20:11:45.000Z
byceps/services/seating/area_service.py
homeworkprod/byceps
cd0f9f37f7b5eb517106ec761acc7e0bdf75e22e
[ "BSD-3-Clause" ]
1
2018-09-30T18:18:24.000Z
2018-09-30T18:18:24.000Z
byceps/services/seating/area_service.py
homeworkprod/byceps
cd0f9f37f7b5eb517106ec761acc7e0bdf75e22e
[ "BSD-3-Clause" ]
9
2015-08-06T16:41:36.000Z
2018-09-25T11:17:31.000Z
""" byceps.services.seating.area_service ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :Copyright: 2014-2022 Jochen Kupperschmidt :License: Revised BSD (see `LICENSE` file for details) """ from __future__ import annotations from typing import Optional from sqlalchemy import select from sqlalchemy.sql import Select from ...database import db, paginate, Pagination from ...typing import PartyID from ..ticketing.dbmodels.ticket import Ticket as DbTicket from .dbmodels.area import Area as DbArea from .dbmodels.seat import Seat as DbSeat from .transfer.models import Area, SeatUtilization def create_area(party_id: PartyID, slug: str, title: str) -> Area: """Create an area.""" area = DbArea(party_id, slug, title) db.session.add(area) db.session.commit() return _db_entity_to_area(area) def delete_area(area_id: str) -> None: """Delete an area.""" db.session.query(DbArea) \ .filter_by(id=area_id) \ .delete() db.session.commit() def count_areas_for_party(party_id: PartyID) -> int: """Return the number of seating areas for that party.""" return db.session \ .query(DbArea) \ .filter_by(party_id=party_id) \ .count() def find_area_for_party_by_slug(party_id: PartyID, slug: str) -> Optional[Area]: """Return the area for that party with that slug, or `None` if not found.""" area = db.session \ .query(DbArea) \ .filter_by(party_id=party_id) \ .filter_by(slug=slug) \ .first() if area is None: return None return _db_entity_to_area(area) def get_areas_with_seat_utilization( party_id: PartyID, ) -> list[Area, SeatUtilization]: """Return all areas and their seat utilization for that party.""" query = _get_areas_with_seat_utilization_query(party_id) rows = db.session.execute(query).all() return [_map_areas_with_seat_utilization_row(row) for row in rows] def get_areas_with_seat_utilization_paginated( party_id: PartyID, page: int, per_page: int ) -> Pagination: """Return areas and their seat utilization for that party, paginated.""" items_query = _get_areas_with_seat_utilization_query(party_id) count_query = select(db.func.count(DbArea.id)) \ .filter(DbArea.party_id == party_id) return paginate( items_query, count_query, page, per_page, item_mapper=_map_areas_with_seat_utilization_row, ) def _get_areas_with_seat_utilization_query(party_id: PartyID) -> Select: area = db.aliased(DbArea) subquery_occupied_seat_count = select(db.func.count(DbTicket.id)) \ .filter(DbTicket.revoked == False) \ .filter(DbTicket.occupied_seat_id != None) \ .join(DbSeat) \ .filter(DbSeat.area_id == area.id) \ .scalar_subquery() subquery_total_seat_count = select(db.func.count(DbSeat.id)) \ .filter_by(area_id=area.id) \ .scalar_subquery() return select( area, subquery_occupied_seat_count, subquery_total_seat_count, ) \ .filter(area.party_id == party_id) \ .group_by(area.id) def _map_areas_with_seat_utilization_row( row: tuple[DbArea, int, int] ) -> tuple[Area, SeatUtilization]: area, occupied_seat_count, total_seat_count = row utilization = SeatUtilization( occupied=occupied_seat_count, total=total_seat_count ) return _db_entity_to_area(area), utilization def _db_entity_to_area(area: DbArea) -> Area: return Area( id=area.id, party_id=area.party_id, slug=area.slug, title=area.title, image_filename=area.image_filename, image_width=area.image_width, image_height=area.image_height, )
27.874074
80
0.675791
from __future__ import annotations from typing import Optional from sqlalchemy import select from sqlalchemy.sql import Select from ...database import db, paginate, Pagination from ...typing import PartyID from ..ticketing.dbmodels.ticket import Ticket as DbTicket from .dbmodels.area import Area as DbArea from .dbmodels.seat import Seat as DbSeat from .transfer.models import Area, SeatUtilization def create_area(party_id: PartyID, slug: str, title: str) -> Area: area = DbArea(party_id, slug, title) db.session.add(area) db.session.commit() return _db_entity_to_area(area) def delete_area(area_id: str) -> None: db.session.query(DbArea) \ .filter_by(id=area_id) \ .delete() db.session.commit() def count_areas_for_party(party_id: PartyID) -> int: return db.session \ .query(DbArea) \ .filter_by(party_id=party_id) \ .count() def find_area_for_party_by_slug(party_id: PartyID, slug: str) -> Optional[Area]: area = db.session \ .query(DbArea) \ .filter_by(party_id=party_id) \ .filter_by(slug=slug) \ .first() if area is None: return None return _db_entity_to_area(area) def get_areas_with_seat_utilization( party_id: PartyID, ) -> list[Area, SeatUtilization]: query = _get_areas_with_seat_utilization_query(party_id) rows = db.session.execute(query).all() return [_map_areas_with_seat_utilization_row(row) for row in rows] def get_areas_with_seat_utilization_paginated( party_id: PartyID, page: int, per_page: int ) -> Pagination: items_query = _get_areas_with_seat_utilization_query(party_id) count_query = select(db.func.count(DbArea.id)) \ .filter(DbArea.party_id == party_id) return paginate( items_query, count_query, page, per_page, item_mapper=_map_areas_with_seat_utilization_row, ) def _get_areas_with_seat_utilization_query(party_id: PartyID) -> Select: area = db.aliased(DbArea) subquery_occupied_seat_count = select(db.func.count(DbTicket.id)) \ .filter(DbTicket.revoked == False) \ .filter(DbTicket.occupied_seat_id != None) \ .join(DbSeat) \ .filter(DbSeat.area_id == area.id) \ .scalar_subquery() subquery_total_seat_count = select(db.func.count(DbSeat.id)) \ .filter_by(area_id=area.id) \ .scalar_subquery() return select( area, subquery_occupied_seat_count, subquery_total_seat_count, ) \ .filter(area.party_id == party_id) \ .group_by(area.id) def _map_areas_with_seat_utilization_row( row: tuple[DbArea, int, int] ) -> tuple[Area, SeatUtilization]: area, occupied_seat_count, total_seat_count = row utilization = SeatUtilization( occupied=occupied_seat_count, total=total_seat_count ) return _db_entity_to_area(area), utilization def _db_entity_to_area(area: DbArea) -> Area: return Area( id=area.id, party_id=area.party_id, slug=area.slug, title=area.title, image_filename=area.image_filename, image_width=area.image_width, image_height=area.image_height, )
true
true
f70c3f84510bef0cd92d2e436087e79ae700c7a7
1,617
py
Python
docs/cornell CS class/lesson 10. Algorithm Design/demos/test_helpers.py
LizzieDeng/kalman_fliter_analysis
50e728f32c496c3fcbb8ca3ee00857b999b88d99
[ "MIT" ]
null
null
null
docs/cornell CS class/lesson 10. Algorithm Design/demos/test_helpers.py
LizzieDeng/kalman_fliter_analysis
50e728f32c496c3fcbb8ca3ee00857b999b88d99
[ "MIT" ]
null
null
null
docs/cornell CS class/lesson 10. Algorithm Design/demos/test_helpers.py
LizzieDeng/kalman_fliter_analysis
50e728f32c496c3fcbb8ca3ee00857b999b88d99
[ "MIT" ]
null
null
null
""" Unit test for multiple modules This module illustrates what a proper unit test should look like. Each function being tested has its own test procedure. It also has a segment of "script code" that invokes the test procedure when this module is run as an script. Author: Walker M. White Date: February 14, 2019 """ import introcs # introcs assert functions import helpers # function to be tested def test_first_name(): """ Test procedure for first_name(n) """ print('Testing first_name') # Test case 1 result = helpers.first_name('Walker White') introcs.assert_equals('Walker',result) # Test case 2 result = helpers.first_name('Walker White') introcs.assert_equals('Walker',result) def test_last_name(): """ Test procedure for last_name(n) """ print('Testing last_name') # Test case 1 result = helpers.last_name('Walker White') introcs.assert_equals('White',result) # Test case 2 result = helpers.last_name('Walker White') introcs.assert_equals('White',result) def test_last_name_first(): """ Test procedure for last_name_first(n) """ print('Testing last_name_first') # Test case 1 result = helpers.last_name_first('Walker White') introcs.assert_equals('White, Walker',result) # Test case 2 result = helpers.last_name_first('Walker White') introcs.assert_equals('White, Walker',result) # Script code if __name__ == '__main__': test_first_name() test_last_name() test_last_name_first() print('The module helpers passed all tests')
23.779412
65
0.682127
import introcs import helpers def test_first_name(): print('Testing first_name') result = helpers.first_name('Walker White') introcs.assert_equals('Walker',result) result = helpers.first_name('Walker White') introcs.assert_equals('Walker',result) def test_last_name(): print('Testing last_name') result = helpers.last_name('Walker White') introcs.assert_equals('White',result) result = helpers.last_name('Walker White') introcs.assert_equals('White',result) def test_last_name_first(): print('Testing last_name_first') result = helpers.last_name_first('Walker White') introcs.assert_equals('White, Walker',result) result = helpers.last_name_first('Walker White') introcs.assert_equals('White, Walker',result) if __name__ == '__main__': test_first_name() test_last_name() test_last_name_first() print('The module helpers passed all tests')
true
true
f70c4348b188e43d79cf8b756f4fb1b4466cb021
2,025
py
Python
indy-tests/utils/utils.py
NgoAnhKhoi/indy-testcase
1f85d2b7e77a5bb9637379286d7f7f142c2c626e
[ "MIT" ]
null
null
null
indy-tests/utils/utils.py
NgoAnhKhoi/indy-testcase
1f85d2b7e77a5bb9637379286d7f7f142c2c626e
[ "MIT" ]
null
null
null
indy-tests/utils/utils.py
NgoAnhKhoi/indy-testcase
1f85d2b7e77a5bb9637379286d7f7f142c2c626e
[ "MIT" ]
null
null
null
''' Created on Nov 9, 2017 @author: khoi.ngo ''' def generate_random_string(prefix="", suffix="", size=20): """ Generate random string . :param prefix: (optional) Prefix of a string. :param suffix: (optional) Suffix of a string. :param length: (optional) Max length of a string (include prefix and suffix) :return: The random string. """ import random import string left_size = size - len(prefix) - len(suffix) random_str = "" if left_size > 0: random_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(left_size)) else: print("Warning: Length of prefix and suffix more than %s chars" % str(size)) result = str(prefix) + random_str + str(suffix) return result def create_step(size): from utils.step import Step lst_step = [] for i in range(0, size): step = Step(i, "") lst_step.append(step) return lst_step def handle_exception(code): if isinstance(code, IndexError or Exception): raise code else: return code async def perform(step, func, *agrs): from indy.error import IndyError from utils.report import Status result = None try: result = await func(*agrs) step.set_status(Status.PASSED) except IndyError as E: print("Indy error" + str(E)) step.set_message(str(E)) return E except Exception as Ex: print("Exception" + str(Ex)) step.set_message(str(E)) return Ex return result async def perform_with_expected_code(step, func, *agrs, expected_code=0): from indy.error import IndyError from utils.report import Status try: await func(*agrs) except IndyError as E: if E == expected_code: step.set_status(Status.PASSED) else: print("Indy error" + str(E)) step.set_message(str(E)) return E except Exception as Ex: print("Exception" + str(Ex)) return Ex
25.961538
109
0.620247
def generate_random_string(prefix="", suffix="", size=20): import random import string left_size = size - len(prefix) - len(suffix) random_str = "" if left_size > 0: random_str = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(left_size)) else: print("Warning: Length of prefix and suffix more than %s chars" % str(size)) result = str(prefix) + random_str + str(suffix) return result def create_step(size): from utils.step import Step lst_step = [] for i in range(0, size): step = Step(i, "") lst_step.append(step) return lst_step def handle_exception(code): if isinstance(code, IndexError or Exception): raise code else: return code async def perform(step, func, *agrs): from indy.error import IndyError from utils.report import Status result = None try: result = await func(*agrs) step.set_status(Status.PASSED) except IndyError as E: print("Indy error" + str(E)) step.set_message(str(E)) return E except Exception as Ex: print("Exception" + str(Ex)) step.set_message(str(E)) return Ex return result async def perform_with_expected_code(step, func, *agrs, expected_code=0): from indy.error import IndyError from utils.report import Status try: await func(*agrs) except IndyError as E: if E == expected_code: step.set_status(Status.PASSED) else: print("Indy error" + str(E)) step.set_message(str(E)) return E except Exception as Ex: print("Exception" + str(Ex)) return Ex
true
true
f70c43bdfff63a45fb5bec5cb16598d64f9c9443
2,990
py
Python
Alexa.py
Rashid7520/alexa
7b5ed9df32222ce1c402f230ade22c7a69b33677
[ "Apache-2.0" ]
1
2022-02-28T17:03:13.000Z
2022-02-28T17:03:13.000Z
Alexa.py
Rashid7520/alexa
7b5ed9df32222ce1c402f230ade22c7a69b33677
[ "Apache-2.0" ]
null
null
null
Alexa.py
Rashid7520/alexa
7b5ed9df32222ce1c402f230ade22c7a69b33677
[ "Apache-2.0" ]
null
null
null
import speech_recognition as sr import pyttsx3 import pywhatkit import datetime import wikipedia import pyjokes import webbrowser import os #import pyaudio listenner = sr.Recognizer() engine = pyttsx3.init() voices = engine.getProperty("voices") engine.setProperty('voice', voices[1].id) def talk(text): engine.say(text) engine.runAndWait() engine.say('Hello Rashid, I am your virtual assistant. how can i help you?') engine.runAndWait() def take_command(): # (<-- !!!) try: with sr.Microphone() as source: print("listening....") voice = listenner.listen(source) command = listenner.recognize_google(voice) command = command.lower() if 'alexa' in command: command = command.replace('alexa', '') print(command) except: pass return command def run_alexa(): command = take_command() print(command) if 'song' in command: song = command.replace('song', '') talk('playing' + song) pywhatkit.playonyt(song) elif 'time' in command: time = datetime.datetime.now().strftime('%I:%M %p') print(time) talk("current time is" + time) elif 'who ' in command: person = command.replace('who ', '') info = wikipedia.summary(person, 1) print(info) talk(info) elif 'are you single' in command: talk('No, i am in a relationship with your wifi') elif 'joke' in command: talk(pyjokes.get_joke()) elif 'wikipedia' in command: ansu = command.replace('wikipedia', '') answer = wikipedia.summary(ansu, sentences=2) print(answer) talk(answer) elif 'open youtube' in command: print('opening you tube.....') talk('opening you tube.') webbrowser.open('youtube.com') elif 'open whatsapp' in command: webbrowser.open('web.whatsapp.com') print('opening whatsapp.....') talk('opening whatsapp.') elif 'open stackoverflow' in command: webbrowser.open('stackoverflow.com') print('opening Stackoverfolw.....') talk('opening Stack overflow .') elif 'music' in command: music_dir = 'C:\\Music' music= os.listdir(music_dir) #print(music) os.startfile(os.path.join(music_dir, music[5])) elif 'open Vs code' in command: code_path = "C:\\Users\\Rashid khan\\AppData\\Local\\Programs\\Microsoft VS Code\\Code.exe" os.startfile(code_path) elif 'how are you' in command: talk('I am feeling awesome and ready for your command') elif 'hear me' in command: talk('yes, I am getting you Rashid') elif 'exit' in command: exit() else: talk('Please say the command again.') while True: run_alexa()
27.181818
100
0.576254
import speech_recognition as sr import pyttsx3 import pywhatkit import datetime import wikipedia import pyjokes import webbrowser import os listenner = sr.Recognizer() engine = pyttsx3.init() voices = engine.getProperty("voices") engine.setProperty('voice', voices[1].id) def talk(text): engine.say(text) engine.runAndWait() engine.say('Hello Rashid, I am your virtual assistant. how can i help you?') engine.runAndWait() def take_command(): try: with sr.Microphone() as source: print("listening....") voice = listenner.listen(source) command = listenner.recognize_google(voice) command = command.lower() if 'alexa' in command: command = command.replace('alexa', '') print(command) except: pass return command def run_alexa(): command = take_command() print(command) if 'song' in command: song = command.replace('song', '') talk('playing' + song) pywhatkit.playonyt(song) elif 'time' in command: time = datetime.datetime.now().strftime('%I:%M %p') print(time) talk("current time is" + time) elif 'who ' in command: person = command.replace('who ', '') info = wikipedia.summary(person, 1) print(info) talk(info) elif 'are you single' in command: talk('No, i am in a relationship with your wifi') elif 'joke' in command: talk(pyjokes.get_joke()) elif 'wikipedia' in command: ansu = command.replace('wikipedia', '') answer = wikipedia.summary(ansu, sentences=2) print(answer) talk(answer) elif 'open youtube' in command: print('opening you tube.....') talk('opening you tube.') webbrowser.open('youtube.com') elif 'open whatsapp' in command: webbrowser.open('web.whatsapp.com') print('opening whatsapp.....') talk('opening whatsapp.') elif 'open stackoverflow' in command: webbrowser.open('stackoverflow.com') print('opening Stackoverfolw.....') talk('opening Stack overflow .') elif 'music' in command: music_dir = 'C:\\Music' music= os.listdir(music_dir) os.startfile(os.path.join(music_dir, music[5])) elif 'open Vs code' in command: code_path = "C:\\Users\\Rashid khan\\AppData\\Local\\Programs\\Microsoft VS Code\\Code.exe" os.startfile(code_path) elif 'how are you' in command: talk('I am feeling awesome and ready for your command') elif 'hear me' in command: talk('yes, I am getting you Rashid') elif 'exit' in command: exit() else: talk('Please say the command again.') while True: run_alexa()
true
true
f70c43e5dc3ab182467438f8d6eb12b710ec1190
615
py
Python
backend/restaurants/migrations/0007_restaurantlocation_owner.py
mahmoud-batman/resturants-club
095bf9b9b5d82a86d6e610517770f1223bb1ef59
[ "MIT" ]
null
null
null
backend/restaurants/migrations/0007_restaurantlocation_owner.py
mahmoud-batman/resturants-club
095bf9b9b5d82a86d6e610517770f1223bb1ef59
[ "MIT" ]
null
null
null
backend/restaurants/migrations/0007_restaurantlocation_owner.py
mahmoud-batman/resturants-club
095bf9b9b5d82a86d6e610517770f1223bb1ef59
[ "MIT" ]
null
null
null
# Generated by Django 3.1.1 on 2020-09-27 18:26 from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('restaurants', '0006_auto_20200927_0920'), ] operations = [ migrations.AddField( model_name='restaurantlocation', name='owner', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
27.954545
133
0.686179
from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('restaurants', '0006_auto_20200927_0920'), ] operations = [ migrations.AddField( model_name='restaurantlocation', name='owner', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
true
true
f70c44b187ccf305b57f7409d5e44844eb21cf70
629
py
Python
test_bdmodels/testmigs/migrations/0001_initial.py
Matific/broken-down-models
abc665917928102052069ed07f6f396b5a603373
[ "MIT" ]
4
2021-08-18T14:34:17.000Z
2021-11-23T12:55:05.000Z
test_bdmodels/testmigs/migrations/0001_initial.py
Matific/broken-down-models
abc665917928102052069ed07f6f396b5a603373
[ "MIT" ]
13
2021-08-18T10:17:58.000Z
2022-02-13T15:11:21.000Z
test_bdmodels/testmigs/migrations/0001_initial.py
Matific/broken-down-models
abc665917928102052069ed07f6f396b5a603373
[ "MIT" ]
1
2021-09-09T16:08:04.000Z
2021-09-09T16:08:04.000Z
# Generated by Django 2.2.23 on 2021-07-07 13:18 from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='BigModel', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('a', models.BooleanField(default=True)), ('b', models.BooleanField(null=True)), ('c', models.IntegerField(default=3)), ('d', models.CharField(default='hi', max_length=10)), ], ), ]
25.16
76
0.54213
from django.db import migrations, models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='BigModel', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('a', models.BooleanField(default=True)), ('b', models.BooleanField(null=True)), ('c', models.IntegerField(default=3)), ('d', models.CharField(default='hi', max_length=10)), ], ), ]
true
true
f70c4510e1c769c5ddba6263ecda35d921c80b8e
4,335
py
Python
lightly/openapi_generated/swagger_client/models/job_status_data_result.py
Tekrific/lightly
75a1d56b4cee77f68e0f3166e3a412711d0dbb2d
[ "MIT" ]
1,515
2020-10-05T13:04:17.000Z
2022-03-31T16:14:55.000Z
lightly/openapi_generated/swagger_client/models/job_status_data_result.py
Tekrific/lightly
75a1d56b4cee77f68e0f3166e3a412711d0dbb2d
[ "MIT" ]
628
2020-10-14T11:38:51.000Z
2022-03-31T14:40:54.000Z
lightly/openapi_generated/swagger_client/models/job_status_data_result.py
Tekrific/lightly
75a1d56b4cee77f68e0f3166e3a412711d0dbb2d
[ "MIT" ]
108
2020-10-17T08:31:06.000Z
2022-03-20T16:44:22.000Z
# coding: utf-8 """ Lightly API Lightly.ai enables you to do self-supervised learning in an easy and intuitive way. The lightly.ai OpenAPI spec defines how one can interact with our REST API to unleash the full potential of lightly.ai # noqa: E501 OpenAPI spec version: 1.0.0 Contact: support@lightly.ai Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from lightly.openapi_generated.swagger_client.configuration import Configuration class JobStatusDataResult(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'type': 'JobResultType', 'data': 'GeneralJobResult' } attribute_map = { 'type': 'type', 'data': 'data' } def __init__(self, type=None, data=None, _configuration=None): # noqa: E501 """JobStatusDataResult - a model defined in Swagger""" # noqa: E501 if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._type = None self._data = None self.discriminator = None self.type = type if data is not None: self.data = data @property def type(self): """Gets the type of this JobStatusDataResult. # noqa: E501 :return: The type of this JobStatusDataResult. # noqa: E501 :rtype: JobResultType """ return self._type @type.setter def type(self, type): """Sets the type of this JobStatusDataResult. :param type: The type of this JobStatusDataResult. # noqa: E501 :type: JobResultType """ if self._configuration.client_side_validation and type is None: raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501 self._type = type @property def data(self): """Gets the data of this JobStatusDataResult. # noqa: E501 :return: The data of this JobStatusDataResult. # noqa: E501 :rtype: GeneralJobResult """ return self._data @data.setter def data(self, data): """Sets the data of this JobStatusDataResult. :param data: The data of this JobStatusDataResult. # noqa: E501 :type: GeneralJobResult """ self._data = data def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(JobStatusDataResult, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, JobStatusDataResult): return False return self.to_dict() == other.to_dict() def __ne__(self, other): """Returns true if both objects are not equal""" if not isinstance(other, JobStatusDataResult): return True return self.to_dict() != other.to_dict()
28.708609
220
0.582699
import pprint import re import six from lightly.openapi_generated.swagger_client.configuration import Configuration class JobStatusDataResult(object): swagger_types = { 'type': 'JobResultType', 'data': 'GeneralJobResult' } attribute_map = { 'type': 'type', 'data': 'data' } def __init__(self, type=None, data=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._type = None self._data = None self.discriminator = None self.type = type if data is not None: self.data = data @property def type(self): return self._type @type.setter def type(self, type): if self._configuration.client_side_validation and type is None: raise ValueError("Invalid value for `type`, must not be `None`") self._type = type @property def data(self): return self._data @data.setter def data(self, data): self._data = data def to_dict(self): result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(JobStatusDataResult, dict): for key, value in self.items(): result[key] = value return result def to_str(self): return pprint.pformat(self.to_dict()) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, JobStatusDataResult): return False return self.to_dict() == other.to_dict() def __ne__(self, other): if not isinstance(other, JobStatusDataResult): return True return self.to_dict() != other.to_dict()
true
true
f70c45334caae225348d1f4da8df8a9711e75ae9
349
py
Python
Flask/apps/main/errors.py
bkjml/online-bus-tickting-system
8b3728238417532b34b90e2af98d445b10c1c3be
[ "MIT" ]
null
null
null
Flask/apps/main/errors.py
bkjml/online-bus-tickting-system
8b3728238417532b34b90e2af98d445b10c1c3be
[ "MIT" ]
null
null
null
Flask/apps/main/errors.py
bkjml/online-bus-tickting-system
8b3728238417532b34b90e2af98d445b10c1c3be
[ "MIT" ]
null
null
null
from flask import render_template from .import main @main.app_errorhandler(404) #if error handlers used instead app_errorhandler the instnace is available only for errors originate in blueprint def page_not_found(e): return render_template(''), 404 @main.app_errorhandler(500) def internal_server_error(e): return render_template(''), 500
34.9
142
0.796562
from flask import render_template from .import main @main.app_errorhandler(404) def page_not_found(e): return render_template(''), 404 @main.app_errorhandler(500) def internal_server_error(e): return render_template(''), 500
true
true
f70c45d1c780fe80cae7e22e7cf26e9752f6ec9c
341
py
Python
lib/setup.py
quartictech/orinoco
d0f2dec19a687f5140ae5d1b61975be617065690
[ "BSD-3-Clause" ]
null
null
null
lib/setup.py
quartictech/orinoco
d0f2dec19a687f5140ae5d1b61975be617065690
[ "BSD-3-Clause" ]
null
null
null
lib/setup.py
quartictech/orinoco
d0f2dec19a687f5140ae5d1b61975be617065690
[ "BSD-3-Clause" ]
null
null
null
from setuptools import setup setup(name='orinoco', version='0.1', description='Sweet data integration', author='Quartic Technologies', author_email='alex@quartic.io', license='MIT', packages=['orinoco'], install_requires=[ 'aiohttp', 'pyformance' ], zip_safe=False)
22.733333
43
0.592375
from setuptools import setup setup(name='orinoco', version='0.1', description='Sweet data integration', author='Quartic Technologies', author_email='alex@quartic.io', license='MIT', packages=['orinoco'], install_requires=[ 'aiohttp', 'pyformance' ], zip_safe=False)
true
true
f70c4887921a9859df7647540889e198cf25bbcd
3,362
py
Python
src/utils/output_inference_images.py
stasys-hub/Collembola_AI
373266d5edde0ad04d898de82c2a1eac89961e30
[ "BSD-3-Clause" ]
1
2022-03-31T12:49:12.000Z
2022-03-31T12:49:12.000Z
src/utils/output_inference_images.py
stasys-hub/Collembola_AI
373266d5edde0ad04d898de82c2a1eac89961e30
[ "BSD-3-Clause" ]
null
null
null
src/utils/output_inference_images.py
stasys-hub/Collembola_AI
373266d5edde0ad04d898de82c2a1eac89961e30
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 """ Project title: CollembolAI Authors: Stephan Weißbach, Stanislav Sys, Clément Schneider Original repository: https://github.com/stasys-hub/Collembola_AI.git Module title: output_inference_images .py Purpose: draws bounding boxes from annotation on pictures. If provided with groundtruth, it will also specifiy correctness of predictions Dependencies: See ReadMe Last Update: 18.02.2022 """ from PIL import Image, ImageFont, ImageDraw import os from utils.cocoutils import coco2df def draw_coco_bbox( coco, out_dir, coco_dir, eval_mode=False, prefix="annotated", line_width=10, fontsize=80, fontYshift=-70, ): """ Detectron2 module for writing annotated pictures was not so explicit to me, and default output not so pretty. This function will draw the annotation on the pictures of a coco dataset. The dataset can be provided as a coco instance, or as a dataframe resulting from coco2df. Modified pictures are written to the out_dir, with a name prefix. To adjust display, simply change line_width (= box line), font_size (= label font). Labels text can be shifted vertically with fontYshift. """ # define some colors for bounding boxes with open( os.path.join(os.path.dirname(os.path.realpath(__file__)), "colors.txt"), "r" ) as colorfile: colors = [color.replace("\n", "") for color in colorfile] Image.MAX_IMAGE_PIXELS = None fnt = ImageFont.truetype( os.path.join(os.path.dirname(os.path.realpath(__file__)), "FreeMono.ttf"), fontsize, ) # convert result dataframe to coco try: coco_df = coco2df(coco) except: coco_df = coco # create label for bounding box if eval_mode: coco_df["label"] = [ f"{' '.join(row['category_name'].split('__')[0].split('_'))} {round(row['score'], 2)} {'true detection' if not row['is_false_positive'] else 'false detection'}" for _, row in coco_df.iterrows() ] else: coco_df["label"] = [ f"{' '.join(row['category_name'].split('__')[0].split('_'))} {round(row['score'], 2)}" for _, row in coco_df.iterrows() ] resh = lambda x: ((x[0], x[1]), (x[0] + x[2], x[1] + x[3])) coco_df["coordinates"] = coco_df["bbox"].apply(resh) # sample colors randomly # create dictionary so that every class maps to one color colormap = {} for idx, classlabel in enumerate(coco_df["category_name"].unique()): colormap[classlabel] = colors[idx % len(colors)] # add a color column for idx, row in coco_df.iterrows(): coco_df.loc[idx, "color"] = colormap[row["category_name"]] for img_name in coco_df.file_name.unique(): source_img = Image.open(f"{coco_dir}/{img_name}") draw = ImageDraw.Draw(source_img) for row in coco_df[coco_df["file_name"] == img_name][ ["label", "coordinates", "color"] ].values: draw.rectangle(row[1], outline=row[2], width=line_width) draw.text( (row[1][0][0], row[1][0][1] + fontYshift), row[0], font=fnt, fill=row[2] ) print(f"Writing {out_dir}/{prefix}_{img_name}") source_img.save(f"{out_dir}/{prefix}_{img_name}", "JPEG")
39.552941
172
0.629982
from PIL import Image, ImageFont, ImageDraw import os from utils.cocoutils import coco2df def draw_coco_bbox( coco, out_dir, coco_dir, eval_mode=False, prefix="annotated", line_width=10, fontsize=80, fontYshift=-70, ): with open( os.path.join(os.path.dirname(os.path.realpath(__file__)), "colors.txt"), "r" ) as colorfile: colors = [color.replace("\n", "") for color in colorfile] Image.MAX_IMAGE_PIXELS = None fnt = ImageFont.truetype( os.path.join(os.path.dirname(os.path.realpath(__file__)), "FreeMono.ttf"), fontsize, ) try: coco_df = coco2df(coco) except: coco_df = coco if eval_mode: coco_df["label"] = [ f"{' '.join(row['category_name'].split('__')[0].split('_'))} {round(row['score'], 2)} {'true detection' if not row['is_false_positive'] else 'false detection'}" for _, row in coco_df.iterrows() ] else: coco_df["label"] = [ f"{' '.join(row['category_name'].split('__')[0].split('_'))} {round(row['score'], 2)}" for _, row in coco_df.iterrows() ] resh = lambda x: ((x[0], x[1]), (x[0] + x[2], x[1] + x[3])) coco_df["coordinates"] = coco_df["bbox"].apply(resh) colormap = {} for idx, classlabel in enumerate(coco_df["category_name"].unique()): colormap[classlabel] = colors[idx % len(colors)] for idx, row in coco_df.iterrows(): coco_df.loc[idx, "color"] = colormap[row["category_name"]] for img_name in coco_df.file_name.unique(): source_img = Image.open(f"{coco_dir}/{img_name}") draw = ImageDraw.Draw(source_img) for row in coco_df[coco_df["file_name"] == img_name][ ["label", "coordinates", "color"] ].values: draw.rectangle(row[1], outline=row[2], width=line_width) draw.text( (row[1][0][0], row[1][0][1] + fontYshift), row[0], font=fnt, fill=row[2] ) print(f"Writing {out_dir}/{prefix}_{img_name}") source_img.save(f"{out_dir}/{prefix}_{img_name}", "JPEG")
true
true
f70c488c62af1145be4b18caddcaa7a6a066aabc
60
py
Python
tutorials/unittests/python/src/evenodd_finder.py
nithyarajendiran/softwaretesting
0fdaaca51804529acf5578e59b2afb0119524dbd
[ "MIT" ]
13
2016-02-08T04:32:22.000Z
2021-11-15T16:19:10.000Z
tutorials/unittests/python/src/evenodd_finder.py
nithyarajendiran/softwaretesting
0fdaaca51804529acf5578e59b2afb0119524dbd
[ "MIT" ]
3
2016-04-18T19:04:27.000Z
2017-01-21T19:07:01.000Z
tutorials/unittests/python/src/evenodd_finder.py
nithyarajendiran/softwaretesting
0fdaaca51804529acf5578e59b2afb0119524dbd
[ "MIT" ]
67
2015-07-27T23:44:47.000Z
2022-02-20T14:38:49.000Z
def is_even(n): if n%2 is 0: return True return False
12
15
0.65
def is_even(n): if n%2 is 0: return True return False
true
true
f70c48d4ef5485c7e9a680e396b7437268194f99
924
bzl
Python
third_party/llvm/workspace.bzl
Jonathan-Atrey/tensorflow
7b508870c2d19dc9772745aa55d953517fcb406d
[ "Apache-2.0" ]
null
null
null
third_party/llvm/workspace.bzl
Jonathan-Atrey/tensorflow
7b508870c2d19dc9772745aa55d953517fcb406d
[ "Apache-2.0" ]
null
null
null
third_party/llvm/workspace.bzl
Jonathan-Atrey/tensorflow
7b508870c2d19dc9772745aa55d953517fcb406d
[ "Apache-2.0" ]
null
null
null
"""Provides the repository macro to import LLVM.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(name): """Imports LLVM.""" LLVM_COMMIT = "5020e104a1349a0ae6532b007b48c68b8f64c049" LLVM_SHA256 = "3113dbc5f7b3e6405375eedfe95e220268bcc4818c8d8453a23ef00f82d4b172" tf_http_archive( name = name, sha256 = LLVM_SHA256, strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT), urls = [ "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), "https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), ], build_file = "//third_party/llvm:BUILD.bazel", patch_file = "//third_party/llvm:macos_build_fix.patch", link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"}, )
42
149
0.676407
load("//third_party:repo.bzl", "tf_http_archive") def repo(name): LLVM_COMMIT = "5020e104a1349a0ae6532b007b48c68b8f64c049" LLVM_SHA256 = "3113dbc5f7b3e6405375eedfe95e220268bcc4818c8d8453a23ef00f82d4b172" tf_http_archive( name = name, sha256 = LLVM_SHA256, strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT), urls = [ "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), "https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), ], build_file = "//third_party/llvm:BUILD.bazel", patch_file = "//third_party/llvm:macos_build_fix.patch", link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"}, )
true
true
f70c49fdef97e4475c713c043941d0fc73f98979
638
py
Python
example/tests/orders/test_orders_signals.py
icvntechstudio/django-salesman
017dd31713e37a445500c18e0c7034608f4f62a7
[ "BSD-3-Clause" ]
222
2020-02-03T16:58:56.000Z
2022-03-30T16:35:35.000Z
example/tests/orders/test_orders_signals.py
icvntechstudio/django-salesman
017dd31713e37a445500c18e0c7034608f4f62a7
[ "BSD-3-Clause" ]
16
2020-03-17T12:38:27.000Z
2022-03-16T13:14:55.000Z
example/tests/orders/test_orders_signals.py
icvntechstudio/django-salesman
017dd31713e37a445500c18e0c7034608f4f62a7
[ "BSD-3-Clause" ]
23
2020-08-28T04:46:33.000Z
2022-01-12T21:57:39.000Z
import pytest from salesman.orders.models import Order from salesman.orders.signals import status_changed _signal_called = False def on_status_changed(sender, order, new_status, old_status, **kwargs): global _signal_called _signal_called = True @pytest.mark.django_db def test_order_changed_signal(rf): status_changed.connect(on_status_changed, dispatch_uid="test_status_changed") order = Order.objects.create(ref="1", subtotal=100, total=100) order.status = order.statuses.COMPLETED order.save() assert _signal_called status_changed.disconnect(on_status_changed, dispatch_uid="test_status_changed")
29
84
0.789969
import pytest from salesman.orders.models import Order from salesman.orders.signals import status_changed _signal_called = False def on_status_changed(sender, order, new_status, old_status, **kwargs): global _signal_called _signal_called = True @pytest.mark.django_db def test_order_changed_signal(rf): status_changed.connect(on_status_changed, dispatch_uid="test_status_changed") order = Order.objects.create(ref="1", subtotal=100, total=100) order.status = order.statuses.COMPLETED order.save() assert _signal_called status_changed.disconnect(on_status_changed, dispatch_uid="test_status_changed")
true
true
f70c4b0fbc3e1bc9072c62f3aaa47973f8478299
37
py
Python
archive/model/__init__.py
winstonwzhang/osumapper
e773b45650f8a013de48ff169a93ea1745c6f931
[ "Apache-2.0" ]
null
null
null
archive/model/__init__.py
winstonwzhang/osumapper
e773b45650f8a013de48ff169a93ea1745c6f931
[ "Apache-2.0" ]
null
null
null
archive/model/__init__.py
winstonwzhang/osumapper
e773b45650f8a013de48ff169a93ea1745c6f931
[ "Apache-2.0" ]
null
null
null
import sys sys.path.append('model/')
12.333333
25
0.72973
import sys sys.path.append('model/')
true
true
f70c4c0aca8a0ff7a8de0e6c05ec2770c0771307
555
py
Python
pyvo/vomas/utils/file_utils.py
lmichel/pyvo
8296bee2e799843909805fb6ae528a9b23776e8d
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
pyvo/vomas/utils/file_utils.py
lmichel/pyvo
8296bee2e799843909805fb6ae528a9b23776e8d
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
pyvo/vomas/utils/file_utils.py
lmichel/pyvo
8296bee2e799843909805fb6ae528a9b23776e8d
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
''' Created on Feb 26, 2021 @author: laurentmichel ''' import os class FileUtils(object): file_path = os.path.dirname(os.path.realpath(__file__)) @staticmethod def get_datadir(): return os.path.realpath(os.path.join(FileUtils.file_path, "../client/tests/", "data")) @staticmethod def get_projectdir(): return os.path.realpath(os.path.join(FileUtils.file_path, "../../")) @staticmethod def get_schemadir(): return os.path.realpath(os.path.join(FileUtils.file_path, "../../", "schema"))
25.227273
94
0.643243
import os class FileUtils(object): file_path = os.path.dirname(os.path.realpath(__file__)) @staticmethod def get_datadir(): return os.path.realpath(os.path.join(FileUtils.file_path, "../client/tests/", "data")) @staticmethod def get_projectdir(): return os.path.realpath(os.path.join(FileUtils.file_path, "../../")) @staticmethod def get_schemadir(): return os.path.realpath(os.path.join(FileUtils.file_path, "../../", "schema"))
true
true
f70c4cd98e14e53f4160fd2649739493408ee4e7
5,578
py
Python
train.py
briancylui/ALOCC-CVPR2018
78b6a1e8f3fcde8a46a88294926074a65ff0726a
[ "MIT" ]
208
2018-03-16T11:48:59.000Z
2022-03-22T14:34:06.000Z
train.py
briancylui/ALOCC-CVPR2018
78b6a1e8f3fcde8a46a88294926074a65ff0726a
[ "MIT" ]
24
2018-05-10T09:07:08.000Z
2022-02-15T10:03:46.000Z
train.py
briancylui/ALOCC-CVPR2018
78b6a1e8f3fcde8a46a88294926074a65ff0726a
[ "MIT" ]
80
2018-03-21T08:16:18.000Z
2022-03-13T12:52:29.000Z
import os import numpy as np from models import ALOCC_Model from utils import pp, visualize, to_json, show_all_variables import tensorflow as tf flags = tf.app.flags flags.DEFINE_integer("epoch", 40, "Epoch to train [25]") flags.DEFINE_float("learning_rate", 0.002, "Learning rate of for adam [0.0002]") flags.DEFINE_float("beta1", 0.5, "Momentum term of adam [0.5]") flags.DEFINE_integer("attention_label", 1, "Conditioned label that growth attention of training label [1]") flags.DEFINE_float("r_alpha", 0.2, "Refinement parameter [0.2]") flags.DEFINE_integer("train_size", np.inf, "The size of train images [np.inf]") flags.DEFINE_integer("batch_size",128, "The size of batch images [64]") flags.DEFINE_integer("input_height", 45, "The size of image to use. [45]") flags.DEFINE_integer("input_width", None, "The size of image to use. If None, same value as input_height [None]") flags.DEFINE_integer("output_height", 45, "The size of the output images to produce [45]") flags.DEFINE_integer("output_width", None, "The size of the output images to produce. If None, same value as output_height [None]") flags.DEFINE_string("dataset", "UCSD", "The name of dataset [UCSD, mnist]") flags.DEFINE_string("dataset_address", "./dataset/UCSD_Anomaly_Dataset.v1p2/UCSDped2/Train", "The path of dataset") flags.DEFINE_string("input_fname_pattern", "*", "Glob pattern of filename of input images [*]") flags.DEFINE_string("checkpoint_dir", "checkpoint", "Directory name to save the checkpoints [checkpoint]") flags.DEFINE_string("log_dir", "log", "Directory name to save the log [log]") flags.DEFINE_string("sample_dir", "samples", "Directory name to save the image samples [samples]") flags.DEFINE_boolean("train", True, "True for training, False for testing [False]") FLAGS = flags.FLAGS def check_some_assertions(): """ to check some assertions in inputs and also check sth else. """ if FLAGS.input_width is None: FLAGS.input_width = FLAGS.input_height if FLAGS.output_width is None: FLAGS.output_width = FLAGS.output_height if not os.path.exists(FLAGS.checkpoint_dir): os.makedirs(FLAGS.checkpoint_dir) if not os.path.exists(FLAGS.log_dir): os.makedirs(FLAGS.log_dir) if not os.path.exists(FLAGS.sample_dir): os.makedirs(FLAGS.sample_dir) def main(_): """ The main function for training steps """ pp.pprint(flags.FLAGS.__flags) n_per_itr_print_results = 100 kb_work_on_patch = True # --------------------------------------------------------------------------------------------- # --------------------------------------------------------------------------------------------- # Manual Switchs ------------------------------------------------------------------------------ # --------------------------------------------------------------------------------------------- # DATASET PARAMETER : UCSD #FLAGS.dataset = 'UCSD' #FLAGS.dataset_address = './dataset/UCSD_Anomaly_Dataset.v1p2/UCSDped2/Train' nd_input_frame_size = (240, 360) nd_slice_size = (45, 45) n_stride = 25 n_fetch_data = 600 # --------------------------------------------------------------------------------------------- # # DATASET PARAMETER : MNIST # FLAGS.dataset = 'mnist' # FLAGS.dataset_address = './dataset/mnist' # nd_input_frame_size = (28, 28) # nd_slice_size = (28, 28) FLAGS.train = True FLAGS.input_width = nd_slice_size[0] FLAGS.input_height = nd_slice_size[1] FLAGS.output_width = nd_slice_size[0] FLAGS.output_height = nd_slice_size[1] FLAGS.sample_dir = 'export/'+FLAGS.dataset +'_%d.%d'%(nd_slice_size[0],nd_slice_size[1]) FLAGS.input_fname_pattern = '*' check_some_assertions() # manual handling of GPU gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.4) run_config = tf.ConfigProto(gpu_options=gpu_options) run_config.gpu_options.allow_growth=True with tf.Session(config=run_config) as sess: tmp_model = ALOCC_Model( sess, input_width=FLAGS.input_width, input_height=FLAGS.input_height, output_width=FLAGS.output_width, output_height=FLAGS.output_height, batch_size=FLAGS.batch_size, sample_num=FLAGS.batch_size, attention_label=FLAGS.attention_label, r_alpha=FLAGS.r_alpha, dataset_name=FLAGS.dataset, dataset_address=FLAGS.dataset_address, input_fname_pattern=FLAGS.input_fname_pattern, checkpoint_dir=FLAGS.checkpoint_dir, is_training = FLAGS.train, log_dir=FLAGS.log_dir, sample_dir=FLAGS.sample_dir, nd_patch_size=nd_slice_size, n_stride=n_stride, n_per_itr_print_results=n_per_itr_print_results, kb_work_on_patch=kb_work_on_patch, nd_input_frame_size = nd_input_frame_size, n_fetch_data=n_fetch_data) #show_all_variables() if FLAGS.train: print('Program is on Train Mode') tmp_model.train(FLAGS) else: if not tmp_model.load(FLAGS.checkpoint_dir)[0]: print('Program is on Test Mode') raise Exception("[!] Train a model first, then run test mode from file test.py") if __name__ == '__main__': tf.app.run()
44.269841
131
0.609896
import os import numpy as np from models import ALOCC_Model from utils import pp, visualize, to_json, show_all_variables import tensorflow as tf flags = tf.app.flags flags.DEFINE_integer("epoch", 40, "Epoch to train [25]") flags.DEFINE_float("learning_rate", 0.002, "Learning rate of for adam [0.0002]") flags.DEFINE_float("beta1", 0.5, "Momentum term of adam [0.5]") flags.DEFINE_integer("attention_label", 1, "Conditioned label that growth attention of training label [1]") flags.DEFINE_float("r_alpha", 0.2, "Refinement parameter [0.2]") flags.DEFINE_integer("train_size", np.inf, "The size of train images [np.inf]") flags.DEFINE_integer("batch_size",128, "The size of batch images [64]") flags.DEFINE_integer("input_height", 45, "The size of image to use. [45]") flags.DEFINE_integer("input_width", None, "The size of image to use. If None, same value as input_height [None]") flags.DEFINE_integer("output_height", 45, "The size of the output images to produce [45]") flags.DEFINE_integer("output_width", None, "The size of the output images to produce. If None, same value as output_height [None]") flags.DEFINE_string("dataset", "UCSD", "The name of dataset [UCSD, mnist]") flags.DEFINE_string("dataset_address", "./dataset/UCSD_Anomaly_Dataset.v1p2/UCSDped2/Train", "The path of dataset") flags.DEFINE_string("input_fname_pattern", "*", "Glob pattern of filename of input images [*]") flags.DEFINE_string("checkpoint_dir", "checkpoint", "Directory name to save the checkpoints [checkpoint]") flags.DEFINE_string("log_dir", "log", "Directory name to save the log [log]") flags.DEFINE_string("sample_dir", "samples", "Directory name to save the image samples [samples]") flags.DEFINE_boolean("train", True, "True for training, False for testing [False]") FLAGS = flags.FLAGS def check_some_assertions(): if FLAGS.input_width is None: FLAGS.input_width = FLAGS.input_height if FLAGS.output_width is None: FLAGS.output_width = FLAGS.output_height if not os.path.exists(FLAGS.checkpoint_dir): os.makedirs(FLAGS.checkpoint_dir) if not os.path.exists(FLAGS.log_dir): os.makedirs(FLAGS.log_dir) if not os.path.exists(FLAGS.sample_dir): os.makedirs(FLAGS.sample_dir) def main(_): pp.pprint(flags.FLAGS.__flags) n_per_itr_print_results = 100 kb_work_on_patch = True nd_input_frame_size = (240, 360) nd_slice_size = (45, 45) n_stride = 25 n_fetch_data = 600 FLAGS.train = True FLAGS.input_width = nd_slice_size[0] FLAGS.input_height = nd_slice_size[1] FLAGS.output_width = nd_slice_size[0] FLAGS.output_height = nd_slice_size[1] FLAGS.sample_dir = 'export/'+FLAGS.dataset +'_%d.%d'%(nd_slice_size[0],nd_slice_size[1]) FLAGS.input_fname_pattern = '*' check_some_assertions() gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.4) run_config = tf.ConfigProto(gpu_options=gpu_options) run_config.gpu_options.allow_growth=True with tf.Session(config=run_config) as sess: tmp_model = ALOCC_Model( sess, input_width=FLAGS.input_width, input_height=FLAGS.input_height, output_width=FLAGS.output_width, output_height=FLAGS.output_height, batch_size=FLAGS.batch_size, sample_num=FLAGS.batch_size, attention_label=FLAGS.attention_label, r_alpha=FLAGS.r_alpha, dataset_name=FLAGS.dataset, dataset_address=FLAGS.dataset_address, input_fname_pattern=FLAGS.input_fname_pattern, checkpoint_dir=FLAGS.checkpoint_dir, is_training = FLAGS.train, log_dir=FLAGS.log_dir, sample_dir=FLAGS.sample_dir, nd_patch_size=nd_slice_size, n_stride=n_stride, n_per_itr_print_results=n_per_itr_print_results, kb_work_on_patch=kb_work_on_patch, nd_input_frame_size = nd_input_frame_size, n_fetch_data=n_fetch_data) if FLAGS.train: print('Program is on Train Mode') tmp_model.train(FLAGS) else: if not tmp_model.load(FLAGS.checkpoint_dir)[0]: print('Program is on Test Mode') raise Exception("[!] Train a model first, then run test mode from file test.py") if __name__ == '__main__': tf.app.run()
true
true
f70c4d3b5c86a34651890132efccc338f4586820
918
py
Python
pipeline/pipeline_import_paths.py
jkabalar/kapture-localization
647ef7cfdfbdac37297682baca1bf13608b6d6e8
[ "BSD-3-Clause" ]
118
2020-11-04T16:48:12.000Z
2022-03-28T13:15:37.000Z
pipeline/pipeline_import_paths.py
jkabalar/kapture-localization
647ef7cfdfbdac37297682baca1bf13608b6d6e8
[ "BSD-3-Clause" ]
23
2020-10-19T09:01:37.000Z
2022-03-25T09:12:31.000Z
pipeline/pipeline_import_paths.py
jkabalar/kapture-localization
647ef7cfdfbdac37297682baca1bf13608b6d6e8
[ "BSD-3-Clause" ]
29
2020-11-25T05:28:32.000Z
2022-03-23T07:20:23.000Z
# Copyright 2020-present NAVER Corp. Under BSD 3-clause license import sys import os.path as path # when developing, prefer local kapture to the one installed on the system HERE_PATH = path.abspath(path.normpath(path.dirname(__file__))) KATURE_LOCALIZATION_REPO_PATH = path.normpath(path.join(HERE_PATH, '../')) # check the presence of kapture directory in repo to be sure its not the installed version if path.isdir(path.join(KATURE_LOCALIZATION_REPO_PATH, 'kapture_localization')): # workaround for sibling import sys.path.insert(0, KATURE_LOCALIZATION_REPO_PATH) # KATURE_LOCALIZATION_TOOLS_PATH = path.normpath(path.join(HERE_PATH, '../')) # # check the presence of pipeline directory in repo to be sure its not the installed version # if path.isdir(path.join(KATURE_LOCALIZATION_TOOLS_PATH, 'pipeline')): # # workaround for sibling import # sys.path.insert(0, KATURE_LOCALIZATION_TOOLS_PATH)
43.714286
93
0.781046
import sys import os.path as path HERE_PATH = path.abspath(path.normpath(path.dirname(__file__))) KATURE_LOCALIZATION_REPO_PATH = path.normpath(path.join(HERE_PATH, '../')) if path.isdir(path.join(KATURE_LOCALIZATION_REPO_PATH, 'kapture_localization')): sys.path.insert(0, KATURE_LOCALIZATION_REPO_PATH)
true
true
f70c505c88c54964fea3593bedb0769499834a3c
257
py
Python
examples/exclude_defaults_none.py
wyfo/apimodel
99e6b23504e8ca775fda9dfa93c4e350211c1b8a
[ "MIT" ]
118
2020-05-24T20:22:27.000Z
2022-03-23T23:04:50.000Z
examples/exclude_defaults_none.py
wyfo/apimodel
99e6b23504e8ca775fda9dfa93c4e350211c1b8a
[ "MIT" ]
89
2021-02-26T14:22:41.000Z
2022-03-30T22:17:06.000Z
examples/exclude_defaults_none.py
wyfo/apimodel
99e6b23504e8ca775fda9dfa93c4e350211c1b8a
[ "MIT" ]
13
2020-12-24T22:43:50.000Z
2022-03-13T15:03:44.000Z
from dataclasses import dataclass from apischema import serialize @dataclass class Foo: bar: int = 0 baz: str | None = None assert serialize(Foo, Foo(), exclude_defaults=True) == {} assert serialize(Foo, Foo(), exclude_none=True) == {"bar": 0}
18.357143
61
0.692607
from dataclasses import dataclass from apischema import serialize @dataclass class Foo: bar: int = 0 baz: str | None = None assert serialize(Foo, Foo(), exclude_defaults=True) == {} assert serialize(Foo, Foo(), exclude_none=True) == {"bar": 0}
true
true
f70c51db8a5f81dccf5c9556e871a85a69eb8c42
2,267
py
Python
Code/Molweni/MolweniConfig.py
Aopolin-Lv/NLP_MRC
62507e1fa0d024e2c6ca3cab37f94a015c071cf9
[ "Apache-2.0" ]
null
null
null
Code/Molweni/MolweniConfig.py
Aopolin-Lv/NLP_MRC
62507e1fa0d024e2c6ca3cab37f94a015c071cf9
[ "Apache-2.0" ]
null
null
null
Code/Molweni/MolweniConfig.py
Aopolin-Lv/NLP_MRC
62507e1fa0d024e2c6ca3cab37f94a015c071cf9
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 """ @Time: 2020/11/14 2:15 下午 @Author: Aopolin @File: MolweniConfig.py @Contact: aopolin.ii@gmail.com @Description: """ class Config(object): def __init__(self): self.SQUAD_DIR = "../../Dataset/squad2.0" self.MOLWENI_DIR = "../../Dataset/Molweni" self.model_type = "bert" # ["distilbert", "albert", "bert", "xlnet", ...] self.model_name_or_path = "bert-base-uncased" self.output_dir = "/tmp/debug_squad/" # 输出目录路径 self.data_dir = "" self.train_file = self.MOLWENI_DIR + "/train_small.json" self.predict_file = self.MOLWENI_DIR + "/dev_small.json" self.config_name = "" self.tokenizer_name = "" self.cache_dir = "" self.version_2_with_negative = True self.null_score_diff_threshold = 0.0 self.n_gpu = 0 self.max_seq_length = 384 self.doc_stride = 128 self.max_query_length = 64 self.do_train = True self.do_eval = True self.evaluate_during_training = False self.do_lower_case = True self.per_gpu_train_batch_size = 12 self.per_gpu_eval_batch_size = 8 self.learning_rate = 3e-5 self.gradient_accumulation_steps = 1 # Number of updates steps to accumulate before performing a backward/update pass self.weight_decay = 0.0 self.adam_epsilon = 1e-8 self.max_grad_norm = 1.0 self.num_train_epochs = 1.0 # 训练的epoch数 self.max_steps = -1 # 最多运行多少步,若设置>0, 将会覆盖num_train_epochs self.warmup_steps = 0 self.n_best_size = 20 self.max_answer_length = 30 self.verbose_logging = False self.lang_id = 0 self.logging_steps = 500 # 打log的步长 self.save_steps = 2000 # 保存模型及其参数的步长 self.eval_all_checkpoints = False self.no_cuda = True self.overwrite_cache = False # 重写缓存文件 self.seed = 42 # 随机种子 self.local_rank = -1 # 分布式计算用到的进程编号,-1表示不使用分布式 self.fp16 = False self.fp16_opt_level = "01" self.server_ip = "" self.server_port = "" self.threads = 1 self.bert_dir = "../../Model_files/bert-base-uncased/" self.device = "cpu"
31.929577
128
0.602117
class Config(object): def __init__(self): self.SQUAD_DIR = "../../Dataset/squad2.0" self.MOLWENI_DIR = "../../Dataset/Molweni" self.model_type = "bert" self.model_name_or_path = "bert-base-uncased" self.output_dir = "/tmp/debug_squad/" self.data_dir = "" self.train_file = self.MOLWENI_DIR + "/train_small.json" self.predict_file = self.MOLWENI_DIR + "/dev_small.json" self.config_name = "" self.tokenizer_name = "" self.cache_dir = "" self.version_2_with_negative = True self.null_score_diff_threshold = 0.0 self.n_gpu = 0 self.max_seq_length = 384 self.doc_stride = 128 self.max_query_length = 64 self.do_train = True self.do_eval = True self.evaluate_during_training = False self.do_lower_case = True self.per_gpu_train_batch_size = 12 self.per_gpu_eval_batch_size = 8 self.learning_rate = 3e-5 self.gradient_accumulation_steps = 1 self.weight_decay = 0.0 self.adam_epsilon = 1e-8 self.max_grad_norm = 1.0 self.num_train_epochs = 1.0 self.max_steps = -1 self.warmup_steps = 0 self.n_best_size = 20 self.max_answer_length = 30 self.verbose_logging = False self.lang_id = 0 self.logging_steps = 500 self.save_steps = 2000 self.eval_all_checkpoints = False self.no_cuda = True self.overwrite_cache = False self.seed = 42 self.local_rank = -1 self.fp16 = False self.fp16_opt_level = "01" self.server_ip = "" self.server_port = "" self.threads = 1 self.bert_dir = "../../Model_files/bert-base-uncased/" self.device = "cpu"
true
true
f70c531988d553f61ca2d9e8c0976401be0015f1
8,831
py
Python
google/cloud/aiplatform_v1/types/dataset.py
connor-mccarthy/python-aiplatform
184f7f327aa00b4c8d1acc24dcb1c4c4be6c5bcc
[ "Apache-2.0" ]
null
null
null
google/cloud/aiplatform_v1/types/dataset.py
connor-mccarthy/python-aiplatform
184f7f327aa00b4c8d1acc24dcb1c4c4be6c5bcc
[ "Apache-2.0" ]
null
null
null
google/cloud/aiplatform_v1/types/dataset.py
connor-mccarthy/python-aiplatform
184f7f327aa00b4c8d1acc24dcb1c4c4be6c5bcc
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import proto # type: ignore from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec from google.cloud.aiplatform_v1.types import io from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore __protobuf__ = proto.module( package="google.cloud.aiplatform.v1", manifest={ "Dataset", "ImportDataConfig", "ExportDataConfig", }, ) class Dataset(proto.Message): r"""A collection of DataItems and Annotations on them. Attributes: name (str): Output only. The resource name of the Dataset. display_name (str): Required. The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. description (str): Optional. The description of the Dataset. metadata_schema_uri (str): Required. Points to a YAML file stored on Google Cloud Storage describing additional information about the Dataset. The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/metadata/. metadata (google.protobuf.struct_pb2.Value): Required. Additional information about the Dataset. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when this Dataset was created. update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Timestamp when this Dataset was last updated. etag (str): Used to perform consistent read-modify-write updates. If not set, a blind "overwrite" update happens. labels (Sequence[google.cloud.aiplatform_v1.types.Dataset.LabelsEntry]): The labels with user-defined metadata to organize your Datasets. Label keys and values can be no longer than 64 characters (Unicode codepoints), can only contain lowercase letters, numeric characters, underscores and dashes. International characters are allowed. No more than 64 user labels can be associated with one Dataset (System labels are excluded). See https://goo.gl/xmQnxf for more information and examples of labels. System reserved label keys are prefixed with "aiplatform.googleapis.com/" and are immutable. Following system labels exist for each Dataset: - "aiplatform.googleapis.com/dataset_metadata_schema": output only, its value is the [metadata_schema's][google.cloud.aiplatform.v1.Dataset.metadata_schema_uri] title. encryption_spec (google.cloud.aiplatform_v1.types.EncryptionSpec): Customer-managed encryption key spec for a Dataset. If set, this Dataset and all sub-resources of this Dataset will be secured by this key. """ name = proto.Field( proto.STRING, number=1, ) display_name = proto.Field( proto.STRING, number=2, ) description = proto.Field( proto.STRING, number=16, ) metadata_schema_uri = proto.Field( proto.STRING, number=3, ) metadata = proto.Field( proto.MESSAGE, number=8, message=struct_pb2.Value, ) create_time = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) update_time = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) etag = proto.Field( proto.STRING, number=6, ) labels = proto.MapField( proto.STRING, proto.STRING, number=7, ) encryption_spec = proto.Field( proto.MESSAGE, number=11, message=gca_encryption_spec.EncryptionSpec, ) class ImportDataConfig(proto.Message): r"""Describes the location from where we import data into a Dataset, together with the labels that will be applied to the DataItems and the Annotations. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: gcs_source (google.cloud.aiplatform_v1.types.GcsSource): The Google Cloud Storage location for the input content. This field is a member of `oneof`_ ``source``. data_item_labels (Sequence[google.cloud.aiplatform_v1.types.ImportDataConfig.DataItemLabelsEntry]): Labels that will be applied to newly imported DataItems. If an identical DataItem as one being imported already exists in the Dataset, then these labels will be appended to these of the already existing one, and if labels with identical key is imported before, the old label value will be overwritten. If two DataItems are identical in the same import data operation, the labels will be combined and if key collision happens in this case, one of the values will be picked randomly. Two DataItems are considered identical if their content bytes are identical (e.g. image bytes or pdf bytes). These labels will be overridden by Annotation labels specified inside index file referenced by [import_schema_uri][google.cloud.aiplatform.v1.ImportDataConfig.import_schema_uri], e.g. jsonl file. import_schema_uri (str): Required. Points to a YAML file stored on Google Cloud Storage describing the import format. Validation will be done against the schema. The schema is defined as an `OpenAPI 3.0.2 Schema Object <https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#schemaObject>`__. """ gcs_source = proto.Field( proto.MESSAGE, number=1, oneof="source", message=io.GcsSource, ) data_item_labels = proto.MapField( proto.STRING, proto.STRING, number=2, ) import_schema_uri = proto.Field( proto.STRING, number=4, ) class ExportDataConfig(proto.Message): r"""Describes what part of the Dataset is to be exported, the destination of the export and how to export. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: gcs_destination (google.cloud.aiplatform_v1.types.GcsDestination): The Google Cloud Storage location where the output is to be written to. In the given directory a new directory will be created with name: ``export-data-<dataset-display-name>-<timestamp-of-export-call>`` where timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All export output will be written into that directory. Inside that directory, annotations with the same schema will be grouped into sub directories which are named with the corresponding annotations' schema title. Inside these sub directories, a schema.yaml will be created to describe the output format. This field is a member of `oneof`_ ``destination``. annotations_filter (str): A filter on Annotations of the Dataset. Only Annotations on to-be-exported DataItems(specified by [data_items_filter][]) that match this filter will be exported. The filter syntax is the same as in [ListAnnotations][google.cloud.aiplatform.v1.DatasetService.ListAnnotations]. """ gcs_destination = proto.Field( proto.MESSAGE, number=1, oneof="destination", message=io.GcsDestination, ) annotations_filter = proto.Field( proto.STRING, number=2, ) __all__ = tuple(sorted(__protobuf__.manifest))
37.261603
110
0.649417
import proto from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec from google.cloud.aiplatform_v1.types import io from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 __protobuf__ = proto.module( package="google.cloud.aiplatform.v1", manifest={ "Dataset", "ImportDataConfig", "ExportDataConfig", }, ) class Dataset(proto.Message): name = proto.Field( proto.STRING, number=1, ) display_name = proto.Field( proto.STRING, number=2, ) description = proto.Field( proto.STRING, number=16, ) metadata_schema_uri = proto.Field( proto.STRING, number=3, ) metadata = proto.Field( proto.MESSAGE, number=8, message=struct_pb2.Value, ) create_time = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) update_time = proto.Field( proto.MESSAGE, number=5, message=timestamp_pb2.Timestamp, ) etag = proto.Field( proto.STRING, number=6, ) labels = proto.MapField( proto.STRING, proto.STRING, number=7, ) encryption_spec = proto.Field( proto.MESSAGE, number=11, message=gca_encryption_spec.EncryptionSpec, ) class ImportDataConfig(proto.Message): gcs_source = proto.Field( proto.MESSAGE, number=1, oneof="source", message=io.GcsSource, ) data_item_labels = proto.MapField( proto.STRING, proto.STRING, number=2, ) import_schema_uri = proto.Field( proto.STRING, number=4, ) class ExportDataConfig(proto.Message): gcs_destination = proto.Field( proto.MESSAGE, number=1, oneof="destination", message=io.GcsDestination, ) annotations_filter = proto.Field( proto.STRING, number=2, ) __all__ = tuple(sorted(__protobuf__.manifest))
true
true
f70c53fffb048f9fbd54384d990e79e47914bdb4
590
py
Python
pyformlang/regular_expression/__init__.py
YaccConstructor/pyformlang
df640e13524c5d835ddcdedf25d8246fc73d7b88
[ "MIT" ]
null
null
null
pyformlang/regular_expression/__init__.py
YaccConstructor/pyformlang
df640e13524c5d835ddcdedf25d8246fc73d7b88
[ "MIT" ]
1
2020-07-22T11:40:30.000Z
2020-07-22T11:40:30.000Z
pyformlang/regular_expression/__init__.py
YaccConstructor/pyformlang
df640e13524c5d835ddcdedf25d8246fc73d7b88
[ "MIT" ]
null
null
null
""" :mod:`pyformlang.regular_expression` ==================================== This module deals with regular expression. By default, this module does not use the standard way to write regular expressions. Please read the documentation of Regex for more information. Available Classes ----------------- Regex A regular expression MisformedRegexError An error occurring when the input regex is incorrect """ from .regex import Regex from .regex_objects import MisformedRegexError from .python_regex import PythonRegex __all__ = ["Regex", "PythonRegex", "MisformedRegexError"]
22.692308
73
0.720339
from .regex import Regex from .regex_objects import MisformedRegexError from .python_regex import PythonRegex __all__ = ["Regex", "PythonRegex", "MisformedRegexError"]
true
true
f70c54a7ba4a717f4018e33e0949863ac4c6ac3f
1,947
py
Python
setup.py
mzy2240/openSE
b6ab59b49efe46ac60e7e993a6096d8a69e3219b
[ "Apache-2.0" ]
null
null
null
setup.py
mzy2240/openSE
b6ab59b49efe46ac60e7e993a6096d8a69e3219b
[ "Apache-2.0" ]
null
null
null
setup.py
mzy2240/openSE
b6ab59b49efe46ac60e7e993a6096d8a69e3219b
[ "Apache-2.0" ]
null
null
null
from pkg_resources import parse_version from configparser import ConfigParser import setuptools assert parse_version(setuptools.__version__)>=parse_version('36.2') # note: all settings are in settings.ini; edit there, not here config = ConfigParser(delimiters=['=']) config.read('settings.ini') cfg = config['DEFAULT'] cfg_keys = 'version description keywords author author_email'.split() expected = cfg_keys + "lib_name user branch license status min_python audience language".split() for o in expected: assert o in cfg, "missing expected setting: {}".format(o) setup_cfg = {o:cfg[o] for o in cfg_keys} licenses = { 'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'), } statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ] py_versions = '2.0 2.1 2.2 2.3 2.4 2.5 2.6 2.7 3.0 3.1 3.2 3.3 3.4 3.5 3.6 3.7 3.8 3.9 3.10'.split() requirements = cfg.get('requirements','').split() lic = licenses[cfg['license']] min_python = cfg['min_python'] setuptools.setup( name = cfg['lib_name'], license = lic[0], classifiers = [ 'Development Status :: ' + statuses[int(cfg['status'])], 'Intended Audience :: ' + cfg['audience'].title(), 'License :: ' + lic[1], 'Natural Language :: ' + cfg['language'].title(), ] + ['Programming Language :: Python :: '+o for o in py_versions[py_versions.index(min_python):]], url = cfg['git_url'], packages = setuptools.find_packages(), include_package_data = True, install_requires = requirements, dependency_links = cfg.get('dep_links','').split(), python_requires = '>=' + cfg['min_python'], long_description = open('README.md', encoding='utf8').read(), long_description_content_type = 'text/markdown', zip_safe = False, entry_points = { 'console_scripts': cfg.get('console_scripts','').split() }, **setup_cfg)
40.5625
102
0.666667
from pkg_resources import parse_version from configparser import ConfigParser import setuptools assert parse_version(setuptools.__version__)>=parse_version('36.2') config = ConfigParser(delimiters=['=']) config.read('settings.ini') cfg = config['DEFAULT'] cfg_keys = 'version description keywords author author_email'.split() expected = cfg_keys + "lib_name user branch license status min_python audience language".split() for o in expected: assert o in cfg, "missing expected setting: {}".format(o) setup_cfg = {o:cfg[o] for o in cfg_keys} licenses = { 'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'), } statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha', '4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ] py_versions = '2.0 2.1 2.2 2.3 2.4 2.5 2.6 2.7 3.0 3.1 3.2 3.3 3.4 3.5 3.6 3.7 3.8 3.9 3.10'.split() requirements = cfg.get('requirements','').split() lic = licenses[cfg['license']] min_python = cfg['min_python'] setuptools.setup( name = cfg['lib_name'], license = lic[0], classifiers = [ 'Development Status :: ' + statuses[int(cfg['status'])], 'Intended Audience :: ' + cfg['audience'].title(), 'License :: ' + lic[1], 'Natural Language :: ' + cfg['language'].title(), ] + ['Programming Language :: Python :: '+o for o in py_versions[py_versions.index(min_python):]], url = cfg['git_url'], packages = setuptools.find_packages(), include_package_data = True, install_requires = requirements, dependency_links = cfg.get('dep_links','').split(), python_requires = '>=' + cfg['min_python'], long_description = open('README.md', encoding='utf8').read(), long_description_content_type = 'text/markdown', zip_safe = False, entry_points = { 'console_scripts': cfg.get('console_scripts','').split() }, **setup_cfg)
true
true
f70c54ace8d3b1e3d7e5d360387aff531bf3b870
4,368
py
Python
src/tagging/train.py
grahamwhiteuk/neutralizing-bias
a6ef764046fcc68ac0daa612c160ec23a79d3e73
[ "MIT" ]
169
2019-11-20T05:20:05.000Z
2022-03-25T11:56:26.000Z
src/tagging/train.py
grahamwhiteuk/neutralizing-bias
a6ef764046fcc68ac0daa612c160ec23a79d3e73
[ "MIT" ]
10
2020-02-10T21:41:10.000Z
2022-02-05T10:37:52.000Z
src/tagging/train.py
grahamwhiteuk/neutralizing-bias
a6ef764046fcc68ac0daa612c160ec23a79d3e73
[ "MIT" ]
44
2019-11-26T11:54:45.000Z
2022-03-03T23:16:04.000Z
# -*- coding: utf-8 -*- """ train bert python tagging/train.py --train ../../data/v6/corpus.wordbiased.tag.train --test ../../data/v6/corpus.wordbiased.tag.test --working_dir TEST --train_batch_size 3 --test_batch_size 10 --hidden_size 32 --debug_skip """ from pytorch_pretrained_bert.tokenization import BertTokenizer from pytorch_pretrained_bert.optimization import BertAdam from collections import defaultdict from torch.utils.data import TensorDataset, DataLoader, RandomSampler from tqdm import tqdm import torch import torch.nn as nn import pickle import sys import os import numpy as np from pytorch_pretrained_bert.modeling import BertForTokenClassification from torch.nn import CrossEntropyLoss from tensorboardX import SummaryWriter import argparse import sklearn.metrics as metrics import model as tagging_model import utils as tagging_utils import sys; sys.path.append('.') from shared.data import get_dataloader from shared.args import ARGS from shared.constants import CUDA if not os.path.exists(ARGS.working_dir): os.makedirs(ARGS.working_dir) with open(ARGS.working_dir + '/command.sh', 'w') as f: f.write('python' + ' '.join(sys.argv) + '\n') # # # # # # # # ## # # # ## # # DATA # # # # # # # # ## # # # ## # # print('LOADING DATA...') tokenizer = BertTokenizer.from_pretrained(ARGS.bert_model, cache_dir=ARGS.working_dir + '/cache') tok2id = tokenizer.vocab tok2id['<del>'] = len(tok2id) train_dataloader, num_train_examples = get_dataloader( ARGS.train, tok2id, ARGS.train_batch_size, ARGS.working_dir + '/train_data.pkl', categories_path=ARGS.categories_file) eval_dataloader, num_eval_examples = get_dataloader( ARGS.test, tok2id, ARGS.test_batch_size, ARGS.working_dir + '/test_data.pkl', test=True, categories_path=ARGS.categories_file) # # # # # # # # ## # # # ## # # MODEL # # # # # # # # ## # # # ## # # print('BUILDING MODEL...') if ARGS.tagger_from_debiaser: model = tagging_model.TaggerFromDebiaser( cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, tok2id=tok2id) elif ARGS.extra_features_top: model = tagging_model.BertForMultitaskWithFeaturesOnTop.from_pretrained( ARGS.bert_model, cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, cache_dir=ARGS.working_dir + '/cache', tok2id=tok2id) elif ARGS.extra_features_bottom: model = tagging_model.BertForMultitaskWithFeaturesOnBottom.from_pretrained( ARGS.bert_model, cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, cache_dir=ARGS.working_dir + '/cache', tok2id=tok2id) else: model = tagging_model.BertForMultitask.from_pretrained( ARGS.bert_model, cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, cache_dir=ARGS.working_dir + '/cache', tok2id=tok2id) if CUDA: model = model.cuda() print('PREPPING RUN...') # # # # # # # # ## # # # ## # # OPTIMIZER, LOSS # # # # # # # # ## # # # ## # # optimizer = tagging_utils.build_optimizer( model, int((num_train_examples * ARGS.epochs) / ARGS.train_batch_size), ARGS.learning_rate) loss_fn = tagging_utils.build_loss_fn() # # # # # # # # ## # # # ## # # TRAIN # # # # # # # # ## # # # ## # # writer = SummaryWriter(ARGS.working_dir) print('INITIAL EVAL...') model.eval() results = tagging_utils.run_inference(model, eval_dataloader, loss_fn, tokenizer) writer.add_scalar('eval/tok_loss', np.mean(results['tok_loss']), 0) writer.add_scalar('eval/tok_acc', np.mean(results['labeling_hits']), 0) print('TRAINING...') model.train() for epoch in range(ARGS.epochs): print('STARTING EPOCH ', epoch) losses = tagging_utils.train_for_epoch(model, train_dataloader, loss_fn, optimizer) writer.add_scalar('train/loss', np.mean(losses), epoch + 1) # eval print('EVAL...') model.eval() results = tagging_utils.run_inference(model, eval_dataloader, loss_fn, tokenizer) writer.add_scalar('eval/tok_loss', np.mean(results['tok_loss']), epoch + 1) writer.add_scalar('eval/tok_acc', np.mean(results['labeling_hits']), epoch + 1) model.train() print('SAVING...') torch.save(model.state_dict(), ARGS.working_dir + '/model_%d.ckpt' % epoch)
31.883212
213
0.690247
from pytorch_pretrained_bert.tokenization import BertTokenizer from pytorch_pretrained_bert.optimization import BertAdam from collections import defaultdict from torch.utils.data import TensorDataset, DataLoader, RandomSampler from tqdm import tqdm import torch import torch.nn as nn import pickle import sys import os import numpy as np from pytorch_pretrained_bert.modeling import BertForTokenClassification from torch.nn import CrossEntropyLoss from tensorboardX import SummaryWriter import argparse import sklearn.metrics as metrics import model as tagging_model import utils as tagging_utils import sys; sys.path.append('.') from shared.data import get_dataloader from shared.args import ARGS from shared.constants import CUDA if not os.path.exists(ARGS.working_dir): os.makedirs(ARGS.working_dir) with open(ARGS.working_dir + '/command.sh', 'w') as f: f.write('python' + ' '.join(sys.argv) + '\n') print('LOADING DATA...') tokenizer = BertTokenizer.from_pretrained(ARGS.bert_model, cache_dir=ARGS.working_dir + '/cache') tok2id = tokenizer.vocab tok2id['<del>'] = len(tok2id) train_dataloader, num_train_examples = get_dataloader( ARGS.train, tok2id, ARGS.train_batch_size, ARGS.working_dir + '/train_data.pkl', categories_path=ARGS.categories_file) eval_dataloader, num_eval_examples = get_dataloader( ARGS.test, tok2id, ARGS.test_batch_size, ARGS.working_dir + '/test_data.pkl', test=True, categories_path=ARGS.categories_file) print('BUILDING MODEL...') if ARGS.tagger_from_debiaser: model = tagging_model.TaggerFromDebiaser( cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, tok2id=tok2id) elif ARGS.extra_features_top: model = tagging_model.BertForMultitaskWithFeaturesOnTop.from_pretrained( ARGS.bert_model, cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, cache_dir=ARGS.working_dir + '/cache', tok2id=tok2id) elif ARGS.extra_features_bottom: model = tagging_model.BertForMultitaskWithFeaturesOnBottom.from_pretrained( ARGS.bert_model, cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, cache_dir=ARGS.working_dir + '/cache', tok2id=tok2id) else: model = tagging_model.BertForMultitask.from_pretrained( ARGS.bert_model, cls_num_labels=ARGS.num_categories, tok_num_labels=ARGS.num_tok_labels, cache_dir=ARGS.working_dir + '/cache', tok2id=tok2id) if CUDA: model = model.cuda() print('PREPPING RUN...') optimizer = tagging_utils.build_optimizer( model, int((num_train_examples * ARGS.epochs) / ARGS.train_batch_size), ARGS.learning_rate) loss_fn = tagging_utils.build_loss_fn() writer = SummaryWriter(ARGS.working_dir) print('INITIAL EVAL...') model.eval() results = tagging_utils.run_inference(model, eval_dataloader, loss_fn, tokenizer) writer.add_scalar('eval/tok_loss', np.mean(results['tok_loss']), 0) writer.add_scalar('eval/tok_acc', np.mean(results['labeling_hits']), 0) print('TRAINING...') model.train() for epoch in range(ARGS.epochs): print('STARTING EPOCH ', epoch) losses = tagging_utils.train_for_epoch(model, train_dataloader, loss_fn, optimizer) writer.add_scalar('train/loss', np.mean(losses), epoch + 1) print('EVAL...') model.eval() results = tagging_utils.run_inference(model, eval_dataloader, loss_fn, tokenizer) writer.add_scalar('eval/tok_loss', np.mean(results['tok_loss']), epoch + 1) writer.add_scalar('eval/tok_acc', np.mean(results['labeling_hits']), epoch + 1) model.train() print('SAVING...') torch.save(model.state_dict(), ARGS.working_dir + '/model_%d.ckpt' % epoch)
true
true
f70c558e8e110dc473b09a410155f7b271c28ec7
480
py
Python
Pupil3dDetectorOSC/SetROI.py
SummerSigh/VrEyeToolbox
8e7c21e4a5ee95db9aea4b1bce0a3966156b9e5c
[ "MIT" ]
9
2022-03-17T02:00:45.000Z
2022-03-21T00:20:46.000Z
Pupil3dDetectorOSC/SetROI.py
SummerSigh/VrEyeToolbox
8e7c21e4a5ee95db9aea4b1bce0a3966156b9e5c
[ "MIT" ]
null
null
null
Pupil3dDetectorOSC/SetROI.py
SummerSigh/VrEyeToolbox
8e7c21e4a5ee95db9aea4b1bce0a3966156b9e5c
[ "MIT" ]
null
null
null
from re import X from tkinter import Y import cv2 cap = cv2.VideoCapture("demo2.mp4") ret, img = cap.read() roibb = cv2.selectROI("image", img, fromCenter=False, showCrosshair=True) print('X', roibb[0]) print('Y', roibb[1]) print('Width', roibb[2]) print('Height', roibb[3]) with open('roi.cfg', 'w+') as rf: rf.write(str(roibb[0])) rf.write('\n') rf.write(str(roibb[1])) rf.write('\n') rf.write(str(roibb[2])) rf.write('\n') rf.write(str(roibb[3]))
22.857143
73
0.625
from re import X from tkinter import Y import cv2 cap = cv2.VideoCapture("demo2.mp4") ret, img = cap.read() roibb = cv2.selectROI("image", img, fromCenter=False, showCrosshair=True) print('X', roibb[0]) print('Y', roibb[1]) print('Width', roibb[2]) print('Height', roibb[3]) with open('roi.cfg', 'w+') as rf: rf.write(str(roibb[0])) rf.write('\n') rf.write(str(roibb[1])) rf.write('\n') rf.write(str(roibb[2])) rf.write('\n') rf.write(str(roibb[3]))
true
true
f70c5661944cec2cc6f4ba9452219086980e1ea2
655
py
Python
venv/Lib/site-packages/nipype/interfaces/dipy/registration.py
richung99/digitizePlots
6b408c820660a415a289726e3223e8f558d3e18b
[ "MIT" ]
585
2015-01-12T16:06:47.000Z
2022-03-26T14:51:08.000Z
nipype/interfaces/dipy/registration.py
tamires-consulting/nipype
b7879d75a63b6500b2e7d2c3eba5aa7670339274
[ "Apache-2.0" ]
2,329
2015-01-01T09:56:41.000Z
2022-03-30T14:24:49.000Z
nipype/interfaces/dipy/registration.py
tamires-consulting/nipype
b7879d75a63b6500b2e7d2c3eba5aa7670339274
[ "Apache-2.0" ]
487
2015-01-20T01:04:52.000Z
2022-03-21T21:22:47.000Z
from distutils.version import LooseVersion from ... import logging from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import align l_wkflw = get_dipy_workflows(align) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more function" )
29.772727
87
0.717557
from distutils.version import LooseVersion from ... import logging from .base import HAVE_DIPY, dipy_version, dipy_to_nipype_interface, get_dipy_workflows IFLOGGER = logging.getLogger("nipype.interface") if HAVE_DIPY and LooseVersion(dipy_version()) >= LooseVersion("0.15"): from dipy.workflows import align l_wkflw = get_dipy_workflows(align) for name, obj in l_wkflw: new_name = name.replace("Flow", "") globals()[new_name] = dipy_to_nipype_interface(new_name, obj) del l_wkflw else: IFLOGGER.info( "We advise you to upgrade DIPY version. This upgrade will" " open access to more function" )
true
true
f70c56dae054122f2ba8494eb90f9965b9b545a2
203
py
Python
approaches/__init__.py
JetBrains-Research/docs-fine-tuning
a56e1f1e4183432994a40ac796a36095a747e052
[ "MIT" ]
null
null
null
approaches/__init__.py
JetBrains-Research/docs-fine-tuning
a56e1f1e4183432994a40ac796a36095a747e052
[ "MIT" ]
null
null
null
approaches/__init__.py
JetBrains-Research/docs-fine-tuning
a56e1f1e4183432994a40ac796a36095a747e052
[ "MIT" ]
null
null
null
from approaches.abstract_approach import AbstractApproach from approaches.simple import SimpleApproach from approaches.tfidf import TfIdfApproach from approaches.intersection import IntersectionApproach
40.6
57
0.901478
from approaches.abstract_approach import AbstractApproach from approaches.simple import SimpleApproach from approaches.tfidf import TfIdfApproach from approaches.intersection import IntersectionApproach
true
true
f70c56e55155862a4b331fc2e4813966d327374e
6,619
py
Python
sagemaker-dash/tutorials/app15.py
philippe-heitzmann/python-apps
1cc6e5e9b9ac81c81a3d4f0e420ff488fe6b2f0a
[ "MIT" ]
13
2021-05-23T15:47:24.000Z
2022-03-24T16:22:14.000Z
sagemaker-dash/tutorials/app15.py
philippe-heitzmann/python-apps
1cc6e5e9b9ac81c81a3d4f0e420ff488fe6b2f0a
[ "MIT" ]
4
2021-11-16T20:44:55.000Z
2022-01-13T19:13:38.000Z
sagemaker-dash/tutorials/app15.py
philippe-heitzmann/python-apps
1cc6e5e9b9ac81c81a3d4f0e420ff488fe6b2f0a
[ "MIT" ]
11
2021-01-31T06:18:10.000Z
2021-11-21T00:02:05.000Z
import dash import dash_core_components as dcc import dash_html_components as html import pandas as pd import plotly.express as px external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] app = dash.Dash(__name__, external_stylesheets=external_stylesheets) df = pd.read_csv('https://plotly.github.io/datasets/country_indicators.csv') available_indicators = df['Indicator Name'].unique() app.layout = html.Div([ html.Div([ html.Div([ dcc.Dropdown(id='crossfilter-xaxis-column', options=[{ 'label': i, 'value': i } for i in available_indicators], value='Fertility rate, total (births per woman)'), dcc.RadioItems(id='crossfilter-xaxis-type', options=[{ 'label': i, 'value': i } for i in ['Linear', 'Log']], value='Linear', labelStyle={ 'display': 'inline-block', 'marginTop': '5px' }) ], style={ 'width': '49%', 'isplay': 'inline-block' }), html.Div([ dcc.Dropdown(id='crossfilter-yaxis-column', options=[{ 'label': i, 'value': i } for i in available_indicators], value='Life expectancy at birth, total (years)'), dcc.RadioItems(id='crossfilter-yaxis-type', options=[{ 'label': i, 'value': i } for i in ['Linear', 'Log']], value='Linear', labelStyle={ 'display': 'inline-block', 'marginTop': '5px' }) ], style={ 'width': '49%', 'float': 'right', 'display': 'inline-block' }) ], style={'padding': '10px 5px'}), html.Div([ dcc.Graph(id='crossfilter-indicator-scatter', hoverData={'points': [{ 'customdata': 'Japan' }]}) ], style={ 'width': '49%', 'display': 'inline-block', 'padding': '0 20' }), html.Div([ dcc.Graph(id='x-time-series'), dcc.Graph(id='y-time-series'), ], style={ 'display': 'inline-block', 'width': '49%' }), html.Div(dcc.Slider( id='crossfilter-year--slider', min=df['Year'].min(), max=df['Year'].max(), value=df['Year'].max(), marks={str(year): str(year) for year in df['Year'].unique()}, step=None), style={ 'width': '49%', 'padding': '0px 20px 20px 20px' }) ]) @app.callback( dash.dependencies.Output('crossfilter-indicator-scatter', 'figure'), [ dash.dependencies.Input('crossfilter-xaxis-column', 'value'), dash.dependencies.Input('crossfilter-yaxis-column', 'value'), dash.dependencies.Input('crossfilter-xaxis-type', 'value'), dash.dependencies.Input('crossfilter-yaxis-type', 'value'), dash.dependencies.Input('crossfilter-year--slider', 'value') ]) def update_graph(xaxis_column_name, yaxis_column_name, xaxis_type, yaxis_type, year_value): dff = df[df['Year'] == year_value] fig = px.scatter( x=dff[dff['Indicator Name'] == xaxis_column_name]['Value'], y=dff[dff['Indicator Name'] == yaxis_column_name]['Value'], hover_name=dff[dff['Indicator Name'] == yaxis_column_name]['Country Name']) fig.update_traces(customdata=dff[dff['Indicator Name'] == yaxis_column_name]['Country Name']) fig.update_xaxes(title=xaxis_column_name, type='linear' if xaxis_type == 'Linear' else 'log') fig.update_yaxes(title=yaxis_column_name, type='linear' if yaxis_type == 'Linear' else 'log') fig.update_layout(margin={ 'l': 40, 'b': 40, 't': 10, 'r': 0 }, hovermode='closest') return fig def create_time_series(dff, axis_type, title): fig = px.scatter(dff, x='Year', y='Value') fig.update_traces(mode='lines+markers') fig.update_xaxes(showgrid=False) fig.update_yaxes(type='linear' if axis_type == 'Linear' else 'log') fig.add_annotation(x=0, y=0.85, xanchor='left', yanchor='bottom', xref='paper', yref='paper', showarrow=False, align='left', text=title) fig.update_layout(height=225, margin={'l': 20, 'b': 30, 'r': 10, 't': 10}) return fig @app.callback(dash.dependencies.Output('x-time-series', 'figure'), [ dash.dependencies.Input('crossfilter-indicator-scatter', 'hoverData'), dash.dependencies.Input('crossfilter-xaxis-column', 'value'), dash.dependencies.Input('crossfilter-xaxis-type', 'value') ]) def update_y_timeseries(hoverData, xaxis_column_name, axis_type): country_name = hoverData['points'][0]['customdata'] dff = df[df['Country Name'] == country_name] dff = dff[dff['Indicator Name'] == xaxis_column_name] title = '<b>{}</b><br>{}'.format(country_name, xaxis_column_name) return create_time_series(dff, axis_type, title) @app.callback(dash.dependencies.Output('y-time-series', 'figure'), [ dash.dependencies.Input('crossfilter-indicator-scatter', 'hoverData'), dash.dependencies.Input('crossfilter-yaxis-column', 'value'), dash.dependencies.Input('crossfilter-yaxis-type', 'value') ]) def update_x_timeseries(hoverData, yaxis_column_name, axis_type): dff = df[df['Country Name'] == hoverData['points'][0]['customdata']] dff = dff[dff['Indicator Name'] == yaxis_column_name] return create_time_series(dff, axis_type, yaxis_column_name) if __name__ == '__main__': app.run_server(debug=True)
35.395722
78
0.500227
import dash import dash_core_components as dcc import dash_html_components as html import pandas as pd import plotly.express as px external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css'] app = dash.Dash(__name__, external_stylesheets=external_stylesheets) df = pd.read_csv('https://plotly.github.io/datasets/country_indicators.csv') available_indicators = df['Indicator Name'].unique() app.layout = html.Div([ html.Div([ html.Div([ dcc.Dropdown(id='crossfilter-xaxis-column', options=[{ 'label': i, 'value': i } for i in available_indicators], value='Fertility rate, total (births per woman)'), dcc.RadioItems(id='crossfilter-xaxis-type', options=[{ 'label': i, 'value': i } for i in ['Linear', 'Log']], value='Linear', labelStyle={ 'display': 'inline-block', 'marginTop': '5px' }) ], style={ 'width': '49%', 'isplay': 'inline-block' }), html.Div([ dcc.Dropdown(id='crossfilter-yaxis-column', options=[{ 'label': i, 'value': i } for i in available_indicators], value='Life expectancy at birth, total (years)'), dcc.RadioItems(id='crossfilter-yaxis-type', options=[{ 'label': i, 'value': i } for i in ['Linear', 'Log']], value='Linear', labelStyle={ 'display': 'inline-block', 'marginTop': '5px' }) ], style={ 'width': '49%', 'float': 'right', 'display': 'inline-block' }) ], style={'padding': '10px 5px'}), html.Div([ dcc.Graph(id='crossfilter-indicator-scatter', hoverData={'points': [{ 'customdata': 'Japan' }]}) ], style={ 'width': '49%', 'display': 'inline-block', 'padding': '0 20' }), html.Div([ dcc.Graph(id='x-time-series'), dcc.Graph(id='y-time-series'), ], style={ 'display': 'inline-block', 'width': '49%' }), html.Div(dcc.Slider( id='crossfilter-year--slider', min=df['Year'].min(), max=df['Year'].max(), value=df['Year'].max(), marks={str(year): str(year) for year in df['Year'].unique()}, step=None), style={ 'width': '49%', 'padding': '0px 20px 20px 20px' }) ]) @app.callback( dash.dependencies.Output('crossfilter-indicator-scatter', 'figure'), [ dash.dependencies.Input('crossfilter-xaxis-column', 'value'), dash.dependencies.Input('crossfilter-yaxis-column', 'value'), dash.dependencies.Input('crossfilter-xaxis-type', 'value'), dash.dependencies.Input('crossfilter-yaxis-type', 'value'), dash.dependencies.Input('crossfilter-year--slider', 'value') ]) def update_graph(xaxis_column_name, yaxis_column_name, xaxis_type, yaxis_type, year_value): dff = df[df['Year'] == year_value] fig = px.scatter( x=dff[dff['Indicator Name'] == xaxis_column_name]['Value'], y=dff[dff['Indicator Name'] == yaxis_column_name]['Value'], hover_name=dff[dff['Indicator Name'] == yaxis_column_name]['Country Name']) fig.update_traces(customdata=dff[dff['Indicator Name'] == yaxis_column_name]['Country Name']) fig.update_xaxes(title=xaxis_column_name, type='linear' if xaxis_type == 'Linear' else 'log') fig.update_yaxes(title=yaxis_column_name, type='linear' if yaxis_type == 'Linear' else 'log') fig.update_layout(margin={ 'l': 40, 'b': 40, 't': 10, 'r': 0 }, hovermode='closest') return fig def create_time_series(dff, axis_type, title): fig = px.scatter(dff, x='Year', y='Value') fig.update_traces(mode='lines+markers') fig.update_xaxes(showgrid=False) fig.update_yaxes(type='linear' if axis_type == 'Linear' else 'log') fig.add_annotation(x=0, y=0.85, xanchor='left', yanchor='bottom', xref='paper', yref='paper', showarrow=False, align='left', text=title) fig.update_layout(height=225, margin={'l': 20, 'b': 30, 'r': 10, 't': 10}) return fig @app.callback(dash.dependencies.Output('x-time-series', 'figure'), [ dash.dependencies.Input('crossfilter-indicator-scatter', 'hoverData'), dash.dependencies.Input('crossfilter-xaxis-column', 'value'), dash.dependencies.Input('crossfilter-xaxis-type', 'value') ]) def update_y_timeseries(hoverData, xaxis_column_name, axis_type): country_name = hoverData['points'][0]['customdata'] dff = df[df['Country Name'] == country_name] dff = dff[dff['Indicator Name'] == xaxis_column_name] title = '<b>{}</b><br>{}'.format(country_name, xaxis_column_name) return create_time_series(dff, axis_type, title) @app.callback(dash.dependencies.Output('y-time-series', 'figure'), [ dash.dependencies.Input('crossfilter-indicator-scatter', 'hoverData'), dash.dependencies.Input('crossfilter-yaxis-column', 'value'), dash.dependencies.Input('crossfilter-yaxis-type', 'value') ]) def update_x_timeseries(hoverData, yaxis_column_name, axis_type): dff = df[df['Country Name'] == hoverData['points'][0]['customdata']] dff = dff[dff['Indicator Name'] == yaxis_column_name] return create_time_series(dff, axis_type, yaxis_column_name) if __name__ == '__main__': app.run_server(debug=True)
true
true
f70c5741e0bdc69a1654286b32fcf96209f03e6b
3,111
py
Python
tests/semver/test_parse_constraint.py
henryiii/poetry-core
c0d7e52f49b12092ba6829ecb02db7701cbff019
[ "MIT" ]
null
null
null
tests/semver/test_parse_constraint.py
henryiii/poetry-core
c0d7e52f49b12092ba6829ecb02db7701cbff019
[ "MIT" ]
null
null
null
tests/semver/test_parse_constraint.py
henryiii/poetry-core
c0d7e52f49b12092ba6829ecb02db7701cbff019
[ "MIT" ]
1
2021-04-08T03:26:23.000Z
2021-04-08T03:26:23.000Z
import pytest from poetry.core.semver import Version from poetry.core.semver import VersionRange from poetry.core.semver import VersionUnion from poetry.core.semver import parse_constraint @pytest.mark.parametrize( "constraint,version", [ ("~=3.8", VersionRange(min=Version(3, 8), max=Version(4, 0), include_min=True)), ( "== 3.8.*", VersionRange(min=Version(3, 8), max=Version(3, 9, 0), include_min=True), ), ( "~= 3.8", VersionRange(min=Version(3, 8), max=Version(4, 0), include_min=True), ), ("~3.8", VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True)), ("~ 3.8", VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True)), (">3.8", VersionRange(min=Version(3, 8))), (">=3.8", VersionRange(min=Version(3, 8), include_min=True)), (">= 3.8", VersionRange(min=Version(3, 8), include_min=True)), ( ">3.8,<=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8,<= 6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( "> 3.8,<= 6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( "> 3.8,<=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8 ,<=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8, <=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8 , <=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( "==3.8", VersionRange( min=Version(3, 8), max=Version(3, 8), include_min=True, include_max=True ), ), ( "== 3.8", VersionRange( min=Version(3, 8), max=Version(3, 8), include_min=True, include_max=True ), ), ( "~2.7 || ~3.8", VersionUnion( VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True), VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True), ), ), ( "~2.7||~3.8", VersionUnion( VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True), VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True), ), ), ( "~ 2.7||~ 3.8", VersionUnion( VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True), VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True), ), ), ], ) def test_parse_constraint(constraint, version): assert parse_constraint(constraint) == version
34.186813
88
0.491803
import pytest from poetry.core.semver import Version from poetry.core.semver import VersionRange from poetry.core.semver import VersionUnion from poetry.core.semver import parse_constraint @pytest.mark.parametrize( "constraint,version", [ ("~=3.8", VersionRange(min=Version(3, 8), max=Version(4, 0), include_min=True)), ( "== 3.8.*", VersionRange(min=Version(3, 8), max=Version(3, 9, 0), include_min=True), ), ( "~= 3.8", VersionRange(min=Version(3, 8), max=Version(4, 0), include_min=True), ), ("~3.8", VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True)), ("~ 3.8", VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True)), (">3.8", VersionRange(min=Version(3, 8))), (">=3.8", VersionRange(min=Version(3, 8), include_min=True)), (">= 3.8", VersionRange(min=Version(3, 8), include_min=True)), ( ">3.8,<=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8,<= 6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( "> 3.8,<= 6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( "> 3.8,<=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8 ,<=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8, <=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( ">3.8 , <=6.5", VersionRange(min=Version(3, 8), max=Version(6, 5), include_max=True), ), ( "==3.8", VersionRange( min=Version(3, 8), max=Version(3, 8), include_min=True, include_max=True ), ), ( "== 3.8", VersionRange( min=Version(3, 8), max=Version(3, 8), include_min=True, include_max=True ), ), ( "~2.7 || ~3.8", VersionUnion( VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True), VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True), ), ), ( "~2.7||~3.8", VersionUnion( VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True), VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True), ), ), ( "~ 2.7||~ 3.8", VersionUnion( VersionRange(min=Version(2, 7), max=Version(2, 8), include_min=True), VersionRange(min=Version(3, 8), max=Version(3, 9), include_min=True), ), ), ], ) def test_parse_constraint(constraint, version): assert parse_constraint(constraint) == version
true
true
f70c577d5a00a0cb8f77a0f4b89135a1f7157793
206
py
Python
PyMOTW/source/itertools/itertools_repeat_map.py
axetang/AxePython
3b517fa3123ce2e939680ad1ae14f7e602d446a6
[ "Apache-2.0" ]
1
2019-01-04T05:47:50.000Z
2019-01-04T05:47:50.000Z
PyMOTW/source/itertools/itertools_repeat_map.py
axetang/AxePython
3b517fa3123ce2e939680ad1ae14f7e602d446a6
[ "Apache-2.0" ]
1
2020-07-18T03:52:03.000Z
2020-07-18T04:18:01.000Z
PyMOTW/source/itertools/itertools_repeat_map.py
axetang/AxePython
3b517fa3123ce2e939680ad1ae14f7e602d446a6
[ "Apache-2.0" ]
2
2021-03-06T04:28:32.000Z
2021-03-06T04:59:17.000Z
#!/usr/bin/env python3 """Using repeat() and map() """ #end_pymotw_header from itertools import * for i in map(lambda x, y: (x, y, x * y), repeat(2), range(5)): print('{:d} * {:d} = {:d}'.format(*i))
20.6
62
0.572816
from itertools import * for i in map(lambda x, y: (x, y, x * y), repeat(2), range(5)): print('{:d} * {:d} = {:d}'.format(*i))
true
true
f70c5874fbd5bf53254b675479e8a23d62b45722
8,237
py
Python
Projects/Sugarscape/Model.py
hunterluepke/Learn-Python-for-Stats-and-Econ
d580a8e27ba937fc8401ac6d0714b6488ac8bbb6
[ "MIT" ]
16
2019-01-10T18:54:13.000Z
2022-01-28T20:07:20.000Z
Projects/Sugarscape/Model.py
hunterluepke/Learn-Python-for-Stats-and-Econ
d580a8e27ba937fc8401ac6d0714b6488ac8bbb6
[ "MIT" ]
null
null
null
Projects/Sugarscape/Model.py
hunterluepke/Learn-Python-for-Stats-and-Econ
d580a8e27ba937fc8401ac6d0714b6488ac8bbb6
[ "MIT" ]
15
2019-01-24T17:11:20.000Z
2021-12-11T01:53:57.000Z
import numpy as np import pandas as pd from scipy.stats.mstats import gmean import random import math from randomdict import RandomDict # from chest import * import shelve from Patch import * from AgentBranch import * import gc from memory_profiler import memory_usage #Model.py class Model(): def __init__(self, gui, num_agents, mutate, genetic, live_visual, agent_attributes, model_attributes): if live_visual: self.GUI = gui self.live_visual = live_visual self.name = gui.name self.run = gui.run self.initial_population = num_agents self.mutate = mutate self.genetic = genetic self.agent_attributes = agent_attributes self.model_attributes = model_attributes self.attributes = agent_attributes + model_attributes # attributes that are not copied during mutation or herding self.drop_attr = ["col", "row", "dx", "dy", "id", "wealth", "top_wealth", "sugar", "water","target", "not_target", "exchange_target", "not_exchange_target", "parent", "image"] # if self.GUI.live_visual: # self.drop_attr.append("image") if self.mutate: self.max_mutate_rate = 0.5 if mutate else 0 #.5 if self.genetic: self.cross_over_rate = .5 ############ set model parameters ############ self.total_agents_created = 0 self.goods = ["sugar", "water"] self.goods_params = {good:{"min":5, "max":25} for good in self.goods} self.max_init_demand_vals = {"price":{"min": 1/2, "max": 2}, "quantity":{"min":10, "max":25}} self.consumption_rate = {"sugar":.5, "water":.5} self.primary_breeds = ["basic", "switcher", "arbitrageur"] self.secondary_breeds = ["herder"] self.breeds = self.primary_breeds + self.secondary_breeds # all agents start as basic, only mutation can create other agents basic = 1 self.breed_probabilities = {"basic":basic, # if you are not a basic, you are a switcher "herder":0, "arbitrageur":0} self.max_vision = 1 # record price of every transaction # then take average at end of period self.transaction_prices = [] self.average_price = np.nan self.total_exchanges = 0 ############ import map and build nav_dict ############ # hash table that identifies possible moves relative to agent position self.nav_dict = { v:{ i:{ j: True for j in range(-v, v + 1) if 0 < (i ** 2 + j ** 2) <= (v ** 2)} for i in range(-v, v + 1)} for v in range(1, self.max_vision + 1)} #sugarMap.shape calls the a tuple with dimensions #of the dataframe self.sugarMap = pd.read_csv('sugar-map.txt', header = None, sep = ' ') # add 1 to each max_Val for key in self.sugarMap: self.sugarMap[key] = self.sugarMap[key].add(1) self.rows, self.cols = self.sugarMap.shape ############ Initialization ############ self.initializePatches() self.initializeAgents() self.data_dict = shelve.open("shelves\\masterShelve", writeback = True) for attribute in self.attributes: self.data_dict[attribute] = shelve.open("shelves\\subshelve-"+attribute, writeback = True) def initializePatches(self): #Instantiate Patches #Create a dictionary to hold the patches, organize as grid. #We first fill these with zeros as placeh holders self.patch_dict = {row:{col:0} for row in range(self.rows) for col in range(self.cols)} for row in range(self.rows): for col in range(self.cols): # replace zeros with actual Patch objects good = "sugar" if row + col < self.cols else "water" self.patch_dict[row][col] = Patch(self, row , col, self.sugarMap[row][col], good) # use RandomDict - O(n) time complexity - for choosing random empty patch self.empty_patches = RandomDict({ (row,col):self.patch_dict[row][col] for row in range(self.rows) for col in range(self.cols)}) def initializeAgents(self): # agents stored in a dict by ID self.agent_dict = {} #if self.live_visual else Chest(path = data_aggregator.folder) #shelve.open("agent_dict") # dead agents will be removed from agent_dict for i in range(self.initial_population): self.total_agents_created += 1 ID = self.total_agents_created row, col = self.chooseRandomEmptyPatch() self.agent_dict[ID] = Agent(self, row, col, ID) self.patch_dict[row][col].agent = self.agent_dict[ID] self.population = self.total_agents_created # def recordAgentLocationInDict(self, agent): # patchIndex = self.convert2dTo1d(agent.row, agent.col) # self.agentLocationDict[patchIndex] = agent def chooseRandomEmptyPatch(self): row, col = self.empty_patches.random_key() del self.empty_patches[row, col] return row, col def runModel(self, periods): def updateModelVariables(): self.population = len(agent_list) self.average_price = gmean(self.transaction_prices) self.transaction_prices = [] for period in range(1, periods + 1): self.growPatches() agent_list = list(self.agent_dict.values()) random.shuffle(agent_list) for agent in agent_list: agent.move() agent.harvest() agent.trade() agent.consume() agent.checkAlive() agent.reproduce() agent.updateParams() # data_aggregator.collectData(self, self.name, # self.run, period) updateModelVariables() self.collectData(str(period)) if self.live_visual: if period % self.GUI.every_t_frames == 0: print("period", period, "population", self.population, sep = "\t") self.GUI.parent.title("Sugarscape: " + str(period)) self.GUI.updatePatches() self.GUI.moveAgents() self.GUI.canvas.update() if period == periods: mem_usage = memory_usage(-1, interval=1)#, timeout=1) print(period, "end memory usage before sync//collect:", mem_usage[0], sep = "\t") self.data_dict.sync() gc.collect() mem_usage = memory_usage(-1, interval=1)#, timeout=1) print(period, "end memory usage after sync//collect:", mem_usage[0], sep = "\t") def growPatches(self): for i in self.patch_dict: for patch in self.patch_dict[i].values(): if patch.Q < patch.maxQ: patch.Q += 1 def collectData(self, period): def collectAgentAttributes(): temp_dict={} for attribute in self.agent_attributes: temp_dict[attribute] = [] for ID, agent in self.agent_dict.items(): for attribute in self.agent_attributes: temp_dict[attribute].append(getattr(agent, attribute)) for attribute, val in temp_dict.items(): self.data_dict[attribute][period] = np.mean(val) def collectModelAttributes(): for attribute in self.model_attributes: self.data_dict[attribute][period] = getattr(self, attribute) collectAgentAttributes() collectModelAttributes()
42.901042
119
0.552264
import numpy as np import pandas as pd from scipy.stats.mstats import gmean import random import math from randomdict import RandomDict import shelve from Patch import * from AgentBranch import * import gc from memory_profiler import memory_usage class Model(): def __init__(self, gui, num_agents, mutate, genetic, live_visual, agent_attributes, model_attributes): if live_visual: self.GUI = gui self.live_visual = live_visual self.name = gui.name self.run = gui.run self.initial_population = num_agents self.mutate = mutate self.genetic = genetic self.agent_attributes = agent_attributes self.model_attributes = model_attributes self.attributes = agent_attributes + model_attributes self.drop_attr = ["col", "row", "dx", "dy", "id", "wealth", "top_wealth", "sugar", "water","target", "not_target", "exchange_target", "not_exchange_target", "parent", "image"] if self.mutate: self.max_mutate_rate = 0.5 if mutate else 0 if self.genetic: self.cross_over_rate = .5 self.total_agents_created = 0 self.goods = ["sugar", "water"] self.goods_params = {good:{"min":5, "max":25} for good in self.goods} self.max_init_demand_vals = {"price":{"min": 1/2, "max": 2}, "quantity":{"min":10, "max":25}} self.consumption_rate = {"sugar":.5, "water":.5} self.primary_breeds = ["basic", "switcher", "arbitrageur"] self.secondary_breeds = ["herder"] self.breeds = self.primary_breeds + self.secondary_breeds basic = 1 self.breed_probabilities = {"basic":basic, "herder":0, "arbitrageur":0} self.max_vision = 1 self.transaction_prices = [] self.average_price = np.nan self.total_exchanges = 0 self.nav_dict = { v:{ i:{ j: True for j in range(-v, v + 1) if 0 < (i ** 2 + j ** 2) <= (v ** 2)} for i in range(-v, v + 1)} for v in range(1, self.max_vision + 1)} self.sugarMap = pd.read_csv('sugar-map.txt', header = None, sep = ' ') for key in self.sugarMap: self.sugarMap[key] = self.sugarMap[key].add(1) self.rows, self.cols = self.sugarMap.shape self.initializePatches() self.initializeAgents() self.data_dict = shelve.open("shelves\\masterShelve", writeback = True) for attribute in self.attributes: self.data_dict[attribute] = shelve.open("shelves\\subshelve-"+attribute, writeback = True) def initializePatches(self): self.patch_dict = {row:{col:0} for row in range(self.rows) for col in range(self.cols)} for row in range(self.rows): for col in range(self.cols): good = "sugar" if row + col < self.cols else "water" self.patch_dict[row][col] = Patch(self, row , col, self.sugarMap[row][col], good) self.empty_patches = RandomDict({ (row,col):self.patch_dict[row][col] for row in range(self.rows) for col in range(self.cols)}) def initializeAgents(self): self.agent_dict = {} for i in range(self.initial_population): self.total_agents_created += 1 ID = self.total_agents_created row, col = self.chooseRandomEmptyPatch() self.agent_dict[ID] = Agent(self, row, col, ID) self.patch_dict[row][col].agent = self.agent_dict[ID] self.population = self.total_agents_created def chooseRandomEmptyPatch(self): row, col = self.empty_patches.random_key() del self.empty_patches[row, col] return row, col def runModel(self, periods): def updateModelVariables(): self.population = len(agent_list) self.average_price = gmean(self.transaction_prices) self.transaction_prices = [] for period in range(1, periods + 1): self.growPatches() agent_list = list(self.agent_dict.values()) random.shuffle(agent_list) for agent in agent_list: agent.move() agent.harvest() agent.trade() agent.consume() agent.checkAlive() agent.reproduce() agent.updateParams() updateModelVariables() self.collectData(str(period)) if self.live_visual: if period % self.GUI.every_t_frames == 0: print("period", period, "population", self.population, sep = "\t") self.GUI.parent.title("Sugarscape: " + str(period)) self.GUI.updatePatches() self.GUI.moveAgents() self.GUI.canvas.update() if period == periods: mem_usage = memory_usage(-1, interval=1) print(period, "end memory usage before sync//collect:", mem_usage[0], sep = "\t") self.data_dict.sync() gc.collect() mem_usage = memory_usage(-1, interval=1) print(period, "end memory usage after sync//collect:", mem_usage[0], sep = "\t") def growPatches(self): for i in self.patch_dict: for patch in self.patch_dict[i].values(): if patch.Q < patch.maxQ: patch.Q += 1 def collectData(self, period): def collectAgentAttributes(): temp_dict={} for attribute in self.agent_attributes: temp_dict[attribute] = [] for ID, agent in self.agent_dict.items(): for attribute in self.agent_attributes: temp_dict[attribute].append(getattr(agent, attribute)) for attribute, val in temp_dict.items(): self.data_dict[attribute][period] = np.mean(val) def collectModelAttributes(): for attribute in self.model_attributes: self.data_dict[attribute][period] = getattr(self, attribute) collectAgentAttributes() collectModelAttributes()
true
true
f70c58be42fabc014fde7ab38fe60f065cf15661
1,583
py
Python
fixture/create_identity_docs.py
bancone/test
b3ffd796e19ab9e7c2317ce0397717e6102f7cca
[ "Apache-2.0" ]
null
null
null
fixture/create_identity_docs.py
bancone/test
b3ffd796e19ab9e7c2317ce0397717e6102f7cca
[ "Apache-2.0" ]
null
null
null
fixture/create_identity_docs.py
bancone/test
b3ffd796e19ab9e7c2317ce0397717e6102f7cca
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 import time import os class CreateID: def __init__(self, rpapp): self.rpapp = rpapp def create_docs(self): driver = self.rpapp.driver driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='ti'])[1]/following::button[6]").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='RUS'])[1]/following::td[1]").click() time.sleep(1) driver.find_element_by_xpath('//button[text()="Create Identity Document"]').click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='Document Type'])[1]/following::div[3]").click() driver.implicitly_wait(20) driver.find_element_by_id("react-select-7-option-2").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='File'])[1]/following::div[1]").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='File'])[1]/preceding::input[1]").\ send_keys(os.getcwd() + "/111.png") driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='File'])[1]/following::button[1]").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='Create Identity Document'])[1]/preceding::a[1]").\ click() driver.find_element_by_xpath( "//a[contains(@href, '/client')]").click()
45.228571
117
0.600758
import time import os class CreateID: def __init__(self, rpapp): self.rpapp = rpapp def create_docs(self): driver = self.rpapp.driver driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='ti'])[1]/following::button[6]").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='RUS'])[1]/following::td[1]").click() time.sleep(1) driver.find_element_by_xpath('//button[text()="Create Identity Document"]').click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='Document Type'])[1]/following::div[3]").click() driver.implicitly_wait(20) driver.find_element_by_id("react-select-7-option-2").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='File'])[1]/following::div[1]").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='File'])[1]/preceding::input[1]").\ send_keys(os.getcwd() + "/111.png") driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='File'])[1]/following::button[1]").click() driver.find_element_by_xpath( "(.//*[normalize-space(text()) and normalize-space(.)='Create Identity Document'])[1]/preceding::a[1]").\ click() driver.find_element_by_xpath( "//a[contains(@href, '/client')]").click()
true
true
f70c591798e6640713a38f3ccae3131033c4b307
1,306
py
Python
moto/sts/models.py
andrewgross/moto
912c3ceb396e1e460c75d6a0a65f2431800a1583
[ "Apache-2.0" ]
null
null
null
moto/sts/models.py
andrewgross/moto
912c3ceb396e1e460c75d6a0a65f2431800a1583
[ "Apache-2.0" ]
null
null
null
moto/sts/models.py
andrewgross/moto
912c3ceb396e1e460c75d6a0a65f2431800a1583
[ "Apache-2.0" ]
null
null
null
import datetime from moto.core import BaseBackend from moto.core.utils import iso_8601_datetime class Token(object): def __init__(self, duration, name=None, policy=None): now = datetime.datetime.now() self.expiration = now + datetime.timedelta(seconds=duration) self.name = name self.policy = None @property def expiration_ISO8601(self): return iso_8601_datetime(self.expiration) class AssumedRole(object): def __init__(self, role_session_name, role_arn, policy, duration, external_id): self.session_name = role_session_name self.arn = role_arn self.policy = policy now = datetime.datetime.now() self.expiration = now + datetime.timedelta(seconds=duration) self.external_id = external_id @property def expiration_ISO8601(self): return iso_8601_datetime(self.expiration) class STSBackend(BaseBackend): def get_session_token(self, duration): token = Token(duration=duration) return token def get_federation_token(self, name, duration, policy): token = Token(duration=duration, name=name, policy=policy) return token def assume_role(self, **kwargs): role = AssumedRole(**kwargs) return role sts_backend = STSBackend()
28.391304
83
0.687596
import datetime from moto.core import BaseBackend from moto.core.utils import iso_8601_datetime class Token(object): def __init__(self, duration, name=None, policy=None): now = datetime.datetime.now() self.expiration = now + datetime.timedelta(seconds=duration) self.name = name self.policy = None @property def expiration_ISO8601(self): return iso_8601_datetime(self.expiration) class AssumedRole(object): def __init__(self, role_session_name, role_arn, policy, duration, external_id): self.session_name = role_session_name self.arn = role_arn self.policy = policy now = datetime.datetime.now() self.expiration = now + datetime.timedelta(seconds=duration) self.external_id = external_id @property def expiration_ISO8601(self): return iso_8601_datetime(self.expiration) class STSBackend(BaseBackend): def get_session_token(self, duration): token = Token(duration=duration) return token def get_federation_token(self, name, duration, policy): token = Token(duration=duration, name=name, policy=policy) return token def assume_role(self, **kwargs): role = AssumedRole(**kwargs) return role sts_backend = STSBackend()
true
true
f70c5ab8d21eb367285ecde00b1cc823ba363c25
41,663
py
Python
utils/gen_onnx_mlir.py
eashdown/onnx-mlir
2662d5530a01ddb11056ae7958118e82487a9eb8
[ "Apache-2.0" ]
null
null
null
utils/gen_onnx_mlir.py
eashdown/onnx-mlir
2662d5530a01ddb11056ae7958118e82487a9eb8
[ "Apache-2.0" ]
null
null
null
utils/gen_onnx_mlir.py
eashdown/onnx-mlir
2662d5530a01ddb11056ae7958118e82487a9eb8
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from collections import defaultdict, OrderedDict from io import StringIO import io import os import sys import datetime import argparse import numpy as np # type: ignore from onnx import defs, FunctionProto, helper, OperatorStatus from onnx.defs import OpSchema, ONNX_DOMAIN, ONNX_ML_DOMAIN from onnx.backend.test.case import collect_snippets from onnx.backend.sample.ops import collect_sample_implementations from typing import Any, Text, Sequence, Dict, List, Type, Set, Tuple import pprint parser = argparse.ArgumentParser() parser.add_argument("--dry-run-onnx-ops", help="Output ONNXOps.td.inc content to stdout.", action="store_true", default=False) parser.add_argument("--dry-run-op-build-table", help="Output OpBuildTable.inc content to stdout.", action="store_true", default=False) parser.add_argument("--check-operation-version", help="check whether the imported onnx package has new operation or " " newer version of operation compared with version stored in version_dicts", action="store_true", default=False) args = parser.parse_args() check_operation_version = args.check_operation_version # Record the version of each operation that is treated as the current version. # To check whether the onnx package being used has newer version operation, # run this script with --check-operation-version flag. # Update this dictionary when a newer version is implemented # TODO: how to keep the old version version_dict = {'Abs': 13, 'Acos': 7, 'Acosh': 9, 'Adagrad': 1, 'Adam': 1, 'Add': 13, 'And': 7, 'ArgMax': 13, 'ArgMin': 13, 'ArrayFeatureExtractor': 1, 'Asin': 7, 'Asinh': 9, 'Atan': 7, 'Atanh': 9, 'AveragePool': 11, 'BatchNormalization': 9, 'Binarizer': 1, 'BitShift': 11, 'Cast': 13, 'CastMap': 1, 'CategoryMapper': 1, 'Ceil': 13, 'Celu': 12, 'Clip': 13, 'Compress': 11, 'Concat': 13, 'ConcatFromSequence': 11, 'Constant': 13, 'ConstantOfShape': 9, 'Conv': 11, 'ConvInteger': 10, 'ConvTranspose': 11, 'Cos': 7, 'Cosh': 9, 'CumSum': 11, 'DepthToSpace': 13, 'DequantizeLinear': 13, 'Det': 11, 'DictVectorizer': 1, 'Div': 13, 'Dropout': 13, 'DynamicQuantizeLinear': 11, 'Einsum': 12, 'Elu': 6, 'Equal': 13, 'Erf': 13, 'Exp': 13, 'Expand': 13, 'EyeLike': 9, 'FeatureVectorizer': 1, 'Flatten': 13, 'Floor': 13, 'GRU': 7, 'Gather': 13, 'GatherElements': 13, 'GatherND': 13, 'Gemm': 13, 'GlobalAveragePool': 1, 'GlobalLpPool': 2, 'GlobalMaxPool': 1, 'Gradient': 1, 'Greater': 13, 'GreaterOrEqual': 12, 'HardSigmoid': 6, 'Hardmax': 13, 'Identity': 13, 'If': 13, 'Imputer': 1, 'InstanceNormalization': 6, 'IsInf': 10, 'IsNaN': 13, 'LRN': 13, 'LSTM': 7, 'LabelEncoder': 2, 'LeakyRelu': 6, 'Less': 13, 'LessOrEqual': 12, 'LinearClassifier': 1, 'LinearRegressor': 1, 'Log': 13, 'LogSoftmax': 13, 'Loop': 13, 'LpNormalization': 1, 'LpPool': 11, 'MatMul': 13, 'MatMulInteger': 10, 'Max': 13, 'MaxPool': 12, 'MaxRoiPool': 1, 'MaxUnpool': 11, 'Mean': 13, 'MeanVarianceNormalization': 13, 'Min': 13, 'Mod': 13, 'Momentum': 1, 'Mul': 13, 'Multinomial': 7, 'Neg': 13, 'NegativeLogLikelihoodLoss': 13, 'NonMaxSuppression': 11, 'NonZero': 13, 'Normalizer': 1, 'Not': 1, 'OneHot': 11, 'OneHotEncoder': 1, 'Or': 7, 'PRelu': 9, 'Pad': 13, 'Pow': 13, 'QLinearConv': 10, 'QLinearMatMul': 10, 'QuantizeLinear': 13, 'RNN': 7, 'RandomNormal': 1, 'RandomNormalLike': 1, 'RandomUniform': 1, 'RandomUniformLike': 1, 'Range': 11, 'Reciprocal': 13, 'ReduceL1': 13, 'ReduceL2': 13, 'ReduceLogSum': 13, 'ReduceLogSumExp': 13, 'ReduceMax': 13, 'ReduceMean': 13, 'ReduceMin': 13, 'ReduceProd': 13, 'ReduceSum': 13, 'ReduceSumSquare': 13, 'Relu': 13, 'Reshape': 13, 'Resize': 13, 'ReverseSequence': 10, 'RoiAlign': 10, 'Round': 11, 'SVMClassifier': 1, 'SVMRegressor': 1, 'Scaler': 1, 'Scan': 11, 'Scatter': 11, 'ScatterElements': 13, 'ScatterND': 13, 'Selu': 6, 'SequenceAt': 11, 'SequenceConstruct': 11, 'SequenceEmpty': 11, 'SequenceErase': 11, 'SequenceInsert': 11, 'SequenceLength': 11, 'Shape': 13, 'Shrink': 9, 'Sigmoid': 13, 'Sign': 13, 'Sin': 7, 'Sinh': 9, 'Size': 13, 'Slice': 13, 'Softmax': 13, 'SoftmaxCrossEntropyLoss': 13, 'Softplus': 1, 'Softsign': 1, 'SpaceToDepth': 13, #'Split': 13, 'Split': 11, 'SplitToSequence': 11, 'Sqrt': 13, #'Squeeze': 13, 'Squeeze': 11, 'StringNormalizer': 10, 'Sub': 13, 'Sum': 13, 'Tan': 7, 'Tanh': 13, 'TfIdfVectorizer': 9, 'ThresholdedRelu': 10, 'Tile': 13, 'TopK': 11, 'Transpose': 13, 'TreeEnsembleClassifier': 1, 'TreeEnsembleRegressor': 1, 'Unique': 11, #'Unsqueeze': 13, 'Unsqueeze': 11, 'Upsample': 10, 'Where': 9, 'Xor': 7, 'ZipMap': 1} # Manual specification of attribute defaults. special_attr_defaults = dict([ # ("AveragePool.kernel_shape", ('ints', '{}')), # ("MaxPool.kernel_shape", ('ints', '{}')), # ("Cast.to", ('int', '0')), # ("Concat.axis", ('int', '0')), # ("Conv.group", ('int', '1')), # ("Unsqueeze.axes", ('ints', '{}')), # ("RNN.activation_alpha", ('floats', '{}')), # ("RNN.activation_beta", ('floats', '{}')), ]) # Manual specification of attribute type. special_attr_types = dict([("Cast.to", 'type')]) # Special operation importing handlers. special_op_handler = dict([ ("BatchNormalization", "ImportNodeBatchNormalization"), ("Dropout", "ImportNodeDropout"), ("Cast", "ImportNodeCast"), ("MaxPool", "ImportNodeMaxPool"), ("Pad", "ImportNodePad"), ("Slice", "ImportNodeSlice"), #("Transpose", "ImportNodeTranspose") ]) # Operations supporting shape inference. OpsWithShapeInference=[ 'Abs', 'Add', 'And', 'Atan', 'AveragePool', 'Cast', 'Concat', 'Constant', 'ConstantOfShape', 'Conv', 'ConvInteger', 'ConvTranspose', 'Cos', 'Cosh', 'DequantizeLinear', 'Div', 'Dropout', 'DynamicQuantizeLinear', 'Elu', 'Erf', 'Exp', 'Expand', 'Flatten', 'GRU', 'Gather', 'Gemm', 'GlobalAveragePool', 'GlobalLpPool', 'GlobalMaxPool', 'HardSigmoid', 'Identity', 'LSTM', 'LeakyRelu', 'Less', 'Log', 'MatMul', 'Max', 'Min', 'Mul', 'Neg', 'OneHotEncoder', 'Or', 'Pad', 'Pow', 'PRelu', 'QLinearConv', 'QuantizeLinear', 'QLinearMatMul', 'RNN', 'Reciprocal', 'ReduceMax', 'ReduceMean', 'ReduceMin', 'ReduceProd', 'ReduceSum', 'Relu', 'Reshape', 'Scaler', 'Selu', 'Shape', 'Sigmoid', 'Sign', 'Sin', 'Sinh', 'Size', 'Slice', 'Softmax', 'Softplus', 'Softsign', 'Split', 'Sqrt', 'Squeeze', 'Sub', 'Sum', 'Tan', 'Tanh', 'Tile', 'Transpose', 'Unsqueeze', 'Xor', 'Loop', ] # Operations supporting canonicalization. OpsWithCanonicalizer = ['Add', 'Constant', 'Identity', 'Gemm', 'Cast', 'Transpose', 'Dropout', 'Shape', 'Size', 'GlobalAveragePool', 'GlobalMaxPool', 'Squeeze', 'Unsqueeze'] OpsWithHelpers = { "Loop": """ mlir::Operation::result_range v_final(); mlir::Operation::result_range scan_outputs(); """, "Scan": """ mlir::Operation::operand_range v_initial(); mlir::Operation::result_range v_final(); mlir::Operation::operand_range scan_inputs(); mlir::Operation::result_range scan_outputs(); """ } # Interface for special handling of type inference # The common code are put into get_type_inference_func OpsWithResultTypeInference = { "Constant": '''if (auto attr = valueAttr()) { resultTypes.push_back(attr.getType()); } else if (auto attr = sparse_valueAttr()) { resultTypes.push_back(attr.getType()); }''', "Cast": '''auto builder = mlir::OpBuilder(getContext()); resultTypes.push_back(mlir::UnrankedTensorType::get(to()));''', "ConstantOfShape": '''if (auto attr = valueAttr()) { resultTypes.push_back(mlir::UnrankedTensorType::get( attr.getType().cast<ShapedType>().getElementType())); } else { resultTypes.push_back(mlir::UnrankedTensorType::get( FloatType::getF32(getContext()))); }''' } # Add an Op in this list if the Op needs result type deduction which is required # when writing declarative rewriting rules. Deduced type is always # an UnrankedTensorType whose element type is the same as the first operand's # element type. # # Currenlty, there are only two build methods generated: # - one with operands and attributes having a separate parameter, and # - one with operands and attributes having aggregated parameters. custom_builder_unranked_ops_list = ['Abs', 'Exp', 'ReduceSum', 'ReduceSumSquare', 'Pad', 'Sqrt', 'Neg', 'Unsqueeze', 'Softmax', 'ReduceMax', 'ReduceLogSum', 'Squeeze', 'Identity', 'Split'] # Custom builder op list for operations with broadcast; we can deduce the right # output type, no need to leave it undef as in the above list. # Ops must have two operands, not one, not three... And there shall be two. # TODO: handle variadic ops omitted here: Max, Min, Min, Sum. custom_builder_broadcast_ops_list = ['Add', 'And', 'Div', 'Equal', 'Greater', 'Less', 'Mul', 'Or', 'Pow', 'Sub', 'Xor'] # union of both custom_builder_ops_list = custom_builder_unranked_ops_list + custom_builder_broadcast_ops_list #a dictionary to add any special definition for an operation custom_definition_misc = dict([ ('Constant', ''' let builders = [ OpBuilder<(ins "Attribute":$sparse_value, "Attribute":$value), [{ if (value) { auto tensorType = value.getType(); build($_builder, $_state, tensorType, sparse_value, value, FloatAttr(), ArrayAttr(), IntegerAttr(), ArrayAttr(), StringAttr(), ArrayAttr()); } else { auto tensorType = sparse_value.getType(); build($_builder, $_state, tensorType, sparse_value, value, FloatAttr(), ArrayAttr(), IntegerAttr(), ArrayAttr(), StringAttr(), ArrayAttr()); } }]> ];'''), ('Cast', ''' let builders = [ OpBuilder<(ins "Value":$input, "TypeAttr":$to), [{ auto resultType = mlir::UnrankedTensorType::get(to.getValue()); build($_builder, $_state, resultType, input, to); }] > ];''' )]) onnx_types = ( 'bool', 'int8', 'int16', 'int32', 'int64', 'unkown', 'float16', 'float', 'double', 'complex64', 'complex128', 'string' ) tblgen_types = ('AnyI1', 'AnyI8', 'AnyI16', 'AnyI32', 'AnyI64', 'BF16', 'F16', 'F32', 'F64', 'Complex<F32>', 'Complex<F64>', 'StringType' ) MAX_NUM_TYPES=20 def should_render_domain(domain): # type: (Text) -> bool return True def display_attr_type(v): # type: (OpSchema.AttrType) -> Text assert isinstance(v, OpSchema.AttrType) s = Text(v) s = s[s.rfind('.') + 1:].lower() if s[-1] == 's': s = 'list of ' + s return s def get_unique_output_name(schema, name): for input in schema.inputs: if input.name == name: return 'out_' + name return name def onnx_attr_type_to_mlir_attr_type(t): onnx_attr_type = Text(t) onnx_attr_type = onnx_attr_type[onnx_attr_type.rfind('.') + 1:].lower() if onnx_attr_type == 'int': mlir_attr_type = 'SI64Attr' elif onnx_attr_type == 'float': mlir_attr_type = 'F32Attr' elif onnx_attr_type == 'ints': mlir_attr_type = 'I64ArrayAttr' elif onnx_attr_type == 'floats': mlir_attr_type = 'F32ArrayAttr' elif onnx_attr_type == "string": mlir_attr_type = 'StrAttr' elif onnx_attr_type == "strings": mlir_attr_type = 'StrArrayAttr' elif onnx_attr_type == 'type': mlir_attr_type = 'TypeAttr' else: mlir_attr_type = 'AnyAttr' #TODO: tensor and sparse tensor return mlir_attr_type #TODO: any better way to do this. def tblgen_attr_type_to_cpp_type(t): if 'I64Attr' in t: cpp_type = 'IntegerAttr' elif 'F32Attr' in t: cpp_type = 'FloatAttr' elif 'I64ArrayAttr' in t or 'F32ArrayAttr' in t: cpp_type = 'ArrayAttr' elif 'StrAttr' in t: cpp_type = 'StringAttr' elif 'strings' in t: cpp_type = 'ArrayAttr' else: cpp_type = 'Attribute' return cpp_type def tblgen_operand_type_to_cpp_type(op_type): if op_type.startswith('Variadic'): mytype = 'ValueRange' else: mytype = 'Value' return mytype def np_type_to_tblgen_attr_type(tstr): index = -1 for i in range(len(onnx_types)): if onnx_types[i] in tstr: index = i break if index == -1: return None else: return tblgen_types[i] def get_tblgen_type_index(type_str): return tblgen_types.index(type_str) #the possible data structures are tensor, map and seq(tensor()) def get_data_structure_element(allowed_type_str): structure_list = ['tensor', 'seq', 'map'] for structure in structure_list: if allowed_type_str.startswith(structure) : element = allowed_type_str.replace( structure+'(', '', 1).replace(')', '', 1) return (structure, element) return (None, None) def get_allowed_elem_types(schema, input): #allowed_types_str = None # return allowed_types_str # TODO: enable type constraints. if input.typeStr : tstr = input.typeStr structure, element = get_data_structure_element(tstr); # In case the type is directly specified if structure and element : t = np_type_to_tblgen_attr_type(element) if t == None : return allowed_structure, None else : return structure, [t] else : return None if schema.type_constraints: for type_constraint in schema.type_constraints: if type_constraint.type_param_str != tstr : continue allowed_type_list=[] allowedTypes = type_constraint.allowed_type_strs allowed_structure = None for allowedType in allowedTypes: structure, element = get_data_structure_element(allowedType); if structure == None or element == None: return None, None if allowed_structure != None and allowed_structure != structure : return None, None allowed_structure = structure t = np_type_to_tblgen_attr_type(element) if t == None : return allowed_structure, None if not t in allowed_type_list : allowed_tyoe_list = allowed_type_list.append(t) return allowed_structure,allowed_type_list return None, None def inc_indent(indent=None): return "" if indent is None else indent + ' ' * 2 def dec_indent(indent): return indent[:-2] def join_args(args): return ", ".join(args) def get_operands_or_results(schema, type_str_dict, is_input): value_list = schema.inputs if is_input else schema.outputs if not value_list: return OrderedDict() def any_type_of(types): assert isinstance(types, list) if len(types) == 1: return types[0] else: return "AnyTypeOf<[{}]>".format(", ".join(types)) name_to_types = OrderedDict() for i, value in enumerate(value_list): types = get_onnx_mlir_types(schema, type_str_dict, value) ''' structure, elem_types = get_allowed_elem_types(schema, type_str_dict, value) if structure == 'tensor' : if elem_types is None: types = ["AnyMemRef", "AnyTensor"] else: elem_types_str = ','.join(elem_types) types = ["TensorOf<[{}]>", "MemRefOf<[{}]>"] types = list(map(lambda x: x.format(elem_types_str), types)) elif structure == 'seq' : # Seq is not supported yet. # Use of TensorOf<[AnyTensor]> as a placeholder for tablegen. # When the Operation is used, warning/error will be generated at runtime. if elem_types is None: types = ["AnyMemRef", "TensorOf<[AnyTensor]>"] else: elem_types_str = ','.join(elem_types) types = ["TensorOf<[TensorOf<[{}]>]>", "MemRefOf<[{}]>"] types = list(map(lambda x: x.format(elem_types_str), types)) elif structure == 'map' : # Map is not supported yet. # Use of TupleOf as a placeholder for tablegen. # When the Operation is used, warning/error will be generated at runtime. if elem_types is None: types = ["AnyMemRef", "TupleOf<[AnyTensor]>"] else: elem_types_str = ','.join(elem_types) types = ["TupleOf<[TensorOf<[{}]>]>", "MemRefOf<[{}]>"] types = list(map(lambda x: x.format(elem_types_str), types)) else: types = ["AnyMemRef", "AnyTensor"] ''' if OpSchema.FormalParameterOption.Optional == value.option: types.append("NoneType") elif OpSchema.FormalParameterOption.Variadic == value.option: if value.isHomogeneous: types = ["Variadic<{}>".format(any_type_of(types))] else: #TODO handle(variadic, heterogeneous) " types = ["Variadic<{}>".format(any_type_of(types))] sys.stderr.write("warning: (variadic, heterogeneous) for " + schema.name + ' ' + value.name + "\n") # Since output name can coincide with that of an input, we explicitly # append a suffix "_out" to such names for disambiguation. if is_input: value_name = value.name else: value_name = get_unique_output_name(schema, value.name) name_to_types[value_name] = any_type_of(types) return name_to_types def get_attrs(schema): def get_attr_type_optional(attr_type): return 'OptionalAttr<{}>'.format( onnx_attr_type_to_mlir_attr_type(attr_type)) def get_attr_type_with_default(attr_type, attr_default): return 'DefaultValuedAttr<{}, "{}">'.format( onnx_attr_type_to_mlir_attr_type(attr_type), attr_default) if not schema.attributes: return OrderedDict() name_to_type = OrderedDict() for _, attr in sorted(schema.attributes.items()): if attr.type == OpSchema.AttrType.GRAPH: continue qualified_attr_name = "{}.{}".format(schema.name, attr.name) if qualified_attr_name in special_attr_defaults: name_to_type[attr.name] = get_attr_type_with_default( *special_attr_defaults[qualified_attr_name]) if qualified_attr_name in special_attr_types: name_to_type[attr.name] = onnx_attr_type_to_mlir_attr_type( special_attr_types[qualified_attr_name]) # option holds either required or default value elif attr.required: name_to_type[attr.name] = onnx_attr_type_to_mlir_attr_type( attr.type) elif attr.default_value.name: def format_value(value): # type: (Any) -> Text if isinstance(value, float): formatted = str(np.round(value, 5)) # use default formatting, unless too long. if (len(formatted) > 10): formatted = str("({:e})".format(value)) return formatted elif isinstance( value, (bytes, bytearray)) and sys.version_info[0] == 3: return str(value.decode('utf-8')) return str(value) default_value = helper.get_attribute_value(attr.default_value) if isinstance(default_value, list): default_value = [format_value(val) for val in default_value] default_value_str = '{}'.format(default_value) default_value_str = default_value_str.replace('[', '{', 1) default_value_str = default_value_str.replace(']', '}', 1) if Text(attr.type) == "AttrType.STRINGS": default_value_str = default_value_str.replace("'", '\\"') else: default_value_str = default_value_str.replace("'", '') else: default_value = format_value(default_value) default_value_str = default_value name_to_type[attr.name] = get_attr_type_with_default( attr.type, default_value_str) else: name_to_type[attr.name] = get_attr_type_optional(attr.type) return name_to_type def get_numberof_list(mylist): expected_num = len(mylist) for element in mylist : if OpSchema.FormalParameterOption.Variadic == element.option: expected_num = -1 return expected_num def get_output_type_mapping(schema): mapping=[] for output in schema.outputs : #if only one type is allowed, just set that structure, allowed_elem_types = get_allowed_elem_types(schema, output) if allowed_elem_types != None and len(allowed_elem_types) == 1 : mapping.append(str(get_tblgen_type_index(allowed_elem_types[0]))) continue #map the type string if output.typeStr : tstr = output.typeStr found = False for i, input in enumerate(schema.inputs): if input.typeStr and input.typeStr == tstr: mapping.append(str(i+MAX_NUM_TYPES)) found = True break if found: continue #unknown output type mapping.append(str(-1)) return mapping def get_numberof_inout(s, indent, schema): expected_num_operands = get_numberof_list(schema.inputs) indent = inc_indent(indent) s += indent + "static int getNumberOfOperands() {\n" indent = inc_indent(indent) s += indent + "return {};\n".format(expected_num_operands) indent = dec_indent(indent) s += indent + "}\n" expected_num_results = get_numberof_list(schema.outputs) s += indent + "static int getNumberOfResults() {\n" indent = inc_indent(indent) s += indent + "return {};\n".format(expected_num_results) indent = dec_indent(indent) s += indent + "}\n" s += indent + "static std::vector<int> getTypeMap() {\n" mapping = get_output_type_mapping(schema) indent = inc_indent(indent) s += indent + "return {" + ",".join(mapping) + "};\n" indent = dec_indent(indent) s += indent + "}\n" return s def get_promotable_const_operands_func(s, indent, const_operands_name_to_idx): cpp_name_to_idx_literal = "{" + ", ".join([ "{{\"{}\", {}}}".format(*name_to_idx) for name_to_idx in const_operands_name_to_idx ]) + "}" #s += indent + "let extraClassDeclaration = [{\n" indent = inc_indent(indent) s += indent + "std::map<std::string, size_t> promotableConstOperands() {\n" indent = inc_indent(indent) s += indent + "return {};\n".format(cpp_name_to_idx_literal) indent = dec_indent(indent) s += indent + "}\n" #indent = dec_indent(indent) #s += indent + "}];\n" return s def get_type_inference_func(s, indent, type_inference_code): indent = inc_indent(indent) s += indent + "std::vector<mlir::Type> resultTypeInference() {" + "\n" indent = inc_indent(indent) s += indent + "std::vector<mlir::Type> resultTypes;" + "\n" s += indent + type_inference_code + '\n' s += indent + "return resultTypes;" + "\n" indent = dec_indent(indent) s += indent + "}" + "\n" indent = dec_indent(indent) return s def parse_type_str(allowedType): # AnyI may be used for uint because the onnx_mlir is not generating uint output # This will be fixed later and UI will be replace AnyI onnx_to_mlir_type_dict = { '(': '<[', ')': ']>', 'tensor' : 'TensorOf', 'seq' : 'SeqOf', 'map' : 'TupleOf', 'bool': 'I1', #'uint8' : 'AnyI8', #uint16' : 'AnyI16', #uint32' : 'AnyI32', #uint64' : 'AnyI64', 'uint8' : 'UI8', 'uint16' : 'UI16', 'uint32' : 'UI32', 'uint64' : 'UI64', 'int8' : 'I8', 'int16' : 'I16', 'int32' : 'I32', 'int64' : 'I64', 'float16' : 'F16', 'bfloat16' : 'BF16', 'float' : 'F32', 'double' : 'F64', 'unkown' : 'BF16', 'complex64' : 'Complex<F32>', 'complex128' : 'Complex<F64>', 'string' : 'StringType'} # Apply substitutions in decreasing order of key-length, so that float16 is replaced # before float, and uint16 is replaced before int16, etc. mapping = list(onnx_to_mlir_type_dict.items()) mapping.sort(key=lambda pair:len(pair[0]), reverse=True) for key, item in mapping: allowedType = allowedType.replace(key, item) return allowedType def parse_a_type_constraint(constraint): allowedTypes = constraint.allowed_type_strs mlirTypes = [] for allowedType in allowedTypes: mlirType = parse_type_str(allowedType) mlirTypes.append(mlirType) # Remove redundant and sort. # However onnx keeps a consitently meaningful order # There is no redundancy as long as each onnx type is mapped uniquely # mlirTypes = sorted(list(set(mlirTypes))) # MemRef is always needed mlirTypes.append("AnyMemRef") return mlirTypes def parse_type_constraints(schema): type_str_dict = dict() for type_constraint in schema.type_constraints: type_str_dict[type_constraint.type_param_str] = parse_a_type_constraint(type_constraint) return type_str_dict def get_onnx_mlir_types(schema, type_str_dict, input): if input.typeStr : if not input.typeStr in type_str_dict : # some arguments use type description directly # instead of constraint return [parse_type_str(input.typeStr), "AnyMemRef"] else : return type_str_dict[input.typeStr] else : print('No typeStr ', schema.name) return [] def gen_op_def(schema): indent = inc_indent() s = 'def ONNX{0}Op:ONNX_Op<"{0}",\n'.format(schema.name) regions = OrderedDict() for _, attr in sorted(schema.attributes.items()): if attr.type == OpSchema.AttrType.GRAPH: if attr.required: regions[attr.name] = "SizedRegion<1>" else: regions[attr.name] = "AnyRegion" # Generate decl for op traits. traits = ["NoSideEffect"] # OpsWithShapeInference: # Now the ShapeInference traits are added to all operation # Dummy implementations are added to ONNXOps.cpp # Error will be report if these operations are encountered at runtime traits.append("DeclareOpInterfaceMethods<ShapeInferenceOpInterface>") if schema.name in OpsWithResultTypeInference.keys(): traits.append("OpInterface<\"ResultTypeInferenceOpInterface\">") if len(regions): traits.append("OpInterface<\"HasOnnxSubgraphOpInterface\">") s += inc_indent(indent) + '[{}]> {{\n'.format(join_args(traits)) # Generate decl for canonicalizer. indent = inc_indent(indent) if schema.name in OpsWithCanonicalizer: s += indent + 'let hasCanonicalizer = 1;\n' # Generate decl for summary. s += indent + 'let summary = "ONNX {} operation";\n'.format(schema.name) # Generate description. s += indent + 'let description = [{\n' if schema.doc: lines = schema.doc.lstrip().splitlines() for line in lines: escaped_line = line.replace('"', '\\"')\ .replace('}]', '\\}\\]') s += indent + '"{}"\n'.format(escaped_line) s += indent + '}];\n' # handle the type constraint for input and output # parse type constraint into onnx-mlir type string list type_str_dict = parse_type_constraints(schema) # Generate ins (consisting of operands and attributes). ins = get_operands_or_results(schema, type_str_dict, is_input=True) ins.update(get_attrs(schema)) ins_strs = ["{1}:${0}".format(*i) for i in ins.items()] s += indent + 'let arguments = (ins {});\n'.format( (',\n' + inc_indent(indent)).join(ins_strs)) # Generate outs (operation results). outs = get_operands_or_results(schema, type_str_dict, is_input=False) outs_strs = ["{1}:${0}".format(*i) for i in outs.items()] s += indent + 'let results = (outs {});\n'.format( (',\n' + inc_indent(indent)).join(outs_strs)) regions_strs = ["{1}:${0}".format(*i) for i in regions.items()] if len(regions): s += indent + 'let regions = (region {});\n'.format( (',\n' + inc_indent(indent)).join(regions_strs)) # custom_builder_broadcast_ops_list # add custom builders # use element type of the first operand to construct an UnrankedTensorType for the output. if schema.name in custom_builder_ops_list: if len(ins) == 0: raise RuntimeWarning( "warning: not generate custom build methods for " + schema.name + " since it does not have operands.") else: s += indent + 'let builders = [\n' # Custom builders with operands and attributes having a separate parameter. # E.g. OpBuilder<(ins "Value":$X, "Value":$Y, "Attribute":$A), [{}]> indent = inc_indent(indent) s += indent + 'OpBuilder<(ins ' operands_dict = get_operands_or_results(schema, type_str_dict, is_input=True) attrs_dict = get_attrs(schema) s += ', '.join('"{}":${}'.format(tblgen_operand_type_to_cpp_type(ty), name) for name, ty in operands_dict.items()) if operands_dict and attrs_dict: s += ', ' s += ', '.join('"{}":${}'.format(tblgen_attr_type_to_cpp_type(ty), name) for name, ty in attrs_dict.items()) s += '), [{\n' indent = inc_indent(indent) # Get output type from first operand's type. first_operand_name = list(ins.items())[0][0] build_type_name = '' if schema.name in custom_builder_broadcast_ops_list: second_operand_name = list(ins.items())[1][0] s += indent + 'auto lhsTy = {}.getType();\n'. \ format(first_operand_name) s += indent + 'auto rhsTy = {}.getType();\n'. \ format(second_operand_name) s += indent + 'auto elementType = getBroadcastedRankedType(lhsTy, rhsTy);\n' s += indent + 'auto shapedType = elementType.dyn_cast_or_null<ShapedType>();\n'; s += indent + 'if (!shapedType || !shapedType.hasStaticShape()) {\n'; s += indent + indent + 'elementType = {}'.format(first_operand_name) + \ '.getType().cast<ShapedType>().getElementType();\n'; s += indent + indent + 'elementType = UnrankedTensorType::get(elementType);\n' s += indent + '}\n'; build_type_name = 'elementType' else: s += indent + 'auto elementType = {}'.format(first_operand_name) + \ '.getType().cast<ShapedType>().getElementType();\n' build_type_name = 'UnrankedTensorType::get(elementType)' s += indent + 'build($_builder, $_state, {}'.format(build_type_name) for name, _ in ins.items(): s += ', ' + name s += ');\n' indent = dec_indent(indent) s += indent + '}]>,\n' # Custom builders with all operands and attributes having aggregate parameters. # E.g. OpBuilder<(ins "ValueRange operands, # ArrayRef<NamedAttribute> attributes", [{}]>' s += indent + 'OpBuilder<(ins ' + \ '"ValueRange":$operands, "ArrayRef<NamedAttribute>":$attributes), [{\n' indent = inc_indent(indent) if schema.name in custom_builder_broadcast_ops_list: s += indent + 'auto lhsTy = operands[0].getType();\n' s += indent + 'auto rhsTy = operands[1].getType();\n' s += indent + 'auto elementType = getBroadcastedRankedType(lhsTy, rhsTy);\n' s += indent + 'auto shapedType = elementType.dyn_cast_or_null<ShapedType>();\n'; s += indent + 'if (!shapedType || !shapedType.hasStaticShape()) {\n'; s += indent + indent + 'elementType = operands[0]' + \ '.getType().cast<ShapedType>().getElementType();\n'; s += indent + indent + 'elementType = UnrankedTensorType::get(elementType);\n' s += indent + '}\n'; else: s += indent + 'auto elementType = operands[0].getType().' + \ 'cast<ShapedType>().getElementType();\n' s += indent + 'std::vector<mlir::Type> outputTypes;\n' s += indent + 'outputTypes.emplace_back({});\n'.format(build_type_name) s += indent + 'build($_builder, $_state, outputTypes, operands, attributes);\n' indent = dec_indent(indent) s += indent + '}]>' s += '\n' + indent + '];\n' # Generate extracClassDeclaration. s += indent + "let extraClassDeclaration = [{\n" #indent = inc_indent(indent) # Generate input/output number. s = get_numberof_inout(s, indent, schema) if schema.name in OpsWithResultTypeInference: s = get_type_inference_func( s, indent, OpsWithResultTypeInference[schema.name]) if schema.name in OpsWithHelpers: s += OpsWithHelpers[schema.name] if len(regions): s += indent + "int64_t getSubgraphRegionIdx(const std::string& name) {\n" indent = inc_indent(indent) for idx, region_name in enumerate(regions.keys()): s += indent + "if (name == \"{}\") return {};\n".format(region_name, idx) s += indent + "llvm_unreachable(\"region with the specified name does not exist\");\n" indent = dec_indent(indent) s += indent + "}\n" s += indent + '}];\n' if ( schema.name in custom_definition_misc) : s += custom_definition_misc[schema.name] + '\n' s += '}\n\n' return s """ special cases: * Split: attr split default value: sizeof(output1) namely 1 * Conv: attr dilations default value is {num_dim of first input - 2, 1} * Conv: attr kernel_shape type is ints * Transpose: attr perm default value is {} empty int list """ def gen_op_importer(schema, file): indent = inc_indent() s = indent + 'import_handler_map_["' + schema.name +'"] = \n ' expected_num_operands = len(schema.inputs) expected_num_results = len(schema.outputs) for input in schema.inputs: if OpSchema.FormalParameterOption.Variadic == input.option: expected_num_operands = -1 for output in schema.outputs: if OpSchema.FormalParameterOption.Variadic == output.option: expected_num_results = -1 handler_func = special_op_handler.get( schema.name, "buildOperation<mlir::ONNX{}Op>".format(schema.name)) # Special handlers currently require expected num operands/results to be specified. # TODO: remove special handlers. args = ["node"] """ if expected_num_operands != -1 or expected_num_results != -1 or "buildOperation" not in handler_func: args.append( "/* expected_num_operands = */ {}".format(expected_num_operands)) args.append( '/* expected_num_results = */ {}'.format(expected_num_results)) """ s += inc_indent(indent) + '&onnx_mlir::detail::FrontendGenImpl::' s += handler_func+';\n' file.write(s) def build_operator_schemas(): # domain -> support level -> name -> [schema] index = defaultdict(lambda: defaultdict(lambda: defaultdict( list))) # type: Dict[Text, Dict[int, Dict[Text, List[OpSchema]]]] for schema in defs.get_all_schemas_with_history(): index[schema.domain][int( schema.support_level)][schema.name].append(schema) # Preprocess the Operator Schemas # [(domain, [(support_level, [(schema name, current schema, all versions schemas)])])] operator_schemas = list( ) # type: List[Tuple[Text, List[Tuple[int, List[Tuple[Text, OpSchema, List[OpSchema]]]]]]] exsting_ops = set() # type: Set[Text] for domain, _supportmap in sorted(index.items()): if not should_render_domain(domain): continue processed_supportmap = list() for _support, _namemap in sorted(_supportmap.items()): processed_namemap = list() for n, unsorted_versions in sorted(_namemap.items()): versions = sorted(unsorted_versions, key=lambda s: s.since_version) schema = versions[-1] if schema.name in exsting_ops: continue if check_operation_version : # Generate operation of the latest version of your onnx. exsting_ops.add(schema.name) processed_namemap.append((n, schema, versions)) # Add checks against version_dict if schema.name not in version_dict : print("Check-operation-version: Operation {} is new with version {}" .format(schema.name, schema.since_version)) elif schema.since_version > version_dict[schema.name]: print("Check-operation-version: Operation {}" .format(schema.name)+ " has a newer version {} over old version {}" .format(schema.since_version, version_dict[schema.name])) else: # Generate operation according to the version in version_dict. if schema.name not in version_dict : continue found = False for schema in reversed(versions): # Check the version number against the version_dict if schema.since_version == version_dict[schema.name]: exsting_ops.add(schema.name) processed_namemap.append((n, schema, versions)) found = True break if not found: print("Your onnx installation may be too old. " "The desired version for operation {} is not found.".format( schema.name)) sys.exit() processed_supportmap.append((_support, processed_namemap)) operator_schemas.append((domain, processed_supportmap)) return operator_schemas def main(args): # type: (Type[Args]) -> None curr_utc_time = datetime.datetime.now( datetime.timezone.utc).strftime("%m/%d/%Y, %H:%M:%S") autogen_warning = ( '//********************************************************\n' '// Do not modify this file directly.\n' '// This file is automatically generated via script.\n' '// Details can be found in docs/ImportONNXDefs.md .\n' '//********************************************************\n\n') autogen_warning = autogen_warning.format(curr_utc_time) op_def = args.op_def op_def.write(autogen_warning) op_importer = args.op_importer op_importer.write(autogen_warning) version_dict = dict() for domain, supportmap in build_operator_schemas(): for _, namemap in supportmap: for op_type, schema, versions in namemap: if check_operation_version: version_dict[schema.name] = schema.since_version else: gen_op_importer(schema, op_importer) r = gen_op_def(schema) op_def.write(r) if check_operation_version : pprint.pprint(version_dict) if __name__ == '__main__': curr_dir = os.path.dirname(os.path.realpath(__file__)) class Args(object): if args.dry_run_onnx_ops: op_def = StringIO() else: op_def_file_path = os.path.join(curr_dir, 'ONNXOps.td.inc') op_def = io.open(op_def_file_path, 'w', newline='') if args.dry_run_op_build_table: op_importer = StringIO() else: op_importer_file_path = os.path.join(curr_dir, 'OpBuildTable.inc') op_importer = io.open(op_importer_file_path, 'w', newline='') main(Args) if args.dry_run_onnx_ops: sys.stdout.write(Args.op_def.getvalue()) if args.dry_run_op_build_table: sys.stdout.write(Args.op_importer.getvalue())
34.15
105
0.595612
from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from collections import defaultdict, OrderedDict from io import StringIO import io import os import sys import datetime import argparse import numpy as np from onnx import defs, FunctionProto, helper, OperatorStatus from onnx.defs import OpSchema, ONNX_DOMAIN, ONNX_ML_DOMAIN from onnx.backend.test.case import collect_snippets from onnx.backend.sample.ops import collect_sample_implementations from typing import Any, Text, Sequence, Dict, List, Type, Set, Tuple import pprint parser = argparse.ArgumentParser() parser.add_argument("--dry-run-onnx-ops", help="Output ONNXOps.td.inc content to stdout.", action="store_true", default=False) parser.add_argument("--dry-run-op-build-table", help="Output OpBuildTable.inc content to stdout.", action="store_true", default=False) parser.add_argument("--check-operation-version", help="check whether the imported onnx package has new operation or " " newer version of operation compared with version stored in version_dicts", action="store_true", default=False) args = parser.parse_args() check_operation_version = args.check_operation_version version_dict = {'Abs': 13, 'Acos': 7, 'Acosh': 9, 'Adagrad': 1, 'Adam': 1, 'Add': 13, 'And': 7, 'ArgMax': 13, 'ArgMin': 13, 'ArrayFeatureExtractor': 1, 'Asin': 7, 'Asinh': 9, 'Atan': 7, 'Atanh': 9, 'AveragePool': 11, 'BatchNormalization': 9, 'Binarizer': 1, 'BitShift': 11, 'Cast': 13, 'CastMap': 1, 'CategoryMapper': 1, 'Ceil': 13, 'Celu': 12, 'Clip': 13, 'Compress': 11, 'Concat': 13, 'ConcatFromSequence': 11, 'Constant': 13, 'ConstantOfShape': 9, 'Conv': 11, 'ConvInteger': 10, 'ConvTranspose': 11, 'Cos': 7, 'Cosh': 9, 'CumSum': 11, 'DepthToSpace': 13, 'DequantizeLinear': 13, 'Det': 11, 'DictVectorizer': 1, 'Div': 13, 'Dropout': 13, 'DynamicQuantizeLinear': 11, 'Einsum': 12, 'Elu': 6, 'Equal': 13, 'Erf': 13, 'Exp': 13, 'Expand': 13, 'EyeLike': 9, 'FeatureVectorizer': 1, 'Flatten': 13, 'Floor': 13, 'GRU': 7, 'Gather': 13, 'GatherElements': 13, 'GatherND': 13, 'Gemm': 13, 'GlobalAveragePool': 1, 'GlobalLpPool': 2, 'GlobalMaxPool': 1, 'Gradient': 1, 'Greater': 13, 'GreaterOrEqual': 12, 'HardSigmoid': 6, 'Hardmax': 13, 'Identity': 13, 'If': 13, 'Imputer': 1, 'InstanceNormalization': 6, 'IsInf': 10, 'IsNaN': 13, 'LRN': 13, 'LSTM': 7, 'LabelEncoder': 2, 'LeakyRelu': 6, 'Less': 13, 'LessOrEqual': 12, 'LinearClassifier': 1, 'LinearRegressor': 1, 'Log': 13, 'LogSoftmax': 13, 'Loop': 13, 'LpNormalization': 1, 'LpPool': 11, 'MatMul': 13, 'MatMulInteger': 10, 'Max': 13, 'MaxPool': 12, 'MaxRoiPool': 1, 'MaxUnpool': 11, 'Mean': 13, 'MeanVarianceNormalization': 13, 'Min': 13, 'Mod': 13, 'Momentum': 1, 'Mul': 13, 'Multinomial': 7, 'Neg': 13, 'NegativeLogLikelihoodLoss': 13, 'NonMaxSuppression': 11, 'NonZero': 13, 'Normalizer': 1, 'Not': 1, 'OneHot': 11, 'OneHotEncoder': 1, 'Or': 7, 'PRelu': 9, 'Pad': 13, 'Pow': 13, 'QLinearConv': 10, 'QLinearMatMul': 10, 'QuantizeLinear': 13, 'RNN': 7, 'RandomNormal': 1, 'RandomNormalLike': 1, 'RandomUniform': 1, 'RandomUniformLike': 1, 'Range': 11, 'Reciprocal': 13, 'ReduceL1': 13, 'ReduceL2': 13, 'ReduceLogSum': 13, 'ReduceLogSumExp': 13, 'ReduceMax': 13, 'ReduceMean': 13, 'ReduceMin': 13, 'ReduceProd': 13, 'ReduceSum': 13, 'ReduceSumSquare': 13, 'Relu': 13, 'Reshape': 13, 'Resize': 13, 'ReverseSequence': 10, 'RoiAlign': 10, 'Round': 11, 'SVMClassifier': 1, 'SVMRegressor': 1, 'Scaler': 1, 'Scan': 11, 'Scatter': 11, 'ScatterElements': 13, 'ScatterND': 13, 'Selu': 6, 'SequenceAt': 11, 'SequenceConstruct': 11, 'SequenceEmpty': 11, 'SequenceErase': 11, 'SequenceInsert': 11, 'SequenceLength': 11, 'Shape': 13, 'Shrink': 9, 'Sigmoid': 13, 'Sign': 13, 'Sin': 7, 'Sinh': 9, 'Size': 13, 'Slice': 13, 'Softmax': 13, 'SoftmaxCrossEntropyLoss': 13, 'Softplus': 1, 'Softsign': 1, 'SpaceToDepth': 13, 'Split': 11, 'SplitToSequence': 11, 'Sqrt': 13, 'Squeeze': 11, 'StringNormalizer': 10, 'Sub': 13, 'Sum': 13, 'Tan': 7, 'Tanh': 13, 'TfIdfVectorizer': 9, 'ThresholdedRelu': 10, 'Tile': 13, 'TopK': 11, 'Transpose': 13, 'TreeEnsembleClassifier': 1, 'TreeEnsembleRegressor': 1, 'Unique': 11, 'Unsqueeze': 11, 'Upsample': 10, 'Where': 9, 'Xor': 7, 'ZipMap': 1} special_attr_defaults = dict([ ]) special_attr_types = dict([("Cast.to", 'type')]) special_op_handler = dict([ ("BatchNormalization", "ImportNodeBatchNormalization"), ("Dropout", "ImportNodeDropout"), ("Cast", "ImportNodeCast"), ("MaxPool", "ImportNodeMaxPool"), ("Pad", "ImportNodePad"), ("Slice", "ImportNodeSlice"), ]) OpsWithShapeInference=[ 'Abs', 'Add', 'And', 'Atan', 'AveragePool', 'Cast', 'Concat', 'Constant', 'ConstantOfShape', 'Conv', 'ConvInteger', 'ConvTranspose', 'Cos', 'Cosh', 'DequantizeLinear', 'Div', 'Dropout', 'DynamicQuantizeLinear', 'Elu', 'Erf', 'Exp', 'Expand', 'Flatten', 'GRU', 'Gather', 'Gemm', 'GlobalAveragePool', 'GlobalLpPool', 'GlobalMaxPool', 'HardSigmoid', 'Identity', 'LSTM', 'LeakyRelu', 'Less', 'Log', 'MatMul', 'Max', 'Min', 'Mul', 'Neg', 'OneHotEncoder', 'Or', 'Pad', 'Pow', 'PRelu', 'QLinearConv', 'QuantizeLinear', 'QLinearMatMul', 'RNN', 'Reciprocal', 'ReduceMax', 'ReduceMean', 'ReduceMin', 'ReduceProd', 'ReduceSum', 'Relu', 'Reshape', 'Scaler', 'Selu', 'Shape', 'Sigmoid', 'Sign', 'Sin', 'Sinh', 'Size', 'Slice', 'Softmax', 'Softplus', 'Softsign', 'Split', 'Sqrt', 'Squeeze', 'Sub', 'Sum', 'Tan', 'Tanh', 'Tile', 'Transpose', 'Unsqueeze', 'Xor', 'Loop', ] OpsWithCanonicalizer = ['Add', 'Constant', 'Identity', 'Gemm', 'Cast', 'Transpose', 'Dropout', 'Shape', 'Size', 'GlobalAveragePool', 'GlobalMaxPool', 'Squeeze', 'Unsqueeze'] OpsWithHelpers = { "Loop": """ mlir::Operation::result_range v_final(); mlir::Operation::result_range scan_outputs(); """, "Scan": """ mlir::Operation::operand_range v_initial(); mlir::Operation::result_range v_final(); mlir::Operation::operand_range scan_inputs(); mlir::Operation::result_range scan_outputs(); """ } OpsWithResultTypeInference = { "Constant": '''if (auto attr = valueAttr()) { resultTypes.push_back(attr.getType()); } else if (auto attr = sparse_valueAttr()) { resultTypes.push_back(attr.getType()); }''', "Cast": '''auto builder = mlir::OpBuilder(getContext()); resultTypes.push_back(mlir::UnrankedTensorType::get(to()));''', "ConstantOfShape": '''if (auto attr = valueAttr()) { resultTypes.push_back(mlir::UnrankedTensorType::get( attr.getType().cast<ShapedType>().getElementType())); } else { resultTypes.push_back(mlir::UnrankedTensorType::get( FloatType::getF32(getContext()))); }''' } # element type. # # Currenlty, there are only two build methods generated: # - one with operands and attributes having a separate parameter, and # - one with operands and attributes having aggregated parameters. custom_builder_unranked_ops_list = ['Abs', 'Exp', 'ReduceSum', 'ReduceSumSquare', 'Pad', 'Sqrt', 'Neg', 'Unsqueeze', 'Softmax', 'ReduceMax', 'ReduceLogSum', 'Squeeze', 'Identity', 'Split'] # Custom builder op list for operations with broadcast; we can deduce the right # output type, no need to leave it undef as in the above list. # Ops must have two operands, not one, not three... And there shall be two. # TODO: handle variadic ops omitted here: Max, Min, Min, Sum. custom_builder_broadcast_ops_list = ['Add', 'And', 'Div', 'Equal', 'Greater', 'Less', 'Mul', 'Or', 'Pow', 'Sub', 'Xor'] # union of both custom_builder_ops_list = custom_builder_unranked_ops_list + custom_builder_broadcast_ops_list #a dictionary to add any special definition for an operation custom_definition_misc = dict([ ('Constant', ''' let builders = [ OpBuilder<(ins "Attribute":$sparse_value, "Attribute":$value), [{ if (value) { auto tensorType = value.getType(); build($_builder, $_state, tensorType, sparse_value, value, FloatAttr(), ArrayAttr(), IntegerAttr(), ArrayAttr(), StringAttr(), ArrayAttr()); } else { auto tensorType = sparse_value.getType(); build($_builder, $_state, tensorType, sparse_value, value, FloatAttr(), ArrayAttr(), IntegerAttr(), ArrayAttr(), StringAttr(), ArrayAttr()); } }]> ];'''), ('Cast', ''' let builders = [ OpBuilder<(ins "Value":$input, "TypeAttr":$to), [{ auto resultType = mlir::UnrankedTensorType::get(to.getValue()); build($_builder, $_state, resultType, input, to); }] > ];''' )]) onnx_types = ( 'bool', 'int8', 'int16', 'int32', 'int64', 'unkown', 'float16', 'float', 'double', 'complex64', 'complex128', 'string' ) tblgen_types = ('AnyI1', 'AnyI8', 'AnyI16', 'AnyI32', 'AnyI64', 'BF16', 'F16', 'F32', 'F64', 'Complex<F32>', 'Complex<F64>', 'StringType' ) MAX_NUM_TYPES=20 def should_render_domain(domain): # type: (Text) -> bool return True def display_attr_type(v): # type: (OpSchema.AttrType) -> Text assert isinstance(v, OpSchema.AttrType) s = Text(v) s = s[s.rfind('.') + 1:].lower() if s[-1] == 's': s = 'list of ' + s return s def get_unique_output_name(schema, name): for input in schema.inputs: if input.name == name: return 'out_' + name return name def onnx_attr_type_to_mlir_attr_type(t): onnx_attr_type = Text(t) onnx_attr_type = onnx_attr_type[onnx_attr_type.rfind('.') + 1:].lower() if onnx_attr_type == 'int': mlir_attr_type = 'SI64Attr' elif onnx_attr_type == 'float': mlir_attr_type = 'F32Attr' elif onnx_attr_type == 'ints': mlir_attr_type = 'I64ArrayAttr' elif onnx_attr_type == 'floats': mlir_attr_type = 'F32ArrayAttr' elif onnx_attr_type == "string": mlir_attr_type = 'StrAttr' elif onnx_attr_type == "strings": mlir_attr_type = 'StrArrayAttr' elif onnx_attr_type == 'type': mlir_attr_type = 'TypeAttr' else: mlir_attr_type = 'AnyAttr' #TODO: tensor and sparse tensor return mlir_attr_type #TODO: any better way to do this. def tblgen_attr_type_to_cpp_type(t): if 'I64Attr' in t: cpp_type = 'IntegerAttr' elif 'F32Attr' in t: cpp_type = 'FloatAttr' elif 'I64ArrayAttr' in t or 'F32ArrayAttr' in t: cpp_type = 'ArrayAttr' elif 'StrAttr' in t: cpp_type = 'StringAttr' elif 'strings' in t: cpp_type = 'ArrayAttr' else: cpp_type = 'Attribute' return cpp_type def tblgen_operand_type_to_cpp_type(op_type): if op_type.startswith('Variadic'): mytype = 'ValueRange' else: mytype = 'Value' return mytype def np_type_to_tblgen_attr_type(tstr): index = -1 for i in range(len(onnx_types)): if onnx_types[i] in tstr: index = i break if index == -1: return None else: return tblgen_types[i] def get_tblgen_type_index(type_str): return tblgen_types.index(type_str) #the possible data structures are tensor, map and seq(tensor()) def get_data_structure_element(allowed_type_str): structure_list = ['tensor', 'seq', 'map'] for structure in structure_list: if allowed_type_str.startswith(structure) : element = allowed_type_str.replace( structure+'(', '', 1).replace(')', '', 1) return (structure, element) return (None, None) def get_allowed_elem_types(schema, input): #allowed_types_str = None # return allowed_types_str # TODO: enable type constraints. if input.typeStr : tstr = input.typeStr structure, element = get_data_structure_element(tstr); # In case the type is directly specified if structure and element : t = np_type_to_tblgen_attr_type(element) if t == None : return allowed_structure, None else : return structure, [t] else : return None if schema.type_constraints: for type_constraint in schema.type_constraints: if type_constraint.type_param_str != tstr : continue allowed_type_list=[] allowedTypes = type_constraint.allowed_type_strs allowed_structure = None for allowedType in allowedTypes: structure, element = get_data_structure_element(allowedType); if structure == None or element == None: return None, None if allowed_structure != None and allowed_structure != structure : return None, None allowed_structure = structure t = np_type_to_tblgen_attr_type(element) if t == None : return allowed_structure, None if not t in allowed_type_list : allowed_tyoe_list = allowed_type_list.append(t) return allowed_structure,allowed_type_list return None, None def inc_indent(indent=None): return "" if indent is None else indent + ' ' * 2 def dec_indent(indent): return indent[:-2] def join_args(args): return ", ".join(args) def get_operands_or_results(schema, type_str_dict, is_input): value_list = schema.inputs if is_input else schema.outputs if not value_list: return OrderedDict() def any_type_of(types): assert isinstance(types, list) if len(types) == 1: return types[0] else: return "AnyTypeOf<[{}]>".format(", ".join(types)) name_to_types = OrderedDict() for i, value in enumerate(value_list): types = get_onnx_mlir_types(schema, type_str_dict, value) if OpSchema.FormalParameterOption.Optional == value.option: types.append("NoneType") elif OpSchema.FormalParameterOption.Variadic == value.option: if value.isHomogeneous: types = ["Variadic<{}>".format(any_type_of(types))] else: #TODO handle(variadic, heterogeneous) " types = ["Variadic<{}>".format(any_type_of(types))] sys.stderr.write("warning: (variadic, heterogeneous) for " + schema.name + ' ' + value.name + "\n") # Since output name can coincide with that of an input, we explicitly # append a suffix "_out" to such names for disambiguation. if is_input: value_name = value.name else: value_name = get_unique_output_name(schema, value.name) name_to_types[value_name] = any_type_of(types) return name_to_types def get_attrs(schema): def get_attr_type_optional(attr_type): return 'OptionalAttr<{}>'.format( onnx_attr_type_to_mlir_attr_type(attr_type)) def get_attr_type_with_default(attr_type, attr_default): return 'DefaultValuedAttr<{}, "{}">'.format( onnx_attr_type_to_mlir_attr_type(attr_type), attr_default) if not schema.attributes: return OrderedDict() name_to_type = OrderedDict() for _, attr in sorted(schema.attributes.items()): if attr.type == OpSchema.AttrType.GRAPH: continue qualified_attr_name = "{}.{}".format(schema.name, attr.name) if qualified_attr_name in special_attr_defaults: name_to_type[attr.name] = get_attr_type_with_default( *special_attr_defaults[qualified_attr_name]) if qualified_attr_name in special_attr_types: name_to_type[attr.name] = onnx_attr_type_to_mlir_attr_type( special_attr_types[qualified_attr_name]) # option holds either required or default value elif attr.required: name_to_type[attr.name] = onnx_attr_type_to_mlir_attr_type( attr.type) elif attr.default_value.name: def format_value(value): # type: (Any) -> Text if isinstance(value, float): formatted = str(np.round(value, 5)) # use default formatting, unless too long. if (len(formatted) > 10): formatted = str("({:e})".format(value)) return formatted elif isinstance( value, (bytes, bytearray)) and sys.version_info[0] == 3: return str(value.decode('utf-8')) return str(value) default_value = helper.get_attribute_value(attr.default_value) if isinstance(default_value, list): default_value = [format_value(val) for val in default_value] default_value_str = '{}'.format(default_value) default_value_str = default_value_str.replace('[', '{', 1) default_value_str = default_value_str.replace(']', '}', 1) if Text(attr.type) == "AttrType.STRINGS": default_value_str = default_value_str.replace("'", '\\"') else: default_value_str = default_value_str.replace("'", '') else: default_value = format_value(default_value) default_value_str = default_value name_to_type[attr.name] = get_attr_type_with_default( attr.type, default_value_str) else: name_to_type[attr.name] = get_attr_type_optional(attr.type) return name_to_type def get_numberof_list(mylist): expected_num = len(mylist) for element in mylist : if OpSchema.FormalParameterOption.Variadic == element.option: expected_num = -1 return expected_num def get_output_type_mapping(schema): mapping=[] for output in schema.outputs : #if only one type is allowed, just set that structure, allowed_elem_types = get_allowed_elem_types(schema, output) if allowed_elem_types != None and len(allowed_elem_types) == 1 : mapping.append(str(get_tblgen_type_index(allowed_elem_types[0]))) continue #map the type string if output.typeStr : tstr = output.typeStr found = False for i, input in enumerate(schema.inputs): if input.typeStr and input.typeStr == tstr: mapping.append(str(i+MAX_NUM_TYPES)) found = True break if found: continue #unknown output type mapping.append(str(-1)) return mapping def get_numberof_inout(s, indent, schema): expected_num_operands = get_numberof_list(schema.inputs) indent = inc_indent(indent) s += indent + "static int getNumberOfOperands() {\n" indent = inc_indent(indent) s += indent + "return {};\n".format(expected_num_operands) indent = dec_indent(indent) s += indent + "}\n" expected_num_results = get_numberof_list(schema.outputs) s += indent + "static int getNumberOfResults() {\n" indent = inc_indent(indent) s += indent + "return {};\n".format(expected_num_results) indent = dec_indent(indent) s += indent + "}\n" s += indent + "static std::vector<int> getTypeMap() {\n" mapping = get_output_type_mapping(schema) indent = inc_indent(indent) s += indent + "return {" + ",".join(mapping) + "};\n" indent = dec_indent(indent) s += indent + "}\n" return s def get_promotable_const_operands_func(s, indent, const_operands_name_to_idx): cpp_name_to_idx_literal = "{" + ", ".join([ "{{\"{}\", {}}}".format(*name_to_idx) for name_to_idx in const_operands_name_to_idx ]) + "}" #s += indent + "let extraClassDeclaration = [{\n" indent = inc_indent(indent) s += indent + "std::map<std::string, size_t> promotableConstOperands() {\n" indent = inc_indent(indent) s += indent + "return {};\n".format(cpp_name_to_idx_literal) indent = dec_indent(indent) s += indent + "}\n" #indent = dec_indent(indent) #s += indent + "}];\n" return s def get_type_inference_func(s, indent, type_inference_code): indent = inc_indent(indent) s += indent + "std::vector<mlir::Type> resultTypeInference() {" + "\n" indent = inc_indent(indent) s += indent + "std::vector<mlir::Type> resultTypes;" + "\n" s += indent + type_inference_code + '\n' s += indent + "return resultTypes;" + "\n" indent = dec_indent(indent) s += indent + "}" + "\n" indent = dec_indent(indent) return s def parse_type_str(allowedType): # AnyI may be used for uint because the onnx_mlir is not generating uint output # This will be fixed later and UI will be replace AnyI onnx_to_mlir_type_dict = { '(': '<[', ')': ']>', 'tensor' : 'TensorOf', 'seq' : 'SeqOf', 'map' : 'TupleOf', 'bool': 'I1', #'uint8' : 'AnyI8', #uint16' : 'AnyI16', #uint64' : 'AnyI64', 'uint8' : 'UI8', 'uint16' : 'UI16', 'uint32' : 'UI32', 'uint64' : 'UI64', 'int8' : 'I8', 'int16' : 'I16', 'int32' : 'I32', 'int64' : 'I64', 'float16' : 'F16', 'bfloat16' : 'BF16', 'float' : 'F32', 'double' : 'F64', 'unkown' : 'BF16', 'complex64' : 'Complex<F32>', 'complex128' : 'Complex<F64>', 'string' : 'StringType'} mapping = list(onnx_to_mlir_type_dict.items()) mapping.sort(key=lambda pair:len(pair[0]), reverse=True) for key, item in mapping: allowedType = allowedType.replace(key, item) return allowedType def parse_a_type_constraint(constraint): allowedTypes = constraint.allowed_type_strs mlirTypes = [] for allowedType in allowedTypes: mlirType = parse_type_str(allowedType) mlirTypes.append(mlirType) mlirTypes.append("AnyMemRef") return mlirTypes def parse_type_constraints(schema): type_str_dict = dict() for type_constraint in schema.type_constraints: type_str_dict[type_constraint.type_param_str] = parse_a_type_constraint(type_constraint) return type_str_dict def get_onnx_mlir_types(schema, type_str_dict, input): if input.typeStr : if not input.typeStr in type_str_dict : return [parse_type_str(input.typeStr), "AnyMemRef"] else : return type_str_dict[input.typeStr] else : print('No typeStr ', schema.name) return [] def gen_op_def(schema): indent = inc_indent() s = 'def ONNX{0}Op:ONNX_Op<"{0}",\n'.format(schema.name) regions = OrderedDict() for _, attr in sorted(schema.attributes.items()): if attr.type == OpSchema.AttrType.GRAPH: if attr.required: regions[attr.name] = "SizedRegion<1>" else: regions[attr.name] = "AnyRegion" traits = ["NoSideEffect"] traits.append("DeclareOpInterfaceMethods<ShapeInferenceOpInterface>") if schema.name in OpsWithResultTypeInference.keys(): traits.append("OpInterface<\"ResultTypeInferenceOpInterface\">") if len(regions): traits.append("OpInterface<\"HasOnnxSubgraphOpInterface\">") s += inc_indent(indent) + '[{}]> {{\n'.format(join_args(traits)) indent = inc_indent(indent) if schema.name in OpsWithCanonicalizer: s += indent + 'let hasCanonicalizer = 1;\n' s += indent + 'let summary = "ONNX {} operation";\n'.format(schema.name) s += indent + 'let description = [{\n' if schema.doc: lines = schema.doc.lstrip().splitlines() for line in lines: escaped_line = line.replace('"', '\\"')\ .replace('}]', '\\}\\]') s += indent + '"{}"\n'.format(escaped_line) s += indent + '}];\n' type_str_dict = parse_type_constraints(schema) ins = get_operands_or_results(schema, type_str_dict, is_input=True) ins.update(get_attrs(schema)) ins_strs = ["{1}:${0}".format(*i) for i in ins.items()] s += indent + 'let arguments = (ins {});\n'.format( (',\n' + inc_indent(indent)).join(ins_strs)) outs = get_operands_or_results(schema, type_str_dict, is_input=False) outs_strs = ["{1}:${0}".format(*i) for i in outs.items()] s += indent + 'let results = (outs {});\n'.format( (',\n' + inc_indent(indent)).join(outs_strs)) regions_strs = ["{1}:${0}".format(*i) for i in regions.items()] if len(regions): s += indent + 'let regions = (region {});\n'.format( (',\n' + inc_indent(indent)).join(regions_strs)) if schema.name in custom_builder_ops_list: if len(ins) == 0: raise RuntimeWarning( "warning: not generate custom build methods for " + schema.name + " since it does not have operands.") else: s += indent + 'let builders = [\n' indent = inc_indent(indent) s += indent + 'OpBuilder<(ins ' operands_dict = get_operands_or_results(schema, type_str_dict, is_input=True) attrs_dict = get_attrs(schema) s += ', '.join('"{}":${}'.format(tblgen_operand_type_to_cpp_type(ty), name) for name, ty in operands_dict.items()) if operands_dict and attrs_dict: s += ', ' s += ', '.join('"{}":${}'.format(tblgen_attr_type_to_cpp_type(ty), name) for name, ty in attrs_dict.items()) s += '), [{\n' indent = inc_indent(indent) first_operand_name = list(ins.items())[0][0] build_type_name = '' if schema.name in custom_builder_broadcast_ops_list: second_operand_name = list(ins.items())[1][0] s += indent + 'auto lhsTy = {}.getType();\n'. \ format(first_operand_name) s += indent + 'auto rhsTy = {}.getType();\n'. \ format(second_operand_name) s += indent + 'auto elementType = getBroadcastedRankedType(lhsTy, rhsTy);\n' s += indent + 'auto shapedType = elementType.dyn_cast_or_null<ShapedType>();\n'; s += indent + 'if (!shapedType || !shapedType.hasStaticShape()) {\n'; s += indent + indent + 'elementType = {}'.format(first_operand_name) + \ '.getType().cast<ShapedType>().getElementType();\n'; s += indent + indent + 'elementType = UnrankedTensorType::get(elementType);\n' s += indent + '}\n'; build_type_name = 'elementType' else: s += indent + 'auto elementType = {}'.format(first_operand_name) + \ '.getType().cast<ShapedType>().getElementType();\n' build_type_name = 'UnrankedTensorType::get(elementType)' s += indent + 'build($_builder, $_state, {}'.format(build_type_name) for name, _ in ins.items(): s += ', ' + name s += ');\n' indent = dec_indent(indent) s += indent + '}]>,\n' # Custom builders with all operands and attributes having aggregate parameters. # E.g. OpBuilder<(ins "ValueRange operands, # ArrayRef<NamedAttribute> attributes", [{}]>' s += indent + 'OpBuilder<(ins ' + \ '"ValueRange":$operands, "ArrayRef<NamedAttribute>":$attributes), [{\n' indent = inc_indent(indent) if schema.name in custom_builder_broadcast_ops_list: s += indent + 'auto lhsTy = operands[0].getType();\n' s += indent + 'auto rhsTy = operands[1].getType();\n' s += indent + 'auto elementType = getBroadcastedRankedType(lhsTy, rhsTy);\n' s += indent + 'auto shapedType = elementType.dyn_cast_or_null<ShapedType>();\n'; s += indent + 'if (!shapedType || !shapedType.hasStaticShape()) {\n'; s += indent + indent + 'elementType = operands[0]' + \ '.getType().cast<ShapedType>().getElementType();\n'; s += indent + indent + 'elementType = UnrankedTensorType::get(elementType);\n' s += indent + '}\n'; else: s += indent + 'auto elementType = operands[0].getType().' + \ 'cast<ShapedType>().getElementType();\n' s += indent + 'std::vector<mlir::Type> outputTypes;\n' s += indent + 'outputTypes.emplace_back({});\n'.format(build_type_name) s += indent + 'build($_builder, $_state, outputTypes, operands, attributes);\n' indent = dec_indent(indent) s += indent + '}]>' s += '\n' + indent + '];\n' s += indent + "let extraClassDeclaration = [{\n" s = get_numberof_inout(s, indent, schema) if schema.name in OpsWithResultTypeInference: s = get_type_inference_func( s, indent, OpsWithResultTypeInference[schema.name]) if schema.name in OpsWithHelpers: s += OpsWithHelpers[schema.name] if len(regions): s += indent + "int64_t getSubgraphRegionIdx(const std::string& name) {\n" indent = inc_indent(indent) for idx, region_name in enumerate(regions.keys()): s += indent + "if (name == \"{}\") return {};\n".format(region_name, idx) s += indent + "llvm_unreachable(\"region with the specified name does not exist\");\n" indent = dec_indent(indent) s += indent + "}\n" s += indent + '}];\n' if ( schema.name in custom_definition_misc) : s += custom_definition_misc[schema.name] + '\n' s += '}\n\n' return s def gen_op_importer(schema, file): indent = inc_indent() s = indent + 'import_handler_map_["' + schema.name +'"] = \n ' expected_num_operands = len(schema.inputs) expected_num_results = len(schema.outputs) for input in schema.inputs: if OpSchema.FormalParameterOption.Variadic == input.option: expected_num_operands = -1 for output in schema.outputs: if OpSchema.FormalParameterOption.Variadic == output.option: expected_num_results = -1 handler_func = special_op_handler.get( schema.name, "buildOperation<mlir::ONNX{}Op>".format(schema.name)) args = ["node"] s += inc_indent(indent) + '&onnx_mlir::detail::FrontendGenImpl::' s += handler_func+';\n' file.write(s) def build_operator_schemas(): index = defaultdict(lambda: defaultdict(lambda: defaultdict( list))) for schema in defs.get_all_schemas_with_history(): index[schema.domain][int( schema.support_level)][schema.name].append(schema) operator_schemas = list( ) exsting_ops = set() for domain, _supportmap in sorted(index.items()): if not should_render_domain(domain): continue processed_supportmap = list() for _support, _namemap in sorted(_supportmap.items()): processed_namemap = list() for n, unsorted_versions in sorted(_namemap.items()): versions = sorted(unsorted_versions, key=lambda s: s.since_version) schema = versions[-1] if schema.name in exsting_ops: continue if check_operation_version : exsting_ops.add(schema.name) processed_namemap.append((n, schema, versions)) if schema.name not in version_dict : print("Check-operation-version: Operation {} is new with version {}" .format(schema.name, schema.since_version)) elif schema.since_version > version_dict[schema.name]: print("Check-operation-version: Operation {}" .format(schema.name)+ " has a newer version {} over old version {}" .format(schema.since_version, version_dict[schema.name])) else: if schema.name not in version_dict : continue found = False for schema in reversed(versions): if schema.since_version == version_dict[schema.name]: exsting_ops.add(schema.name) processed_namemap.append((n, schema, versions)) found = True break if not found: print("Your onnx installation may be too old. " "The desired version for operation {} is not found.".format( schema.name)) sys.exit() processed_supportmap.append((_support, processed_namemap)) operator_schemas.append((domain, processed_supportmap)) return operator_schemas def main(args): curr_utc_time = datetime.datetime.now( datetime.timezone.utc).strftime("%m/%d/%Y, %H:%M:%S") autogen_warning = ( '//********************************************************\n' '// Do not modify this file directly.\n' '// This file is automatically generated via script.\n' '// Details can be found in docs/ImportONNXDefs.md .\n' '//********************************************************\n\n') autogen_warning = autogen_warning.format(curr_utc_time) op_def = args.op_def op_def.write(autogen_warning) op_importer = args.op_importer op_importer.write(autogen_warning) version_dict = dict() for domain, supportmap in build_operator_schemas(): for _, namemap in supportmap: for op_type, schema, versions in namemap: if check_operation_version: version_dict[schema.name] = schema.since_version else: gen_op_importer(schema, op_importer) r = gen_op_def(schema) op_def.write(r) if check_operation_version : pprint.pprint(version_dict) if __name__ == '__main__': curr_dir = os.path.dirname(os.path.realpath(__file__)) class Args(object): if args.dry_run_onnx_ops: op_def = StringIO() else: op_def_file_path = os.path.join(curr_dir, 'ONNXOps.td.inc') op_def = io.open(op_def_file_path, 'w', newline='') if args.dry_run_op_build_table: op_importer = StringIO() else: op_importer_file_path = os.path.join(curr_dir, 'OpBuildTable.inc') op_importer = io.open(op_importer_file_path, 'w', newline='') main(Args) if args.dry_run_onnx_ops: sys.stdout.write(Args.op_def.getvalue()) if args.dry_run_op_build_table: sys.stdout.write(Args.op_importer.getvalue())
true
true
f70c5bac57d302bc5a45ac3b4b947aa42b326dfb
5,320
py
Python
tests/components/smartthings/test_sensor.py
itewk/home-assistant
769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4
[ "Apache-2.0" ]
23
2017-11-15T21:03:53.000Z
2021-03-29T21:33:48.000Z
tests/components/smartthings/test_sensor.py
itewk/home-assistant
769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4
[ "Apache-2.0" ]
39
2016-12-16T12:40:34.000Z
2017-02-13T17:53:42.000Z
tests/components/smartthings/test_sensor.py
itewk/home-assistant
769cf19052f8c9ef374d8ba8ae7705ccc7bf4cf4
[ "Apache-2.0" ]
10
2018-01-01T00:12:51.000Z
2021-12-21T23:08:05.000Z
""" Test for the SmartThings sensors platform. The only mocking required is of the underlying SmartThings API object so real HTTP calls are not initiated during testing. """ from pysmartthings import ATTRIBUTES, CAPABILITIES, Attribute, Capability from homeassistant.components.sensor import DEVICE_CLASSES, DOMAIN as SENSOR_DOMAIN from homeassistant.components.smartthings import sensor from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, ) from homeassistant.helpers.dispatcher import async_dispatcher_send from .conftest import setup_platform async def test_mapping_integrity(): """Test ensures the map dicts have proper integrity.""" for capability, maps in sensor.CAPABILITY_TO_SENSORS.items(): assert capability in CAPABILITIES, capability for sensor_map in maps: assert sensor_map.attribute in ATTRIBUTES, sensor_map.attribute if sensor_map.device_class: assert ( sensor_map.device_class in DEVICE_CLASSES ), sensor_map.device_class async def test_async_setup_platform(): """Test setup platform does nothing (it uses config entries).""" await sensor.async_setup_platform(None, None, None) async def test_entity_state(hass, device_factory): """Tests the state attributes properly match the sensor types.""" device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) state = hass.states.get("sensor.sensor_1_battery") assert state.state == "100" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "%" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " Battery" async def test_entity_three_axis_state(hass, device_factory): """Tests the state attributes properly match the three axis types.""" device = device_factory( "Three Axis", [Capability.three_axis], {Attribute.three_axis: [100, 75, 25]} ) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) state = hass.states.get("sensor.three_axis_x_coordinate") assert state.state == "100" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " X Coordinate" state = hass.states.get("sensor.three_axis_y_coordinate") assert state.state == "75" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " Y Coordinate" state = hass.states.get("sensor.three_axis_z_coordinate") assert state.state == "25" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " Z Coordinate" async def test_entity_three_axis_invalid_state(hass, device_factory): """Tests the state attributes properly match the three axis types.""" device = device_factory( "Three Axis", [Capability.three_axis], {Attribute.three_axis: []} ) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) state = hass.states.get("sensor.three_axis_x_coordinate") assert state.state == STATE_UNKNOWN state = hass.states.get("sensor.three_axis_y_coordinate") assert state.state == STATE_UNKNOWN state = hass.states.get("sensor.three_axis_z_coordinate") assert state.state == STATE_UNKNOWN async def test_entity_and_device_attributes(hass, device_factory): """Test the attributes of the entity are correct.""" # Arrange device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) entity_registry = await hass.helpers.entity_registry.async_get_registry() device_registry = await hass.helpers.device_registry.async_get_registry() # Act await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) # Assert entry = entity_registry.async_get("sensor.sensor_1_battery") assert entry assert entry.unique_id == device.device_id + "." + Attribute.battery entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, []) assert entry assert entry.name == device.label assert entry.model == device.device_type_name assert entry.manufacturer == "Unavailable" async def test_update_from_signal(hass, device_factory): """Test the binary_sensor updates when receiving a signal.""" # Arrange device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) device.status.apply_attribute_update( "main", Capability.battery, Attribute.battery, 75 ) # Act async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) # Assert await hass.async_block_till_done() state = hass.states.get("sensor.sensor_1_battery") assert state is not None assert state.state == "75" async def test_unload_config_entry(hass, device_factory): """Test the binary_sensor is removed when the config entry is unloaded.""" # Arrange device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) config_entry = await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) # Act await hass.config_entries.async_forward_entry_unload(config_entry, "sensor") # Assert assert not hass.states.get("sensor.sensor_1_battery")
42.56
88
0.738346
from pysmartthings import ATTRIBUTES, CAPABILITIES, Attribute, Capability from homeassistant.components.sensor import DEVICE_CLASSES, DOMAIN as SENSOR_DOMAIN from homeassistant.components.smartthings import sensor from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE from homeassistant.const import ( ATTR_FRIENDLY_NAME, ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, ) from homeassistant.helpers.dispatcher import async_dispatcher_send from .conftest import setup_platform async def test_mapping_integrity(): for capability, maps in sensor.CAPABILITY_TO_SENSORS.items(): assert capability in CAPABILITIES, capability for sensor_map in maps: assert sensor_map.attribute in ATTRIBUTES, sensor_map.attribute if sensor_map.device_class: assert ( sensor_map.device_class in DEVICE_CLASSES ), sensor_map.device_class async def test_async_setup_platform(): await sensor.async_setup_platform(None, None, None) async def test_entity_state(hass, device_factory): device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) state = hass.states.get("sensor.sensor_1_battery") assert state.state == "100" assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == "%" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " Battery" async def test_entity_three_axis_state(hass, device_factory): device = device_factory( "Three Axis", [Capability.three_axis], {Attribute.three_axis: [100, 75, 25]} ) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) state = hass.states.get("sensor.three_axis_x_coordinate") assert state.state == "100" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " X Coordinate" state = hass.states.get("sensor.three_axis_y_coordinate") assert state.state == "75" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " Y Coordinate" state = hass.states.get("sensor.three_axis_z_coordinate") assert state.state == "25" assert state.attributes[ATTR_FRIENDLY_NAME] == device.label + " Z Coordinate" async def test_entity_three_axis_invalid_state(hass, device_factory): device = device_factory( "Three Axis", [Capability.three_axis], {Attribute.three_axis: []} ) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) state = hass.states.get("sensor.three_axis_x_coordinate") assert state.state == STATE_UNKNOWN state = hass.states.get("sensor.three_axis_y_coordinate") assert state.state == STATE_UNKNOWN state = hass.states.get("sensor.three_axis_z_coordinate") assert state.state == STATE_UNKNOWN async def test_entity_and_device_attributes(hass, device_factory): device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) entity_registry = await hass.helpers.entity_registry.async_get_registry() device_registry = await hass.helpers.device_registry.async_get_registry() await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) entry = entity_registry.async_get("sensor.sensor_1_battery") assert entry assert entry.unique_id == device.device_id + "." + Attribute.battery entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, []) assert entry assert entry.name == device.label assert entry.model == device.device_type_name assert entry.manufacturer == "Unavailable" async def test_update_from_signal(hass, device_factory): device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) device.status.apply_attribute_update( "main", Capability.battery, Attribute.battery, 75 ) async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id]) await hass.async_block_till_done() state = hass.states.get("sensor.sensor_1_battery") assert state is not None assert state.state == "75" async def test_unload_config_entry(hass, device_factory): device = device_factory("Sensor 1", [Capability.battery], {Attribute.battery: 100}) config_entry = await setup_platform(hass, SENSOR_DOMAIN, devices=[device]) await hass.config_entries.async_forward_entry_unload(config_entry, "sensor") assert not hass.states.get("sensor.sensor_1_battery")
true
true
f70c5e0ef140c99c7cfd6c015d53084e18fd2d1c
2,676
py
Python
pvoutput.py
houthacker/solar
fd7b3d36ce27a8287eee4a5cd7a2c383d1bb4379
[ "MIT" ]
6
2017-12-02T00:31:11.000Z
2020-04-16T18:48:37.000Z
pvoutput.py
semonet/solar
fd7b3d36ce27a8287eee4a5cd7a2c383d1bb4379
[ "MIT" ]
null
null
null
pvoutput.py
semonet/solar
fd7b3d36ce27a8287eee4a5cd7a2c383d1bb4379
[ "MIT" ]
2
2019-04-11T08:46:46.000Z
2020-09-14T18:57:51.000Z
#!/usr/bin/env python3 # pvoutput.py # # Simple library for uploading data to PVOutput. import urllib.request import urllib.parse import urllib.error import logging import sys logger = logging.getLogger(__name__) class System: """Provides methods for direct uploading to PVOutput for set system.""" def __init__(self, api_key, system_id): self.api_key = api_key self.system_id = system_id def add_output(self, data): """Add end of day output information. Data should be a dictionary with parameters as described here: http://pvoutput.org/help.html#api-addoutput .""" url = "http://pvoutput.org/service/r2/addoutput.jsp" self.__make_request(url, data) def add_status(self, data): """Add live output data. Data should contain the parameters as described here: http://pvoutput.org/help.html#api-addstatus .""" url = "http://pvoutput.org/service/r2/addstatus.jsp" self.__make_request(url, data) # Could add methods like 'get_status' def __make_request(self, url, data): logger.debug('Making request: %s, %s', url, data) data = urllib.parse.urlencode(data).encode('ascii') req = urllib.request.Request(url, data) req.add_header('X-Pvoutput-Apikey', self.api_key) req.add_header('X-Pvoutput-SystemId', self.system_id) try: f = urllib.request.urlopen(req) except urllib.error.HTTPError as e: logger.error('Upload failed: %s', e.read().decode()) except urllib.error.URLError as e: logger.error('Upload failed: %s', e) else: with f: logger.debug('Response: %s', f.read().decode()) def __str__(self): return self.system_id.__str__() def __repr__(self): return self.system_id.__repr__() def __hash__(self): return self.system_id.__hash__() def __eq__(self, other): return self.system_id == other.system_id # Test code if __name__ == '__main__': import time import configparser logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) data = { 'd': time.strftime('%Y%m%d'), 't': time.strftime('%H:%M'), 'v1': 0, # Energy today 'v2': 0, # Output power 'v5': 20.0, # Temperature 'v6': 230.0 # Grid voltage } config = configparser.ConfigParser() config.read_file(open('samil_upload.ini')) # Assumes a default API key and system ID api_key = config['DEFAULT']['API key'] system_id = config['DEFAULT']['System ID'] pv = System(api_key, system_id) pv.add_status(data)
31.116279
80
0.628176
import urllib.request import urllib.parse import urllib.error import logging import sys logger = logging.getLogger(__name__) class System: def __init__(self, api_key, system_id): self.api_key = api_key self.system_id = system_id def add_output(self, data): url = "http://pvoutput.org/service/r2/addoutput.jsp" self.__make_request(url, data) def add_status(self, data): url = "http://pvoutput.org/service/r2/addstatus.jsp" self.__make_request(url, data) def __make_request(self, url, data): logger.debug('Making request: %s, %s', url, data) data = urllib.parse.urlencode(data).encode('ascii') req = urllib.request.Request(url, data) req.add_header('X-Pvoutput-Apikey', self.api_key) req.add_header('X-Pvoutput-SystemId', self.system_id) try: f = urllib.request.urlopen(req) except urllib.error.HTTPError as e: logger.error('Upload failed: %s', e.read().decode()) except urllib.error.URLError as e: logger.error('Upload failed: %s', e) else: with f: logger.debug('Response: %s', f.read().decode()) def __str__(self): return self.system_id.__str__() def __repr__(self): return self.system_id.__repr__() def __hash__(self): return self.system_id.__hash__() def __eq__(self, other): return self.system_id == other.system_id if __name__ == '__main__': import time import configparser logging.basicConfig(level=logging.DEBUG, stream=sys.stdout) data = { 'd': time.strftime('%Y%m%d'), 't': time.strftime('%H:%M'), 'v1': 0, 'v2': 0, 'v5': 20.0, 'v6': 230.0 } config = configparser.ConfigParser() config.read_file(open('samil_upload.ini')) api_key = config['DEFAULT']['API key'] system_id = config['DEFAULT']['System ID'] pv = System(api_key, system_id) pv.add_status(data)
true
true
f70c5e1a08b6c30cd22ab82097aa427cc601e770
758
py
Python
weddingPlanner/urls.py
mithran77/online-wedding-planner
9212d163de5ec75c30f33e1733a3d863049f1a59
[ "BSD-2-Clause" ]
null
null
null
weddingPlanner/urls.py
mithran77/online-wedding-planner
9212d163de5ec75c30f33e1733a3d863049f1a59
[ "BSD-2-Clause" ]
4
2020-02-11T21:37:44.000Z
2021-06-10T17:26:16.000Z
weddingPlanner/urls.py
mithran77/online-wedding-planner
9212d163de5ec75c30f33e1733a3d863049f1a59
[ "BSD-2-Clause" ]
null
null
null
from django.conf.urls import include, url # pragma: no cover from django.contrib import admin # pragma: no cover from weddingServices import views as ws_views # pragma: no cover from django.contrib.auth import views as auth_views # pragma: no cover urlpatterns = [ # pragma: no cover # Examples: # url(r'^$', 'weddingPlanner.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^login/$', auth_views.login, {'template_name': 'weddingServices/login.html'}, name='login'), url(r'^logout/$', auth_views.logout, {'next_page': '/weddingServices'}, name='logout'), url(r'^signup/$', ws_views.signup, name='signup'), url(r'^weddingServices/', include('weddingServices.urls')), ]
42.111111
99
0.693931
from django.conf.urls import include, url from django.contrib import admin from weddingServices import views as ws_views from django.contrib.auth import views as auth_views urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^login/$', auth_views.login, {'template_name': 'weddingServices/login.html'}, name='login'), url(r'^logout/$', auth_views.logout, {'next_page': '/weddingServices'}, name='logout'), url(r'^signup/$', ws_views.signup, name='signup'), url(r'^weddingServices/', include('weddingServices.urls')), ]
true
true
f70c5e61b4dd32058f4477ae50c3d199891c6439
19,730
py
Python
modules/s3db/tour.py
nck0405/ChennaiEden
a2381e1b557927fe211ca6329ba64f85dd743fd1
[ "MIT" ]
1
2015-01-24T04:31:51.000Z
2015-01-24T04:31:51.000Z
modules/s3db/tour.py
nck0405/ChennaiEden
a2381e1b557927fe211ca6329ba64f85dd743fd1
[ "MIT" ]
null
null
null
modules/s3db/tour.py
nck0405/ChennaiEden
a2381e1b557927fe211ca6329ba64f85dd743fd1
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Sahana Eden Guided Tour Model @copyright: 2009-2015 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. @todo: update for new template path modules/template """ __all__ = ("S3GuidedTourModel", "tour_rheader", "tour_builder", ) from gluon import * from gluon.storage import Storage from ..s3 import * # ============================================================================= class S3GuidedTourModel(S3Model): """ Details about which guided tours this Person has completed """ names = ("tour_config", "tour_details", "tour_user", ) def model(self): T = current.T db = current.db NONE = current.messages["NONE"] s3 = current.response.s3 add_components = self.add_components configure = self.configure crud_strings = s3.crud_strings define_table = self.define_table person_id = self.pr_person_id # --------------------------------------------------------------------- # Guided tours that are available # tablename = "tour_config" define_table(tablename, Field("name", represent=lambda v: v or NONE, label=T("Display name"), requires = IS_NOT_EMPTY(), ), Field("code", length=255, notnull=True, unique=True, represent=lambda v: v or NONE, label=T("Unique code")), Field("controller", represent=lambda v: v or NONE, label=T("Controller tour is activated")), Field("function", represent=lambda v: v or NONE, label=T("Function tour is activated")), Field("autostart", "boolean", default=False, represent=lambda v: \ T("Yes") if v else T("No"), label=T("Auto start")), Field("role", "string", represent=lambda v: v or NONE, label=T("User's role")), * s3_meta_fields() ) # CRUD strings ADD_TOUR = T("Create Tour") crud_strings[tablename] = Storage( label_create = ADD_TOUR, title_display = T("Tour Configuration"), title_list = T("Tours"), title_update = T("Edit Tour"), label_list_button = T("List Tours"), label_delete_button = T("Delete Tour"), msg_record_created = T("Tour added"), msg_record_modified = T("Tour updated"), msg_record_deleted = T("Tour deleted"), msg_list_empty = T("No Tours currently registered")) represent = S3Represent(lookup=tablename, translate=True) tour_config_id = S3ReusableField("tour_config_id", "reference %s" % tablename, requires = IS_EMPTY_OR( IS_ONE_OF(db, "tour_config.id", represent, sort=True)), represent=represent, label=T("Tour Name"), ondelete="SET NULL") # Components add_components(tablename, # Details tour_details="tour_config_id", # Users tour_user="tour_config_id", ) # --------------------------------------------------------------------- # Details of the tour. # tablename = "tour_details" define_table(tablename, tour_config_id(empty = False), Field("posn", "integer", default=0, label=T("Position in tour")), Field("controller", represent=lambda v: v or NONE, label=T("Controller name")), Field("function", represent=lambda v: v or NONE, label=T("Function name")), Field("args", represent=lambda v: v or NONE, label=T("Arguments")), Field("tip_title", represent=lambda v: v or NONE, label=T("Title")), Field("tip_details", represent=lambda v: v or NONE, label=T("Details")), Field("html_id", represent=lambda v: v or NONE, label=T("HTML ID")), Field("html_class", represent=lambda v: v or NONE, label=T("HTML class")), Field("button", represent=lambda v: v or NONE, label=T("Button name")), Field("tip_location", represent=lambda v: v or NONE, label=T("Loctaion of tip")), Field("datatable_id", represent=lambda v: v or NONE, label=T("DataTable ID")), Field("datatable_row", represent=lambda v: v or NONE, label=T("DataTable row")), Field("redirect", represent=lambda v: v or NONE, label=T("Redirect URL")), ) # CRUD strings ADD_DETAILS = T("Create Details") crud_strings[tablename] = Storage( label_create = ADD_DETAILS, title_display = T("Tour Details"), title_list = T("Details"), title_update = T("Edit Details"), label_list_button = T("List Details"), label_delete_button = T("Delete Detail"), msg_record_created = T("Detail added"), msg_record_modified = T("Detail updated"), msg_record_deleted = T("Detail deleted"), msg_list_empty = T("No Details currently registered")) configure(tablename, orderby = "tour_details.tour_config_id,tour_details.posn" ) # --------------------------------------------------------------------- # Details of the tours that the user has taken. # tablename = "tour_user" define_table(tablename, person_id(label = T("Person"), ondelete="CASCADE", empty = False, ), tour_config_id(), Field("place", represent=lambda v: v or NONE, label=T("Where reached")), Field("resume", represent=lambda v: v or NONE, label=T("URL to resume tour")), Field("completed", "boolean", default=False, represent=lambda v: \ T("Yes") if v else T("No"), label=T("Completed tour?")), Field("trip_counter", "integer", default=0, label=T("Times Completed")), ) # CRUD strings ADD_USER = T("Create User") crud_strings[tablename] = Storage( label_create = ADD_USER, title_display = T("Tour User"), title_list = T("Users"), title_update = T("Edit User"), label_list_button = T("List Users"), label_delete_button = T("Delete User"), msg_record_created = T("User added"), msg_record_modified = T("User updated"), msg_record_deleted = T("User deleted"), msg_list_empty = T("No users have taken a tour")) # --------------------------------------------------------------------- # Pass names back to global scope (s3.*) # return dict(tour_config_id = tour_config_id, ) # ============================================================================= def tour_rheader(r): """ Resource Header for Guided Tour """ if r.representation == "html": tour = r.record if tour: T = current.T tabs = [(T("Edit Details"), None), (T("Details"), "details"), (T("People"), "user"), ] rheader_tabs = s3_rheader_tabs(r, tabs) table = r.table rheader = DIV(TABLE(TR(TH("%s: " % table.name.label), tour.name, ), TR(TH("%s: " % table.code.label), tour.code, ), ), rheader_tabs ) return rheader return None # ============================================================================= def tour_builder(output): """ Helper function to attach a guided tour (if required) to the output """ auth = current.auth db = current.db s3db = current.s3db request = current.request s3 = current.response.s3 T = current.T req_vars = request.vars tour_id = req_vars.tour # Now see if the details are on the database for this user tour = None user_id = None if auth.is_logged_in(): user_id = auth.s3_logged_in_person() # Find out if the user has done this tour before utable = s3db.tour_user uquery = (utable.person_id == user_id) & \ (utable.tour_config_id == tour_id) tour = db(uquery).select(utable.id, utable.completed, utable.place, utable.resume, limitby=(0, 1)).first() # If the tour has just been started (from the menu) then # it may be necessary to redirect to a different controller # @todo: does place need to be changed to controller and function? if not req_vars.tour_running: if (tour and not tour.completed and tour.place != request.controller): redirect("%s?tour=%s" %(tour.resume, tour_id)) # get the details from the database dtable = s3db.tour_details dquery = (dtable.tour_config_id == tour_id) &\ (dtable.controller == request.controller) &\ (dtable.function == request.function) details = db(dquery).select(dtable.args, dtable.tip_title, dtable.tip_details, dtable.button, dtable.tip_location, dtable.html_id, dtable.html_class, dtable.datatable_id, dtable.datatable_row, dtable.redirect, orderby=(dtable.posn) ) # tour_filename = os.path.join(request.folder, # "private", # "tour", # tour_name) # tour_file = open (tour_filename, "rb") # # now open the details of the guided_tour into a dictionary # import csv # tour_details = csv.DictReader(tour_file, skipinitialspace=True) # load the list of tour items in the html joyride_OL = OL(_id="joyrideID_1") pre_step_data = [] post_step_data = [] post_ride_data = [] last_row = None last_used = None req_args = request.args cnt = -1 for row in details: if row.args: args = row.args.split(",") else: args = [] # if the page has a nested login form then "login" will be added to # the req_args list so it needs to be added to the args list as well if "login" in req_args: if "login" not in args: args.append("login") # The following will capture the actual id used for the req_arg # Example org/organisation/10, where 10 is the id from the database posn = 0 for arg in args: if arg == "dt_id": args[posn] = req_args[posn] posn += 1 # Now check that the tour url matches the current url if (args == req_args): cnt += 1 # number of records used in this part of the tour if row.datatable_id: dt_id = row.datatable_id # cols = [] # if "DataTable_columns" in row: # cols = row["DataTable_columns"].split(",") row_num = 0 if row.datatable_row: row_num = row.datatable_row # Now set this up for the pre-processor hook in joyride pre_step_data.append([cnt, dt_id, row_num]) if row.redirect: redirect_row = row.redirect.split(",") if len(redirect_row) >= 3: url = URL(c=redirect_row[0], f=redirect_row[1], args=redirect_row[2:], vars={"tour_running":True, "tour":tour_id} ) if "dt_id" in redirect_row[2]: post_step_data.append([cnt, url, dt_id, row_num]) elif len(redirect_row) == 2: url = URL(c=redirect_row[0], f=redirect_row[1], vars={"tour_running":True, "tour":tour_id} ) post_step_data.append([cnt, url]) else: url = URL(c=redirect_row[0],vars={"tour_running":True, "tour":tour_id}) post_step_data.append([cnt, url]) extra = {} if row.html_id: extra["_data-id"] = row.html_id elif row.html_class: extra["_data-class"] = row.html_class if row.button: extra["_data-button"] = row.button else: extra["_data-button"] = "Next" if row.tip_location: extra["_data-options"] = "tipLocation:%s" % row.tip_location.lower() else: extra["_data-options"] = "tipLocation:right" joyride_OL.append(LI(H2(T(row.tip_title)), P(T(row.tip_details)), **extra ) ) last_used = row last_row = row # The following redirect will be triggered if the user has moved away # from the tour, such as by clicking on a tab. However if a tab # is part of the tour we are unable to determine if they have moved # away or just visiting as part of the tour and so it will continue. if len(joyride_OL) == 0: del request.vars.tour redirect(URL(args=req_args, vars=request.vars)) if (user_id != None) and (last_row == last_used): # set up an AJAX call to record that the tour has been completed post_ride_data = [cnt, tour_id] joyride_div = DIV(joyride_OL, _class="hidden") # Add the javascript configuration data from gluon.serializers import json as jsons if pre_step_data: joyride_div.append(INPUT(_type="hidden", _id="prestep_data", _name="prestep_data", _value=jsons(pre_step_data)) ) if post_step_data: joyride_div.append(INPUT(_type="hidden", _id="poststep_data", _name="poststep_data", _value=jsons(post_step_data)) ) if post_ride_data: joyride_div.append(INPUT(_type="hidden", _id="postride_data", _name="postride_data", _value=jsons(post_ride_data)) ) # Now add the details to the tour_user table if user_id != None: if tour == None: # this user has never done this tour before so create a new record utable.insert(person_id = user_id, tour_config_id = tour_id, place = request.controller, resume = request.url) else: # the user has done some of this tour so update the record db(uquery).update(place = request.controller, resume = request.url, completed = False) output["joyride_div"] = joyride_div if s3.debug: appname = request.application s3.scripts.append("/%s/static/scripts/jquery.joyride.js" % appname) s3.scripts.append("/%s/static/scripts/S3/s3.guidedtour.js" % appname) s3.stylesheets.append("plugins/joyride.min.css") else: s3.scripts.append("/%s/static/scripts/S3/s3.guidedtour.min.js" % request.application) s3.stylesheets.append("plugins/joyride.css") return output # END =========================================================================
42.15812
93
0.456918
__all__ = ("S3GuidedTourModel", "tour_rheader", "tour_builder", ) from gluon import * from gluon.storage import Storage from ..s3 import * class S3GuidedTourModel(S3Model): names = ("tour_config", "tour_details", "tour_user", ) def model(self): T = current.T db = current.db NONE = current.messages["NONE"] s3 = current.response.s3 add_components = self.add_components configure = self.configure crud_strings = s3.crud_strings define_table = self.define_table person_id = self.pr_person_id tablename = "tour_config" define_table(tablename, Field("name", represent=lambda v: v or NONE, label=T("Display name"), requires = IS_NOT_EMPTY(), ), Field("code", length=255, notnull=True, unique=True, represent=lambda v: v or NONE, label=T("Unique code")), Field("controller", represent=lambda v: v or NONE, label=T("Controller tour is activated")), Field("function", represent=lambda v: v or NONE, label=T("Function tour is activated")), Field("autostart", "boolean", default=False, represent=lambda v: \ T("Yes") if v else T("No"), label=T("Auto start")), Field("role", "string", represent=lambda v: v or NONE, label=T("User's role")), * s3_meta_fields() ) # CRUD strings ADD_TOUR = T("Create Tour") crud_strings[tablename] = Storage( label_create = ADD_TOUR, title_display = T("Tour Configuration"), title_list = T("Tours"), title_update = T("Edit Tour"), label_list_button = T("List Tours"), label_delete_button = T("Delete Tour"), msg_record_created = T("Tour added"), msg_record_modified = T("Tour updated"), msg_record_deleted = T("Tour deleted"), msg_list_empty = T("No Tours currently registered")) represent = S3Represent(lookup=tablename, translate=True) tour_config_id = S3ReusableField("tour_config_id", "reference %s" % tablename, requires = IS_EMPTY_OR( IS_ONE_OF(db, "tour_config.id", represent, sort=True)), represent=represent, label=T("Tour Name"), ondelete="SET NULL") # Components add_components(tablename, # Details tour_details="tour_config_id", # Users tour_user="tour_config_id", ) # --------------------------------------------------------------------- # Details of the tour. # tablename = "tour_details" define_table(tablename, tour_config_id(empty = False), Field("posn", "integer", default=0, label=T("Position in tour")), Field("controller", represent=lambda v: v or NONE, label=T("Controller name")), Field("function", represent=lambda v: v or NONE, label=T("Function name")), Field("args", represent=lambda v: v or NONE, label=T("Arguments")), Field("tip_title", represent=lambda v: v or NONE, label=T("Title")), Field("tip_details", represent=lambda v: v or NONE, label=T("Details")), Field("html_id", represent=lambda v: v or NONE, label=T("HTML ID")), Field("html_class", represent=lambda v: v or NONE, label=T("HTML class")), Field("button", represent=lambda v: v or NONE, label=T("Button name")), Field("tip_location", represent=lambda v: v or NONE, label=T("Loctaion of tip")), Field("datatable_id", represent=lambda v: v or NONE, label=T("DataTable ID")), Field("datatable_row", represent=lambda v: v or NONE, label=T("DataTable row")), Field("redirect", represent=lambda v: v or NONE, label=T("Redirect URL")), ) # CRUD strings ADD_DETAILS = T("Create Details") crud_strings[tablename] = Storage( label_create = ADD_DETAILS, title_display = T("Tour Details"), title_list = T("Details"), title_update = T("Edit Details"), label_list_button = T("List Details"), label_delete_button = T("Delete Detail"), msg_record_created = T("Detail added"), msg_record_modified = T("Detail updated"), msg_record_deleted = T("Detail deleted"), msg_list_empty = T("No Details currently registered")) configure(tablename, orderby = "tour_details.tour_config_id,tour_details.posn" ) # --------------------------------------------------------------------- # Details of the tours that the user has taken. # tablename = "tour_user" define_table(tablename, person_id(label = T("Person"), ondelete="CASCADE", empty = False, ), tour_config_id(), Field("place", represent=lambda v: v or NONE, label=T("Where reached")), Field("resume", represent=lambda v: v or NONE, label=T("URL to resume tour")), Field("completed", "boolean", default=False, represent=lambda v: \ T("Yes") if v else T("No"), label=T("Completed tour?")), Field("trip_counter", "integer", default=0, label=T("Times Completed")), ) # CRUD strings ADD_USER = T("Create User") crud_strings[tablename] = Storage( label_create = ADD_USER, title_display = T("Tour User"), title_list = T("Users"), title_update = T("Edit User"), label_list_button = T("List Users"), label_delete_button = T("Delete User"), msg_record_created = T("User added"), msg_record_modified = T("User updated"), msg_record_deleted = T("User deleted"), msg_list_empty = T("No users have taken a tour")) # --------------------------------------------------------------------- # Pass names back to global scope (s3.*) # return dict(tour_config_id = tour_config_id, ) # ============================================================================= def tour_rheader(r): if r.representation == "html": tour = r.record if tour: T = current.T tabs = [(T("Edit Details"), None), (T("Details"), "details"), (T("People"), "user"), ] rheader_tabs = s3_rheader_tabs(r, tabs) table = r.table rheader = DIV(TABLE(TR(TH("%s: " % table.name.label), tour.name, ), TR(TH("%s: " % table.code.label), tour.code, ), ), rheader_tabs ) return rheader return None # ============================================================================= def tour_builder(output): auth = current.auth db = current.db s3db = current.s3db request = current.request s3 = current.response.s3 T = current.T req_vars = request.vars tour_id = req_vars.tour # Now see if the details are on the database for this user tour = None user_id = None if auth.is_logged_in(): user_id = auth.s3_logged_in_person() # Find out if the user has done this tour before utable = s3db.tour_user uquery = (utable.person_id == user_id) & \ (utable.tour_config_id == tour_id) tour = db(uquery).select(utable.id, utable.completed, utable.place, utable.resume, limitby=(0, 1)).first() # If the tour has just been started (from the menu) then # it may be necessary to redirect to a different controller # @todo: does place need to be changed to controller and function? if not req_vars.tour_running: if (tour and not tour.completed and tour.place != request.controller): redirect("%s?tour=%s" %(tour.resume, tour_id)) # get the details from the database dtable = s3db.tour_details dquery = (dtable.tour_config_id == tour_id) &\ (dtable.controller == request.controller) &\ (dtable.function == request.function) details = db(dquery).select(dtable.args, dtable.tip_title, dtable.tip_details, dtable.button, dtable.tip_location, dtable.html_id, dtable.html_class, dtable.datatable_id, dtable.datatable_row, dtable.redirect, orderby=(dtable.posn) ) # tour_filename = os.path.join(request.folder, # "private", # "tour", # tour_name) # tour_file = open (tour_filename, "rb") # # now open the details of the guided_tour into a dictionary # import csv # tour_details = csv.DictReader(tour_file, skipinitialspace=True) # load the list of tour items in the html joyride_OL = OL(_id="joyrideID_1") pre_step_data = [] post_step_data = [] post_ride_data = [] last_row = None last_used = None req_args = request.args cnt = -1 for row in details: if row.args: args = row.args.split(",") else: args = [] # if the page has a nested login form then "login" will be added to # the req_args list so it needs to be added to the args list as well if "login" in req_args: if "login" not in args: args.append("login") # The following will capture the actual id used for the req_arg # Example org/organisation/10, where 10 is the id from the database posn = 0 for arg in args: if arg == "dt_id": args[posn] = req_args[posn] posn += 1 # Now check that the tour url matches the current url if (args == req_args): cnt += 1 # number of records used in this part of the tour if row.datatable_id: dt_id = row.datatable_id # cols = [] # if "DataTable_columns" in row: # cols = row["DataTable_columns"].split(",") row_num = 0 if row.datatable_row: row_num = row.datatable_row # Now set this up for the pre-processor hook in joyride pre_step_data.append([cnt, dt_id, row_num]) if row.redirect: redirect_row = row.redirect.split(",") if len(redirect_row) >= 3: url = URL(c=redirect_row[0], f=redirect_row[1], args=redirect_row[2:], vars={"tour_running":True, "tour":tour_id} ) if "dt_id" in redirect_row[2]: post_step_data.append([cnt, url, dt_id, row_num]) elif len(redirect_row) == 2: url = URL(c=redirect_row[0], f=redirect_row[1], vars={"tour_running":True, "tour":tour_id} ) post_step_data.append([cnt, url]) else: url = URL(c=redirect_row[0],vars={"tour_running":True, "tour":tour_id}) post_step_data.append([cnt, url]) extra = {} if row.html_id: extra["_data-id"] = row.html_id elif row.html_class: extra["_data-class"] = row.html_class if row.button: extra["_data-button"] = row.button else: extra["_data-button"] = "Next" if row.tip_location: extra["_data-options"] = "tipLocation:%s" % row.tip_location.lower() else: extra["_data-options"] = "tipLocation:right" joyride_OL.append(LI(H2(T(row.tip_title)), P(T(row.tip_details)), **extra ) ) last_used = row last_row = row # The following redirect will be triggered if the user has moved away # from the tour, such as by clicking on a tab. However if a tab # is part of the tour we are unable to determine if they have moved # away or just visiting as part of the tour and so it will continue. if len(joyride_OL) == 0: del request.vars.tour redirect(URL(args=req_args, vars=request.vars)) if (user_id != None) and (last_row == last_used): # set up an AJAX call to record that the tour has been completed post_ride_data = [cnt, tour_id] joyride_div = DIV(joyride_OL, _class="hidden") # Add the javascript configuration data from gluon.serializers import json as jsons if pre_step_data: joyride_div.append(INPUT(_type="hidden", _id="prestep_data", _name="prestep_data", _value=jsons(pre_step_data)) ) if post_step_data: joyride_div.append(INPUT(_type="hidden", _id="poststep_data", _name="poststep_data", _value=jsons(post_step_data)) ) if post_ride_data: joyride_div.append(INPUT(_type="hidden", _id="postride_data", _name="postride_data", _value=jsons(post_ride_data)) ) # Now add the details to the tour_user table if user_id != None: if tour == None: # this user has never done this tour before so create a new record utable.insert(person_id = user_id, tour_config_id = tour_id, place = request.controller, resume = request.url) else: # the user has done some of this tour so update the record db(uquery).update(place = request.controller, resume = request.url, completed = False) output["joyride_div"] = joyride_div if s3.debug: appname = request.application s3.scripts.append("/%s/static/scripts/jquery.joyride.js" % appname) s3.scripts.append("/%s/static/scripts/S3/s3.guidedtour.js" % appname) s3.stylesheets.append("plugins/joyride.min.css") else: s3.scripts.append("/%s/static/scripts/S3/s3.guidedtour.min.js" % request.application) s3.stylesheets.append("plugins/joyride.css") return output # END =========================================================================
true
true
f70c5edb71a501cc0a19546d7d5381683b20c5ec
5,403
py
Python
rasa/core/channels/rest.py
mukulbalodi/rasa
3126ef1148c165f2402f3c7203138d429e46c68c
[ "Apache-2.0" ]
null
null
null
rasa/core/channels/rest.py
mukulbalodi/rasa
3126ef1148c165f2402f3c7203138d429e46c68c
[ "Apache-2.0" ]
null
null
null
rasa/core/channels/rest.py
mukulbalodi/rasa
3126ef1148c165f2402f3c7203138d429e46c68c
[ "Apache-2.0" ]
1
2022-02-22T12:35:19.000Z
2022-02-22T12:35:19.000Z
import asyncio import inspect import json import logging from asyncio import Queue, CancelledError from sanic import Blueprint, response from sanic.request import Request from sanic.response import HTTPResponse, ResponseStream from typing import Text, Dict, Any, Optional, Callable, Awaitable, NoReturn, Union import rasa.utils.endpoints from rasa.core.channels.channel import ( InputChannel, CollectingOutputChannel, UserMessage, ) logger = logging.getLogger(__name__) class RestInput(InputChannel): """A custom http input channel. This implementation is the basis for a custom implementation of a chat frontend. You can customize this to send messages to Rasa and retrieve responses from the assistant.""" @classmethod def name(cls) -> Text: return "rest" @staticmethod async def on_message_wrapper( on_new_message: Callable[[UserMessage], Awaitable[Any]], text: Text, queue: Queue, sender_id: Text, input_channel: Text, metadata: Optional[Dict[Text, Any]], ) -> None: collector = QueueOutputChannel(queue) message = UserMessage( text, collector, sender_id, input_channel=input_channel, metadata=metadata ) await on_new_message(message) await queue.put("DONE") async def _extract_sender(self, req: Request) -> Optional[Text]: return req.json.get("sender", None) # noinspection PyMethodMayBeStatic def _extract_message(self, req: Request) -> Optional[Text]: return req.json.get("message", None) def _extract_input_channel(self, req: Request) -> Text: return req.json.get("input_channel") or self.name() def stream_response( self, on_new_message: Callable[[UserMessage], Awaitable[None]], text: Text, sender_id: Text, input_channel: Text, metadata: Optional[Dict[Text, Any]], ) -> Callable[[Any], Awaitable[None]]: async def stream(resp: Any) -> None: q = Queue() task = asyncio.ensure_future( self.on_message_wrapper( on_new_message, text, q, sender_id, input_channel, metadata ) ) while True: result = await q.get() if result == "DONE": break else: await resp.write(json.dumps(result) + "\n") await task return stream def blueprint( self, on_new_message: Callable[[UserMessage], Awaitable[None]] ) -> Blueprint: custom_webhook = Blueprint( "custom_webhook_{}".format(type(self).__name__), inspect.getmodule(self).__name__, ) # noinspection PyUnusedLocal @custom_webhook.route("/", methods=["GET"]) async def health(request: Request) -> HTTPResponse: return response.json({"status": "ok"}) @custom_webhook.route("/webhook", methods=["POST"]) async def receive(request: Request) -> Union[ResponseStream, HTTPResponse]: sender_id = await self._extract_sender(request) text = self._extract_message(request) should_use_stream = rasa.utils.endpoints.bool_arg( request, "stream", default=False ) input_channel = self._extract_input_channel(request) metadata = self.get_metadata(request) if should_use_stream: return response.stream( self.stream_response( on_new_message, text, sender_id, input_channel, metadata ), content_type="text/event-stream", ) else: collector = CollectingOutputChannel() # noinspection PyBroadException try: await on_new_message( UserMessage( text, collector, sender_id, input_channel=input_channel, metadata=metadata, ) ) except CancelledError: logger.error( f"Message handling timed out for " f"user message '{text}'." ) except Exception: logger.exception( f"An exception occured while handling " f"user message '{text}'." ) return response.json(collector.messages) return custom_webhook class QueueOutputChannel(CollectingOutputChannel): """Output channel that collects send messages in a list (doesn't send them anywhere, just collects them).""" @classmethod def name(cls) -> Text: return "queue" # noinspection PyMissingConstructor def __init__(self, message_queue: Optional[Queue] = None) -> None: super().__init__() self.messages = Queue() if not message_queue else message_queue def latest_output(self) -> NoReturn: raise NotImplementedError("A queue doesn't allow to peek at messages.") async def _persist_message(self, message: Dict[Text, Any]) -> None: await self.messages.put(message)
33.351852
86
0.581529
import asyncio import inspect import json import logging from asyncio import Queue, CancelledError from sanic import Blueprint, response from sanic.request import Request from sanic.response import HTTPResponse, ResponseStream from typing import Text, Dict, Any, Optional, Callable, Awaitable, NoReturn, Union import rasa.utils.endpoints from rasa.core.channels.channel import ( InputChannel, CollectingOutputChannel, UserMessage, ) logger = logging.getLogger(__name__) class RestInput(InputChannel): @classmethod def name(cls) -> Text: return "rest" @staticmethod async def on_message_wrapper( on_new_message: Callable[[UserMessage], Awaitable[Any]], text: Text, queue: Queue, sender_id: Text, input_channel: Text, metadata: Optional[Dict[Text, Any]], ) -> None: collector = QueueOutputChannel(queue) message = UserMessage( text, collector, sender_id, input_channel=input_channel, metadata=metadata ) await on_new_message(message) await queue.put("DONE") async def _extract_sender(self, req: Request) -> Optional[Text]: return req.json.get("sender", None) def _extract_message(self, req: Request) -> Optional[Text]: return req.json.get("message", None) def _extract_input_channel(self, req: Request) -> Text: return req.json.get("input_channel") or self.name() def stream_response( self, on_new_message: Callable[[UserMessage], Awaitable[None]], text: Text, sender_id: Text, input_channel: Text, metadata: Optional[Dict[Text, Any]], ) -> Callable[[Any], Awaitable[None]]: async def stream(resp: Any) -> None: q = Queue() task = asyncio.ensure_future( self.on_message_wrapper( on_new_message, text, q, sender_id, input_channel, metadata ) ) while True: result = await q.get() if result == "DONE": break else: await resp.write(json.dumps(result) + "\n") await task return stream def blueprint( self, on_new_message: Callable[[UserMessage], Awaitable[None]] ) -> Blueprint: custom_webhook = Blueprint( "custom_webhook_{}".format(type(self).__name__), inspect.getmodule(self).__name__, ) @custom_webhook.route("/", methods=["GET"]) async def health(request: Request) -> HTTPResponse: return response.json({"status": "ok"}) @custom_webhook.route("/webhook", methods=["POST"]) async def receive(request: Request) -> Union[ResponseStream, HTTPResponse]: sender_id = await self._extract_sender(request) text = self._extract_message(request) should_use_stream = rasa.utils.endpoints.bool_arg( request, "stream", default=False ) input_channel = self._extract_input_channel(request) metadata = self.get_metadata(request) if should_use_stream: return response.stream( self.stream_response( on_new_message, text, sender_id, input_channel, metadata ), content_type="text/event-stream", ) else: collector = CollectingOutputChannel() try: await on_new_message( UserMessage( text, collector, sender_id, input_channel=input_channel, metadata=metadata, ) ) except CancelledError: logger.error( f"Message handling timed out for " f"user message '{text}'." ) except Exception: logger.exception( f"An exception occured while handling " f"user message '{text}'." ) return response.json(collector.messages) return custom_webhook class QueueOutputChannel(CollectingOutputChannel): @classmethod def name(cls) -> Text: return "queue" def __init__(self, message_queue: Optional[Queue] = None) -> None: super().__init__() self.messages = Queue() if not message_queue else message_queue def latest_output(self) -> NoReturn: raise NotImplementedError("A queue doesn't allow to peek at messages.") async def _persist_message(self, message: Dict[Text, Any]) -> None: await self.messages.put(message)
true
true
f70c5fcd09a063102113a02480cf3a39f1d26fcb
7,804
py
Python
Support/Fuego/Pythia/pythia-0.5/packages/fuego/fuego/serialization/chemkin/pickle/ChemkinPickler.py
balos1/PelePhysics
d01190cc7b0eaad4ec96fac573034ccb485f0e9f
[ "BSD-3-Clause-LBNL" ]
31
2018-11-21T01:49:06.000Z
2022-03-30T03:41:43.000Z
Support/Fuego/Pythia/pythia-0.5/packages/fuego/fuego/serialization/chemkin/pickle/ChemkinPickler.py
balos1/PelePhysics
d01190cc7b0eaad4ec96fac573034ccb485f0e9f
[ "BSD-3-Clause-LBNL" ]
123
2019-03-12T22:27:29.000Z
2022-03-29T17:00:04.000Z
Support/Fuego/Pythia/pythia-0.5/packages/fuego/fuego/serialization/chemkin/pickle/ChemkinPickler.py
sundials-codes/PelePhysics
5624f83a04f43aa95288be9d8a7bb372a4adefe6
[ "BSD-3-Clause-LBNL" ]
32
2018-11-05T11:51:59.000Z
2022-03-29T13:09:32.000Z
#!/usr/bin/env python # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Michael A.G. Aivazis # California Institute of Technology # (C) 1998-2003 All Rights Reserved # # <LicenseText> # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # from weaver.mills.LineMill import LineMill class ChemkinPickler(LineMill): names = ["chemkin"] def _renderDocument(self, mechanism, options=None): self.pickleElementSection(mechanism) self.pickleSpeciesSection(mechanism) self.pickleThermoSection(mechanism) self.pickleReactionSection(mechanism) return def pickleElementSection(self, mechanism): self._rep += ["", "! Element section", "", "Elements"] line = " " * 4 for element in mechanism.element(): symbol = element.symbol if len(line) + len(symbol) > 75: self._rep.append(line) line = " " * 4 line += " " + symbol self._rep.append(line) self._rep.append("End") return def pickleSpeciesSection(self, mechanism): self._rep += ["", "! Species section", "", "Species"] line = " " * 4 for species in mechanism.species(): symbol = species.symbol if len(line) + len(symbol) > 75: self._rep.append(line) line = " " * 4 line += " " + symbol self._rep.append(line) self._rep.append("End") return def pickleThermoSection(self, mechanism): self._rep += ["", "! Thermo section", ""] line = "Thermo" if mechanism.thermoAll(): line += " All" self._rep.append(line) if mechanism.thermoRange(): line = "%15.8g " * 3 % mechanism.thermoRange() self._rep.append(line) format = "%15.8e" * 5 + "%5d" for species in mechanism.species(): if not species.thermo: continue self._rep.append("!") # compute line 1 line_1 = "%-18s" % species.symbol + " " * 6 composition = [ "%-2s%3d" % (element, factor) for element, factor in species.composition ] line_1 += "".join(composition[: min(len(composition), 4)]) line_1 += (" " * 5) * (max(0, 4 - len(composition))) line_1 += species.phase.upper() line_1 += "%10.3f" % species.thermo[1].lowT line_1 += "%10.3f" % species.thermo[0].highT if species.thermo[1].highT != species.thermo[0].lowT: import journal journal.firewall("fuego").hit("bad mechanism") continue if species.thermo[1].lowT: line_1 += "%10.3f" % species.thermo[1].lowT else: line_1 += " " * 10 if len(composition) >= 5: line_1 += "%-2s%2d" % composition[4] else: line_1 += " " * 4 line_1 += "1" self._rep.append(line_1) # get the thermo parametrization highParameters = species.thermo[0].parameters lowParameters = species.thermo[1].parameters # compute line 2 line_2 = "" line_2 += "%15.8e" % highParameters[0] line_2 += "%15.8e" % highParameters[1] line_2 += "%15.8e" % highParameters[2] line_2 += "%15.8e" % highParameters[3] line_2 += "%15.8e" % highParameters[4] line_2 += " " * 4 + "2" self._rep.append(line_2) # compute line 3 line_3 = "" line_3 += "%15.8e" % highParameters[5] line_3 += "%15.8e" % highParameters[6] line_3 += "%15.8e" % lowParameters[0] line_3 += "%15.8e" % lowParameters[1] line_3 += "%15.8e" % lowParameters[2] line_3 += " " * 4 + "3" self._rep.append(line_3) # compute line 4 line_4 = "" line_4 += "%15.8e" % lowParameters[3] line_4 += "%15.8e" % lowParameters[4] line_4 += "%15.8e" % lowParameters[5] line_4 += "%15.8e" % lowParameters[6] line_4 += " " * 15 line_4 += " " * 4 + "4" self._rep.append(line_4) self._rep.append("") self._rep.append("End") return def pickleReactionSection(self, mechanism): self._rep.append("") self._rep.append("! Reaction section") self._rep.append("") self._rep.append("Reactions") i = 0 for reaction in mechanism.reaction(): i += 1 self.pickleReaction(reaction, i) self._rep.append("") self._rep.append("End") return def pickleReaction(self, reaction, i): lines = [] form = _printReagents(reaction, reaction.reactants) if reaction.reversible: form += " <=> " else: form += " => " form += _printReagents(reaction, reaction.products) line = "%-40s" % form line += "%10.3g" % reaction.arrhenius[0] line += "%10.3g" % reaction.arrhenius[1] line += "%10.3g" % reaction.arrhenius[2] line += " " * 5 + "! %5d" % i lines.append(line) if reaction.efficiencies: efficiencies = " " for species, coefficient in reaction.efficiencies: efficiencies += "%s / %4.2f / " % ( species, coefficient + 1, ) # remember adjustment lines.append(efficiencies) if reaction.low: low = " LOW /%s/" % _printParameters(reaction.low) lines.append(low) if reaction.troe: troe = " TROE /%s/" % _printParameters(reaction.troe) lines.append(troe) if reaction.sri: sri = " SRI /%s/" % _printParameters(reaction.sri) lines.append(sri) if reaction.rev: rev = " REV /%s/" % _printParameters(reaction.rev) lines.append(rev) if reaction.lt: lt = " LT /%s/" % _printParameters(reaction.lt) lines.append(lt) if reaction.rlt: rlt = " RLT /%s/" % _printParameters(reaction.rlt) lines.append(rlt) if reaction.radiation: radiation = " HV / %g /" % reaction.radiation lines.append(radiation) if reaction.duplicate: duplicate = " DUPLICATE" lines.append(duplicate) self._rep += lines return lines def __init__(self, options=None): LineMill.__init__(self, "!", _FIRSTLINE) return # helpers _FIRSTLINE = "! -*- chemkin -*-" def _printReagents(reaction, composition): terms = [] for species, factor in composition: str = "" if factor != 1: str += "%d " % factor str += species terms.append(str) line = " + ".join(terms) if reaction.thirdBody: species, factor = reaction.thirdBody if species == "<mixture>": species = "M" if reaction.falloff: line += " (+" else: line += " + " if factor != 1: line += "%d" % factor line += species if reaction.falloff: line += ")" return line def _printParameters(ptuple): format = "%10.3e " * len(ptuple) return format % ptuple # version __id__ = "$Id$" # End of file
25.92691
82
0.481164
from weaver.mills.LineMill import LineMill class ChemkinPickler(LineMill): names = ["chemkin"] def _renderDocument(self, mechanism, options=None): self.pickleElementSection(mechanism) self.pickleSpeciesSection(mechanism) self.pickleThermoSection(mechanism) self.pickleReactionSection(mechanism) return def pickleElementSection(self, mechanism): self._rep += ["", "! Element section", "", "Elements"] line = " " * 4 for element in mechanism.element(): symbol = element.symbol if len(line) + len(symbol) > 75: self._rep.append(line) line = " " * 4 line += " " + symbol self._rep.append(line) self._rep.append("End") return def pickleSpeciesSection(self, mechanism): self._rep += ["", "! Species section", "", "Species"] line = " " * 4 for species in mechanism.species(): symbol = species.symbol if len(line) + len(symbol) > 75: self._rep.append(line) line = " " * 4 line += " " + symbol self._rep.append(line) self._rep.append("End") return def pickleThermoSection(self, mechanism): self._rep += ["", "! Thermo section", ""] line = "Thermo" if mechanism.thermoAll(): line += " All" self._rep.append(line) if mechanism.thermoRange(): line = "%15.8g " * 3 % mechanism.thermoRange() self._rep.append(line) format = "%15.8e" * 5 + "%5d" for species in mechanism.species(): if not species.thermo: continue self._rep.append("!") line_1 = "%-18s" % species.symbol + " " * 6 composition = [ "%-2s%3d" % (element, factor) for element, factor in species.composition ] line_1 += "".join(composition[: min(len(composition), 4)]) line_1 += (" " * 5) * (max(0, 4 - len(composition))) line_1 += species.phase.upper() line_1 += "%10.3f" % species.thermo[1].lowT line_1 += "%10.3f" % species.thermo[0].highT if species.thermo[1].highT != species.thermo[0].lowT: import journal journal.firewall("fuego").hit("bad mechanism") continue if species.thermo[1].lowT: line_1 += "%10.3f" % species.thermo[1].lowT else: line_1 += " " * 10 if len(composition) >= 5: line_1 += "%-2s%2d" % composition[4] else: line_1 += " " * 4 line_1 += "1" self._rep.append(line_1) highParameters = species.thermo[0].parameters lowParameters = species.thermo[1].parameters line_2 = "" line_2 += "%15.8e" % highParameters[0] line_2 += "%15.8e" % highParameters[1] line_2 += "%15.8e" % highParameters[2] line_2 += "%15.8e" % highParameters[3] line_2 += "%15.8e" % highParameters[4] line_2 += " " * 4 + "2" self._rep.append(line_2) line_3 = "" line_3 += "%15.8e" % highParameters[5] line_3 += "%15.8e" % highParameters[6] line_3 += "%15.8e" % lowParameters[0] line_3 += "%15.8e" % lowParameters[1] line_3 += "%15.8e" % lowParameters[2] line_3 += " " * 4 + "3" self._rep.append(line_3) line_4 = "" line_4 += "%15.8e" % lowParameters[3] line_4 += "%15.8e" % lowParameters[4] line_4 += "%15.8e" % lowParameters[5] line_4 += "%15.8e" % lowParameters[6] line_4 += " " * 15 line_4 += " " * 4 + "4" self._rep.append(line_4) self._rep.append("") self._rep.append("End") return def pickleReactionSection(self, mechanism): self._rep.append("") self._rep.append("! Reaction section") self._rep.append("") self._rep.append("Reactions") i = 0 for reaction in mechanism.reaction(): i += 1 self.pickleReaction(reaction, i) self._rep.append("") self._rep.append("End") return def pickleReaction(self, reaction, i): lines = [] form = _printReagents(reaction, reaction.reactants) if reaction.reversible: form += " <=> " else: form += " => " form += _printReagents(reaction, reaction.products) line = "%-40s" % form line += "%10.3g" % reaction.arrhenius[0] line += "%10.3g" % reaction.arrhenius[1] line += "%10.3g" % reaction.arrhenius[2] line += " " * 5 + "! %5d" % i lines.append(line) if reaction.efficiencies: efficiencies = " " for species, coefficient in reaction.efficiencies: efficiencies += "%s / %4.2f / " % ( species, coefficient + 1, ) lines.append(efficiencies) if reaction.low: low = " LOW /%s/" % _printParameters(reaction.low) lines.append(low) if reaction.troe: troe = " TROE /%s/" % _printParameters(reaction.troe) lines.append(troe) if reaction.sri: sri = " SRI /%s/" % _printParameters(reaction.sri) lines.append(sri) if reaction.rev: rev = " REV /%s/" % _printParameters(reaction.rev) lines.append(rev) if reaction.lt: lt = " LT /%s/" % _printParameters(reaction.lt) lines.append(lt) if reaction.rlt: rlt = " RLT /%s/" % _printParameters(reaction.rlt) lines.append(rlt) if reaction.radiation: radiation = " HV / %g /" % reaction.radiation lines.append(radiation) if reaction.duplicate: duplicate = " DUPLICATE" lines.append(duplicate) self._rep += lines return lines def __init__(self, options=None): LineMill.__init__(self, "!", _FIRSTLINE) return _FIRSTLINE = "! -*- chemkin -*-" def _printReagents(reaction, composition): terms = [] for species, factor in composition: str = "" if factor != 1: str += "%d " % factor str += species terms.append(str) line = " + ".join(terms) if reaction.thirdBody: species, factor = reaction.thirdBody if species == "<mixture>": species = "M" if reaction.falloff: line += " (+" else: line += " + " if factor != 1: line += "%d" % factor line += species if reaction.falloff: line += ")" return line def _printParameters(ptuple): format = "%10.3e " * len(ptuple) return format % ptuple __id__ = "$Id$"
true
true
f70c60815fe4aa5aa2583779ef34546134b65f1c
4,527
py
Python
main.py
Mattstacey321/ulauncher-vscodium-recent
19d0ddbcfbc384ae85c878f2f2683df57fb040f4
[ "MIT" ]
null
null
null
main.py
Mattstacey321/ulauncher-vscodium-recent
19d0ddbcfbc384ae85c878f2f2683df57fb040f4
[ "MIT" ]
null
null
null
main.py
Mattstacey321/ulauncher-vscodium-recent
19d0ddbcfbc384ae85c878f2f2683df57fb040f4
[ "MIT" ]
null
null
null
import os import os.path import json import pathlib from types import prepare_class from ulauncher.api.client.Extension import Extension from ulauncher.api.client.EventListener import EventListener from ulauncher.api.shared.event import ( KeywordQueryEvent, ItemEnterEvent, PreferencesEvent, PreferencesUpdateEvent, ) from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem from ulauncher.api.shared.item.ExtensionSmallResultItem import ExtensionSmallResultItem from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction from ulauncher.api.shared.action.HideWindowAction import HideWindowAction from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction from fuzzywuzzy import process, fuzz class Utils: @staticmethod def get_path(filename, from_home=False): base_dir = pathlib.Path.home() if from_home else pathlib.Path( __file__).parent.absolute() return os.path.join(base_dir, filename) class Code: open_command_paths = ["/opt/vscodium-bin"] def get_installed_path(self): for path in self.open_command_paths: if os.path.exists(path): return path return False def is_installed(self): return bool(self.installed_path) def get_recents(self): recents = [] storage = json.load( open(Utils.get_path(".config/VSCodium/storage.json", True), "r")) openedPaths = storage["openedPathsList"]["entries"] for path in openedPaths: folder = "folderUri" in path uri = path["folderUri"] if folder else path["fileUri"] label = path["label"] if "label" in path else uri.split("/")[-1] recents.append({ "folder": folder, "uri": uri, "label": label }) return recents def open_vscode(self, recent): if not self.is_installed(): return option = "--folder-uri" if recent["folder"] else "--file-uri" os.system(f"{self.installed_path} {option} {recent['uri']}") def __init__(self): self.installed_path = self.get_installed_path() class CodeExtension(Extension): keyword = None code = None def __init__(self): super(CodeExtension, self).__init__() self.subscribe(KeywordQueryEvent, KeywordQueryEventListener()) self.subscribe(ItemEnterEvent, ItemEnterEventListener()) self.subscribe(PreferencesEvent, PreferencesEventListener()) self.subscribe(PreferencesUpdateEvent, PreferencesUpdateEventListener()) self.code = Code() def get_ext_result_items(self, query): query = query.lower() if query else "" recents = self.code.get_recents() items = [] data = [] label_matches = process.extract(query, choices=map(lambda c: c["label"], recents), limit=20, scorer=fuzz.partial_ratio) uri_matches = process.extract(query, choices=map(lambda c: c["uri"], recents), limit=20, scorer=fuzz.partial_ratio) for match in label_matches: recent = next((c for c in recents if c["label"] == match[0]), None) if (recent is not None and match[1] > 95): data.append(recent) for match in uri_matches: recent = next((c for c in recents if c["uri"] == match[0]), None) existing = next((c for c in data if c["uri"] == recent["uri"]), None) if (recent is not None and existing is None): data.append(recent) for recent in data[:20]: items.append( ExtensionSmallResultItem( icon=Utils.get_path( f"images/{'folder' if recent['folder'] else 'file'}.svg"), name=recent["label"], on_enter=ExtensionCustomAction(recent), ) ) return items class KeywordQueryEventListener(EventListener): def on_event(self, event, extension): items = [] if not extension.code.is_installed(): items.append( ExtensionResultItem( icon=Utils.get_path("images/icon.svg"), name="No VSCodium?", description="Can't find the VSCodium's `codium` command in your system :(", highlightable=False, on_enter=HideWindowAction(), ) ) return RenderResultListAction(items) argument = event.get_argument() or "" items.extend(extension.get_ext_result_items(argument)) return RenderResultListAction(items) class ItemEnterEventListener(EventListener): def on_event(self, event, extension): recent = event.get_data() extension.code.open_vscode(recent) class PreferencesEventListener(EventListener): def on_event(self, event, extension): extension.keyword = event.preferences["code_kw"] class PreferencesUpdateEventListener(EventListener): def on_event(self, event, extension): if event.id == "code_kw": extension.keyword = event.new_value if __name__ == "__main__": CodeExtension().run()
30.587838
121
0.737133
import os import os.path import json import pathlib from types import prepare_class from ulauncher.api.client.Extension import Extension from ulauncher.api.client.EventListener import EventListener from ulauncher.api.shared.event import ( KeywordQueryEvent, ItemEnterEvent, PreferencesEvent, PreferencesUpdateEvent, ) from ulauncher.api.shared.item.ExtensionResultItem import ExtensionResultItem from ulauncher.api.shared.item.ExtensionSmallResultItem import ExtensionSmallResultItem from ulauncher.api.shared.action.RenderResultListAction import RenderResultListAction from ulauncher.api.shared.action.HideWindowAction import HideWindowAction from ulauncher.api.shared.action.ExtensionCustomAction import ExtensionCustomAction from fuzzywuzzy import process, fuzz class Utils: @staticmethod def get_path(filename, from_home=False): base_dir = pathlib.Path.home() if from_home else pathlib.Path( __file__).parent.absolute() return os.path.join(base_dir, filename) class Code: open_command_paths = ["/opt/vscodium-bin"] def get_installed_path(self): for path in self.open_command_paths: if os.path.exists(path): return path return False def is_installed(self): return bool(self.installed_path) def get_recents(self): recents = [] storage = json.load( open(Utils.get_path(".config/VSCodium/storage.json", True), "r")) openedPaths = storage["openedPathsList"]["entries"] for path in openedPaths: folder = "folderUri" in path uri = path["folderUri"] if folder else path["fileUri"] label = path["label"] if "label" in path else uri.split("/")[-1] recents.append({ "folder": folder, "uri": uri, "label": label }) return recents def open_vscode(self, recent): if not self.is_installed(): return option = "--folder-uri" if recent["folder"] else "--file-uri" os.system(f"{self.installed_path} {option} {recent['uri']}") def __init__(self): self.installed_path = self.get_installed_path() class CodeExtension(Extension): keyword = None code = None def __init__(self): super(CodeExtension, self).__init__() self.subscribe(KeywordQueryEvent, KeywordQueryEventListener()) self.subscribe(ItemEnterEvent, ItemEnterEventListener()) self.subscribe(PreferencesEvent, PreferencesEventListener()) self.subscribe(PreferencesUpdateEvent, PreferencesUpdateEventListener()) self.code = Code() def get_ext_result_items(self, query): query = query.lower() if query else "" recents = self.code.get_recents() items = [] data = [] label_matches = process.extract(query, choices=map(lambda c: c["label"], recents), limit=20, scorer=fuzz.partial_ratio) uri_matches = process.extract(query, choices=map(lambda c: c["uri"], recents), limit=20, scorer=fuzz.partial_ratio) for match in label_matches: recent = next((c for c in recents if c["label"] == match[0]), None) if (recent is not None and match[1] > 95): data.append(recent) for match in uri_matches: recent = next((c for c in recents if c["uri"] == match[0]), None) existing = next((c for c in data if c["uri"] == recent["uri"]), None) if (recent is not None and existing is None): data.append(recent) for recent in data[:20]: items.append( ExtensionSmallResultItem( icon=Utils.get_path( f"images/{'folder' if recent['folder'] else 'file'}.svg"), name=recent["label"], on_enter=ExtensionCustomAction(recent), ) ) return items class KeywordQueryEventListener(EventListener): def on_event(self, event, extension): items = [] if not extension.code.is_installed(): items.append( ExtensionResultItem( icon=Utils.get_path("images/icon.svg"), name="No VSCodium?", description="Can't find the VSCodium's `codium` command in your system :(", highlightable=False, on_enter=HideWindowAction(), ) ) return RenderResultListAction(items) argument = event.get_argument() or "" items.extend(extension.get_ext_result_items(argument)) return RenderResultListAction(items) class ItemEnterEventListener(EventListener): def on_event(self, event, extension): recent = event.get_data() extension.code.open_vscode(recent) class PreferencesEventListener(EventListener): def on_event(self, event, extension): extension.keyword = event.preferences["code_kw"] class PreferencesUpdateEventListener(EventListener): def on_event(self, event, extension): if event.id == "code_kw": extension.keyword = event.new_value if __name__ == "__main__": CodeExtension().run()
true
true
f70c60bcf9f085a2a93faecbc3574b70bb4fe4af
1,457
py
Python
tools/accuracy_checker/accuracy_checker/postprocessor/translate_3d_poses.py
AnthonyQuantum/open_model_zoo
7d235755e2d17f6186b11243a169966e4f05385a
[ "Apache-2.0" ]
4
2021-04-21T02:38:04.000Z
2021-10-13T12:15:33.000Z
tools/accuracy_checker/accuracy_checker/postprocessor/translate_3d_poses.py
AnthonyQuantum/open_model_zoo
7d235755e2d17f6186b11243a169966e4f05385a
[ "Apache-2.0" ]
6
2020-11-13T19:02:47.000Z
2022-03-12T00:43:24.000Z
tools/accuracy_checker/accuracy_checker/postprocessor/translate_3d_poses.py
AnthonyQuantum/open_model_zoo
7d235755e2d17f6186b11243a169966e4f05385a
[ "Apache-2.0" ]
null
null
null
""" Copyright (c) 2019 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from ..representation import PoseEstimation3dAnnotation, PoseEstimation3dPrediction from .postprocessor import Postprocessor class Translate3dPoses(Postprocessor): __provider__ = 'translate_3d_poses' annotation_types = (PoseEstimation3dAnnotation,) prediction_types = (PoseEstimation3dPrediction,) def process_image(self, annotations, predictions): for batch_id, prediction in enumerate(predictions): for pose_id in range(prediction.size): translation = prediction.translations[pose_id] translation[2] *= annotations[batch_id].fx if annotations[batch_id] is not None else 1 prediction.x_3d_values[pose_id] += translation[0] prediction.y_3d_values[pose_id] += translation[1] prediction.z_3d_values[pose_id] += translation[2] return annotations, predictions
40.472222
102
0.739876
from ..representation import PoseEstimation3dAnnotation, PoseEstimation3dPrediction from .postprocessor import Postprocessor class Translate3dPoses(Postprocessor): __provider__ = 'translate_3d_poses' annotation_types = (PoseEstimation3dAnnotation,) prediction_types = (PoseEstimation3dPrediction,) def process_image(self, annotations, predictions): for batch_id, prediction in enumerate(predictions): for pose_id in range(prediction.size): translation = prediction.translations[pose_id] translation[2] *= annotations[batch_id].fx if annotations[batch_id] is not None else 1 prediction.x_3d_values[pose_id] += translation[0] prediction.y_3d_values[pose_id] += translation[1] prediction.z_3d_values[pose_id] += translation[2] return annotations, predictions
true
true
f70c60debfd9c884bf61bf9fe74a7c883dea56e0
1,078
py
Python
get_similar_words_triples.py
t2hk/scdv_glove_elasticsearch
41cd336decf1e14e77439caaa26f64edf28ce42b
[ "Apache-2.0" ]
2
2020-01-07T15:44:04.000Z
2020-02-28T08:03:15.000Z
get_similar_words_triples.py
t2hk/scdv_glove_elasticsearch
41cd336decf1e14e77439caaa26f64edf28ce42b
[ "Apache-2.0" ]
null
null
null
get_similar_words_triples.py
t2hk/scdv_glove_elasticsearch
41cd336decf1e14e77439caaa26f64edf28ce42b
[ "Apache-2.0" ]
null
null
null
from gensim.models import KeyedVectors import numpy as np import matplotlib.pyplot as plt import matplotlib as mpl import matplotlib.font_manager as fm import pandas as pd glove_vector_file = "vectors.txt" gensim_glove_vector_file = "gensim_glove_vectors.txt" top_k = 10 words_triple_file = 'similarity_words.ttl' # GloVeの単語ベクトルファイルを読み込み、単語数とベクトルサイズを付与した処理用のファイルを作成する。 vectors = pd.read_csv(glove_vector_file, delimiter=' ', index_col=0, header=None) vocab_count = vectors.shape[0] # 単語数 num_features = vectors.shape[1] # 次元数 print("単語数:{} 次元数:{}".format(vocab_count, num_features)) glove_vectors = KeyedVectors.load_word2vec_format(gensim_glove_vector_file, binary=False) words = list(glove_vectors.vocab.keys()) sim_words_list = [] with open(words_triple_file, 'w') as f: for word in words: sim_words = glove_vectors.most_similar(word, [], top_k) for sim_word in sim_words: triple = '"{}" owl:equivalentClass "{}"'.format(word, sim_word[0]) sim_words_list.append(triple) f.writelines(triple + '\n') len(sim_words_list)
27.641026
89
0.748609
from gensim.models import KeyedVectors import numpy as np import matplotlib.pyplot as plt import matplotlib as mpl import matplotlib.font_manager as fm import pandas as pd glove_vector_file = "vectors.txt" gensim_glove_vector_file = "gensim_glove_vectors.txt" top_k = 10 words_triple_file = 'similarity_words.ttl' vectors = pd.read_csv(glove_vector_file, delimiter=' ', index_col=0, header=None) vocab_count = vectors.shape[0] num_features = vectors.shape[1] print("単語数:{} 次元数:{}".format(vocab_count, num_features)) glove_vectors = KeyedVectors.load_word2vec_format(gensim_glove_vector_file, binary=False) words = list(glove_vectors.vocab.keys()) sim_words_list = [] with open(words_triple_file, 'w') as f: for word in words: sim_words = glove_vectors.most_similar(word, [], top_k) for sim_word in sim_words: triple = '"{}" owl:equivalentClass "{}"'.format(word, sim_word[0]) sim_words_list.append(triple) f.writelines(triple + '\n') len(sim_words_list)
true
true
f70c614d16242661bb9c6d13d42fa1394aa4231b
17,585
py
Python
extra/osmprovider.py
bebora/LeafRoute
c80ed77789c73f0173c986c5482de4f2503f7cc1
[ "BSD-2-Clause" ]
3
2019-05-26T12:32:20.000Z
2019-10-07T11:29:28.000Z
extra/osmprovider.py
bebora/LeafRoute
c80ed77789c73f0173c986c5482de4f2503f7cc1
[ "BSD-2-Clause" ]
null
null
null
extra/osmprovider.py
bebora/LeafRoute
c80ed77789c73f0173c986c5482de4f2503f7cc1
[ "BSD-2-Clause" ]
6
2019-06-20T10:41:43.000Z
2019-10-10T00:17:09.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = "Leonardo Arcari @leonardoarcari" from networkx.relabel import convert_node_labels_to_integers import networkx as nx from pathlib import Path from osgeo import ogr from osgeo import osr from math import cos, sin, asin, sqrt, radians import random ## Modules # Elementary modules from math import radians, cos, sin, asin, sqrt import argparse import copy import json import os speed_limits = {"motorway" :130, "trunk" :110, "primary" :90, "secondary" :70, "tertiary" : 70, "unclassified" : 30, "residential" : 50, "service" : 10, "motorway_link" : 60, "trunk_link" : 60, "primary_link" : 60, "secondary_link": 60, "tertiary_link": 35, "living_street": 5, "pedestrian" : 5, "track" : 5, "road" : 5, "footway" : 5, "steps" : 5, "path" : 5, "cycleway" : 5, "it:urban": 50, "it:rural": 90, "it:motorway": 130, "it:trunk": 110} # Specific modules import xml.sax # parse osm file from pathlib import Path # manage cached tiles banned_tags = [ "footway", "bridleway", "steps", "path", "cycleway", "construction" ] def street_filter(way): ht = way.tags["highway"] if ht in banned_tags: return False return True def haversine(lon1, lat1, lon2, lat2, unit_m = True): """ Calculate the great circle distance between two points on the earth (specified in decimal degrees) default unit : km """ # convert decimal degrees to radians lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 c = 2 * asin(sqrt(a)) r = 6371 # Radius of earth in kilometers. Use 3956 for miles if (unit_m): r *= 1000 return c * r def download_osm(left, bottom, right, top, proxy = False, proxyHost = "10.0.4.2", proxyPort = "3128", cache = False, cacheTempDir = "/tmp/tmpOSM/", verbose = True): """ Return a filehandle to the downloaded data from osm api.""" import urllib.request # To request the web if 'map' in os.listdir('.'): print("Assuming ./map is the right file") return open('map', 'r') if (cache): ## cached tile filename cachedTileFilename = "osm_map_{:.8f}_{:.8f}_{:.8f}_{:.8f}.map".format(left, bottom, right, top) if (verbose): print("Cached tile filename :", cachedTileFilename) Path(cacheTempDir).mkdir(parents = True, exist_ok = True) ## Create cache path if not exists osmFile = Path(cacheTempDir + cachedTileFilename).resolve() ## Replace the relative cache folder path to absolute path if osmFile.is_file(): # download from the cache folder if (verbose): print("Tile loaded from the cache folder.") fp = urllib.request.urlopen("file://"+str(osmFile)) return fp if (proxy): # configure the urllib request with the proxy proxy_handler = urllib.request.ProxyHandler({'https': 'https://' + proxyHost + ":" + proxyPort, 'http': 'http://' + proxyHost + ":" + proxyPort}) opener = urllib.request.build_opener(proxy_handler) urllib.request.install_opener(opener) # request = "http://api.openstreetmap.org/api/0.6/map?bbox=%f,%f,%f,%f"%(left,bottom,right,top) # request = "http://overpass.osm.rambler.ru/cgi/xapi_meta?*[bbox=%f,%f,%f,%f]"%(left,bottom,right,top) request = "http://www.overpass-api.de/api/xapi_meta?*[bbox=%f,%f,%f,%f]"%(left,bottom,right,top) if (verbose): print("Download the tile from osm web api ... in progress") print("Request :", request) fp = urllib.request.urlopen(request) if (verbose): print("OSM Tile downloaded") if (cache): if (verbose): print("Write osm tile in the cache" ) content = fp.read() with open(osmFile, 'wb') as f: f.write(content) if osmFile.is_file(): if (verbose): print("OSM tile written in the cache") fp = urllib.request.urlopen("file://"+str(osmFile)) ## Reload the osm tile from the cache (because fp.read moved the cursor) return fp return fp def read_osm(filename_or_stream, only_roads=True): """Read graph in OSM format from file specified by name or by stream object. Parameters ---------- filename_or_stream : filename or stream object Returns ------- G : Graph Examples -------- >>> G=nx.read_osm(nx.download_osm(-122.33,47.60,-122.31,47.61)) >>> import matplotlib.pyplot as plt >>> plt.plot([G.node[n]['lat']for n in G], [G.node[n]['lon'] for n in G], 'o', color='k') >>> plt.show() """ osm = OSM(filename_or_stream) G = nx.DiGraph() ## Add ways for w in osm.ways.values(): if only_roads and 'highway' not in w.tags: continue if not street_filter(w): continue speed = 50 if 'maxspeed' in w.tags: speed = w.tags['maxspeed'] elif w.tags['highway'] in speed_limits.keys(): speed = speed_limits[w.tags['highway']] if ('oneway' in w.tags): if (w.tags['oneway'] == 'yes'): # ONLY ONE DIRECTION G.add_path(w.nds, id=w.id, speed = speed) else: # BOTH DIRECTION G.add_path(w.nds, id=w.id, speed = speed) G.add_path(w.nds[::-1], id=w.id, speed = speed) else: # BOTH DIRECTION G.add_path(w.nds, id=w.id, speed = speed) G.add_path(w.nds[::-1], id=w.id, speed = speed) ## Complete the used nodes' information for n_id in G.nodes(): n = osm.nodes[n_id] G.node[n_id]['lat'] = n.lat G.node[n_id]['lon'] = n.lon G.node[n_id]['id'] = n.id ## Estimate the length of each way for u,v,d in G.edges(data=True): distance = haversine(G.node[u]['lon'], G.node[u]['lat'], G.node[v]['lon'], G.node[v]['lat'], unit_m = True) # Give a realistic distance estimation (neither EPSG nor projection nor reference system are specified) speed = d['speed'] try: time_seconds = distance / (float(speed)*1000) *3600 except ValueError: speed = speed.lower() if 'none' in speed: speed = 50 elif 'mph' in speed or 'mp/h' in speed: speed = ''.join(c for c in speed if c.isdigit()) speed = int(float(speed) * 1.609344) elif 'kmh' in speed or 'km/h' in speed or 'kph' in speed or 'kp/h' in speed: speed = ''.join(c for c in speed if c.isdigit()) elif speed in speed_limits.keys(): speed = speed_limits[speed] else: speed = 50 speed = int(speed) time_seconds = distance / (speed*1000) *3600 G.add_weighted_edges_from([( u, v, time_seconds)], weight='time') return G class Node: def __init__(self, id, lon, lat): self.id = id self.lon = lon self.lat = lat self.tags = {} def __str__(self): return "Node (id : %s) lon : %s, lat : %s "%(self.id, self.lon, self.lat) class Way: def __init__(self, id, osm): self.osm = osm self.id = id self.nds = [] self.tags = {} def split(self, dividers): # slice the node-array using this nifty recursive function def slice_array(ar, dividers): for i in range(1,len(ar)-1): if dividers[ar[i]]>1: left = ar[:i+1] right = ar[i:] rightsliced = slice_array(right, dividers) return [left]+rightsliced return [ar] slices = slice_array(self.nds, dividers) # create a way object for each node-array slice ret = [] i=0 for slice in slices: littleway = copy.copy( self ) littleway.id += "-%d"%i littleway.nds = slice ret.append( littleway ) i += 1 return ret class OSM: def __init__(self, filename_or_stream): """ File can be either a filename or stream/file object.""" nodes = {} ways = {} superself = self class OSMHandler(xml.sax.ContentHandler): @classmethod def setDocumentLocator(self,loc): pass @classmethod def startDocument(self): pass @classmethod def endDocument(self): pass @classmethod def startElement(self, name, attrs): if name=='node': self.currElem = Node(attrs['id'], float(attrs['lon']), float(attrs['lat'])) elif name=='way': self.currElem = Way(attrs['id'], superself) elif name=='tag': self.currElem.tags[attrs['k']] = attrs['v'] elif name=='nd': self.currElem.nds.append( attrs['ref'] ) @classmethod def endElement(self,name): if name=='node': nodes[self.currElem.id] = self.currElem elif name=='way': ways[self.currElem.id] = self.currElem @classmethod def characters(self, chars): pass xml.sax.parse(filename_or_stream, OSMHandler) self.nodes = nodes self.ways = ways #count times each node is used node_histogram = dict.fromkeys( self.nodes.keys(), 0 ) for way in self.ways.values(): if len(way.nds) < 2: #if a way has only one node, delete it out of the osm collection del self.ways[way.id] else: for node in way.nds: node_histogram[node] += 1 #use that histogram to split all ways, replacing the member set of ways new_ways = {} for id, way in self.ways.items(): split_ways = way.split(node_histogram) for split_way in split_ways: new_ways[split_way.id] = split_way self.ways = new_ways class MapProvider: """ This is an interface for classes providing data about a geographical map. A MapProvider offers general-information about the map, conversion between source IDs and normalized IDs (i.e. starting from 0). Moreover, it enables map serialization to .gr file format. """ def getName(self): raise NotImplementedError() def getNumVertices(self): raise NotImplementedError() def getNumEdges(self): raise NotImplementedError() def getXRange(self): raise NotImplementedError() def getYRange(self): raise NotImplementedError() def getPoint(self, id, targetEPSG): raise NotImplementedError() def getDistanceKm(self, id1, id2): raise NotImplementedError() def toID(self, normalized_id): raise NotImplementedError() def toNormalizedID(self, id): raise NotImplementedError() def getNormalizedVertices(self): raise NotImplementedError() def getNormalizedEdges(self): raise NotImplementedError() def generateRandomP2P(self, n, seed): raise NotImplementedError() def writeP2P(self, path, p2p_seq): raise NotImplementedError() def write(self, path): raise NotImplementedError() def write_coo(self, path): raise NotImplementedError() def writeWkt(self, out_path, alt_paths, targetEPSG): raise NotImplementedError() class OSMProvider(MapProvider): def __init__(self, name, left, bottom, right, top): super().__init__() self.name = name self.left = left self.bottom = bottom self.right = right self.top = top self.srcEPSG = 4326 # WGS84 self.G = read_osm(download_osm(left, bottom, right, top, cache=True)) self.G = convert_node_labels_to_integers(self.G, label_attribute='id') def getName(self): return self.name def getNumVertices(self): return self.G.number_of_nodes() def getNumEdges(self): return self.G.number_of_edges() def getXRange(self): raise NotImplementedError() def getYRange(self): raise NotImplementedError() def getPoint(self, id, targetEPSG): lat = self.G.node[id]['lat'] lon = self.G.node[id]['lon'] # Load source EPSG reference system source = osr.SpatialReference() source.ImportFromEPSG(self.srcEPSG) # Load target EPSG target = osr.SpatialReference() target.ImportFromEPSG(targetEPSG) # Transform coordinates transform = osr.CoordinateTransformation(source, target) point = ogr.CreateGeometryFromWkt('POINT ({} {})'.format(lon, lat)) point.Transform(transform) x_prime, y_prime = (point.GetX(), point.GetY()) return (x_prime, y_prime) def getDistanceKm(self, id1, id2): WGS84 = 4326 lon1, lat1 = self.getPoint(id1, WGS84) lon2, lat2 = self.getPoint(id2, WGS84) return self._calc_distance(lat1, lon1, lat2, lon2) def toID(self, normalized_id): return normalized_id def toNormalizedID(self, id): return id def getNormalizedVertices(self): return self.G.nodes() def getNormalizedEdges(self): return self.G.edges() def generateRandomP2P(self, n=1000, seed=None): if seed is not None: random.seed(seed) def sample_node(): return random.randrange(self.getNumVertices()) p2p = [(sample_node(), sample_node()) for _ in range(n)] return p2p def writeP2P(self, path, p2p_seq): p = Path(path) print('[OSMProvider] Writing P2P in ARLib-format to {}...'.format(p)) with open(p, mode='w') as f: for s, t in p2p_seq: f.write('{} {}\n'.format(s, t)) def write(self, path): p = Path(path) print('[OSMProvider] Writing graph in ARLib-format to {}...'.format(p)) with open(p, mode='w') as f: for u, v, w in self.G.edges(data='time'): f.write('{} {} {}\n'.format(u, v, w)) def write_coo(self, path): p = Path(path) WGS84 = 4326 print('[OSMProvider] Writing coordinates in ARLib-format to {}...'.format(p)) with open(p, mode='w') as f: for v in sorted(nx.nodes(self.G)): lon, lat = self.getPoint(v, WGS84) f.write('{} {}\n'.format(lon, lat)) def writeWkt(self, out_path, alt_paths, targetEPSG): p = Path(out_path) print('[OSMProvider] Writing alternative paths in WKT to {}...'.format(p)) # Fill Multiline lines = [] for alt_path in alt_paths: # Fill path Line line = ogr.Geometry(ogr.wkbLineString) for v in alt_path: v = self.toID(v) x, y = self.getPoint(v, targetEPSG) line.AddPoint(x, y) lines.append(line) # Write to out_path with open(p, mode='w') as f: f.write('K;Line\n') for k, line in enumerate(lines): f.write('{};{}\n'.format(k, line.ExportToWkt())) def writeDIMACS(self, path): p = Path(path) print('[OSMProvider] Writing graph in DIMACS-format to {}...'.format(p)) with open(p, mode='w') as f: header = ['p sp ' '{} {}\n'.format(self.getNumVertices(), self.getNumEdges())] f.writelines(header) for u, v, w in self.G.edges(data='time'): f.write('a {} {} {}\n'.format(u, v, w)) @staticmethod def _calc_distance(lat1, lon1, lat2, lon2): """ Calculate the great circle distance between two points on the earth (specified in decimal degrees) """ # convert decimal degrees to radians lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 c = 2 * asin(sqrt(a)) km = 6371 * c return km if __name__ == "__main__": milan = { "top": 45.555946, "bottom": 45.366724, "left": 9.020613, "right": 9.2979979 } parser = argparse.ArgumentParser() parser.add_argument('-f', action='store', dest='bboxfile', help='File path of the JSON bounding box', default=None) args = parser.parse_args() if args.bboxfile is None: bbox = milan else: with open(args.bboxfile, 'r') as fp: bbox = json.load(fp) maps = OSMProvider('Milan', **bbox) maps.write('/tmp/weights') # maps.writeDIMACS('/tmp/milan_map.gr') maps.write_coo('/tmp/ids') # maps.writeP2P('/tmp/milan.p2p', map.generateRandomP2P(seed=1234))
32.20696
219
0.557805
__author__ = "Leonardo Arcari @leonardoarcari" from networkx.relabel import convert_node_labels_to_integers import networkx as nx from pathlib import Path from osgeo import ogr from osgeo import osr from math import cos, sin, asin, sqrt, radians import random from math import radians, cos, sin, asin, sqrt import argparse import copy import json import os speed_limits = {"motorway" :130, "trunk" :110, "primary" :90, "secondary" :70, "tertiary" : 70, "unclassified" : 30, "residential" : 50, "service" : 10, "motorway_link" : 60, "trunk_link" : 60, "primary_link" : 60, "secondary_link": 60, "tertiary_link": 35, "living_street": 5, "pedestrian" : 5, "track" : 5, "road" : 5, "footway" : 5, "steps" : 5, "path" : 5, "cycleway" : 5, "it:urban": 50, "it:rural": 90, "it:motorway": 130, "it:trunk": 110} import xml.sax from pathlib import Path banned_tags = [ "footway", "bridleway", "steps", "path", "cycleway", "construction" ] def street_filter(way): ht = way.tags["highway"] if ht in banned_tags: return False return True def haversine(lon1, lat1, lon2, lat2, unit_m = True): lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 c = 2 * asin(sqrt(a)) r = 6371 if (unit_m): r *= 1000 return c * r def download_osm(left, bottom, right, top, proxy = False, proxyHost = "10.0.4.2", proxyPort = "3128", cache = False, cacheTempDir = "/tmp/tmpOSM/", verbose = True): import urllib.request if 'map' in os.listdir('.'): print("Assuming ./map is the right file") return open('map', 'r') if (cache): cachedTileFilename = "osm_map_{:.8f}_{:.8f}_{:.8f}_{:.8f}.map".format(left, bottom, right, top) if (verbose): print("Cached tile filename :", cachedTileFilename) Path(cacheTempDir).mkdir(parents = True, exist_ok = True) osmFile = Path(cacheTempDir + cachedTileFilename).resolve() if osmFile.is_file(): if (verbose): print("Tile loaded from the cache folder.") fp = urllib.request.urlopen("file://"+str(osmFile)) return fp if (proxy): proxy_handler = urllib.request.ProxyHandler({'https': 'https://' + proxyHost + ":" + proxyPort, 'http': 'http://' + proxyHost + ":" + proxyPort}) opener = urllib.request.build_opener(proxy_handler) urllib.request.install_opener(opener) request = "http://www.overpass-api.de/api/xapi_meta?*[bbox=%f,%f,%f,%f]"%(left,bottom,right,top) if (verbose): print("Download the tile from osm web api ... in progress") print("Request :", request) fp = urllib.request.urlopen(request) if (verbose): print("OSM Tile downloaded") if (cache): if (verbose): print("Write osm tile in the cache" ) content = fp.read() with open(osmFile, 'wb') as f: f.write(content) if osmFile.is_file(): if (verbose): print("OSM tile written in the cache") fp = urllib.request.urlopen("file://"+str(osmFile)) return fp return fp def read_osm(filename_or_stream, only_roads=True): osm = OSM(filename_or_stream) G = nx.DiGraph() for w in osm.ways.values(): if only_roads and 'highway' not in w.tags: continue if not street_filter(w): continue speed = 50 if 'maxspeed' in w.tags: speed = w.tags['maxspeed'] elif w.tags['highway'] in speed_limits.keys(): speed = speed_limits[w.tags['highway']] if ('oneway' in w.tags): if (w.tags['oneway'] == 'yes'): G.add_path(w.nds, id=w.id, speed = speed) else: G.add_path(w.nds, id=w.id, speed = speed) G.add_path(w.nds[::-1], id=w.id, speed = speed) else: G.add_path(w.nds, id=w.id, speed = speed) G.add_path(w.nds[::-1], id=w.id, speed = speed) for n_id in G.nodes(): n = osm.nodes[n_id] G.node[n_id]['lat'] = n.lat G.node[n_id]['lon'] = n.lon G.node[n_id]['id'] = n.id ## Estimate the length of each way for u,v,d in G.edges(data=True): distance = haversine(G.node[u]['lon'], G.node[u]['lat'], G.node[v]['lon'], G.node[v]['lat'], unit_m = True) # Give a realistic distance estimation (neither EPSG nor projection nor reference system are specified) speed = d['speed'] try: time_seconds = distance / (float(speed)*1000) *3600 except ValueError: speed = speed.lower() if 'none' in speed: speed = 50 elif 'mph' in speed or 'mp/h' in speed: speed = ''.join(c for c in speed if c.isdigit()) speed = int(float(speed) * 1.609344) elif 'kmh' in speed or 'km/h' in speed or 'kph' in speed or 'kp/h' in speed: speed = ''.join(c for c in speed if c.isdigit()) elif speed in speed_limits.keys(): speed = speed_limits[speed] else: speed = 50 speed = int(speed) time_seconds = distance / (speed*1000) *3600 G.add_weighted_edges_from([( u, v, time_seconds)], weight='time') return G class Node: def __init__(self, id, lon, lat): self.id = id self.lon = lon self.lat = lat self.tags = {} def __str__(self): return "Node (id : %s) lon : %s, lat : %s "%(self.id, self.lon, self.lat) class Way: def __init__(self, id, osm): self.osm = osm self.id = id self.nds = [] self.tags = {} def split(self, dividers): # slice the node-array using this nifty recursive function def slice_array(ar, dividers): for i in range(1,len(ar)-1): if dividers[ar[i]]>1: left = ar[:i+1] right = ar[i:] rightsliced = slice_array(right, dividers) return [left]+rightsliced return [ar] slices = slice_array(self.nds, dividers) # create a way object for each node-array slice ret = [] i=0 for slice in slices: littleway = copy.copy( self ) littleway.id += "-%d"%i littleway.nds = slice ret.append( littleway ) i += 1 return ret class OSM: def __init__(self, filename_or_stream): nodes = {} ways = {} superself = self class OSMHandler(xml.sax.ContentHandler): @classmethod def setDocumentLocator(self,loc): pass @classmethod def startDocument(self): pass @classmethod def endDocument(self): pass @classmethod def startElement(self, name, attrs): if name=='node': self.currElem = Node(attrs['id'], float(attrs['lon']), float(attrs['lat'])) elif name=='way': self.currElem = Way(attrs['id'], superself) elif name=='tag': self.currElem.tags[attrs['k']] = attrs['v'] elif name=='nd': self.currElem.nds.append( attrs['ref'] ) @classmethod def endElement(self,name): if name=='node': nodes[self.currElem.id] = self.currElem elif name=='way': ways[self.currElem.id] = self.currElem @classmethod def characters(self, chars): pass xml.sax.parse(filename_or_stream, OSMHandler) self.nodes = nodes self.ways = ways #count times each node is used node_histogram = dict.fromkeys( self.nodes.keys(), 0 ) for way in self.ways.values(): if len(way.nds) < 2: #if a way has only one node, delete it out of the osm collection del self.ways[way.id] else: for node in way.nds: node_histogram[node] += 1 #use that histogram to split all ways, replacing the member set of ways new_ways = {} for id, way in self.ways.items(): split_ways = way.split(node_histogram) for split_way in split_ways: new_ways[split_way.id] = split_way self.ways = new_ways class MapProvider: def getName(self): raise NotImplementedError() def getNumVertices(self): raise NotImplementedError() def getNumEdges(self): raise NotImplementedError() def getXRange(self): raise NotImplementedError() def getYRange(self): raise NotImplementedError() def getPoint(self, id, targetEPSG): raise NotImplementedError() def getDistanceKm(self, id1, id2): raise NotImplementedError() def toID(self, normalized_id): raise NotImplementedError() def toNormalizedID(self, id): raise NotImplementedError() def getNormalizedVertices(self): raise NotImplementedError() def getNormalizedEdges(self): raise NotImplementedError() def generateRandomP2P(self, n, seed): raise NotImplementedError() def writeP2P(self, path, p2p_seq): raise NotImplementedError() def write(self, path): raise NotImplementedError() def write_coo(self, path): raise NotImplementedError() def writeWkt(self, out_path, alt_paths, targetEPSG): raise NotImplementedError() class OSMProvider(MapProvider): def __init__(self, name, left, bottom, right, top): super().__init__() self.name = name self.left = left self.bottom = bottom self.right = right self.top = top self.srcEPSG = 4326 # WGS84 self.G = read_osm(download_osm(left, bottom, right, top, cache=True)) self.G = convert_node_labels_to_integers(self.G, label_attribute='id') def getName(self): return self.name def getNumVertices(self): return self.G.number_of_nodes() def getNumEdges(self): return self.G.number_of_edges() def getXRange(self): raise NotImplementedError() def getYRange(self): raise NotImplementedError() def getPoint(self, id, targetEPSG): lat = self.G.node[id]['lat'] lon = self.G.node[id]['lon'] # Load source EPSG reference system source = osr.SpatialReference() source.ImportFromEPSG(self.srcEPSG) # Load target EPSG target = osr.SpatialReference() target.ImportFromEPSG(targetEPSG) # Transform coordinates transform = osr.CoordinateTransformation(source, target) point = ogr.CreateGeometryFromWkt('POINT ({} {})'.format(lon, lat)) point.Transform(transform) x_prime, y_prime = (point.GetX(), point.GetY()) return (x_prime, y_prime) def getDistanceKm(self, id1, id2): WGS84 = 4326 lon1, lat1 = self.getPoint(id1, WGS84) lon2, lat2 = self.getPoint(id2, WGS84) return self._calc_distance(lat1, lon1, lat2, lon2) def toID(self, normalized_id): return normalized_id def toNormalizedID(self, id): return id def getNormalizedVertices(self): return self.G.nodes() def getNormalizedEdges(self): return self.G.edges() def generateRandomP2P(self, n=1000, seed=None): if seed is not None: random.seed(seed) def sample_node(): return random.randrange(self.getNumVertices()) p2p = [(sample_node(), sample_node()) for _ in range(n)] return p2p def writeP2P(self, path, p2p_seq): p = Path(path) print('[OSMProvider] Writing P2P in ARLib-format to {}...'.format(p)) with open(p, mode='w') as f: for s, t in p2p_seq: f.write('{} {}\n'.format(s, t)) def write(self, path): p = Path(path) print('[OSMProvider] Writing graph in ARLib-format to {}...'.format(p)) with open(p, mode='w') as f: for u, v, w in self.G.edges(data='time'): f.write('{} {} {}\n'.format(u, v, w)) def write_coo(self, path): p = Path(path) WGS84 = 4326 print('[OSMProvider] Writing coordinates in ARLib-format to {}...'.format(p)) with open(p, mode='w') as f: for v in sorted(nx.nodes(self.G)): lon, lat = self.getPoint(v, WGS84) f.write('{} {}\n'.format(lon, lat)) def writeWkt(self, out_path, alt_paths, targetEPSG): p = Path(out_path) print('[OSMProvider] Writing alternative paths in WKT to {}...'.format(p)) # Fill Multiline lines = [] for alt_path in alt_paths: # Fill path Line line = ogr.Geometry(ogr.wkbLineString) for v in alt_path: v = self.toID(v) x, y = self.getPoint(v, targetEPSG) line.AddPoint(x, y) lines.append(line) # Write to out_path with open(p, mode='w') as f: f.write('K;Line\n') for k, line in enumerate(lines): f.write('{};{}\n'.format(k, line.ExportToWkt())) def writeDIMACS(self, path): p = Path(path) print('[OSMProvider] Writing graph in DIMACS-format to {}...'.format(p)) with open(p, mode='w') as f: header = ['p sp ' '{} {}\n'.format(self.getNumVertices(), self.getNumEdges())] f.writelines(header) for u, v, w in self.G.edges(data='time'): f.write('a {} {} {}\n'.format(u, v, w)) @staticmethod def _calc_distance(lat1, lon1, lat2, lon2): # convert decimal degrees to radians lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2]) # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2 c = 2 * asin(sqrt(a)) km = 6371 * c return km if __name__ == "__main__": milan = { "top": 45.555946, "bottom": 45.366724, "left": 9.020613, "right": 9.2979979 } parser = argparse.ArgumentParser() parser.add_argument('-f', action='store', dest='bboxfile', help='File path of the JSON bounding box', default=None) args = parser.parse_args() if args.bboxfile is None: bbox = milan else: with open(args.bboxfile, 'r') as fp: bbox = json.load(fp) maps = OSMProvider('Milan', **bbox) maps.write('/tmp/weights') # maps.writeDIMACS('/tmp/milan_map.gr') maps.write_coo('/tmp/ids') # maps.writeP2P('/tmp/milan.p2p', map.generateRandomP2P(seed=1234))
true
true
f70c628036311a35fe236d5b94ccde4248cfc56c
269
py
Python
1-moderate/robo-and-robitta/main.py
mpillar/codeeval
ad1fc5aea277575dcce6ad5db230d7a2bfe41eed
[ "Unlicense" ]
21
2015-02-09T18:41:15.000Z
2021-07-31T02:43:28.000Z
1-moderate/robo-and-robitta/main.py
mpillar/codeeval
ad1fc5aea277575dcce6ad5db230d7a2bfe41eed
[ "Unlicense" ]
null
null
null
1-moderate/robo-and-robitta/main.py
mpillar/codeeval
ad1fc5aea277575dcce6ad5db230d7a2bfe41eed
[ "Unlicense" ]
37
2015-01-06T06:20:17.000Z
2021-06-21T18:22:13.000Z
import re import sys def walk(m, n, x, y): c = 0 while n > y: c, m, n, x, y = c + m, n - 1, m, n - y, x return c + x with open(sys.argv[1], 'r') as test_cases: for test in test_cases: print(walk(*map(int, re.findall(r'\d+', test))))
17.933333
56
0.505576
import re import sys def walk(m, n, x, y): c = 0 while n > y: c, m, n, x, y = c + m, n - 1, m, n - y, x return c + x with open(sys.argv[1], 'r') as test_cases: for test in test_cases: print(walk(*map(int, re.findall(r'\d+', test))))
true
true
f70c62a3ac4a06f3848d9bf75d7cfce91bf451dc
3,889
py
Python
Python/Module/JoyStick.py
Hajime-Saitou/Games
f7ec0807855481cd1c6b135cd7c6d0f3898d3c59
[ "MIT" ]
1
2021-02-27T17:58:14.000Z
2021-02-27T17:58:14.000Z
Python/Module/JoyStick.py
Hajime-Saitou/Games
f7ec0807855481cd1c6b135cd7c6d0f3898d3c59
[ "MIT" ]
null
null
null
Python/Module/JoyStick.py
Hajime-Saitou/Games
f7ec0807855481cd1c6b135cd7c6d0f3898d3c59
[ "MIT" ]
null
null
null
# JoyStick # # Copyright (c) 2021 Hajime Saito # # Released under the MIT license. # see https://opensource.org/licenses/MIT import pygame from pygame.locals import * import Repeater JOY_MAX_TRIGGER = 16 JOY_NOINPUT = 0 JOY_UP = 0x1 << JOY_MAX_TRIGGER JOY_RIGHT = 0x2 << JOY_MAX_TRIGGER JOY_DOWN = 0x4 << JOY_MAX_TRIGGER JOY_LEFT = 0x8 << JOY_MAX_TRIGGER JOY_TRIGGER1 = 0x1 << 0 JOY_TRIGGER2 = 0x1 << 1 JOY_TRIGGER3 = 0x1 << 2 JOY_TRIGGER4 = 0x1 << 3 JOY_TRIGGER5 = 0x1 << 4 JOY_TRIGGER6 = 0x1 << 5 JOY_TRIGGER7 = 0x1 << 6 JOY_TRIGGER8 = 0x1 << 7 JOY_TRIGGER9 = 0x1 << 8 JOY_TRIGGER10 = 0x1 << 9 JOY_TRIGGER11 = 0x1 << 10 JOY_TRIGGER12 = 0x1 << 11 JOY_TRIGGER13 = 0x1 << 12 JOY_TRIGGER14 = 0x1 << 13 JOY_TRIGGER15 = 0x1 << 14 JOY_TRIGGER16 = 0x1 << 15 JOY_MASK_STICK = (JOY_UP | JOY_RIGHT | JOY_DOWN | JOY_LEFT) JOY_MASK_BUTTON = ~JOY_MASK_STICK class JoyStickBase(object): def __init__(self): self.data = JOY_NOINPUT self.prevData = JOY_NOINPUT self.xorData = JOY_NOINPUT self.latestButtonDown = JOY_NOINPUT self.latestButtonUp = JOY_NOINPUT self.repeater = Repeater.XorRepeater() self.repeater.setDefaultValue(JOY_NOINPUT) self.repeatedData = JOY_NOINPUT def update(self): # update self.data at subclass before call this. self.repeatedData = self.repeater.update(self.data) self.xorData = self.data ^ self.prevData self.latestButtonDown = self.xorData & self.data self.latestButtonUp = self.xorData & ~self.data self.prevData = self.data class JoyKey(JoyStickBase): def __init__(self): super().__init__() self.vk_up = K_UP self.vk_right = K_RIGHT self.vk_down = K_DOWN self.vk_left = K_LEFT self.vk_button = [ 0 ] * JOY_MAX_TRIGGER self.vk_button[0] = K_z self.vk_button[1] = K_x self.vk_button[2] = K_c def update(self): key = pygame.key.get_pressed() self.data = JOY_NOINPUT if key[self.vk_up] == 1: self.data |= JOY_UP if key[self.vk_right] == 1: self.data |= JOY_RIGHT if key[self.vk_down] == 1: self.data |= JOY_DOWN if key[self.vk_left] == 1: self.data |= JOY_LEFT for i in range(JOY_MAX_TRIGGER): if key[self.vk_button[i]] == 1: self.data |= 1 << i super().update() class JoyStick(JoyStickBase): def __init__(self, joyStickId=0): super().__init__() if joyStickId >= pygame.joystick.get_count(): raise ValueError("Invalid JoyStick ID {}".format(joyStickId)) self.joyStick = pygame.joystick.Joystick(joyStickId) self.joyStick.init() self.hasHat = True if self.joyStick.get_numhats() > 0 else False def update(self): self.data = JOY_NOINPUT stickDatas = [] if self.hasHat: for i in range(self.joyStick.get_numhats()): x, y = self.joyStick.get_hat(i) stickDatas.extend([ x, -y ]) else: for i in range(self.joyStick.get_numaxes()): stickDatas.append(self.joyStick.get_axis(i)) if stickDatas[1] < -0.5: self.data |= JOY_UP if stickDatas[1] > 0.5: self.data |= JOY_DOWN if stickDatas[0] > 0.5: self.data |= JOY_RIGHT if stickDatas[0] < -0.5: self.data |= JOY_LEFT for i in range(self.joyStick.get_numbuttons()): if self.joyStick.get_button(i) == True: self.data |= 1 << i super().update() class JoyStickIntegrator(JoyStickBase): def __init__(self): super().__init__() self.joySticks = [] def append(self, joyStick): self.joySticks.append(joyStick) def remove(self, joyStick): self.joySticks.remove(joyStick) def update(self): self.data = JOY_NOINPUT self.repeatedData = JOY_NOINPUT self.xorData = JOY_NOINPUT self.latestButtonDown = JOY_NOINPUT self.latestButtonUp = JOY_NOINPUT for joyStick in self.joySticks: joyStick.update() self.data |= joyStick.data self.repeatedData |= joyStick.repeatedData self.xorData |= joyStick.xorData self.latestButtonDown |= joyStick.latestButtonDown self.latestButtonUp |= joyStick.latestButtonUp
24.770701
66
0.701723
import pygame from pygame.locals import * import Repeater JOY_MAX_TRIGGER = 16 JOY_NOINPUT = 0 JOY_UP = 0x1 << JOY_MAX_TRIGGER JOY_RIGHT = 0x2 << JOY_MAX_TRIGGER JOY_DOWN = 0x4 << JOY_MAX_TRIGGER JOY_LEFT = 0x8 << JOY_MAX_TRIGGER JOY_TRIGGER1 = 0x1 << 0 JOY_TRIGGER2 = 0x1 << 1 JOY_TRIGGER3 = 0x1 << 2 JOY_TRIGGER4 = 0x1 << 3 JOY_TRIGGER5 = 0x1 << 4 JOY_TRIGGER6 = 0x1 << 5 JOY_TRIGGER7 = 0x1 << 6 JOY_TRIGGER8 = 0x1 << 7 JOY_TRIGGER9 = 0x1 << 8 JOY_TRIGGER10 = 0x1 << 9 JOY_TRIGGER11 = 0x1 << 10 JOY_TRIGGER12 = 0x1 << 11 JOY_TRIGGER13 = 0x1 << 12 JOY_TRIGGER14 = 0x1 << 13 JOY_TRIGGER15 = 0x1 << 14 JOY_TRIGGER16 = 0x1 << 15 JOY_MASK_STICK = (JOY_UP | JOY_RIGHT | JOY_DOWN | JOY_LEFT) JOY_MASK_BUTTON = ~JOY_MASK_STICK class JoyStickBase(object): def __init__(self): self.data = JOY_NOINPUT self.prevData = JOY_NOINPUT self.xorData = JOY_NOINPUT self.latestButtonDown = JOY_NOINPUT self.latestButtonUp = JOY_NOINPUT self.repeater = Repeater.XorRepeater() self.repeater.setDefaultValue(JOY_NOINPUT) self.repeatedData = JOY_NOINPUT def update(self): self.repeatedData = self.repeater.update(self.data) self.xorData = self.data ^ self.prevData self.latestButtonDown = self.xorData & self.data self.latestButtonUp = self.xorData & ~self.data self.prevData = self.data class JoyKey(JoyStickBase): def __init__(self): super().__init__() self.vk_up = K_UP self.vk_right = K_RIGHT self.vk_down = K_DOWN self.vk_left = K_LEFT self.vk_button = [ 0 ] * JOY_MAX_TRIGGER self.vk_button[0] = K_z self.vk_button[1] = K_x self.vk_button[2] = K_c def update(self): key = pygame.key.get_pressed() self.data = JOY_NOINPUT if key[self.vk_up] == 1: self.data |= JOY_UP if key[self.vk_right] == 1: self.data |= JOY_RIGHT if key[self.vk_down] == 1: self.data |= JOY_DOWN if key[self.vk_left] == 1: self.data |= JOY_LEFT for i in range(JOY_MAX_TRIGGER): if key[self.vk_button[i]] == 1: self.data |= 1 << i super().update() class JoyStick(JoyStickBase): def __init__(self, joyStickId=0): super().__init__() if joyStickId >= pygame.joystick.get_count(): raise ValueError("Invalid JoyStick ID {}".format(joyStickId)) self.joyStick = pygame.joystick.Joystick(joyStickId) self.joyStick.init() self.hasHat = True if self.joyStick.get_numhats() > 0 else False def update(self): self.data = JOY_NOINPUT stickDatas = [] if self.hasHat: for i in range(self.joyStick.get_numhats()): x, y = self.joyStick.get_hat(i) stickDatas.extend([ x, -y ]) else: for i in range(self.joyStick.get_numaxes()): stickDatas.append(self.joyStick.get_axis(i)) if stickDatas[1] < -0.5: self.data |= JOY_UP if stickDatas[1] > 0.5: self.data |= JOY_DOWN if stickDatas[0] > 0.5: self.data |= JOY_RIGHT if stickDatas[0] < -0.5: self.data |= JOY_LEFT for i in range(self.joyStick.get_numbuttons()): if self.joyStick.get_button(i) == True: self.data |= 1 << i super().update() class JoyStickIntegrator(JoyStickBase): def __init__(self): super().__init__() self.joySticks = [] def append(self, joyStick): self.joySticks.append(joyStick) def remove(self, joyStick): self.joySticks.remove(joyStick) def update(self): self.data = JOY_NOINPUT self.repeatedData = JOY_NOINPUT self.xorData = JOY_NOINPUT self.latestButtonDown = JOY_NOINPUT self.latestButtonUp = JOY_NOINPUT for joyStick in self.joySticks: joyStick.update() self.data |= joyStick.data self.repeatedData |= joyStick.repeatedData self.xorData |= joyStick.xorData self.latestButtonDown |= joyStick.latestButtonDown self.latestButtonUp |= joyStick.latestButtonUp
true
true
f70c62fba7c4424468351fa0d5cf2e162e2be301
1,399
py
Python
auth_api/views.py
sodascourse/note-storage
2e7decf968c6939b30ca52573e1a33d12ce32e0e
[ "Apache-2.0" ]
null
null
null
auth_api/views.py
sodascourse/note-storage
2e7decf968c6939b30ca52573e1a33d12ce32e0e
[ "Apache-2.0" ]
null
null
null
auth_api/views.py
sodascourse/note-storage
2e7decf968c6939b30ca52573e1a33d12ce32e0e
[ "Apache-2.0" ]
null
null
null
from django.contrib.auth import get_user_model from rest_framework import serializers from rest_framework.generics import CreateAPIView User = get_user_model() class SignupSerializer(serializers.Serializer): error_message = "'{value}' is a registered {field}. Contact admin if you forgets password." username = serializers.CharField() password = serializers.CharField(write_only=True) email = serializers.EmailField() def validate_username(self, username): if User.objects.filter(username=username).exists(): error_message = self.error_message.format(value=username, field="username") raise serializers.ValidationError(error_message) return username def validate_email(self, email): if User.objects.filter(email=email).exists(): error_message = self.error_message.format(value=email, field="email") raise serializers.ValidationError(error_message) return email def create(self, validated_data): data = validated_data.copy() password = data.pop("password") user = User(**data) user.set_password(password) user.save() return user def update(self, instance, validated_data): raise RuntimeError("Update is disallowed.") class SignupView(CreateAPIView): serializer_class = SignupSerializer authentication_classes = ()
33.309524
95
0.706219
from django.contrib.auth import get_user_model from rest_framework import serializers from rest_framework.generics import CreateAPIView User = get_user_model() class SignupSerializer(serializers.Serializer): error_message = "'{value}' is a registered {field}. Contact admin if you forgets password." username = serializers.CharField() password = serializers.CharField(write_only=True) email = serializers.EmailField() def validate_username(self, username): if User.objects.filter(username=username).exists(): error_message = self.error_message.format(value=username, field="username") raise serializers.ValidationError(error_message) return username def validate_email(self, email): if User.objects.filter(email=email).exists(): error_message = self.error_message.format(value=email, field="email") raise serializers.ValidationError(error_message) return email def create(self, validated_data): data = validated_data.copy() password = data.pop("password") user = User(**data) user.set_password(password) user.save() return user def update(self, instance, validated_data): raise RuntimeError("Update is disallowed.") class SignupView(CreateAPIView): serializer_class = SignupSerializer authentication_classes = ()
true
true
f70c64f401fdeb5c96c28f7172af6fe7f9010dbf
49,435
py
Python
qiskit/pulse/schedule.py
SooluThomas/qiskit-terra
25b47af83f14afb3441d7b2c1bd31bda93e3549d
[ "Apache-2.0" ]
null
null
null
qiskit/pulse/schedule.py
SooluThomas/qiskit-terra
25b47af83f14afb3441d7b2c1bd31bda93e3549d
[ "Apache-2.0" ]
null
null
null
qiskit/pulse/schedule.py
SooluThomas/qiskit-terra
25b47af83f14afb3441d7b2c1bd31bda93e3549d
[ "Apache-2.0" ]
null
null
null
# This code is part of Qiskit. # # (C) Copyright IBM 2019. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """The Schedule is one of the most fundamental objects to this pulse-level programming module. A ``Schedule`` is a representation of a *program* in Pulse. Each schedule tracks the time of each instruction occuring in parallel over multiple signal *channels*. """ import abc import copy import itertools import multiprocessing as mp import sys import warnings from collections import defaultdict from typing import List, Tuple, Iterable, Union, Dict, Callable, Set, Optional, Any import numpy as np from qiskit.circuit.parameter import Parameter from qiskit.circuit.parameterexpression import ParameterExpression, ParameterValueType from qiskit.pulse.channels import Channel from qiskit.pulse.exceptions import PulseError # pylint: disable=cyclic-import from qiskit.pulse.instructions import Instruction from qiskit.pulse.utils import instruction_duration_validation from qiskit.utils.multiprocessing import is_main_process # pylint: disable=missing-return-doc Interval = Tuple[int, int] """An interval type is a tuple of a start time (inclusive) and an end time (exclusive).""" TimeSlots = Dict[Channel, List[Tuple[int, int]]] """List of timeslots occupied by instructions for each channel.""" class Schedule(abc.ABC): """A quantum program *schedule* with exact time constraints for its instructions, operating over all input signal *channels* and supporting special syntaxes for building. """ # Counter for the number of instances in this class. instances_counter = itertools.count() # Prefix to use for auto naming. prefix = 'sched' def __init__(self, *schedules: Union[Union['Schedule', Instruction], Tuple[int, Union['Schedule', Instruction]]], name: Optional[str] = None, metadata: Optional[dict] = None): """Create an empty schedule. Args: *schedules: Child Schedules of this parent Schedule. May either be passed as the list of schedules, or a list of ``(start_time, schedule)`` pairs. name: Name of this schedule. Defaults to an autogenerated string if not provided. metadata: Arbitrary key value metadata to associate with the schedule. This gets stored as free-form data in a dict in the :attr:`~qiskit.pulse.Schedule.metadata` attribute. It will not be directly used in the schedule. Raises: TypeError: if metadata is not a dict. """ if name is None: name = self.prefix + str(next(self.instances_counter)) if sys.platform != "win32" and not is_main_process(): name += '-{}'.format(mp.current_process().pid) self._name = name self._duration = 0 # These attributes are populated by ``_mutable_insert`` self._timeslots = {} self.__children = [] self._parameter_table = defaultdict(list) for sched_pair in schedules: try: time, sched = sched_pair except TypeError: # recreate as sequence starting at 0. time, sched = 0, sched_pair self._mutable_insert(time, sched) if not isinstance(metadata, dict) and metadata is not None: raise TypeError("Only a dictionary or None is accepted for schedule metadata") self._metadata = metadata @property def name(self) -> str: """Name of this Schedule""" return self._name @property def timeslots(self) -> TimeSlots: """Time keeping attribute.""" return self._timeslots @property def duration(self) -> int: """Duration of this schedule.""" return self._duration @property def start_time(self) -> int: """Starting time of this schedule.""" return self.ch_start_time(*self.channels) @property def stop_time(self) -> int: """Stopping time of this schedule.""" return self.duration @property def channels(self) -> Tuple[Channel]: """Returns channels that this schedule uses.""" return tuple(self._timeslots.keys()) @property def _children(self) -> Tuple[Tuple[int, Union['Schedule', Instruction]], ...]: """Return the child``NamedValues``s of this ``Schedule`` in the order they were added to the schedule. Returns: A tuple, where each element is a two-tuple containing the initial scheduled time of each ``NamedValue`` and the component itself. """ return tuple(self.__children) @property def instructions(self): """Get the time-ordered instructions from self. ReturnType: Tuple[Tuple[int, Instruction], ...] """ def key(time_inst_pair): inst = time_inst_pair[1] return (time_inst_pair[0], inst.duration, sorted(chan.name for chan in inst.channels)) return tuple(sorted(self._instructions(), key=key)) @property def metadata(self): """The user provided metadata associated with the schedule The metadata for the schedule is a user provided ``dict`` of metadata for the schedule. It will not be used to influence the execution or operation of the schedule, but it is expected to be passed between all transforms of the schedule and that providers will associate any schedule metadata with the results it returns from execution of that schedule. """ return self._metadata @metadata.setter def metadata(self, metadata): """Update the schedule metadata""" if not isinstance(metadata, dict) and metadata is not None: raise TypeError("Only a dictionary or None is accepted for schedule metadata") self._metadata = metadata def ch_duration(self, *channels: List[Channel]) -> int: """Return the time of the end of the last instruction over the supplied channels. Args: *channels: Channels within ``self`` to include. """ return self.ch_stop_time(*channels) def ch_start_time(self, *channels: List[Channel]) -> int: """Return the time of the start of the first instruction over the supplied channels. Args: *channels: Channels within ``self`` to include. """ try: chan_intervals = (self._timeslots[chan] for chan in channels if chan in self._timeslots) return min(intervals[0][0] for intervals in chan_intervals) except ValueError: # If there are no instructions over channels return 0 def ch_stop_time(self, *channels: List[Channel]) -> int: """Return maximum start time over supplied channels. Args: *channels: Channels within ``self`` to include. """ try: chan_intervals = (self._timeslots[chan] for chan in channels if chan in self._timeslots) return max(intervals[-1][1] for intervals in chan_intervals) except ValueError: # If there are no instructions over channels return 0 def _instructions(self, time: int = 0): """Iterable for flattening Schedule tree. Args: time: Shifted time due to parent. Yields: Iterable[Tuple[int, Instruction]]: Tuple containing the time each :class:`~qiskit.pulse.Instruction` starts at and the flattened :class:`~qiskit.pulse.Instruction` s. """ for insert_time, child_sched in self._children: yield from child_sched._instructions(time + insert_time) def shift(self, time: int, name: Optional[str] = None, inplace: bool = False ) -> 'Schedule': """Return a schedule shifted forward by ``time``. Args: time: Time to shift by. name: Name of the new schedule. Defaults to the name of self. inplace: Perform operation inplace on this schedule. Otherwise return a new ``Schedule``. """ if inplace: return self._mutable_shift(time) return self._immutable_shift(time, name=name) def _immutable_shift(self, time: int, name: Optional[str] = None ) -> 'Schedule': """Return a new schedule shifted forward by `time`. Args: time: Time to shift by name: Name of the new schedule if call was mutable. Defaults to name of self """ if name is None: name = self.name return Schedule((time, self), name=name) def _mutable_shift(self, time: int ) -> 'Schedule': """Return this schedule shifted forward by `time`. Args: time: Time to shift by Raises: PulseError: if ``time`` is not an integer. """ if not isinstance(time, int): raise PulseError( "Schedule start time must be an integer.") timeslots = {} for chan, ch_timeslots in self._timeslots.items(): timeslots[chan] = [(ts[0] + time, ts[1] + time) for ts in ch_timeslots] _check_nonnegative_timeslot(timeslots) self._duration = self._duration + time self._timeslots = timeslots self.__children = [(orig_time + time, child) for orig_time, child in self._children] return self def insert(self, start_time: int, schedule: Union['Schedule', Instruction], name: Optional[str] = None, inplace: bool = False ) -> 'Schedule': """Return a new schedule with ``schedule`` inserted into ``self`` at ``start_time``. Args: start_time: Time to insert the schedule. schedule: Schedule to insert. name: Name of the new schedule. Defaults to the name of self. inplace: Perform operation inplace on this schedule. Otherwise return a new ``Schedule``. """ if inplace: return self._mutable_insert(start_time, schedule) return self._immutable_insert(start_time, schedule, name=name) def _mutable_insert(self, start_time: int, schedule: Union['Schedule', Instruction] ) -> 'Schedule': """Mutably insert `schedule` into `self` at `start_time`. Args: start_time: Time to insert the second schedule. schedule: Schedule to mutably insert. """ self._add_timeslots(start_time, schedule) self.__children.append((start_time, schedule)) self._update_parameter_table(schedule) return self def _immutable_insert(self, start_time: int, schedule: Union['Schedule', Instruction], name: Optional[str] = None, ) -> 'Schedule': """Return a new schedule with ``schedule`` inserted into ``self`` at ``start_time``. Args: start_time: Time to insert the schedule. schedule: Schedule to insert. name: Name of the new ``Schedule``. Defaults to name of ``self``. """ if name is None: name = self.name new_sched = Schedule(name=name) new_sched._mutable_insert(0, self) new_sched._mutable_insert(start_time, schedule) return new_sched def append(self, schedule: Union['Schedule', Instruction], name: Optional[str] = None, inplace: bool = False) -> 'Schedule': r"""Return a new schedule with ``schedule`` inserted at the maximum time over all channels shared between ``self`` and ``schedule``. .. math:: t = \textrm{max}(\texttt{x.stop_time} |\texttt{x} \in \texttt{self.channels} \cap \texttt{schedule.channels}) Args: schedule: Schedule to be appended. name: Name of the new ``Schedule``. Defaults to name of ``self``. inplace: Perform operation inplace on this schedule. Otherwise return a new ``Schedule``. """ common_channels = set(self.channels) & set(schedule.channels) time = self.ch_stop_time(*common_channels) return self.insert(time, schedule, name=name, inplace=inplace) def flatten(self) -> 'Schedule': """Return a new schedule which is the flattened schedule contained all ``instructions``.""" return Schedule(*self.instructions, name=self.name) def filter(self, *filter_funcs: List[Callable], channels: Optional[Iterable[Channel]] = None, instruction_types=None, time_ranges: Optional[Iterable[Tuple[int, int]]] = None, intervals: Optional[Iterable[Interval]] = None) -> 'Schedule': """Return a new ``Schedule`` with only the instructions from this ``Schedule`` which pass though the provided filters; i.e. an instruction will be retained iff every function in ``filter_funcs`` returns ``True``, the instruction occurs on a channel type contained in ``channels``, the instruction type is contained in ``instruction_types``, and the period over which the instruction operates is *fully* contained in one specified in ``time_ranges`` or ``intervals``. If no arguments are provided, ``self`` is returned. Args: filter_funcs: A list of Callables which take a (int, Union['Schedule', Instruction]) tuple and return a bool. channels: For example, ``[DriveChannel(0), AcquireChannel(0)]``. instruction_types (Optional[Iterable[Type[qiskit.pulse.Instruction]]]): For example, ``[PulseInstruction, AcquireInstruction]``. time_ranges: For example, ``[(0, 5), (6, 10)]``. intervals: For example, ``[(0, 5), (6, 10)]``. """ composed_filter = self._construct_filter(*filter_funcs, channels=channels, instruction_types=instruction_types, time_ranges=time_ranges, intervals=intervals) return self._apply_filter(composed_filter, new_sched_name="{name}".format(name=self.name)) def exclude(self, *filter_funcs: List[Callable], channels: Optional[Iterable[Channel]] = None, instruction_types=None, time_ranges: Optional[Iterable[Tuple[int, int]]] = None, intervals: Optional[Iterable[Interval]] = None) -> 'Schedule': """Return a Schedule with only the instructions from this Schedule *failing* at least one of the provided filters. This method is the complement of ``self.filter``, so that:: self.filter(args) | self.exclude(args) == self Args: filter_funcs: A list of Callables which take a (int, Union['Schedule', Instruction]) tuple and return a bool. channels: For example, ``[DriveChannel(0), AcquireChannel(0)]``. instruction_types (Optional[Iterable[Type[qiskit.pulse.Instruction]]]): For example, ``[PulseInstruction, AcquireInstruction]``. time_ranges: For example, ``[(0, 5), (6, 10)]``. intervals: For example, ``[(0, 5), (6, 10)]``. """ composed_filter = self._construct_filter(*filter_funcs, channels=channels, instruction_types=instruction_types, time_ranges=time_ranges, intervals=intervals) return self._apply_filter(lambda x: not composed_filter(x), new_sched_name="{name}".format(name=self.name)) def _apply_filter(self, filter_func: Callable, new_sched_name: str) -> 'Schedule': """Return a Schedule containing only the instructions from this Schedule for which ``filter_func`` returns ``True``. Args: filter_func: Function of the form (int, Union['Schedule', Instruction]) -> bool. new_sched_name: Name of the returned ``Schedule``. """ subschedules = self.flatten()._children valid_subschedules = [sched for sched in subschedules if filter_func(sched)] return Schedule(*valid_subschedules, name=new_sched_name) def _construct_filter(self, *filter_funcs: List[Callable], channels: Optional[Iterable[Channel]] = None, instruction_types: Optional[Iterable[Instruction]] = None, time_ranges: Optional[Iterable[Tuple[int, int]]] = None, intervals: Optional[Iterable[Interval]] = None) -> Callable: """Returns a boolean-valued function with input type ``(int, ScheduleComponent)`` that returns ``True`` iff the input satisfies all of the criteria specified by the arguments; i.e. iff every function in ``filter_funcs`` returns ``True``, the instruction occurs on a channel type contained in ``channels``, the instruction type is contained in ``instruction_types``, and the period over which the instruction operates is fully contained in one specified in ``time_ranges`` or ``intervals``. Args: filter_funcs: A list of Callables which take a (int, ScheduleComponent) tuple and return a bool channels: For example, ``[DriveChannel(0), AcquireChannel(0)]`` or ``DriveChannel(0)`` instruction_types: For example, ``[PulseInstruction, AcquireInstruction]`` or ``DelayInstruction`` time_ranges: For example, ``[(0, 5), (6, 10)]`` or ``(0, 5)`` intervals: For example, ``[Interval(0, 5), Interval(6, 10)]`` or ``Interval(0, 5)`` """ def if_scalar_cast_to_list(to_list): try: iter(to_list) except TypeError: to_list = [to_list] return to_list def only_channels(channels: Union[Set[Channel], Channel]) -> Callable: channels = if_scalar_cast_to_list(channels) def channel_filter(time_inst) -> bool: """Filter channel. Args: time_inst (Tuple[int, Instruction]): Time """ return any([chan in channels for chan in time_inst[1].channels]) return channel_filter def only_instruction_types(types: Union[Iterable[abc.ABCMeta], abc.ABCMeta]) -> Callable: types = if_scalar_cast_to_list(types) def instruction_filter(time_inst) -> bool: """Filter instruction. Args: time_inst (Tuple[int, Instruction]): Time """ return isinstance(time_inst[1], tuple(types)) return instruction_filter def only_intervals(ranges: Union[Iterable[Interval], Interval]) -> Callable: ranges = if_scalar_cast_to_list(ranges) def interval_filter(time_inst) -> bool: """Filter interval. Args: time_inst (Tuple[int, Instruction]): Time """ for i in ranges: inst_start = time_inst[0] inst_stop = inst_start + time_inst[1].duration if i[0] <= inst_start and inst_stop <= i[1]: return True return False return interval_filter filter_func_list = list(filter_funcs) if channels is not None: filter_func_list.append(only_channels(channels)) if instruction_types is not None: filter_func_list.append(only_instruction_types(instruction_types)) if time_ranges is not None: filter_func_list.append(only_intervals(time_ranges)) if intervals is not None: filter_func_list.append(only_intervals(intervals)) # return function returning true iff all filters are passed return lambda x: all([filter_func(x) for filter_func in filter_func_list]) def _add_timeslots(self, time: int, schedule: Union['Schedule', Instruction]) -> None: """Update all time tracking within this schedule based on the given schedule. Args: time: The time to insert the schedule into self. schedule: The schedule to insert into self. Raises: PulseError: If timeslots overlap or an invalid start time is provided. """ if not np.issubdtype(type(time), np.integer): raise PulseError("Schedule start time must be an integer.") other_timeslots = _get_timeslots(schedule) self._duration = max(self._duration, time + schedule.duration) for channel in schedule.channels: if channel not in self._timeslots: if time == 0: self._timeslots[channel] = copy.copy(other_timeslots[channel]) else: self._timeslots[channel] = [(i[0] + time, i[1] + time) for i in other_timeslots[channel]] continue for idx, interval in enumerate(other_timeslots[channel]): if interval[0] + time >= self._timeslots[channel][-1][1]: # Can append the remaining intervals self._timeslots[channel].extend( [(i[0] + time, i[1] + time) for i in other_timeslots[channel][idx:]]) break try: interval = (interval[0] + time, interval[1] + time) index = _find_insertion_index(self._timeslots[channel], interval) self._timeslots[channel].insert(index, interval) except PulseError: raise PulseError( "Schedule(name='{new}') cannot be inserted into Schedule(name='{old}') at " "time {time} because its instruction on channel {ch} scheduled from time " "{t0} to {tf} overlaps with an existing instruction." "".format(new=schedule.name or '', old=self.name or '', time=time, ch=channel, t0=interval[0], tf=interval[1])) _check_nonnegative_timeslot(self._timeslots) def _remove_timeslots(self, time: int, schedule: Union['Schedule', Instruction]): """Delete the timeslots if present for the respective schedule component. Args: time: The time to remove the timeslots for the ``schedule`` component. schedule: The schedule to insert into self. Raises: PulseError: If timeslots overlap or an invalid start time is provided. """ if not isinstance(time, int): raise PulseError("Schedule start time must be an integer.") for channel in schedule.channels: if channel not in self._timeslots: raise PulseError( 'The channel {} is not present in the schedule'.format(channel)) channel_timeslots = self._timeslots[channel] other_timeslots = _get_timeslots(schedule) for interval in other_timeslots[channel]: if channel_timeslots: interval = (interval[0] + time, interval[1] + time) index = _interval_index(channel_timeslots, interval) if channel_timeslots[index] == interval: channel_timeslots.pop(index) continue raise PulseError( "Cannot find interval ({t0}, {tf}) to remove from " "channel {ch} in Schedule(name='{name}').".format( ch=channel, t0=interval[0], tf=interval[1], name=schedule.name)) if not channel_timeslots: self._timeslots.pop(channel) def _replace_timeslots(self, time: int, old: Union['Schedule', Instruction], new: Union['Schedule', Instruction]): """Replace the timeslots of ``old`` if present with the timeslots of ``new``. Args: time: The time to remove the timeslots for the ``schedule`` component. old: Instruction to replace. new: Instruction to replace with. """ self._remove_timeslots(time, old) self._add_timeslots(time, new) def replace(self, old: Union['Schedule', Instruction], new: Union['Schedule', Instruction], inplace: bool = False, ) -> 'Schedule': """Return a schedule with the ``old`` instruction replaced with a ``new`` instruction. The replacement matching is based on an instruction equality check. .. jupyter-kernel:: python3 :id: replace .. jupyter-execute:: from qiskit import pulse d0 = pulse.DriveChannel(0) sched = pulse.Schedule() old = pulse.Play(pulse.Constant(100, 1.0), d0) new = pulse.Play(pulse.Constant(100, 0.1), d0) sched += old sched = sched.replace(old, new) assert sched == pulse.Schedule(new) Only matches at the top-level of the schedule tree. If you wish to perform this replacement over all instructions in the schedule tree. Flatten the schedule prior to running:: .. jupyter-execute:: sched = pulse.Schedule() sched += pulse.Schedule(old) sched = sched.flatten() sched = sched.replace(old, new) assert sched == pulse.Schedule(new) Args: old: Instruction to replace. new: Instruction to replace with. inplace: Replace instruction by mutably modifying this ``Schedule``. Returns: The modified schedule with ``old`` replaced by ``new``. Raises: PulseError: If the ``Schedule`` after replacements will has a timing overlap. """ new_children = [] for time, child in self._children: if child == old: new_children.append((time, new)) if inplace: self._replace_timeslots(time, old, new) else: new_children.append((time, child)) if inplace: self.__children = new_children self._parameter_table.clear() for _, child in new_children: self._update_parameter_table(child) return self else: try: return Schedule(*new_children) except PulseError as err: raise PulseError( 'Replacement of {old} with {new} results in ' 'overlapping instructions.'.format( old=old, new=new)) from err @property def parameters(self) -> Set: """Parameters which determine the schedule behavior.""" return set(self._parameter_table.keys()) def is_parameterized(self) -> bool: """Return True iff the instruction is parameterized.""" return bool(self.parameters) def assign_parameters(self, value_dict: Dict[ParameterExpression, ParameterValueType], ) -> 'Schedule': """Assign the parameters in this schedule according to the input. Args: value_dict: A mapping from Parameters to either numeric values or another Parameter expression. Returns: Schedule with updated parameters (a new one if not inplace, otherwise self). """ for parameter in self.parameters: if parameter not in value_dict: continue value = value_dict[parameter] for inst in self._parameter_table[parameter]: inst.assign_parameters({parameter: value}) entry = self._parameter_table.pop(parameter) if isinstance(value, ParameterExpression): for new_parameter in value.parameters: if new_parameter in self._parameter_table: new_entry = set(entry + self._parameter_table[new_parameter]) self._parameter_table[new_parameter] = list(new_entry) else: self._parameter_table[new_parameter] = entry # Update timeslots according to new channel keys for chan in copy.copy(self._timeslots): if isinstance(chan.index, ParameterExpression): chan_timeslots = self._timeslots.pop(chan) # Find the channel's new assignment new_channel = chan for param, value in value_dict.items(): if param in new_channel.parameters: new_channel = new_channel.assign(param, value) # Merge with existing channel if new_channel in self._timeslots: sched = Schedule() sched._timeslots = {new_channel: chan_timeslots} self._add_timeslots(0, sched) # Or add back under the new name else: self._timeslots[new_channel] = chan_timeslots return self def get_parameters(self, parameter_name: str) -> List[Parameter]: """Get parameter object bound to this schedule by string name. Because different ``Parameter`` objects can have the same name, this method returns a list of ``Parameter`` s for the provided name. Args: parameter_name: Name of parameter. Returns: Parameter objects that have corresponding name. """ return [param for param in self.parameters if param.name == parameter_name] def _update_parameter_table(self, schedule: 'Schedule'): """ Args: schedule: """ schedule = schedule.flatten() for _, inst in schedule.instructions: for param in inst.parameters: self._parameter_table[param].append(inst) def draw(self, dt: Any = None, # deprecated style: Optional[Dict[str, Any]] = None, filename: Any = None, # deprecated interp_method: Any = None, # deprecated scale: Any = None, # deprecated channel_scales: Any = None, # deprecated plot_all: Any = None, # deprecated plot_range: Any = None, # deprecated interactive: Any = None, # deprecated table: Any = None, # deprecated label: Any = None, # deprecated framechange: Any = None, # deprecated channels: Any = None, # deprecated show_framechange_channels: Any = None, # deprecated draw_title: Any = None, # deprecated backend=None, # importing backend causes cyclic import time_range: Optional[Tuple[int, int]] = None, time_unit: str = 'dt', disable_channels: Optional[List[Channel]] = None, show_snapshot: bool = True, show_framechange: bool = True, show_waveform_info: bool = True, show_barrier: bool = True, plotter: str = 'mpl2d', axis: Optional[Any] = None): """Plot the schedule. Args: style: Stylesheet options. This can be dictionary or preset stylesheet classes. See :py:class:~`qiskit.visualization.pulse_v2.stylesheets.IQXStandard`, :py:class:~`qiskit.visualization.pulse_v2.stylesheets.IQXSimple`, and :py:class:~`qiskit.visualization.pulse_v2.stylesheets.IQXDebugging` for details of preset stylesheets. backend (Optional[BaseBackend]): Backend object to play the input pulse program. If provided, the plotter may use to make the visualization hardware aware. time_range: Set horizontal axis limit. Tuple `(tmin, tmax)`. time_unit: The unit of specified time range either `dt` or `ns`. The unit of `ns` is available only when `backend` object is provided. disable_channels: A control property to show specific pulse channel. Pulse channel instances provided as a list are not shown in the output image. show_snapshot: Show snapshot instructions. show_framechange: Show frame change instructions. The frame change represents instructions that modulate phase or frequency of pulse channels. show_waveform_info: Show additional information about waveforms such as their name. show_barrier: Show barrier lines. plotter: Name of plotter API to generate an output image. One of following APIs should be specified:: mpl2d: Matplotlib API for 2D image generation. Matplotlib API to generate 2D image. Charts are placed along y axis with vertical offset. This API takes matplotlib.axes.Axes as ``axis`` input. ``axis`` and ``style`` kwargs may depend on the plotter. axis: Arbitrary object passed to the plotter. If this object is provided, the plotters use a given ``axis`` instead of internally initializing a figure object. This object format depends on the plotter. See plotter argument for details. dt: Deprecated. This argument is used by the legacy pulse drawer. filename: Deprecated. This argument is used by the legacy pulse drawer. To save output image, you can call ``.savefig`` method with returned Matplotlib Figure object. interp_method: Deprecated. This argument is used by the legacy pulse drawer. scale: Deprecated. This argument is used by the legacy pulse drawer. channel_scales: Deprecated. This argument is used by the legacy pulse drawer. plot_all: Deprecated. This argument is used by the legacy pulse drawer. plot_range: Deprecated. This argument is used by the legacy pulse drawer. interactive: Deprecated. This argument is used by the legacy pulse drawer. table: Deprecated. This argument is used by the legacy pulse drawer. label: Deprecated. This argument is used by the legacy pulse drawer. framechange: Deprecated. This argument is used by the legacy pulse drawer. channels: Deprecated. This argument is used by the legacy pulse drawer. show_framechange_channels: Deprecated. This argument is used by the legacy pulse drawer. draw_title: Deprecated. This argument is used by the legacy pulse drawer. Returns: Visualization output data. The returned data type depends on the ``plotter``. If matplotlib family is specified, this will be a ``matplotlib.pyplot.Figure`` data. """ # pylint: disable=cyclic-import, missing-return-type-doc from qiskit.visualization import pulse_drawer_v2, SchedStyle legacy_args = {'dt': dt, 'filename': filename, 'interp_method': interp_method, 'scale': scale, 'channel_scales': channel_scales, 'plot_all': plot_all, 'plot_range': plot_range, 'interactive': interactive, 'table': table, 'label': label, 'framechange': framechange, 'channels': channels, 'show_framechange_channels': show_framechange_channels, 'draw_title': draw_title} active_legacy_args = [] for name, legacy_arg in legacy_args.items(): if legacy_arg is not None: active_legacy_args.append(name) if active_legacy_args: warnings.warn('Legacy pulse drawer is deprecated. ' 'Specified arguments {dep_args} are deprecated. ' 'Please check the API document of new pulse drawer ' '`qiskit.visualization.pulse_drawer_v2`.' ''.format(dep_args=', '.join(active_legacy_args)), DeprecationWarning) if filename: warnings.warn('File saving is delegated to the plotter software in new drawer. ' 'If you specify matplotlib plotter family to `plotter` argument, ' 'you can call `savefig` method with the returned Figure object.', DeprecationWarning) if isinstance(style, SchedStyle): style = None warnings.warn('Legacy stylesheet is specified. This is ignored in the new drawer. ' 'Please check the API documentation for this method.') return pulse_drawer_v2(program=self, style=style, backend=backend, time_range=time_range, time_unit=time_unit, disable_channels=disable_channels, show_snapshot=show_snapshot, show_framechange=show_framechange, show_waveform_info=show_waveform_info, show_barrier=show_barrier, plotter=plotter, axis=axis) def __eq__(self, other: Union['Schedule', Instruction]) -> bool: """Test if two ScheduleComponents are equal. Equality is checked by verifying there is an equal instruction at every time in ``other`` for every instruction in this ``Schedule``. .. warning:: This does not check for logical equivalency. Ie., ```python >>> (Delay(10)(DriveChannel(0)) + Delay(10)(DriveChannel(0)) == Delay(20)(DriveChannel(0))) False ``` """ channels = set(self.channels) other_channels = set(other.channels) # first check channels are the same if channels != other_channels: return False # then verify same number of instructions in each instructions = self.instructions other_instructions = other.instructions if len(instructions) != len(other_instructions): return False # finally check each instruction in `other` is in this schedule for idx, inst in enumerate(other_instructions): # check assumes `Schedule.instructions` is sorted consistently if instructions[idx] != inst: return False return True def __add__(self, other: Union['Schedule', Instruction]) -> 'Schedule': """Return a new schedule with ``other`` inserted within ``self`` at ``start_time``.""" return self.append(other) def __or__(self, other: Union['Schedule', Instruction]) -> 'Schedule': """Return a new schedule which is the union of `self` and `other`.""" return self.insert(0, other) def __lshift__(self, time: int) -> 'Schedule': """Return a new schedule which is shifted forward by ``time``.""" return self.shift(time) def __len__(self) -> int: """Return number of instructions in the schedule.""" return len(self.instructions) def __repr__(self): name = format(self._name) if self._name else "" instructions = ", ".join([repr(instr) for instr in self.instructions[:50]]) if len(self.instructions) > 25: instructions += ", ..." return 'Schedule({}, name="{}")'.format(instructions, name) class ParameterizedSchedule: """Temporary parameterized schedule class. This should not be returned to users as it is currently only a helper class. This class is takes an input command definition that accepts a set of parameters. Calling ``bind`` on the class will return a ``Schedule``. """ def __init__(self, *schedules, parameters: Optional[Dict[str, Union[float, complex]]] = None, name: Optional[str] = None): warnings.warn('ParameterizedSchedule is deprecated. Use Schedule with ' 'circuit.Parameter objects.', DeprecationWarning) full_schedules = [] parameterized = [] parameters = parameters or [] self.name = name or '' # partition schedules into callable and schedules for schedule in schedules: if isinstance(schedule, ParameterizedSchedule): parameterized.append(schedule) parameters += schedule.parameters elif callable(schedule): parameterized.append(schedule) elif isinstance(schedule, Schedule): full_schedules.append(schedule) else: raise PulseError('Input type: {} not supported'.format(type(schedule))) self._parameterized = tuple(parameterized) self._schedules = tuple(full_schedules) self._parameters = tuple(sorted(set(parameters))) @property def parameters(self) -> Tuple[str]: """Schedule parameters.""" return self._parameters def bind_parameters(self, *args: Union[int, float, complex, ParameterExpression], **kwargs: Union[int, float, complex, ParameterExpression]) -> Schedule: """Generate the Schedule from params to evaluate command expressions""" bound_schedule = Schedule(name=self.name) schedules = list(self._schedules) named_parameters = {} if args: for key, val in zip(self.parameters, args): named_parameters[key] = val if kwargs: for key, val in kwargs.items(): if key in self.parameters: if key not in named_parameters.keys(): named_parameters[key] = val else: raise PulseError("%s got multiple values for argument '%s'" % (self.__class__.__name__, key)) else: raise PulseError("%s got an unexpected keyword argument '%s'" % (self.__class__.__name__, key)) for param_sched in self._parameterized: # recursively call until based callable is reached if isinstance(param_sched, type(self)): predefined = param_sched.parameters else: # assuming no other parameterized instructions predefined = self.parameters sub_params = {k: v for k, v in named_parameters.items() if k in predefined} schedules.append(param_sched(**sub_params)) # construct evaluated schedules for sched in schedules: if isinstance(sched, tuple): bound_schedule.insert(sched[0], sched[1]) else: bound_schedule |= sched return bound_schedule def __call__(self, *args: Union[int, float, complex, ParameterExpression], **kwargs: Union[int, float, complex, ParameterExpression]) -> Schedule: return self.bind_parameters(*args, **kwargs) def _interval_index(intervals: List[Interval], interval: Interval) -> int: """Find the index of an interval. Args: intervals: A sorted list of non-overlapping Intervals. interval: The interval for which the index into intervals will be found. Returns: The index of the interval. Raises: PulseError: If the interval does not exist. """ index = _locate_interval_index(intervals, interval) found_interval = intervals[index] if found_interval != interval: raise PulseError('The interval: {} does not exist in intervals: {}'.format( interval, intervals )) return index def _locate_interval_index(intervals: List[Interval], interval: Interval, index: int = 0) -> int: """Using binary search on start times, find an interval. Args: intervals: A sorted list of non-overlapping Intervals. interval: The interval for which the index into intervals will be found. index: A running tally of the index, for recursion. The user should not pass a value. Returns: The index into intervals that new_interval would be inserted to maintain a sorted list of intervals. """ if not intervals or len(intervals) == 1: return index mid_idx = len(intervals) // 2 mid = intervals[mid_idx] if interval[1] <= mid[0] and (interval != mid): return _locate_interval_index(intervals[:mid_idx], interval, index=index) else: return _locate_interval_index(intervals[mid_idx:], interval, index=index + mid_idx) def _find_insertion_index(intervals: List[Interval], new_interval: Interval) -> int: """Using binary search on start times, return the index into `intervals` where the new interval belongs, or raise an error if the new interval overlaps with any existing ones. Args: intervals: A sorted list of non-overlapping Intervals. new_interval: The interval for which the index into intervals will be found. Returns: The index into intervals that new_interval should be inserted to maintain a sorted list of intervals. Raises: PulseError: If new_interval overlaps with the given intervals. """ index = _locate_interval_index(intervals, new_interval) if index < len(intervals): if _overlaps(intervals[index], new_interval): raise PulseError("New interval overlaps with existing.") return index if new_interval[1] <= intervals[index][0] else index + 1 return index def _overlaps(first: Interval, second: Interval) -> bool: """Return True iff first and second overlap. Note: first.stop may equal second.start, since Interval stop times are exclusive. """ if first[0] == second[0] == second[1]: # They fail to overlap if one of the intervals has duration 0 return False if first[0] > second[0]: first, second = second, first return second[0] < first[1] def _check_nonnegative_timeslot(timeslots: TimeSlots): """Test that a channel has no negative timeslots. Raises: PulseError: If a channel timeslot is negative. """ for chan, chan_timeslots in timeslots.items(): if chan_timeslots: if chan_timeslots[0][0] < 0: raise PulseError( "An instruction on {} has a negative " " starting time.".format(chan)) def _get_timeslots(schedule: Union[Instruction, Schedule]) -> TimeSlots: """Generate timeslots from given schedule component. Args: schedule: Input schedule component. Raises: PulseError: When invalid schedule type is specified. """ if isinstance(schedule, Instruction): duration = schedule.duration instruction_duration_validation(duration) timeslots = {channel: [(0, duration)] for channel in schedule.channels} elif isinstance(schedule, Schedule): timeslots = schedule.timeslots else: raise PulseError('Invalid schedule type {} is specified.'.format(type(schedule))) return timeslots
42.07234
100
0.588834
import abc import copy import itertools import multiprocessing as mp import sys import warnings from collections import defaultdict from typing import List, Tuple, Iterable, Union, Dict, Callable, Set, Optional, Any import numpy as np from qiskit.circuit.parameter import Parameter from qiskit.circuit.parameterexpression import ParameterExpression, ParameterValueType from qiskit.pulse.channels import Channel from qiskit.pulse.exceptions import PulseError from qiskit.pulse.instructions import Instruction from qiskit.pulse.utils import instruction_duration_validation from qiskit.utils.multiprocessing import is_main_process Interval = Tuple[int, int] TimeSlots = Dict[Channel, List[Tuple[int, int]]] class Schedule(abc.ABC): instances_counter = itertools.count() prefix = 'sched' def __init__(self, *schedules: Union[Union['Schedule', Instruction], Tuple[int, Union['Schedule', Instruction]]], name: Optional[str] = None, metadata: Optional[dict] = None): if name is None: name = self.prefix + str(next(self.instances_counter)) if sys.platform != "win32" and not is_main_process(): name += '-{}'.format(mp.current_process().pid) self._name = name self._duration = 0 self._timeslots = {} self.__children = [] self._parameter_table = defaultdict(list) for sched_pair in schedules: try: time, sched = sched_pair except TypeError: time, sched = 0, sched_pair self._mutable_insert(time, sched) if not isinstance(metadata, dict) and metadata is not None: raise TypeError("Only a dictionary or None is accepted for schedule metadata") self._metadata = metadata @property def name(self) -> str: return self._name @property def timeslots(self) -> TimeSlots: return self._timeslots @property def duration(self) -> int: return self._duration @property def start_time(self) -> int: return self.ch_start_time(*self.channels) @property def stop_time(self) -> int: return self.duration @property def channels(self) -> Tuple[Channel]: return tuple(self._timeslots.keys()) @property def _children(self) -> Tuple[Tuple[int, Union['Schedule', Instruction]], ...]: return tuple(self.__children) @property def instructions(self): def key(time_inst_pair): inst = time_inst_pair[1] return (time_inst_pair[0], inst.duration, sorted(chan.name for chan in inst.channels)) return tuple(sorted(self._instructions(), key=key)) @property def metadata(self): return self._metadata @metadata.setter def metadata(self, metadata): if not isinstance(metadata, dict) and metadata is not None: raise TypeError("Only a dictionary or None is accepted for schedule metadata") self._metadata = metadata def ch_duration(self, *channels: List[Channel]) -> int: return self.ch_stop_time(*channels) def ch_start_time(self, *channels: List[Channel]) -> int: try: chan_intervals = (self._timeslots[chan] for chan in channels if chan in self._timeslots) return min(intervals[0][0] for intervals in chan_intervals) except ValueError: return 0 def ch_stop_time(self, *channels: List[Channel]) -> int: try: chan_intervals = (self._timeslots[chan] for chan in channels if chan in self._timeslots) return max(intervals[-1][1] for intervals in chan_intervals) except ValueError: return 0 def _instructions(self, time: int = 0): for insert_time, child_sched in self._children: yield from child_sched._instructions(time + insert_time) def shift(self, time: int, name: Optional[str] = None, inplace: bool = False ) -> 'Schedule': if inplace: return self._mutable_shift(time) return self._immutable_shift(time, name=name) def _immutable_shift(self, time: int, name: Optional[str] = None ) -> 'Schedule': if name is None: name = self.name return Schedule((time, self), name=name) def _mutable_shift(self, time: int ) -> 'Schedule': if not isinstance(time, int): raise PulseError( "Schedule start time must be an integer.") timeslots = {} for chan, ch_timeslots in self._timeslots.items(): timeslots[chan] = [(ts[0] + time, ts[1] + time) for ts in ch_timeslots] _check_nonnegative_timeslot(timeslots) self._duration = self._duration + time self._timeslots = timeslots self.__children = [(orig_time + time, child) for orig_time, child in self._children] return self def insert(self, start_time: int, schedule: Union['Schedule', Instruction], name: Optional[str] = None, inplace: bool = False ) -> 'Schedule': if inplace: return self._mutable_insert(start_time, schedule) return self._immutable_insert(start_time, schedule, name=name) def _mutable_insert(self, start_time: int, schedule: Union['Schedule', Instruction] ) -> 'Schedule': self._add_timeslots(start_time, schedule) self.__children.append((start_time, schedule)) self._update_parameter_table(schedule) return self def _immutable_insert(self, start_time: int, schedule: Union['Schedule', Instruction], name: Optional[str] = None, ) -> 'Schedule': if name is None: name = self.name new_sched = Schedule(name=name) new_sched._mutable_insert(0, self) new_sched._mutable_insert(start_time, schedule) return new_sched def append(self, schedule: Union['Schedule', Instruction], name: Optional[str] = None, inplace: bool = False) -> 'Schedule': common_channels = set(self.channels) & set(schedule.channels) time = self.ch_stop_time(*common_channels) return self.insert(time, schedule, name=name, inplace=inplace) def flatten(self) -> 'Schedule': return Schedule(*self.instructions, name=self.name) def filter(self, *filter_funcs: List[Callable], channels: Optional[Iterable[Channel]] = None, instruction_types=None, time_ranges: Optional[Iterable[Tuple[int, int]]] = None, intervals: Optional[Iterable[Interval]] = None) -> 'Schedule': composed_filter = self._construct_filter(*filter_funcs, channels=channels, instruction_types=instruction_types, time_ranges=time_ranges, intervals=intervals) return self._apply_filter(composed_filter, new_sched_name="{name}".format(name=self.name)) def exclude(self, *filter_funcs: List[Callable], channels: Optional[Iterable[Channel]] = None, instruction_types=None, time_ranges: Optional[Iterable[Tuple[int, int]]] = None, intervals: Optional[Iterable[Interval]] = None) -> 'Schedule': composed_filter = self._construct_filter(*filter_funcs, channels=channels, instruction_types=instruction_types, time_ranges=time_ranges, intervals=intervals) return self._apply_filter(lambda x: not composed_filter(x), new_sched_name="{name}".format(name=self.name)) def _apply_filter(self, filter_func: Callable, new_sched_name: str) -> 'Schedule': subschedules = self.flatten()._children valid_subschedules = [sched for sched in subschedules if filter_func(sched)] return Schedule(*valid_subschedules, name=new_sched_name) def _construct_filter(self, *filter_funcs: List[Callable], channels: Optional[Iterable[Channel]] = None, instruction_types: Optional[Iterable[Instruction]] = None, time_ranges: Optional[Iterable[Tuple[int, int]]] = None, intervals: Optional[Iterable[Interval]] = None) -> Callable: def if_scalar_cast_to_list(to_list): try: iter(to_list) except TypeError: to_list = [to_list] return to_list def only_channels(channels: Union[Set[Channel], Channel]) -> Callable: channels = if_scalar_cast_to_list(channels) def channel_filter(time_inst) -> bool: return any([chan in channels for chan in time_inst[1].channels]) return channel_filter def only_instruction_types(types: Union[Iterable[abc.ABCMeta], abc.ABCMeta]) -> Callable: types = if_scalar_cast_to_list(types) def instruction_filter(time_inst) -> bool: return isinstance(time_inst[1], tuple(types)) return instruction_filter def only_intervals(ranges: Union[Iterable[Interval], Interval]) -> Callable: ranges = if_scalar_cast_to_list(ranges) def interval_filter(time_inst) -> bool: for i in ranges: inst_start = time_inst[0] inst_stop = inst_start + time_inst[1].duration if i[0] <= inst_start and inst_stop <= i[1]: return True return False return interval_filter filter_func_list = list(filter_funcs) if channels is not None: filter_func_list.append(only_channels(channels)) if instruction_types is not None: filter_func_list.append(only_instruction_types(instruction_types)) if time_ranges is not None: filter_func_list.append(only_intervals(time_ranges)) if intervals is not None: filter_func_list.append(only_intervals(intervals)) return lambda x: all([filter_func(x) for filter_func in filter_func_list]) def _add_timeslots(self, time: int, schedule: Union['Schedule', Instruction]) -> None: if not np.issubdtype(type(time), np.integer): raise PulseError("Schedule start time must be an integer.") other_timeslots = _get_timeslots(schedule) self._duration = max(self._duration, time + schedule.duration) for channel in schedule.channels: if channel not in self._timeslots: if time == 0: self._timeslots[channel] = copy.copy(other_timeslots[channel]) else: self._timeslots[channel] = [(i[0] + time, i[1] + time) for i in other_timeslots[channel]] continue for idx, interval in enumerate(other_timeslots[channel]): if interval[0] + time >= self._timeslots[channel][-1][1]: self._timeslots[channel].extend( [(i[0] + time, i[1] + time) for i in other_timeslots[channel][idx:]]) break try: interval = (interval[0] + time, interval[1] + time) index = _find_insertion_index(self._timeslots[channel], interval) self._timeslots[channel].insert(index, interval) except PulseError: raise PulseError( "Schedule(name='{new}') cannot be inserted into Schedule(name='{old}') at " "time {time} because its instruction on channel {ch} scheduled from time " "{t0} to {tf} overlaps with an existing instruction." "".format(new=schedule.name or '', old=self.name or '', time=time, ch=channel, t0=interval[0], tf=interval[1])) _check_nonnegative_timeslot(self._timeslots) def _remove_timeslots(self, time: int, schedule: Union['Schedule', Instruction]): if not isinstance(time, int): raise PulseError("Schedule start time must be an integer.") for channel in schedule.channels: if channel not in self._timeslots: raise PulseError( 'The channel {} is not present in the schedule'.format(channel)) channel_timeslots = self._timeslots[channel] other_timeslots = _get_timeslots(schedule) for interval in other_timeslots[channel]: if channel_timeslots: interval = (interval[0] + time, interval[1] + time) index = _interval_index(channel_timeslots, interval) if channel_timeslots[index] == interval: channel_timeslots.pop(index) continue raise PulseError( "Cannot find interval ({t0}, {tf}) to remove from " "channel {ch} in Schedule(name='{name}').".format( ch=channel, t0=interval[0], tf=interval[1], name=schedule.name)) if not channel_timeslots: self._timeslots.pop(channel) def _replace_timeslots(self, time: int, old: Union['Schedule', Instruction], new: Union['Schedule', Instruction]): self._remove_timeslots(time, old) self._add_timeslots(time, new) def replace(self, old: Union['Schedule', Instruction], new: Union['Schedule', Instruction], inplace: bool = False, ) -> 'Schedule': new_children = [] for time, child in self._children: if child == old: new_children.append((time, new)) if inplace: self._replace_timeslots(time, old, new) else: new_children.append((time, child)) if inplace: self.__children = new_children self._parameter_table.clear() for _, child in new_children: self._update_parameter_table(child) return self else: try: return Schedule(*new_children) except PulseError as err: raise PulseError( 'Replacement of {old} with {new} results in ' 'overlapping instructions.'.format( old=old, new=new)) from err @property def parameters(self) -> Set: return set(self._parameter_table.keys()) def is_parameterized(self) -> bool: return bool(self.parameters) def assign_parameters(self, value_dict: Dict[ParameterExpression, ParameterValueType], ) -> 'Schedule': for parameter in self.parameters: if parameter not in value_dict: continue value = value_dict[parameter] for inst in self._parameter_table[parameter]: inst.assign_parameters({parameter: value}) entry = self._parameter_table.pop(parameter) if isinstance(value, ParameterExpression): for new_parameter in value.parameters: if new_parameter in self._parameter_table: new_entry = set(entry + self._parameter_table[new_parameter]) self._parameter_table[new_parameter] = list(new_entry) else: self._parameter_table[new_parameter] = entry for chan in copy.copy(self._timeslots): if isinstance(chan.index, ParameterExpression): chan_timeslots = self._timeslots.pop(chan) new_channel = chan for param, value in value_dict.items(): if param in new_channel.parameters: new_channel = new_channel.assign(param, value) # Merge with existing channel if new_channel in self._timeslots: sched = Schedule() sched._timeslots = {new_channel: chan_timeslots} self._add_timeslots(0, sched) # Or add back under the new name else: self._timeslots[new_channel] = chan_timeslots return self def get_parameters(self, parameter_name: str) -> List[Parameter]: return [param for param in self.parameters if param.name == parameter_name] def _update_parameter_table(self, schedule: 'Schedule'): schedule = schedule.flatten() for _, inst in schedule.instructions: for param in inst.parameters: self._parameter_table[param].append(inst) def draw(self, dt: Any = None, # deprecated style: Optional[Dict[str, Any]] = None, filename: Any = None, # deprecated interp_method: Any = None, # deprecated scale: Any = None, # deprecated channel_scales: Any = None, # deprecated plot_all: Any = None, # deprecated plot_range: Any = None, # deprecated interactive: Any = None, # deprecated table: Any = None, # deprecated label: Any = None, # deprecated framechange: Any = None, # deprecated channels: Any = None, # deprecated show_framechange_channels: Any = None, # deprecated draw_title: Any = None, # deprecated backend=None, # importing backend causes cyclic import time_range: Optional[Tuple[int, int]] = None, time_unit: str = 'dt', disable_channels: Optional[List[Channel]] = None, show_snapshot: bool = True, show_framechange: bool = True, show_waveform_info: bool = True, show_barrier: bool = True, plotter: str = 'mpl2d', axis: Optional[Any] = None): # pylint: disable=cyclic-import, missing-return-type-doc from qiskit.visualization import pulse_drawer_v2, SchedStyle legacy_args = {'dt': dt, 'filename': filename, 'interp_method': interp_method, 'scale': scale, 'channel_scales': channel_scales, 'plot_all': plot_all, 'plot_range': plot_range, 'interactive': interactive, 'table': table, 'label': label, 'framechange': framechange, 'channels': channels, 'show_framechange_channels': show_framechange_channels, 'draw_title': draw_title} active_legacy_args = [] for name, legacy_arg in legacy_args.items(): if legacy_arg is not None: active_legacy_args.append(name) if active_legacy_args: warnings.warn('Legacy pulse drawer is deprecated. ' 'Specified arguments {dep_args} are deprecated. ' 'Please check the API document of new pulse drawer ' '`qiskit.visualization.pulse_drawer_v2`.' ''.format(dep_args=', '.join(active_legacy_args)), DeprecationWarning) if filename: warnings.warn('File saving is delegated to the plotter software in new drawer. ' 'If you specify matplotlib plotter family to `plotter` argument, ' 'you can call `savefig` method with the returned Figure object.', DeprecationWarning) if isinstance(style, SchedStyle): style = None warnings.warn('Legacy stylesheet is specified. This is ignored in the new drawer. ' 'Please check the API documentation for this method.') return pulse_drawer_v2(program=self, style=style, backend=backend, time_range=time_range, time_unit=time_unit, disable_channels=disable_channels, show_snapshot=show_snapshot, show_framechange=show_framechange, show_waveform_info=show_waveform_info, show_barrier=show_barrier, plotter=plotter, axis=axis) def __eq__(self, other: Union['Schedule', Instruction]) -> bool: channels = set(self.channels) other_channels = set(other.channels) # first check channels are the same if channels != other_channels: return False # then verify same number of instructions in each instructions = self.instructions other_instructions = other.instructions if len(instructions) != len(other_instructions): return False # finally check each instruction in `other` is in this schedule for idx, inst in enumerate(other_instructions): # check assumes `Schedule.instructions` is sorted consistently if instructions[idx] != inst: return False return True def __add__(self, other: Union['Schedule', Instruction]) -> 'Schedule': return self.append(other) def __or__(self, other: Union['Schedule', Instruction]) -> 'Schedule': return self.insert(0, other) def __lshift__(self, time: int) -> 'Schedule': return self.shift(time) def __len__(self) -> int: return len(self.instructions) def __repr__(self): name = format(self._name) if self._name else "" instructions = ", ".join([repr(instr) for instr in self.instructions[:50]]) if len(self.instructions) > 25: instructions += ", ..." return 'Schedule({}, name="{}")'.format(instructions, name) class ParameterizedSchedule: def __init__(self, *schedules, parameters: Optional[Dict[str, Union[float, complex]]] = None, name: Optional[str] = None): warnings.warn('ParameterizedSchedule is deprecated. Use Schedule with ' 'circuit.Parameter objects.', DeprecationWarning) full_schedules = [] parameterized = [] parameters = parameters or [] self.name = name or '' # partition schedules into callable and schedules for schedule in schedules: if isinstance(schedule, ParameterizedSchedule): parameterized.append(schedule) parameters += schedule.parameters elif callable(schedule): parameterized.append(schedule) elif isinstance(schedule, Schedule): full_schedules.append(schedule) else: raise PulseError('Input type: {} not supported'.format(type(schedule))) self._parameterized = tuple(parameterized) self._schedules = tuple(full_schedules) self._parameters = tuple(sorted(set(parameters))) @property def parameters(self) -> Tuple[str]: return self._parameters def bind_parameters(self, *args: Union[int, float, complex, ParameterExpression], **kwargs: Union[int, float, complex, ParameterExpression]) -> Schedule: bound_schedule = Schedule(name=self.name) schedules = list(self._schedules) named_parameters = {} if args: for key, val in zip(self.parameters, args): named_parameters[key] = val if kwargs: for key, val in kwargs.items(): if key in self.parameters: if key not in named_parameters.keys(): named_parameters[key] = val else: raise PulseError("%s got multiple values for argument '%s'" % (self.__class__.__name__, key)) else: raise PulseError("%s got an unexpected keyword argument '%s'" % (self.__class__.__name__, key)) for param_sched in self._parameterized: # recursively call until based callable is reached if isinstance(param_sched, type(self)): predefined = param_sched.parameters else: # assuming no other parameterized instructions predefined = self.parameters sub_params = {k: v for k, v in named_parameters.items() if k in predefined} schedules.append(param_sched(**sub_params)) # construct evaluated schedules for sched in schedules: if isinstance(sched, tuple): bound_schedule.insert(sched[0], sched[1]) else: bound_schedule |= sched return bound_schedule def __call__(self, *args: Union[int, float, complex, ParameterExpression], **kwargs: Union[int, float, complex, ParameterExpression]) -> Schedule: return self.bind_parameters(*args, **kwargs) def _interval_index(intervals: List[Interval], interval: Interval) -> int: index = _locate_interval_index(intervals, interval) found_interval = intervals[index] if found_interval != interval: raise PulseError('The interval: {} does not exist in intervals: {}'.format( interval, intervals )) return index def _locate_interval_index(intervals: List[Interval], interval: Interval, index: int = 0) -> int: if not intervals or len(intervals) == 1: return index mid_idx = len(intervals) // 2 mid = intervals[mid_idx] if interval[1] <= mid[0] and (interval != mid): return _locate_interval_index(intervals[:mid_idx], interval, index=index) else: return _locate_interval_index(intervals[mid_idx:], interval, index=index + mid_idx) def _find_insertion_index(intervals: List[Interval], new_interval: Interval) -> int: index = _locate_interval_index(intervals, new_interval) if index < len(intervals): if _overlaps(intervals[index], new_interval): raise PulseError("New interval overlaps with existing.") return index if new_interval[1] <= intervals[index][0] else index + 1 return index def _overlaps(first: Interval, second: Interval) -> bool: if first[0] == second[0] == second[1]: # They fail to overlap if one of the intervals has duration 0 return False if first[0] > second[0]: first, second = second, first return second[0] < first[1] def _check_nonnegative_timeslot(timeslots: TimeSlots): for chan, chan_timeslots in timeslots.items(): if chan_timeslots: if chan_timeslots[0][0] < 0: raise PulseError( "An instruction on {} has a negative " " starting time.".format(chan)) def _get_timeslots(schedule: Union[Instruction, Schedule]) -> TimeSlots: if isinstance(schedule, Instruction): duration = schedule.duration instruction_duration_validation(duration) timeslots = {channel: [(0, duration)] for channel in schedule.channels} elif isinstance(schedule, Schedule): timeslots = schedule.timeslots else: raise PulseError('Invalid schedule type {} is specified.'.format(type(schedule))) return timeslots
true
true
f70c650c83c204939534be015e5d5136526af01e
38,121
py
Python
microsetta_admin/server.py
microsetta/microsetta-admin
1ba6787c0315a74d50cafd722dbbe044d507c07f
[ "BSD-3-Clause" ]
null
null
null
microsetta_admin/server.py
microsetta/microsetta-admin
1ba6787c0315a74d50cafd722dbbe044d507c07f
[ "BSD-3-Clause" ]
null
null
null
microsetta_admin/server.py
microsetta/microsetta-admin
1ba6787c0315a74d50cafd722dbbe044d507c07f
[ "BSD-3-Clause" ]
null
null
null
import jwt from flask import render_template, Flask, request, session, send_file import secrets from datetime import datetime import io from jwt import PyJWTError from werkzeug.exceptions import BadRequest from werkzeug.utils import redirect import pandas as pd from microsetta_admin import metadata_util, upload_util from microsetta_admin.config_manager import SERVER_CONFIG from microsetta_admin._api import APIRequest import importlib.resources as pkg_resources TOKEN_KEY_NAME = 'token' SEND_EMAIL_CHECKBOX_DEFAULT_NAME = 'send_email' PUB_KEY = pkg_resources.read_text( 'microsetta_admin', "authrocket.pubkey") DUMMY_SELECT_TEXT = '-------' RECEIVED_TYPE_DROPDOWN = \ [DUMMY_SELECT_TEXT, "Blood (skin prick)", "Saliva", "Stool", "Sample Type Unclear (Swabs Included)"] VALID_STATUS = "sample-is-valid" NO_SOURCE_STATUS = "no-associated-source" NO_ACCOUNT_STATUS = "no-registered-account" NO_COLLECTION_INFO_STATUS = "no-collection-info" INCONSISTENT_SAMPLE_STATUS = "sample-has-inconsistencies" UNKNOWN_VALIDITY_STATUS = "received-unknown-validity" STATUS_OPTIONS = [DUMMY_SELECT_TEXT, VALID_STATUS, NO_SOURCE_STATUS, NO_ACCOUNT_STATUS, NO_COLLECTION_INFO_STATUS, INCONSISTENT_SAMPLE_STATUS, UNKNOWN_VALIDITY_STATUS] API_PROJECTS_URL = '/api/admin/projects' def handle_pyjwt(pyjwt_error): # PyJWTError (Aka, anything wrong with token) will force user to log out # and log in again return redirect('/logout') def parse_jwt(token): """ Raises ------ jwt.PyJWTError If the token is invalid """ decoded = jwt.decode(token, PUB_KEY, algorithms=['RS256'], verify=True) return decoded def build_login_variables(): # Anything that renders sitebase.html must pass down these variables to # jinja2 token_info = None if TOKEN_KEY_NAME in session: # If user leaves the page open, the token can expire before the # session, so if our token goes back we need to force them to login # again. token_info = parse_jwt(session[TOKEN_KEY_NAME]) vars = { 'endpoint': SERVER_CONFIG["endpoint"], 'ui_endpoint': SERVER_CONFIG["ui_endpoint"], 'authrocket_url': SERVER_CONFIG["authrocket_url"] } if token_info is not None: vars['email'] = token_info['email'] return vars def build_app(): # Create the application instance app = Flask(__name__) flask_secret = SERVER_CONFIG["FLASK_SECRET_KEY"] if flask_secret is None: print("WARNING: FLASK_SECRET_KEY must be set to run with gUnicorn") flask_secret = secrets.token_urlsafe(16) app.secret_key = flask_secret app.config['SESSION_TYPE'] = 'memcached' app.config['SESSION_COOKIE_NAME'] = 'session-microsetta-admin' # Set mapping from exception type to response code app.register_error_handler(PyJWTError, handle_pyjwt) return app app = build_app() @app.context_processor def utility_processor(): def format_timestamp(timestamp_str): if not timestamp_str: return "None" datetime_obj = datetime.fromisoformat(timestamp_str) return datetime_obj.strftime("%Y %B %d %H:%M:%S") return dict(format_timestamp=format_timestamp) @app.route('/') def home(): return render_template('sitebase.html', **build_login_variables()) @app.route('/search', methods=['GET']) def search(): return _search() @app.route('/search/sample', methods=['GET', 'POST']) def search_sample(): return _search('samples') @app.route('/search/kit', methods=['GET', 'POST']) def search_kit(): return _search('kit') @app.route('/search/email', methods=['GET', 'POST']) def search_email(): return _search('account') def _search(resource=None): if request.method == 'GET': return render_template('search.html', **build_login_variables()) elif request.method == 'POST': query = request.form['search_%s' % resource] status, result = APIRequest.get( '/api/admin/search/%s/%s' % (resource, query)) if status == 404: result = {'error_message': "Query not found"} return render_template('search_result.html', **build_login_variables(), result=result), 200 elif status == 200: return render_template('search_result.html', **build_login_variables(), resource=resource, result=result), 200 else: return result def _translate_nones(a_dict, do_none_to_str): # Note: this ISN'T a deep copy. This function is NOT set up # for recursing through a multi-layer dictionary result = a_dict.copy() for k, v in result.items(): if do_none_to_str and v is None: result[k] = "" elif not do_none_to_str and v == '': result[k] = None return result def _get_projects(include_stats, is_active): projects_uri = API_PROJECTS_URL + f"?include_stats={include_stats}" if is_active is not None: projects_uri += f"&is_active={is_active}" status, projects_output = APIRequest.get(projects_uri) if status >= 400: result = {'error_message': f"Unable to load project list: " f"{projects_uri}"} else: cleaned_projects = [_translate_nones(x, True) for x in projects_output] # if we're not using full project stats, sort # alphabetically by project name if not include_stats: cleaned_projects = sorted(cleaned_projects, key=lambda k: k['project_name']) result = {'projects': cleaned_projects} return status, result @app.route('/manage_projects', methods=['GET', 'POST']) def manage_projects(): result = None is_active = request.args.get('is_active', None) if request.method == 'POST': model = {x: request.form[x] for x in request.form} project_id = model.pop('project_id') model['is_microsetta'] = model.get('is_microsetta', '') == 'true' model['bank_samples'] = model.get('bank_samples', '') == 'true' model = _translate_nones(model, False) if project_id.isdigit(): # update (put) an existing project action = "update" status, api_output = APIRequest.put( '{}/{}'.format(API_PROJECTS_URL, project_id), json=model) else: # create (post) a new project action = "create" status, api_output = APIRequest.post( API_PROJECTS_URL, json=model) # if api post or put failed if status >= 400: result = {'error_message': f'Unable to {action} project.'} # end if post # if the above work (if any) didn't produce an error message, return # the projects list if result is None: _, result = _get_projects(include_stats=True, is_active=is_active) return render_template('manage_projects.html', **build_login_variables(), result=result), 200 @app.route('/email_stats', methods=['GET', 'POST']) def email_stats(): _, result = _get_projects(include_stats=False, is_active=True) projects = result.get('projects') if request.method == 'GET': project = request.args.get('project', None) email = request.args.get('email') if email is None: # They want to search for emails, show them the search dialog return render_template("email_stats_pulldown.html", **build_login_variables(), resource=None, search_error=None, projects=projects) emails = [email, ] elif request.method == 'POST': project = request.form.get('project', None) emails, upload_err = upload_util.parse_request_csv_col( request, 'file', 'email' ) if upload_err is not None: return render_template('email_stats_pulldown.html', **build_login_variables(), resource=None, search_error=[{'error': upload_err}], projects=projects) else: raise BadRequest() if project == "": project = None # de-duplicate emails = list({e.lower() for e in emails}) status, result = APIRequest.post( '/api/admin/account_email_summary', json={ "emails": emails, "project": project }) if status != 200: return render_template('email_stats_pulldown.html', search_error=[{'error': result}], resource=None, **build_login_variables(), projects=projects) # At a minimum, our table will display these columns. # We may show additional info depending on what comes back from the request base_data_template = { 'email': 'XXX', 'summary': 'XXX', 'account_id': 'XXX', 'creation_time': 'XXX', 'kit_name': 'XXX', 'project': 'XXX', 'unclaimed-samples-in-kit': 0, 'never-scanned': 0, 'sample-is-valid': 0, 'no-associated-source': 0, 'no-registered-account': 0, 'no-collection-info': 0, 'sample-has-inconsistencies': 0, 'received-unknown-validity': 0 } df = pd.DataFrame([base_data_template] + result) df = df.drop(0) # remove the template row numeric_cols = [ "unclaimed-samples-in-kit", "never-scanned", "sample-is-valid", "no-associated-source", "no-registered-account", "no-collection-info", "sample-has-inconsistencies", "received-unknown-validity" ] df[numeric_cols] = df[numeric_cols].apply(pd.to_numeric) df[numeric_cols] = df[numeric_cols].fillna(0) def urlify_account_id(id_): if pd.isnull(id_): return "No associated account" else: ui_endpoint = SERVER_CONFIG['ui_endpoint'] account_url = f"{ui_endpoint}/accounts/{id_}" return f'<a target="_blank" href="{account_url}">{id_}</a>' # see https://stackoverflow.com/questions/20035518/insert-a-link-inside-a-pandas-table # noqa df['account_id'] = df["account_id"].apply(urlify_account_id) return render_template("email_stats_pulldown.html", search_error=None, resource=df, **build_login_variables(), projects=projects) @app.route('/per_sample_summary', methods=['GET', 'POST']) def per_sample_summary(): # get a list of all projects in the system _, result = _get_projects(include_stats=False, is_active=True) projects = result.get('projects') # filter out any projects that don't belong to Microsetta projects = [x for x in projects if x['is_microsetta'] is True] # build a list of dictionaries with just the project id and the project # name. projects = [{'project_name': x['project_name'], 'project_id': x['project_id']} for x in projects] # determine if user wants sample ids stripped strip_sampleid = request.form.get('strip_sampleid', 'off') strip_sampleid = strip_sampleid.lower() == 'on' if request.method == 'GET': # If user arrived via GET then they are either here w/out # querying and they simply need the default webpage, or they are # querying with either a list of barcodes, or with a project id. # look for both parameters to determine which state we are in. sample_barcode = request.args.get('sample_barcode') project_id = request.args.get('project_id') if sample_barcode is None and project_id is None: # user just wants the default page. return render_template('per_sample_summary.html', resource=None, projects=projects, **build_login_variables()) if project_id is not None: # user wants to get summaries on all samples in a project. payload = {'project_id': project_id} status, result = APIRequest.post('/api/admin/account_barcode_summa' 'ry?strip_sampleid=False', json=payload) if status == 200: if result['partial_result'] is True: unprocessed_barcodes = result['unprocessed_barcodes'] else: unprocessed_barcodes = None resource = pd.DataFrame(result['samples']) order = ['sampleid', 'project', 'account-email', 'source-email', 'source-type', 'site-sampled', 'sample-status', 'sample-received', 'ffq-taken', 'ffq-complete', 'vioscreen_username'] order.extend(sorted(set(resource.columns) - set(order))) resource = resource[order] if unprocessed_barcodes: return render_template('per_sample_summary.html', resource=resource, projects=projects, error_message="Too many barcodes. S" "erver processed only" " the first 1000.", **build_login_variables()) else: return render_template('per_sample_summary.html', resource=resource, projects=projects, **build_login_variables()) else: return render_template('per_sample_summary.html', resource=None, projects=projects, error_message=result, **build_login_variables()) # if we are here then the user is querying using barcodes and we # simply need to set up the query below to perform. sample_barcodes = [sample_barcode, ] else: # assume POST, since there are only two methods defined in route. # if we are here, it is because the user is querying using an uploaded # file containing sample names. sample_barcodes, err = upload_util.parse_request_csv_col(request, 'file', 'sample_name') if err is not None: # there was an error. abort early. return render_template('per_sample_summary.html', resource=None, projects=projects, **build_login_variables(), search_error=[{'error': err}]) # perform the main query. payload = {'sample_barcodes': sample_barcodes} status, result = APIRequest.post('/api/admin/account_barcode_summary?stri' 'p_sampleid=%s' % str(strip_sampleid), json=payload) if status == 200: if result['partial_result'] is True: unprocessed_barcodes = result['unprocessed_barcodes'] else: unprocessed_barcodes = None resource = pd.DataFrame(result['samples']) order = ['sampleid', 'project', 'account-email', 'source-email', 'source-type', 'site-sampled', 'sample-status', 'sample-received', 'ffq-taken', 'ffq-complete', 'vioscreen_username'] order.extend(sorted(set(resource.columns) - set(order))) resource = resource[order] if unprocessed_barcodes: return render_template('per_sample_summary.html', resource=resource, projects=projects, error_message="Too many barcodes. S" "erver processed only" " the first 1000.", **build_login_variables()) else: return render_template('per_sample_summary.html', resource=resource, projects=projects, **build_login_variables()) else: return render_template('per_sample_summary.html', resource=None, projects=projects, error_message=result, **build_login_variables()) def _get_by_sample_barcode(sample_barcodes, strip_sampleid, projects): payload = {'sample_barcodes': sample_barcodes} status, result = APIRequest.post('/api/admin/account_barcode_summary?' 'strip_sampleid=%s' % str(strip_sampleid), json=payload) if status == 200: if result['partial_result'] is True: unprocessed_barcodes = result['unprocessed_barcodes'] else: unprocessed_barcodes = None resource = pd.DataFrame(result['samples']) order = ['sampleid', 'project', 'account-email', 'source-email', 'source-type', 'site-sampled', 'sample-status', 'sample-received', 'ffq-taken', 'ffq-complete', 'vioscreen_username'] order.extend(sorted(set(resource.columns) - set(order))) resource = resource[order] if unprocessed_barcodes: return render_template('per_sample_summary.html', resource=resource, projects=projects, error_message="Too many barcodes. S" "erver processed only" " the first 1000.", **build_login_variables()) else: return render_template('per_sample_summary.html', resource=resource, projects=projects, **build_login_variables()) else: return render_template('per_sample_summary.html', resource=None, projects=projects, error_message=result, **build_login_variables()) @app.route('/create_kits', methods=['GET', 'POST']) def new_kits(): _, result = _get_projects(include_stats=False, is_active=True) projects = result.get('projects') if request.method == 'GET': return render_template('create_kits.html', error_message=result.get('error_message'), projects=projects, **build_login_variables()) elif request.method == 'POST': num_kits = int(request.form['num_kits']) num_samples = int(request.form['num_samples']) prefix = request.form['prefix'] selected_project_ids = request.form.getlist('project_ids') payload = {'number_of_kits': num_kits, 'number_of_samples': num_samples, 'project_ids': selected_project_ids} if prefix: payload['kit_id_prefix'] = prefix status, result = APIRequest.post( '/api/admin/create/kits', json=payload) if status != 201: return render_template('create_kits.html', error_message='Failed to create kits', projects=projects, **build_login_variables()) # StringIO/BytesIO based off https://stackoverflow.com/a/45111660 buf = io.StringIO() payload = io.BytesIO() # explicitly expand out the barcode detail kits = pd.DataFrame(result['created']) for i in range(num_samples): kits['barcode_%d' % (i+1)] = [r['sample_barcodes'][i] for _, r in kits.iterrows()] kits.drop(columns='sample_barcodes', inplace=True) kits.to_csv(buf, sep=',', index=False, header=True) payload.write(buf.getvalue().encode('utf-8')) payload.seek(0) buf.close() stamp = datetime.now().strftime('%d%b%Y-%H%M') fname = f'kits-{stamp}.csv' return send_file(payload, as_attachment=True, attachment_filename=fname, mimetype='text/csv') def _check_sample_status(extended_barcode_info): warning = None in_microsetta_project = any( [x['is_microsetta'] for x in extended_barcode_info['projects_info']]) # one warning to rule them all; check in order of precendence if not in_microsetta_project: warning = UNKNOWN_VALIDITY_STATUS elif extended_barcode_info['account'] is None: warning = NO_ACCOUNT_STATUS elif extended_barcode_info['source'] is None: warning = NO_SOURCE_STATUS # collection datetime is used as the bellwether for the whole # set of sample collection info because it is relevant to all # kinds of samples (whereas previously used field, sample site, is not # filled when environmental samples are returned). elif extended_barcode_info['sample'].get('datetime_collected') is None: warning = NO_COLLECTION_INFO_STATUS return warning # Set up handlers for the cases, # GET to view the page, # POST to update info for a barcode -AND (possibly)- # email end user about the change in sample status, def _scan_get(sample_barcode, update_error): # If there is no sample_barcode in the GET # they still need to enter one in the box, so show empty page if sample_barcode is None: return render_template('scan.html', **build_login_variables()) # Assuming there is a sample barcode, grab that sample's information status, result = APIRequest.get( '/api/admin/search/samples/%s' % sample_barcode) # If we successfully grab it, show the page to the user if status == 200: # Process result in python because its easier than jinja2. status_warning = _check_sample_status(result) # check the latest scan to find the default sample_status for form latest_status = DUMMY_SELECT_TEXT if result['latest_scan']: latest_status = result['latest_scan']['sample_status'] account = result.get('account') events = [] if account: event_status, event_result = APIRequest.get( '/api/admin/events/accounts/%s' % account['id'] ) if event_status != 200: raise Exception("Couldn't pull event history") events = event_result return render_template( 'scan.html', **build_login_variables(), barcode_info=result["barcode_info"], projects_info=result['projects_info'], scans_info=result['scans_info'], latest_status=latest_status, dummy_status=DUMMY_SELECT_TEXT, status_options=STATUS_OPTIONS, send_email=session.get(SEND_EMAIL_CHECKBOX_DEFAULT_NAME, True), sample_info=result['sample'], extended_info=result, status_warning=status_warning, update_error=update_error, received_type_dropdown=RECEIVED_TYPE_DROPDOWN, source=result['source'], events=events ) elif status == 401: # If we fail due to unauthorized, need the user to log in again return redirect('/logout') elif status == 404: # If we fail due to not found, need to tell the user to pick a diff # barcode return render_template( 'scan.html', **build_login_variables(), search_error="Barcode %s Not Found" % sample_barcode, update_error=update_error, received_type_dropdown=RECEIVED_TYPE_DROPDOWN ) else: raise BadRequest() def _scan_post_update_info(sample_barcode, technician_notes, sample_status, action, issue_type, template, received_type, recorded_type): ### # Bugfix Part 1 for duplicate emails being sent. Theory is that client is # out of sync due to hitting back button after a scan has changed # state. # Can't test if client is up to date without ETags, so for right now, # we just validate whether or not they should send an email, duplicating # the client log. (This can still break with multiple admin clients, # but that is unlikely at the moment.) latest_status = None # TODO: Replace this with ETags! status, result = APIRequest.get( '/api/admin/search/samples/%s' % sample_barcode) if result['latest_scan']: latest_status = result['latest_scan']['sample_status'] ### # Do the actual update status, response = APIRequest.post( '/api/admin/scan/%s' % sample_barcode, json={ "sample_status": sample_status, "technician_notes": technician_notes } ) # if the update failed, keep track of the error so it can be displayed if status != 201: update_error = response return _scan_get(sample_barcode, update_error) else: update_error = None # If we're not supposed to send an email, go back to GET if action != "send_email": return _scan_get(sample_barcode, update_error) ### # Bugfix Part 2 for duplicate emails being sent. if sample_status == latest_status: # This is what we'll hit if javascript thinks it's updating status # but is out of sync with the database. update_error = "Ignoring Send Email, sample_status would " \ "not have been updated (Displayed page was out of " \ "sync)" return _scan_get(sample_barcode, update_error) ### # This is what we'll hit if there are no email templates to send for # the new sample status (or if we screw up javascript side :D ) if template is None: update_error = "Cannot Send Email: No Issue Type Specified " \ "(or no issue types available)" return _scan_get(sample_barcode, update_error) # Otherwise, send out an email to the end user status, response = APIRequest.post( '/api/admin/email', json={ "issue_type": issue_type, "template": template, "template_args": { "sample_barcode": sample_barcode, "recorded_type": recorded_type, "received_type": received_type } } ) # if the email failed to send, keep track of the error # so it can be displayed if status != 200: update_error = response else: update_error = None return _scan_get(sample_barcode, update_error) @app.route('/scan', methods=['GET', 'POST']) def scan(): # Now that the handlers are set up, parse the request to determine what # to do. # If its a get, grab the sample_barcode from the query string rather than # form parameters if request.method == 'GET': sample_barcode = request.args.get('sample_barcode') return _scan_get(sample_barcode, None) # If its a post, make the changes, then refresh the page if request.method == 'POST': # Without some extra ajax, we can't persist the send_email checkbox # until they actually post the form send_email = request.form.get('send_email', False) session[SEND_EMAIL_CHECKBOX_DEFAULT_NAME] = send_email sample_barcode = request.form['sample_barcode'] technician_notes = request.form['technician_notes'] sample_status = request.form['sample_status'] action = request.form.get('action') issue_type = request.form.get('issue_type') template = request.form.get('template') received_type = request.form.get('received_type') recorded_type = request.form.get('recorded_type') return _scan_post_update_info(sample_barcode, technician_notes, sample_status, action, issue_type, template, received_type, recorded_type) @app.route('/metadata_pulldown', methods=['GET', 'POST']) def metadata_pulldown(): allow_missing = request.form.get('allow_missing_samples', False) if request.method == 'GET': sample_barcode = request.args.get('sample_barcode') # If there is no sample_barcode in the GET # they still need to enter one in the box, so show empty page if sample_barcode is None: return render_template('metadata_pulldown.html', **build_login_variables()) sample_barcodes = [sample_barcode] elif request.method == 'POST': sample_barcodes, upload_err = upload_util.parse_request_csv_col( request, 'file', 'sample_name' ) if upload_err is not None: return render_template('metadata_pulldown.html', **build_login_variables(), search_error=[{'error': upload_err}]) else: raise BadRequest() df, errors = metadata_util.retrieve_metadata(sample_barcodes) # Strangely, these api requests are returning an html error page rather # than a machine parseable json error response object with message. # This is almost certainly due to error handling for the cohosted minimal # client. In future, we should just pass down whatever the api says here. if len(errors) == 0 or allow_missing: df = metadata_util.drop_private_columns(df) # TODO: Streaming direct from pandas is a pain. Need to search for # better ways to iterate and chunk this file as we generate it strstream = io.StringIO() df.to_csv(strstream, sep='\t', index=True, header=True) # TODO: utf-8 or utf-16 encoding?? bytestream = io.BytesIO() bytestream.write(strstream.getvalue().encode('utf-8')) bytestream.seek(0) strstream.close() return send_file(bytestream, mimetype="text/tab-separated-values", as_attachment=True, attachment_filename="metadata_pulldown.tsv", add_etags=False, cache_timeout=None, conditional=False, last_modified=None, ) else: return render_template('metadata_pulldown.html', **build_login_variables(), info={'barcodes': sample_barcodes}, search_error=errors) @app.route('/submit_daklapack_order', methods=['GET']) def submit_daklapack_order(): error_msg_key = "error_message" def return_error(msg): return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=msg) status, dak_articles_output = APIRequest.get( '/api/admin/daklapack_articles') if status >= 400: return return_error("Unable to load daklapack articles list.") status, projects_output = _get_projects(include_stats=False, is_active=True) if status >= 400: return return_error(projects_output[error_msg_key]) return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=None, dummy_status=DUMMY_SELECT_TEXT, dak_articles=dak_articles_output, contact_phone_number=SERVER_CONFIG[ "order_contact_phone"], projects=projects_output['projects']) @app.route('/submit_daklapack_order', methods=['POST']) def post_submit_daklapack_order(): def return_error(msg): return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=msg) error_message = success_submissions = failure_submissions = headers = None expected_headers = ["firstName", "lastName", "address1", "insertion", "address2", "postalCode", "city", "state", "country", "countryCode"] # get required fields; cast where expected by api phone_number = request.form['contact_phone_number'] project_ids_list = list(map(int, request.form.getlist('projects'))) dak_article_code = request.form['dak_article_code'] article_quantity = int(request.form['quantity']) file = request.files['addresses_file'] # get optional fields or defaults planned_send_str = request.form.get('planned_send_date') planned_send_date = planned_send_str if planned_send_str else None description = request.form.get('description') fedex_ref_1 = request.form.get('fedex_ref_1') fedex_ref_2 = request.form.get('fedex_ref_2') fedex_ref_3 = request.form.get('fedex_ref_3') try: # NB: import everything as a string so that zip codes beginning with # zero (e.g., 06710) don't get silently cast to numbers if file.filename.endswith('xls'): addresses_df = pd.read_excel(file, dtype=str) elif file.filename.endswith('xlsx'): addresses_df = pd.read_excel(file, engine='openpyxl', dtype=str) else: raise ValueError(f"Unrecognized extension on putative excel " f"filename: {file.filename}") headers = list(addresses_df.columns) except Exception as e: # noqa return return_error('Could not parse addresses file') if headers != expected_headers: return return_error(f"Received column names {headers} do " f"not match expected column names" f" {expected_headers}") # add (same) contact phone number to every address addresses_df['phone'] = phone_number addresses_df = addresses_df.fillna("") temp_dict = addresses_df.to_dict(orient='index') addresses_list = [temp_dict[n] for n in range(len(temp_dict))] status, post_output = APIRequest.post( '/api/admin/daklapack_orders', json={ "project_ids": project_ids_list, "article_code": dak_article_code, "quantity": article_quantity, "addresses": addresses_list, "planned_send_date": planned_send_date, "description": description, "fedex_ref_1": fedex_ref_1, "fedex_ref_2": fedex_ref_2, "fedex_ref_3": fedex_ref_3 } ) # if the post failed, keep track of the error so it can be displayed if status != 200: error_message = post_output else: order_submissions = post_output["order_submissions"] success_submissions = [x for x in order_submissions if x["order_success"]] failure_submissions = [x for x in order_submissions if not x["order_success"]] return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=error_message, success_submissions=success_submissions, failure_submissions=failure_submissions) @app.route('/authrocket_callback') def authrocket_callback(): token = request.args.get('token') session[TOKEN_KEY_NAME] = token return redirect("/") @app.route('/logout') def logout(): if TOKEN_KEY_NAME in session: del session[TOKEN_KEY_NAME] return redirect("/") # If we're running in stand alone mode, run the application if __name__ == '__main__': if SERVER_CONFIG["ssl_cert_path"] and SERVER_CONFIG["ssl_key_path"]: ssl_context = ( SERVER_CONFIG["ssl_cert_path"], SERVER_CONFIG["ssl_key_path"] ) else: ssl_context = None app.run( port=SERVER_CONFIG['port'], debug=SERVER_CONFIG['debug'], ssl_context=ssl_context )
38.780264
98
0.574906
import jwt from flask import render_template, Flask, request, session, send_file import secrets from datetime import datetime import io from jwt import PyJWTError from werkzeug.exceptions import BadRequest from werkzeug.utils import redirect import pandas as pd from microsetta_admin import metadata_util, upload_util from microsetta_admin.config_manager import SERVER_CONFIG from microsetta_admin._api import APIRequest import importlib.resources as pkg_resources TOKEN_KEY_NAME = 'token' SEND_EMAIL_CHECKBOX_DEFAULT_NAME = 'send_email' PUB_KEY = pkg_resources.read_text( 'microsetta_admin', "authrocket.pubkey") DUMMY_SELECT_TEXT = '-------' RECEIVED_TYPE_DROPDOWN = \ [DUMMY_SELECT_TEXT, "Blood (skin prick)", "Saliva", "Stool", "Sample Type Unclear (Swabs Included)"] VALID_STATUS = "sample-is-valid" NO_SOURCE_STATUS = "no-associated-source" NO_ACCOUNT_STATUS = "no-registered-account" NO_COLLECTION_INFO_STATUS = "no-collection-info" INCONSISTENT_SAMPLE_STATUS = "sample-has-inconsistencies" UNKNOWN_VALIDITY_STATUS = "received-unknown-validity" STATUS_OPTIONS = [DUMMY_SELECT_TEXT, VALID_STATUS, NO_SOURCE_STATUS, NO_ACCOUNT_STATUS, NO_COLLECTION_INFO_STATUS, INCONSISTENT_SAMPLE_STATUS, UNKNOWN_VALIDITY_STATUS] API_PROJECTS_URL = '/api/admin/projects' def handle_pyjwt(pyjwt_error): return redirect('/logout') def parse_jwt(token): decoded = jwt.decode(token, PUB_KEY, algorithms=['RS256'], verify=True) return decoded def build_login_variables(): token_info = None if TOKEN_KEY_NAME in session: token_info = parse_jwt(session[TOKEN_KEY_NAME]) vars = { 'endpoint': SERVER_CONFIG["endpoint"], 'ui_endpoint': SERVER_CONFIG["ui_endpoint"], 'authrocket_url': SERVER_CONFIG["authrocket_url"] } if token_info is not None: vars['email'] = token_info['email'] return vars def build_app(): app = Flask(__name__) flask_secret = SERVER_CONFIG["FLASK_SECRET_KEY"] if flask_secret is None: print("WARNING: FLASK_SECRET_KEY must be set to run with gUnicorn") flask_secret = secrets.token_urlsafe(16) app.secret_key = flask_secret app.config['SESSION_TYPE'] = 'memcached' app.config['SESSION_COOKIE_NAME'] = 'session-microsetta-admin' app.register_error_handler(PyJWTError, handle_pyjwt) return app app = build_app() @app.context_processor def utility_processor(): def format_timestamp(timestamp_str): if not timestamp_str: return "None" datetime_obj = datetime.fromisoformat(timestamp_str) return datetime_obj.strftime("%Y %B %d %H:%M:%S") return dict(format_timestamp=format_timestamp) @app.route('/') def home(): return render_template('sitebase.html', **build_login_variables()) @app.route('/search', methods=['GET']) def search(): return _search() @app.route('/search/sample', methods=['GET', 'POST']) def search_sample(): return _search('samples') @app.route('/search/kit', methods=['GET', 'POST']) def search_kit(): return _search('kit') @app.route('/search/email', methods=['GET', 'POST']) def search_email(): return _search('account') def _search(resource=None): if request.method == 'GET': return render_template('search.html', **build_login_variables()) elif request.method == 'POST': query = request.form['search_%s' % resource] status, result = APIRequest.get( '/api/admin/search/%s/%s' % (resource, query)) if status == 404: result = {'error_message': "Query not found"} return render_template('search_result.html', **build_login_variables(), result=result), 200 elif status == 200: return render_template('search_result.html', **build_login_variables(), resource=resource, result=result), 200 else: return result def _translate_nones(a_dict, do_none_to_str): # for recursing through a multi-layer dictionary result = a_dict.copy() for k, v in result.items(): if do_none_to_str and v is None: result[k] = "" elif not do_none_to_str and v == '': result[k] = None return result def _get_projects(include_stats, is_active): projects_uri = API_PROJECTS_URL + f"?include_stats={include_stats}" if is_active is not None: projects_uri += f"&is_active={is_active}" status, projects_output = APIRequest.get(projects_uri) if status >= 400: result = {'error_message': f"Unable to load project list: " f"{projects_uri}"} else: cleaned_projects = [_translate_nones(x, True) for x in projects_output] # if we're not using full project stats, sort if not include_stats: cleaned_projects = sorted(cleaned_projects, key=lambda k: k['project_name']) result = {'projects': cleaned_projects} return status, result @app.route('/manage_projects', methods=['GET', 'POST']) def manage_projects(): result = None is_active = request.args.get('is_active', None) if request.method == 'POST': model = {x: request.form[x] for x in request.form} project_id = model.pop('project_id') model['is_microsetta'] = model.get('is_microsetta', '') == 'true' model['bank_samples'] = model.get('bank_samples', '') == 'true' model = _translate_nones(model, False) if project_id.isdigit(): action = "update" status, api_output = APIRequest.put( '{}/{}'.format(API_PROJECTS_URL, project_id), json=model) else: action = "create" status, api_output = APIRequest.post( API_PROJECTS_URL, json=model) if status >= 400: result = {'error_message': f'Unable to {action} project.'} # the projects list if result is None: _, result = _get_projects(include_stats=True, is_active=is_active) return render_template('manage_projects.html', **build_login_variables(), result=result), 200 @app.route('/email_stats', methods=['GET', 'POST']) def email_stats(): _, result = _get_projects(include_stats=False, is_active=True) projects = result.get('projects') if request.method == 'GET': project = request.args.get('project', None) email = request.args.get('email') if email is None: # They want to search for emails, show them the search dialog return render_template("email_stats_pulldown.html", **build_login_variables(), resource=None, search_error=None, projects=projects) emails = [email, ] elif request.method == 'POST': project = request.form.get('project', None) emails, upload_err = upload_util.parse_request_csv_col( request, 'file', 'email' ) if upload_err is not None: return render_template('email_stats_pulldown.html', **build_login_variables(), resource=None, search_error=[{'error': upload_err}], projects=projects) else: raise BadRequest() if project == "": project = None # de-duplicate emails = list({e.lower() for e in emails}) status, result = APIRequest.post( '/api/admin/account_email_summary', json={ "emails": emails, "project": project }) if status != 200: return render_template('email_stats_pulldown.html', search_error=[{'error': result}], resource=None, **build_login_variables(), projects=projects) # At a minimum, our table will display these columns. # We may show additional info depending on what comes back from the request base_data_template = { 'email': 'XXX', 'summary': 'XXX', 'account_id': 'XXX', 'creation_time': 'XXX', 'kit_name': 'XXX', 'project': 'XXX', 'unclaimed-samples-in-kit': 0, 'never-scanned': 0, 'sample-is-valid': 0, 'no-associated-source': 0, 'no-registered-account': 0, 'no-collection-info': 0, 'sample-has-inconsistencies': 0, 'received-unknown-validity': 0 } df = pd.DataFrame([base_data_template] + result) df = df.drop(0) # remove the template row numeric_cols = [ "unclaimed-samples-in-kit", "never-scanned", "sample-is-valid", "no-associated-source", "no-registered-account", "no-collection-info", "sample-has-inconsistencies", "received-unknown-validity" ] df[numeric_cols] = df[numeric_cols].apply(pd.to_numeric) df[numeric_cols] = df[numeric_cols].fillna(0) def urlify_account_id(id_): if pd.isnull(id_): return "No associated account" else: ui_endpoint = SERVER_CONFIG['ui_endpoint'] account_url = f"{ui_endpoint}/accounts/{id_}" return f'<a target="_blank" href="{account_url}">{id_}</a>' # see https://stackoverflow.com/questions/20035518/insert-a-link-inside-a-pandas-table # noqa df['account_id'] = df["account_id"].apply(urlify_account_id) return render_template("email_stats_pulldown.html", search_error=None, resource=df, **build_login_variables(), projects=projects) @app.route('/per_sample_summary', methods=['GET', 'POST']) def per_sample_summary(): # get a list of all projects in the system _, result = _get_projects(include_stats=False, is_active=True) projects = result.get('projects') # filter out any projects that don't belong to Microsetta projects = [x for x in projects if x['is_microsetta'] is True] projects = [{'project_name': x['project_name'], 'project_id': x['project_id']} for x in projects] strip_sampleid = request.form.get('strip_sampleid', 'off') strip_sampleid = strip_sampleid.lower() == 'on' if request.method == 'GET': sample_barcode = request.args.get('sample_barcode') project_id = request.args.get('project_id') if sample_barcode is None and project_id is None: return render_template('per_sample_summary.html', resource=None, projects=projects, **build_login_variables()) if project_id is not None: payload = {'project_id': project_id} status, result = APIRequest.post('/api/admin/account_barcode_summa' 'ry?strip_sampleid=False', json=payload) if status == 200: if result['partial_result'] is True: unprocessed_barcodes = result['unprocessed_barcodes'] else: unprocessed_barcodes = None resource = pd.DataFrame(result['samples']) order = ['sampleid', 'project', 'account-email', 'source-email', 'source-type', 'site-sampled', 'sample-status', 'sample-received', 'ffq-taken', 'ffq-complete', 'vioscreen_username'] order.extend(sorted(set(resource.columns) - set(order))) resource = resource[order] if unprocessed_barcodes: return render_template('per_sample_summary.html', resource=resource, projects=projects, error_message="Too many barcodes. S" "erver processed only" " the first 1000.", **build_login_variables()) else: return render_template('per_sample_summary.html', resource=resource, projects=projects, **build_login_variables()) else: return render_template('per_sample_summary.html', resource=None, projects=projects, error_message=result, **build_login_variables()) sample_barcodes = [sample_barcode, ] else: sample_barcodes, err = upload_util.parse_request_csv_col(request, 'file', 'sample_name') if err is not None: return render_template('per_sample_summary.html', resource=None, projects=projects, **build_login_variables(), search_error=[{'error': err}]) payload = {'sample_barcodes': sample_barcodes} status, result = APIRequest.post('/api/admin/account_barcode_summary?stri' 'p_sampleid=%s' % str(strip_sampleid), json=payload) if status == 200: if result['partial_result'] is True: unprocessed_barcodes = result['unprocessed_barcodes'] else: unprocessed_barcodes = None resource = pd.DataFrame(result['samples']) order = ['sampleid', 'project', 'account-email', 'source-email', 'source-type', 'site-sampled', 'sample-status', 'sample-received', 'ffq-taken', 'ffq-complete', 'vioscreen_username'] order.extend(sorted(set(resource.columns) - set(order))) resource = resource[order] if unprocessed_barcodes: return render_template('per_sample_summary.html', resource=resource, projects=projects, error_message="Too many barcodes. S" "erver processed only" " the first 1000.", **build_login_variables()) else: return render_template('per_sample_summary.html', resource=resource, projects=projects, **build_login_variables()) else: return render_template('per_sample_summary.html', resource=None, projects=projects, error_message=result, **build_login_variables()) def _get_by_sample_barcode(sample_barcodes, strip_sampleid, projects): payload = {'sample_barcodes': sample_barcodes} status, result = APIRequest.post('/api/admin/account_barcode_summary?' 'strip_sampleid=%s' % str(strip_sampleid), json=payload) if status == 200: if result['partial_result'] is True: unprocessed_barcodes = result['unprocessed_barcodes'] else: unprocessed_barcodes = None resource = pd.DataFrame(result['samples']) order = ['sampleid', 'project', 'account-email', 'source-email', 'source-type', 'site-sampled', 'sample-status', 'sample-received', 'ffq-taken', 'ffq-complete', 'vioscreen_username'] order.extend(sorted(set(resource.columns) - set(order))) resource = resource[order] if unprocessed_barcodes: return render_template('per_sample_summary.html', resource=resource, projects=projects, error_message="Too many barcodes. S" "erver processed only" " the first 1000.", **build_login_variables()) else: return render_template('per_sample_summary.html', resource=resource, projects=projects, **build_login_variables()) else: return render_template('per_sample_summary.html', resource=None, projects=projects, error_message=result, **build_login_variables()) @app.route('/create_kits', methods=['GET', 'POST']) def new_kits(): _, result = _get_projects(include_stats=False, is_active=True) projects = result.get('projects') if request.method == 'GET': return render_template('create_kits.html', error_message=result.get('error_message'), projects=projects, **build_login_variables()) elif request.method == 'POST': num_kits = int(request.form['num_kits']) num_samples = int(request.form['num_samples']) prefix = request.form['prefix'] selected_project_ids = request.form.getlist('project_ids') payload = {'number_of_kits': num_kits, 'number_of_samples': num_samples, 'project_ids': selected_project_ids} if prefix: payload['kit_id_prefix'] = prefix status, result = APIRequest.post( '/api/admin/create/kits', json=payload) if status != 201: return render_template('create_kits.html', error_message='Failed to create kits', projects=projects, **build_login_variables()) buf = io.StringIO() payload = io.BytesIO() kits = pd.DataFrame(result['created']) for i in range(num_samples): kits['barcode_%d' % (i+1)] = [r['sample_barcodes'][i] for _, r in kits.iterrows()] kits.drop(columns='sample_barcodes', inplace=True) kits.to_csv(buf, sep=',', index=False, header=True) payload.write(buf.getvalue().encode('utf-8')) payload.seek(0) buf.close() stamp = datetime.now().strftime('%d%b%Y-%H%M') fname = f'kits-{stamp}.csv' return send_file(payload, as_attachment=True, attachment_filename=fname, mimetype='text/csv') def _check_sample_status(extended_barcode_info): warning = None in_microsetta_project = any( [x['is_microsetta'] for x in extended_barcode_info['projects_info']]) if not in_microsetta_project: warning = UNKNOWN_VALIDITY_STATUS elif extended_barcode_info['account'] is None: warning = NO_ACCOUNT_STATUS elif extended_barcode_info['source'] is None: warning = NO_SOURCE_STATUS elif extended_barcode_info['sample'].get('datetime_collected') is None: warning = NO_COLLECTION_INFO_STATUS return warning def _scan_get(sample_barcode, update_error): if sample_barcode is None: return render_template('scan.html', **build_login_variables()) status, result = APIRequest.get( '/api/admin/search/samples/%s' % sample_barcode) # If we successfully grab it, show the page to the user if status == 200: # Process result in python because its easier than jinja2. status_warning = _check_sample_status(result) # check the latest scan to find the default sample_status for form latest_status = DUMMY_SELECT_TEXT if result['latest_scan']: latest_status = result['latest_scan']['sample_status'] account = result.get('account') events = [] if account: event_status, event_result = APIRequest.get( '/api/admin/events/accounts/%s' % account['id'] ) if event_status != 200: raise Exception("Couldn't pull event history") events = event_result return render_template( 'scan.html', **build_login_variables(), barcode_info=result["barcode_info"], projects_info=result['projects_info'], scans_info=result['scans_info'], latest_status=latest_status, dummy_status=DUMMY_SELECT_TEXT, status_options=STATUS_OPTIONS, send_email=session.get(SEND_EMAIL_CHECKBOX_DEFAULT_NAME, True), sample_info=result['sample'], extended_info=result, status_warning=status_warning, update_error=update_error, received_type_dropdown=RECEIVED_TYPE_DROPDOWN, source=result['source'], events=events ) elif status == 401: return redirect('/logout') elif status == 404: return render_template( 'scan.html', **build_login_variables(), search_error="Barcode %s Not Found" % sample_barcode, update_error=update_error, received_type_dropdown=RECEIVED_TYPE_DROPDOWN ) else: raise BadRequest() def _scan_post_update_info(sample_barcode, technician_notes, sample_status, action, issue_type, template, received_type, recorded_type): # we just validate whether or not they should send an email, duplicating # the client log. (This can still break with multiple admin clients, # but that is unlikely at the moment.) latest_status = None # TODO: Replace this with ETags! status, result = APIRequest.get( '/api/admin/search/samples/%s' % sample_barcode) if result['latest_scan']: latest_status = result['latest_scan']['sample_status'] ### # Do the actual update status, response = APIRequest.post( '/api/admin/scan/%s' % sample_barcode, json={ "sample_status": sample_status, "technician_notes": technician_notes } ) # if the update failed, keep track of the error so it can be displayed if status != 201: update_error = response return _scan_get(sample_barcode, update_error) else: update_error = None # If we're not supposed to send an email, go back to GET if action != "send_email": return _scan_get(sample_barcode, update_error) if sample_status == latest_status: update_error = "Ignoring Send Email, sample_status would " \ "not have been updated (Displayed page was out of " \ "sync)" return _scan_get(sample_barcode, update_error) # the new sample status (or if we screw up javascript side :D ) if template is None: update_error = "Cannot Send Email: No Issue Type Specified " \ "(or no issue types available)" return _scan_get(sample_barcode, update_error) # Otherwise, send out an email to the end user status, response = APIRequest.post( '/api/admin/email', json={ "issue_type": issue_type, "template": template, "template_args": { "sample_barcode": sample_barcode, "recorded_type": recorded_type, "received_type": received_type } } ) # if the email failed to send, keep track of the error # so it can be displayed if status != 200: update_error = response else: update_error = None return _scan_get(sample_barcode, update_error) @app.route('/scan', methods=['GET', 'POST']) def scan(): # Now that the handlers are set up, parse the request to determine what # to do. # If its a get, grab the sample_barcode from the query string rather than # form parameters if request.method == 'GET': sample_barcode = request.args.get('sample_barcode') return _scan_get(sample_barcode, None) # If its a post, make the changes, then refresh the page if request.method == 'POST': # Without some extra ajax, we can't persist the send_email checkbox send_email = request.form.get('send_email', False) session[SEND_EMAIL_CHECKBOX_DEFAULT_NAME] = send_email sample_barcode = request.form['sample_barcode'] technician_notes = request.form['technician_notes'] sample_status = request.form['sample_status'] action = request.form.get('action') issue_type = request.form.get('issue_type') template = request.form.get('template') received_type = request.form.get('received_type') recorded_type = request.form.get('recorded_type') return _scan_post_update_info(sample_barcode, technician_notes, sample_status, action, issue_type, template, received_type, recorded_type) @app.route('/metadata_pulldown', methods=['GET', 'POST']) def metadata_pulldown(): allow_missing = request.form.get('allow_missing_samples', False) if request.method == 'GET': sample_barcode = request.args.get('sample_barcode') if sample_barcode is None: return render_template('metadata_pulldown.html', **build_login_variables()) sample_barcodes = [sample_barcode] elif request.method == 'POST': sample_barcodes, upload_err = upload_util.parse_request_csv_col( request, 'file', 'sample_name' ) if upload_err is not None: return render_template('metadata_pulldown.html', **build_login_variables(), search_error=[{'error': upload_err}]) else: raise BadRequest() df, errors = metadata_util.retrieve_metadata(sample_barcodes) if len(errors) == 0 or allow_missing: df = metadata_util.drop_private_columns(df) strstream = io.StringIO() df.to_csv(strstream, sep='\t', index=True, header=True) bytestream = io.BytesIO() bytestream.write(strstream.getvalue().encode('utf-8')) bytestream.seek(0) strstream.close() return send_file(bytestream, mimetype="text/tab-separated-values", as_attachment=True, attachment_filename="metadata_pulldown.tsv", add_etags=False, cache_timeout=None, conditional=False, last_modified=None, ) else: return render_template('metadata_pulldown.html', **build_login_variables(), info={'barcodes': sample_barcodes}, search_error=errors) @app.route('/submit_daklapack_order', methods=['GET']) def submit_daklapack_order(): error_msg_key = "error_message" def return_error(msg): return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=msg) status, dak_articles_output = APIRequest.get( '/api/admin/daklapack_articles') if status >= 400: return return_error("Unable to load daklapack articles list.") status, projects_output = _get_projects(include_stats=False, is_active=True) if status >= 400: return return_error(projects_output[error_msg_key]) return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=None, dummy_status=DUMMY_SELECT_TEXT, dak_articles=dak_articles_output, contact_phone_number=SERVER_CONFIG[ "order_contact_phone"], projects=projects_output['projects']) @app.route('/submit_daklapack_order', methods=['POST']) def post_submit_daklapack_order(): def return_error(msg): return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=msg) error_message = success_submissions = failure_submissions = headers = None expected_headers = ["firstName", "lastName", "address1", "insertion", "address2", "postalCode", "city", "state", "country", "countryCode"] phone_number = request.form['contact_phone_number'] project_ids_list = list(map(int, request.form.getlist('projects'))) dak_article_code = request.form['dak_article_code'] article_quantity = int(request.form['quantity']) file = request.files['addresses_file'] planned_send_str = request.form.get('planned_send_date') planned_send_date = planned_send_str if planned_send_str else None description = request.form.get('description') fedex_ref_1 = request.form.get('fedex_ref_1') fedex_ref_2 = request.form.get('fedex_ref_2') fedex_ref_3 = request.form.get('fedex_ref_3') try: if file.filename.endswith('xls'): addresses_df = pd.read_excel(file, dtype=str) elif file.filename.endswith('xlsx'): addresses_df = pd.read_excel(file, engine='openpyxl', dtype=str) else: raise ValueError(f"Unrecognized extension on putative excel " f"filename: {file.filename}") headers = list(addresses_df.columns) except Exception as e: # noqa return return_error('Could not parse addresses file') if headers != expected_headers: return return_error(f"Received column names {headers} do " f"not match expected column names" f" {expected_headers}") # add (same) contact phone number to every address addresses_df['phone'] = phone_number addresses_df = addresses_df.fillna("") temp_dict = addresses_df.to_dict(orient='index') addresses_list = [temp_dict[n] for n in range(len(temp_dict))] status, post_output = APIRequest.post( '/api/admin/daklapack_orders', json={ "project_ids": project_ids_list, "article_code": dak_article_code, "quantity": article_quantity, "addresses": addresses_list, "planned_send_date": planned_send_date, "description": description, "fedex_ref_1": fedex_ref_1, "fedex_ref_2": fedex_ref_2, "fedex_ref_3": fedex_ref_3 } ) # if the post failed, keep track of the error so it can be displayed if status != 200: error_message = post_output else: order_submissions = post_output["order_submissions"] success_submissions = [x for x in order_submissions if x["order_success"]] failure_submissions = [x for x in order_submissions if not x["order_success"]] return render_template('submit_daklapack_order.html', **build_login_variables(), error_message=error_message, success_submissions=success_submissions, failure_submissions=failure_submissions) @app.route('/authrocket_callback') def authrocket_callback(): token = request.args.get('token') session[TOKEN_KEY_NAME] = token return redirect("/") @app.route('/logout') def logout(): if TOKEN_KEY_NAME in session: del session[TOKEN_KEY_NAME] return redirect("/") # If we're running in stand alone mode, run the application if __name__ == '__main__': if SERVER_CONFIG["ssl_cert_path"] and SERVER_CONFIG["ssl_key_path"]: ssl_context = ( SERVER_CONFIG["ssl_cert_path"], SERVER_CONFIG["ssl_key_path"] ) else: ssl_context = None app.run( port=SERVER_CONFIG['port'], debug=SERVER_CONFIG['debug'], ssl_context=ssl_context )
true
true
f70c651bfec18a770fd048a9281fc8526c3a4a28
53,701
py
Python
htools/core.py
hdmamin/htools
620c6add29561b77c10d793e4be7beeb28b32bab
[ "MIT" ]
1
2019-12-14T15:24:38.000Z
2019-12-14T15:24:38.000Z
htools/core.py
hdmamin/htools
620c6add29561b77c10d793e4be7beeb28b32bab
[ "MIT" ]
null
null
null
htools/core.py
hdmamin/htools
620c6add29561b77c10d793e4be7beeb28b32bab
[ "MIT" ]
1
2020-03-30T17:26:39.000Z
2020-03-30T17:26:39.000Z
from bz2 import BZ2File from collections import Counter, Sequence, Iterable, \ Mapping from functools import partial import gc from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email import encoders from inspect import signature, getattr_static, ismethod, getmembers, getmodule from itertools import chain import json from multiprocessing import Pool import os from pathlib import Path import pickle from random import choice import re import smtplib from subprocess import run, check_output import sys import time from tqdm.auto import tqdm import wordninja as wn from htools.config import get_credentials, get_default_user class InvalidArgumentError(Exception): pass def hdir(obj, magics=False, internals=False): """Print object methods and attributes, by default excluding magic methods. Parameters ----------- obj: any type The object to print methods and attributes for. magics: bool Specifies whether to include magic methods (e.g. __name__, __hash__). Default False. internals: bool Specifies whether to include internal methods (e.g. _dfs, _name). Default False. Returns -------- dict Keys are method/attribute names, values are strings specifying whether the corresponding key is a 'method' or an 'attr'. """ output = dict() for attr in dir(obj): # Exclude magics or internals if specified. if (not magics and attr.startswith('__')) or \ (not internals and re.match('_[^_]', attr)): continue # Handle rare case where attr can't be invoked (e.g. df.sparse on a # non-sparse Pandas dataframe). try: is_method = callable(getattr(obj, attr)) except Exception: continue # Update output to specify whether attr is callable. if is_method: output[attr] = 'method' else: output[attr] = 'attribute' return output def tdir(obj, **kwargs): """A variation of the built in `dir` function that shows the attribute names as well as their types. Methods are excluded as they can change the object's state. Parameters ---------- obj: any type The object to examine. kwargs: bool Additional arguments to be passed to hdir. Options are `magics` and `internals`. See hdir documentation for more information. Returns ------- dict[str, type]: Dictionary mapping the name of the object's attributes to the corresponding types of those attributes. """ return {k: type(getattr(obj, k)) for k, v in hdir(obj, **kwargs).items() if v == 'attribute'} def hasarg(func, arg): """Checks if a function has a given argument. Works with args and kwargs as well if you exclude the stars. See example below. Parameters ---------- func: function arg: str Name of argument to look for. Returns ------- bool Example ------- def foo(a, b=6, *args): return >>> hasarg(foo, 'b') True >>> hasarg(foo, 'args') True >>> hasarg(foo, 'c') False """ return arg in signature(func).parameters def quickmail(subject, message, to_email, from_email=None, img_path=None, img_name=None, verbose=True, password=None): """Send an email. Parameters ----------- from_email: str Gmail address being used to send email. to_email: str Recipient's email. subject: str Subject line of email. message: str Body of email. Returns -------- None """ # Load email username. Error handling takes place in config functions. from_email = from_email or get_default_user() if not from_email: return None # Load email password. password = password or get_credentials(from_email) if not password: return None # Create message and add text if specified. msg = MIMEMultipart() msg['Subject'] = subject msg['From'] = from_email msg['To'] = to_email if message: msg.attach(MIMEText(message)) # Load and attach image. if img_path: with open(img_path, 'rb') as f: img = MIMEImage(f.read(), name=img_name or os.path.basename(img_path)) encoders.encode_base64(img) msg.attach(img) # Access server and send email. server = smtplib.SMTP(host='smtp.gmail.com', port=587) server.starttls() server.login(user=from_email, password=password) server.sendmail(from_email, to_email, msg.as_string()) if verbose: print(f'Email sent to {to_email}.') def hsplit(text, sep, group=True, attach=True): """Flexible string splitting that retains the delimiter rather, unlike the built-in str.split() method. NOTE: I recently observed behavior suggesting separators with special characters (e.g. "\n") may not work as expected for some settings. It should work when group=True and attach=True though since I rewrote that with new logic without the re module. Parameters ----------- text: str The input text to be split. sep: str The delimiter to be split on. group: bool Specifies whether to group consecutive delimiters together (True), or to separate them (False). attach: bool Specifies whether to attach the delimiter to the string that preceeds it (True), or to detach it so it appears in the output list as its own item (False). Returns -------- list[str] Examples --------- text = "Score -- Giants win 6-5" sep = '-' # Case 0.1: Delimiters are grouped together and attached to the preceding word. >> hsplit(text, sep, group=True, attach=True) >> ['Score --', ' Giants win 6-', '5'] # Case 0.2: Delimiters are grouped together but are detached from the preceding word, instead appearing as their own item in the output list. >> hsplit(text, sep, group=True, attach=False) >> ['Score ', '--', ' Giants win 6', '-', '5'] Case 1.1: Delimiters are retained and attached to the preceding string. If the delimiter occurs multiple times consecutively, only the first occurrence is attached, and the rest appear as individual items in the output list. >> hsplit(text, sep, group=False, attach=True) >> ['Score -', '-', ' Giants win 6-', '5'] # Case 1.2: Delimiters are retained but are detached from the preceding string. Each instance appears as its own item in the output list. >> hsplit(text, sep, group=False, attach=False) >> ['Score ', '-', '-', ' Giants win 6', '-', '5'] """ sep_re = re.escape(sep) regex = f'[^{sep_re}]*{sep_re}*' ########################################################################## # Case 0: Consecutive delimiters are grouped together. ########################################################################## if group: # Subcase 0.1 if attach: return _grouped_split(text, sep) # Subcase 0.2 else: return [word for word in re.split(f'({sep_re}+)', text) if word] ########################################################################## # Case 1: Consecutive delimiters are NOT grouped together. ########################################################################## words = text.split(sep) # Subcase 1.1 if attach: return [word for word in re.findall(regex[:-1]+'?', text) if word] # Subcase 1.2 return [word for word in chain(*zip(words, [sep]*len(words))) if word][:-1] def _grouped_split(text, sep): """Hsplit helper for case where group=True and attach=True (see hsplit docs). Old re.find() method didn't work right when sep had special characters (e.g. "\n"). """ res = [] toks = text.split(sep) max_idx = len(toks) - 1 for i, tok in enumerate(toks): if tok: if i < max_idx: tok += sep res.append(tok) elif i < max_idx: if res: res[-1] += sep else: res.append(sep) return res def rmvars(*args): """Wrapper to quickly free up memory by deleting global variables. Htools 3.0 does not provide a way to do this for local variables. Parameters ---------- args: str One or more variable names to delete. Do not pass in the variable itself. Returns ------- None """ for arg in args: del globals()[arg] gc.collect() def print_object_sizes(space, limit=None, exclude_underscore=True): """Print the object names and sizes of the currently defined objects. Parameters ----------- space: dict locals(), globals(), or vars() limit: int or None Optionally limit the number of objects displayed (default None for no limit). exclude_underscore: bool Determine whether to exclude objects whose names start with an underscore (default True). """ var_size = [(var, sys.getsizeof(obj)) for var, obj in space.items()] for var, size in sorted(var_size, key=lambda x: -x[1])[:limit]: if not var.startswith('_') or not exclude_underscore: print(var, size) def eprint(arr, indent=2, spacing=1): """Enumerated print. Prints an iterable with one item per line accompanied by a number specifying its index in the iterable. Parameters ----------- arr: iterable The object to be iterated over. indent: int Width to assign to column of integer indices. Default is 2, meaning columns will line up as long as <100 items are being printed, which is the expected use case. spacing: int Line spacing. Default of 1 will print each item on a new line with no blank lines in between. Spacing of 2 will double space output, and so on for larger values. Returns -------- None """ for i, x in enumerate(arr): print(f'{i:>{indent}}: {x}', end='\n'*spacing) def _read_write_args(path, mode): """Helper for `save` and `load` functions. Parameters ---------- path: str Path to read/write object from/to. mode: str 'w' for writing files (as in `save`), 'r' for reading files (as in `load`). Returns ------- tuple: Function to open file, mode to open file with (str), object to open file with. """ ext = path.rpartition('.')[-1] if ext not in {'json', 'pkl', 'zip'}: raise InvalidArgumentError( 'Invalid extension. Make sure your filename ends with ' '.json, .pkl, or .zip.' ) # Store in dict to make it easier to add additional formats in future. ext2data = { 'json': (open, '', json), 'pkl': (open, 'b', pickle), 'zip': (BZ2File, '', pickle), } opener, mode_suffix, saver = ext2data[ext] return opener, mode + mode_suffix, saver def save(obj, path, mode_pre='w', verbose=True): """Wrapper to save data as text, pickle (optionally zipped), or json. Parameters ----------- obj: any Object to save. This will be pickled/jsonified/zipped inside the function - do not convert it before-hand. path: str File name to save object to. Should end with .txt, .sh, md, .pkl, .zip, or .json depending on desired output format. If .zip is used, object will be zipped and then pickled. (.sh and .md will be treated identically to .txt.) mode_pre: str Determines whether to write or append text. One of ('w', 'a'). verbose: bool If True, print a message confirming that the data was pickled, along with its path. Returns ------- None """ path = Path(path) os.makedirs(path.parent, exist_ok=True) if verbose: print(f'Writing data to {path}.') if path.suffix[1:] in ('txt', 'sh', 'md', 'py'): with path.open(mode_pre) as f: f.write(obj) else: opener, mode, saver = _read_write_args(str(path), mode_pre) with opener(path, mode) as f: saver.dump(obj, f) def load(path, verbose=True): """Wrapper to load text files or pickled (optionally zipped) or json data. Parameters ---------- path : str File to load. File type will be inferred from extension. Must be one of '.txt', '.sh', 'md', '.json', '.pkl', or '.zip'. verbose : bool, optional If True, will print message stating where object was loaded from. Returns ------- object: The Python object that was pickled to the specified file. """ path = Path(path) if path.suffix[1:] in ('txt', 'sh', 'md', 'py'): return path.read_text() opener, mode, saver = _read_write_args(str(path), 'r') with opener(path, mode) as f: data = saver.load(f) if verbose: print(f'Object loaded from {path}.') return data def dict_sum(*args): """Given two or more dictionaries with numeric values, combine them into a single dictionary. For keys that appear in multiple dictionaries, their corresponding values are added to produce the new value. This differs from combining two dictionaries in the following manner: {**d1, **d2} The method shown above will combine the keys but will retain the value from d2, rather than adding the values from d1 and d2. Parameters ----------- *args: dicts 2 or more dictionaries with numeric values. Returns -------- dict: Contains all keys which appear in any of the dictionaries that are passed in. The corresponding values from each dictionary containing a given key are summed to produce the new value. Examples --------- >>> d1 = {'a': 1, 'b': 2, 'c': 3} >>> d2 = {'a': 10, 'c': -20, 'd': 30} >>> d3 = {'c': 10, 'd': 5, 'e': 0} >>> dict_sum(d1, d2) {'a': 11, 'b': 2, 'c': -7, 'd': 35, 'e': 0} """ keys = {key for d in args for key in d.keys()} return {key: sum(d.get(key, 0) for d in args) for key in keys} def _select_mapping(items, keep=(), drop=()): """Helper function for `select`. Parameters ---------- items: Mapping Dict (or similar mapping) to select/drop from. keep: Iterable[str] Sequence of keys to keep. drop: Iterable[str] Sequence of keys to drop. You should specify either `keep` or `drop`, not both. Returns ------- Dict """ if keep: return {k: items[k] for k in keep} return {k: v for k, v in items.items() if k not in set(drop)} def _select_sequence(items, keep=(), drop=()): """Helper function for `select` that works on sequences (basically collections that support enumeration). Parameters ---------- items: Sequence List, tuple, or iterable sequence of some sort to select items from. keep: Iterable[str] Sequence of indices to keep. drop: Iterable[str] Sequence of indices to drop. You should specify either `keep` or `drop`, not both. Returns ------- Same type as `items` (usually a list or tuple). """ type_ = type(items) if keep: return type_(x for i, x in enumerate(items) if i in set(keep)) return type_(x for i, x in enumerate(items) if i not in set(drop)) def select(items, keep=(), drop=()): """Select a subset of a data structure. When used on a mapping (e.g. dict), you can specify a list of keys to include or exclude. When used on a sequence like a list or tuple, specify indices instead of keys. Parameters ---------- items: abc.Sequence or abc.Mapping The dictionary to select items from. keep: Iterable[str] Sequence of keys to keep. drop: Iterable[str] Sequence of keys to drop. You should specify either `keep` or `drop`, not both. Returns ------- dict: Dictionary containing only the specified keys (when passing in `keep`), or all keys except the specified ones (when passing in `drop`). """ if bool(keep) + bool(drop) != 1: raise InvalidArgumentError('Specify exactly one of `keep` or `drop`.') if isinstance(items, Mapping): return _select_mapping(items, keep, drop) elif isinstance(items, Sequence): return _select_sequence(items, keep, drop) else: raise InvalidArgumentError('`items` must be a Mapping or Sequence.') def differences(obj1, obj2, methods=False, **kwargs): """Find the differences between two objects (generally of the same type - technically this isn't enforced but we do require that the objects have the same set of attribute names so a similar effect is achieved. Actual type checking was causing problems comparing multiple Args instances, presumably because each Args object is defined when called). This is a way to get more detail beyond whether two objects are equal or not. Parameters ----------- obj1: any An object. obj2: any, usually the same type as obj1 An object. methods: bool If True, include methods in the comparison. If False, only attributes will be compared. Note that the output may not be particularly interpretable when using method=True; for instance when comparing two strings consisting of different characters, we get a lot of output that looks like this: {'islower': (<function str.islower()>, <function str.islower()>), 'isupper': (<function str.isupper()>, <function str.isupper()>),... 'istitle': (<function str.istitle()>, <function str.istitle()>)} These attributes all reflect the same difference: if obj1 is 'abc' and obj2 is 'def', then 'abc' != 'def' and 'ABC' != 'DEF' abd 'Abc' != 'Def'. When method=False, we ignore all of these, such that differences('a', 'b') returns {}. Therefore, it is important to carefully consider what differences you care about identifying. **kwargs: bool Can pass args to hdir to include magics or internals. Returns -------- dict[str, tuple]: Maps attribute name to a tuple of values, where the first is the corresponding value for obj1 and the second is the corresponding value for obj2. """ # May built-in comparison functionality. Keep error handling broad. try: if obj1 == obj2: return {} except Exception: pass attr1, attr2 = hdir(obj1, **kwargs), hdir(obj2, **kwargs) assert attr1.keys() == attr2.keys(), 'Objects must have same attributes.' diffs = {} for (k1, v1), (k2, v2) in zip(attr1.items(), attr2.items()): # Only compare non-callable attributes. if not (methods or v1 == 'attribute'): continue # Comparisons work differently for arrays/tensors than other objects. val1, val2 = getattr(obj1, k1), getattr(obj2, k2) try: equal = (val1 == val2).all() except AttributeError: equal = val1 == val2 # Store values that are different for obj1 and obj2. if not equal: diffs[k1] = (val1, val2) return diffs def catch(func, *args, verbose=False): """Error handling for list comprehensions. In practice, it's recommended to use the higher-level robust_comp() function which uses catch() under the hood. Parameters ----------- func: function *args: any type Arguments to be passed to func. verbose: bool If True, print the error message should one occur. Returns -------- any type: If the function executes successfully, its output is returned. Otherwise, return None. Examples --------- [catch(lambda x: 1 / x, i) for i in range(3)] >>> [None, 1.0, 0.5] # Note that the filtering method shown below also removes zeros which is # okay in this case. list(filter(None, [catch(lambda x: 1 / x, i) for i in range(3)])) >>> [1.0, 0.5] """ try: return func(*args) except Exception as e: if verbose: print(e) return def safe_map(func, seq): """This addresses the issue of error handling in map() or list comprehension operations by simply skipping any items that throw an error. Note that values of None will be removed from the resulting list. Parameters ---------- func: function Function to apply to each item in seq. seq: generator, iterator The sequence to iterate over. This could also be a generator, list, set, etc. Returns ------- list Examples -------- # Notice that instead of throwing an error when dividing by zero, that # entry was simply dropped. >>> safe_map(lambda x: x/(x-2), range(4)) [-0.0, -1.0, 3.0] """ return list( filter(lambda x: x is not None, (catch(func, obj) for obj in seq)) ) def flatten(nested): """Flatten a nested sequence where the sub-items can be sequences or primitives. This differs slightly from itertools chain methods because those require all sub-items to be sequences. Here, items can be primitives, sequences, nested sequences, or any combination of these. Any iterable items aside from strings will be completely un-nested, so use with caution (e.g. a torch Dataset would be unpacked into separate items for each index). This also returns a list rather than a generator. Parameters ---------- nested: sequence (list, tuple, set) Sequence where some or all of the items are also sequences. Returns ------- list: Flattened version of `nested`. """ def _walk(nested): for group in nested: if isinstance(group, Iterable) and not isinstance(group, str): yield from _walk(group) else: yield group return list(_walk(nested)) class BasicPipeline: """Create a simple unidirectional pipeline of functions to apply in order with optional debugging output. """ def __init__(self, *funcs): """ Parameters ---------- *funcs: function(s) One or more functions to apply in the specified order. """ # Make `funcs` mutable. Could use @htools.meta.delegate('funcs') # but not sure if that would cause circular import issues. Check later. self.funcs = list(funcs) def __call__(self, x, verbose=False, attr=''): """Apply the pipeline of functions to x. Parameters ---------- x: any Object to operate on. verbose: bool If True, print x (or an attribute of x) after each step. attr: str If specified and verbose is True, will print this attribute of x after each function is applied. Returns ------- output of last func in self.funcs """ for func in self.funcs: x = func(x) if verbose: print(repr(getattr(x, attr, x))) return x def __repr__(self): # Try to display each item in the form that was likely passed in: for # functions, this is the name, but for callable classes this is # the str representation of the object, not the class itself. names = ',\n\t'.join(str(f) if hasattr(f, '__call__') else func_name(f) for f in self.funcs) return f'{type(self).__name__}(\n\t{names}\n)' def pipe(x, *funcs, verbose=False, attr=''): """Convenience function to apply many functions in order to some object. This lets us replace messy notation where it's hard to keep parenthesis straight: list(parse_processed_text(tokenize_rows(porter_stem(strip_html_tags( text))))) with: pipe(text, strip_html_tags, porter_stem, tokenize_rows, parse_processed_text, list) or if we have a list of functions: pipe(x, *funcs) Parameters ---------- x: any Object to apply functions to. *funcs: function(s) Functions in the order you want to apply them. Use functools.partial to specify other arguments. verbose: bool If True, print x (or an attribute of x) after each step. attr: str If specified and verbose is True, will print this attribute of x after each function is applied. Returns ------- output of last func in *funcs """ return BasicPipeline(*funcs)(x, verbose=verbose, attr=attr) def vcounts(arr, normalize=True): """Equivalent of pandas_htools vcounts method that we can apply on lists or arrays. Basically just a wrapper around Counter but with optional normalization. Parameters ---------- arr: Iterable Sequence of values to count. Typically a list or numpy array. normalize: bool If True, counts will be converted to percentages. Returns ------- dict: Maps unique items in `arr` to the number of times (or % of times) that they occur in `arr`. """ counts = dict(Counter(arr)) if normalize: length = len(arr) counts = {k: v/length for k, v in counts.items()} return counts def item(it, random=True, try_values=True): """Get an item from an iterable (e.g. dict, set, torch DataLoader). This is a quick way to access an item for iterables that don't support indexing, or do support indexing but require us to know a key. Parameters ---------- it: Iterable Container that we want to access a value from. random: bool If True, pick a random value from `it`. Otherwise just return the first value. try_values: bool If True, will check if `it` has a `values` attribute and will operate on that if it does. We often want to see a random value from a dict rather than a key. If we want both a key and value, we could set try_values=False and pass in d.items(). Returns ------- any: An item from the iterable. """ if try_values and hasattr(it, 'values'): it = it.values() if random: return choice(list(it)) return next(iter(it)) def lmap(fn, *args): """Basically a wrapper for `map` that returns a list rather than a generator. This is such a common pattern that I think it deserves its own function (think of it as a concise alternative to a list comprehension). One slight difference is that we use *args instead of passing in an iterable. This adds a slight convenience for the intended use case (fast prototyping). See the `Examples` for more on this. Parameters ---------- args: any Returns ------- list Examples -------- Consider these three equivalent syntax options: lmap(fn, x, y) [fn(obj) for obj in (x, y)] list(map(fn, (x, y)) When quickly iterating, option 1 saves a bit of typing. The extra parentheses that options 2 and 3 require to put x and y in a temporary data structure can get messy as we add more complex logic. """ return list(map(fn, args)) def amap(attr, *args): """More convenient syntax for quick data exploration. Get an attribute value for multiple objects. Name is short for "attrmap". Parameters ---------- attr: str Name of attribute to retrieve for each object. args: any Objects (usually of same type) to retrieve attributes for. Returns ------- list: Result for each object. Examples -------- df1 = pd.DataFrame(np.random.randint(0, 10, (4, 5))) df2 = pd.DataFrame(np.random.randint(0, 3, (4, 5))) df3 = pd.DataFrame(np.random.randint(0, 3, (2, 3))) >>> amap('shape', df1, df2, df3) [(4, 5), (4, 5), (2, 3)] net = nn.Sequential(...) >>> amap('shape', *net.parameters()) [torch.Size([5, 3]), torch.Size([16, 4]), torch.Size([16, 3]), torch.Size([16])] """ return [getattr(arg, attr) for arg in args] def smap(*x): """Get shape of each array/tensor in a list or tuple. Parameters ---------- *x: np.arrays or torch.tensors We use star unpacking here to create a consistent interface with amap() and lmap(). Returns ------- list: Shape of each array/tensor in input. """ return amap('shape', *x) def sleepy_range(*args, wait=1, wait_before=True): """Convenience function: we often want to create a loop that mimics doing some time intensive thing on each iteration. This is just like the built-in range function (not technically a function!) but with a sleep period baked in, making it particularly useful for list comprehensions where this would be tricky otherwise. Note: unlike range, calling this is destructive. See examples. Parameters ---------- args: int Passed on to range(). wait: int or float Number of seconds to wait on each iteration. Remember this is a keyword only argument for compatibility with the range interface. wait_before: bool Determines whether to sleep before or after yielding the number. Defaults to before to mimic "doing work" before producing some result. Examples -------- # Takes 6 seconds to create this list. >>> [i for i in sleepy_range(3, wait=2)] [0, 1, 2] >>> srange = sleepy_range(0, 6, 2, wait_before=False) >>> for i in srange: >>> print(i) 0 2 4 >>> for i in srange: >>> print(i) # Notice this cannot be used again without manually calling sleepy_range. """ for i in range(*args): if wait_before: time.sleep(wait) yield i if not wait_before: time.sleep(wait) def venumerate(iterable, start=0, freq=1, print_before=True, message_format='{}'): """Verbose enumerate: simple convenience function that's a drop-in replacement for enumerate. It prints updates as we iterate over some object. TQDM progress bar may not be available in some cases (e.g. we don't know the length of the interval, or possible some cases using concurrency?), and this function gives us some way to keep an eye on progress. Mainly intended as a convenience for list comprehensions, since in a standard for loop we could easily add this logic. Parameters ---------- iterable: Iterable The object to iterate over. start: int Passed on to enumerate - the first index to use when counting. freq: int Frequency with which to print updates (i.e. updates are printed when i is divisible by freq). print_before: bool Specifies whether to print the message before yielding the i'th value or after. message_format: str Used to format the message that will be displayed when i is divisible by freq. Defaults to just printing i. """ for i, x in enumerate(iterable, start=start): if i % freq == 0 and print_before: print(message_format.format(i)) yield i, x if i % freq == 0 and not print_before: print(message_format.format(i)) def method_of(meth): """Retrieve the class a method belongs to. This will NOT work on attributes. Also, this won't help if your goal is to retrieve an instance: this returns the type of the instance. Not thoroughly tested but it seems to work regardless of whether you pass in meth from an instance or a class (the output is the same in both cases). Parameters ---------- meth: MethodType The method to retrieve the class of. Returns ------- type: The class which defines the method in question. Examples -------- class Foo: def my_method(self, x): return x*2 f = Foo() assert method_of(Foo.my_method) == method_of(f.my_method) == Foo """ cls, name = meth.__qualname__.split('.') return dict(getmembers(getmodule(meth)))[cls] def hasstatic(cls, meth_name): """Check if a class possesses a staticmethod of a given name. Similar to hasattr. Note that isinstance(cls.meth_name, staticmethod) would always return False: we must use getattr_static or cls.__dict__[meth_name] to potentially return True. Parameters ---------- cls: Type or any A class or an instance (seems to work on both, though more extensive testing may be needed for more complex scenarios). meth_name: str Name of method to check. If the class/instance does not contain any attribute with this name, function returns False. Returns ------- bool: True if `cls` has a staticmethod with name `meth_name`. """ return isinstance(getattr_static(cls, meth_name, None), staticmethod) def isstatic(meth): """Companion to hasstatic that checks a method itself rather than a class and method name. It does use hasstatic under the hood. """ # First check isn't required but I want to avoid reaching the hackier bits # of code if necessary. This catches regular methods and attributes. if ismethod(meth) or not callable(meth): return False parts = getattr(meth, '__qualname__', '').split('.') if len(parts) != 2: return False cls = method_of(meth) return hasstatic(cls, parts[-1]) def has_classmethod(cls, meth_name): """Check if a class has a classmethod with a given name. Note that isinstance(cls.meth_name, classmethod) would always return False: we must use getattr_static or cls.__dict__[meth_name] to potentially return True. Parameters ---------- cls: type or obj This is generally intended to be a class but it should work on objects (class instances) as well. meth_name: str The name of the potential classmethod to check for. Returns ------- bool: True if cls possesses a classmethod with the specified name. """ return isinstance(getattr_static(cls, meth_name), classmethod) def is_classmethod(meth): """Companion to has_classmethod that checks a method itself rather than a class and a method name. It does use has_classmethod under the hood. """ if not ismethod(meth): return False parts = getattr(meth, '__qualname__', '').split('.') if len(parts) != 2: return False cls = method_of(meth) return has_classmethod(cls, parts[-1]) def parallelize(func, items, total=None, chunksize=1_000, processes=None): """Apply a function to a sequence of items in parallel. A progress bar is included. Parameters ---------- func: function This will be applied to each item in `items`. items: Iterable Sequence of items to apply `func` to. total: int or None This defaults to the length of `items`. In the case that items is a generator, this lets us pass in the length explicitly. This lets tdqm know how quickly to advance our progress bar. chunksize: int Positive int that determines the size of chunks submitted to the process pool as separate tasks. Multiprocessing's default is 1 but larger values should speed things up, especially with long sequences. processes: None Optionally set number of processes to run in parallel. Returns ------- list """ total = total or len(items) with Pool(processes) as p: res = list(tqdm(p.imap(func, items, chunksize=chunksize), total=total)) return res def identity(x): """Returns the input argument. Sometimes it is convenient to have this if we sometimes apply a function to an item: rather than defining a None variable, sometimes setting it to a function, then checking if it's None every time we're about to call it, we can set the default as identity and safely call it without checking. Parameters ---------- x: any Returns ------- x: Unchanged input. """ return x def always_true(x, *args, **kwargs): """Similar to `identity` but returns True instead of x. I'm tempted to name this `true` but I fear that will cause some horrible bugs where I accidentally use this when I want to use True. """ return True def ifnone(arg, backup): """Shortcut to provide a backup value if an argument is None. Commonly used for numpy arrays since their truthiness is ambiguous. Parameters ---------- arg: any We will check if this is None. backup: any This will be returned if arg is None. Returns ------- Either `arg` or `backup` will be returned. """ return arg if arg is not None else backup def listlike(x): """Checks if an object is a list/tuple/set/array etc. Strings and mappings (e.g. dicts) are not considered list-like. """ return isinstance(x, Iterable) and not isinstance(x, (str, Mapping)) def tolist(x, length_like=None, length=None, error_message='x length does not match desired length.'): """Helper to let a function accept a single value or a list of values for a certain parameter. WARNING: if x is a primitive and you specify a length (either via `length_like` or `length`, the resulting list will contain multiple references to the same item). This is mostly intended for use on lists of floats or ints so I don't think it's a problem, but keep this in mind when considering using this on mutable objects. Parameters ---------- x: Iterable Usually either a list/tuple or a primitive. length_like: None or object If provided, we check that x is the same length. If x is a primitive, we'll make it the same length. length: None or int Similar to `length_like` but lets us specify the desired length directly. `length_like` overrides this, though you should only provide one or the other. error_message: str Displayed in the event that a desired length is specified and x is list-like and does not match that length. You can pass in your own error message if you want something more specific to your current use case. Returns ------- list Examples -------- def train(lrs): lrs = tolist(lrs) ... We can now pass in a single learning rate or multiple. >>> train(3e-3) >>> train([3e-4, 3e-3]) """ if length_like is not None: length = len(length_like) # Case 1. List-like x if listlike(x): if length: assert len(x) == length, error_message return list(x) # Case 2. Dict-like x if isinstance(x, Mapping): raise ValueError('x must not be a mapping. It should probably be a ' 'primitive (str, int, etc.) or a list-like object ' '(tuple, list, set).') # Case 3. Primitive x return [x] * (length or 1) def xor_none(*args, n=1): """Checks that exactly 1 (or n) of inputs is not None. Useful for validating optional function arguments (for example, ensuring the user specifies either a directory name or a list of files but not both. Parameters ---------- args: any n: int The desired number of non-None elements. Usually 1 but we allow the user to specify other values. Returns ------- None: This will raise an error if the condition is not satisfied. Do not use this as an if condition (e.g. `if xor_none(a, b): print('success')`. This would always evaluate to False because the function doesn't explicitly return a value so we get None. """ if sum(bool(arg is not None) for arg in args) != n: raise ValueError(f'Exactly {n} or args must be not None.') def max_key(d, fn=identity): """Find the maximum value in a dictionary and return the associated key. If we want to compare values using something other than their numeric values, we can specify a function. For example, with a dict mapping strings to strings, fn=len would return the key with the longest value. Parameters ---------- d: dict Values to select from. fn: callable Takes 1 argument (a single value from d.values()) and returns a number. This will be used to sort the items. Returns ------- A key from dict `d`. """ return max(d.items(), key=lambda x: fn(x[1]))[0] def is_builtin(x, drop_callables=True): """Check if an object is a Python built-in object. Parameters ---------- x: object drop_callables: bool If True, return False for callables (basically functions, methods, or classes). These typically will return True otherwise since they are of class `type` or `builtin_function_or_method`. Returns ------- bool: True if `x` is a built-in object, False otherwise. """ def _builtin(x, drop_callables): if callable(x) and drop_callables: return False return x.__class__.__module__ == 'builtins' builtin = partial(_builtin, drop_callables=drop_callables) # Check mapping first because mappings are iterable. if isinstance(x, Mapping): return builtin(x) and all(builtin(o) for o in flatten(x.items())) elif isinstance(x, Iterable): return builtin(x) and all(builtin(o) for o in flatten(x)) return builtin(x) def hashable(x): """Check if an object is hashable. Hashable objects will usually be immutable though this is not guaranteed. Parameters ---------- x: object The item to check for hashability. Returns ------- bool: True if `x` is hashable (suggesting immutability), False otherwise. """ try: _ = hash(x) return True except TypeError: return False def fgrep(text, term, window=25, with_idx=False, reverse=False): """Search a string for a given term. If found, print it with some context. Similar to `grep -C 1 term text`. `fgrep` is short for faux grep. Parameters ---------- text: str Text to search. term: str Term to look for in text. window: int Number of characters to display before and after the matching term. with_idx: bool If True, return index as well as string. reverse: bool If True, reverse search direction (find last match rather than first). Returns ------- str or tuple[int, str]: The desired term and its surrounding context. If the term isn't present, an empty string is returned. If with_idx=True, a tuple of (match index, string with text) is returned. """ idx = text.rfind(term) if reverse else text.find(term) if idx == -1: res = '' else: res = text[max(idx-window, 0):idx+window] return (idx, res) if with_idx else res def spacer(char='-', n_chars=79, newlines_before=1, newlines_after=1): """ Get string to separate output when printing output for multiple items. Parameters ---------- char: str The character that will be printed repeatedly. n_chars: int The number of times to repeat `char`. We expect that `char` is a single character so this will be the total line length. newlines_before: int Number of newline characters to add before the spacer. newlines_after: int Number of newline characters to add after the spacer. Returns ------- str """ return '\n'*newlines_before + char * n_chars + '\n'*newlines_after def func_name(func): """Usually just returns the name of a function. The difference is this is compatible with functools.partial, which otherwise makes __name__ inaccessible. Parameters ---------- func: callable Can be a function, partial, or callable class. """ assert callable(func), 'Input must be callable.' try: res = func.__name__ except AttributeError: if isinstance(func, partial): return func_name(func.func) else: return func.__class__.__name__ except Exception as e: raise e return res def snake2camel(text): """Convert snake case to camel case. This assumes the input is valid snake case (if you have some weird hybrid of snake and camel case, for instance, you'd want to do some preprocessing first). Parameters ---------- text: str Snake case string, e.g. vader_sentiment_score. Returns ------- str: `text` converted to camel case, e.g. vaderSentimentScore. """ res = [] prev = '' for char in text: if char != '_': # Check if res is empty because of case with leading underscore. res.append(char.upper() if prev == '_' and res else char) prev = char return ''.join(res) def camel2snake(text): """Convert camel case to snake case. This assumes the input is valid camel case (if you have some weird hybrid of camel and snake case, for instance, you'd want to do some preprocessing first). Parameters ---------- text: str Camel case string, e.g. vaderSentimentScore. Returns ------- str: `text` converted to snake case, e.g. vader_sentiment_score. """ res = [] for char in text: if char.islower(): res.append(char) else: res.extend(['_', char.lower()]) return ''.join(res) def to_snake(text): """Experimental feature: tries to convert any common format to snake case. This hasn't been extensively tested but it seems to work with snake case (no change), camel case, upper camel case, words separated by hyphens/dashes/spaces, and combinations of the above. It may occasionally split words that should not be split, though this should be rare if names use actual English words (this might not work so well on fastai-style variable names (very short, e.g. "tfms" for "transforms"), but the intended use case is mostly for fixing column names in pandas. Parameters ---------- text: str Returns ------- str: Input text converted to snake case. """ return '_'.join(wn.split(text.lower())) def to_camel(text): """Experimental feature: tries to convert any common format to camel case. This hasn't been extensively tested but it seems to work with camel case (no change), snake case, upper camel case, words separated by hyphens/dashes/spaces, and combinations of the above. It may occasionally split words that should not be split, though this should be rare if names use actual English words (this might not work so well on fastai-style variable names (very short, e.g. "tfms" for "transforms"), but the intended use case is mostly for fixing column names in pandas. Parameters ---------- text: str Returns ------- str: Input text converted to snake case. """ return ''.join(w.title() if i > 0 else w for i, w in enumerate(wn.split(text.lower()))) def kwargs_fallback(self, *args, assign=False, **kwargs): """Use inside a method that accepts **kwargs. Sometimes we want to use an instance variable for some computation but want to give the user the option to pass in a new value to the method (often ML hyperparameters) to be used instead. This function makes that a little more convenient. Parameters ---------- self: object The class instance. In most cases users will literally pass `self` in. args: str One or more names of variables to use this procedure on. assign: bool If True, any user-provided kwargs will be used to update attributes of the instance. If False (the default), they will be used in computation but won't change the state of the instance. kwargs: any Just forward along the kwargs passed to the method. Returns ------- list or single object: If more than one arg is specified, a list of values is returned. For just one arg, a single value will be returned. Examples -------- class Foo: def __init__(self, a, b=3, c=('a', 'b', 'c')): self.a, self.b, self.c = a, b, c def walk(self, d, **kwargs): a, c = kwargs_fallback(self, 'a', 'c', **kwargs) print(self.a, self.b, self.c) print(a, c, end='\n\n') b, c = kwargs_fallback(self, 'b', 'c', assign=True, **kwargs) print(self.a, self.b, self.c) print(b, c) # Notice the first `kwargs_fallback` call doesn't change attributes of f # but the second does. In the first block of print statements, the variable # `b` does not exist yet because we didn't include it in *args. >>> f = Foo(1) >>> f.walk(d=0, b=10, c=100) 1 3 ('a', 'b', 'c') 1 100 1 10 100 10 100 """ res = [] for arg in args: # Don't just use `kwargs.get(arg) or ...` because this doesn't work # well when we pass in a numpy array or None. val = kwargs[arg] if arg in kwargs else getattr(self, arg) res.append(val) if assign: setattr(self, arg, val) return res if len(res) > 1 else res[0] def cd_root(root_subdir='notebooks', max_depth=4): """Run at start of Jupyter notebook to enter project root. Parameters ---------- root_subdir: str Name of a subdirectory contained in the project root directory. If not found in the current working directory, this will move to the parent directory repeatedly until it is found. Choose carefully: if you have multiple directories with the same name in your directory structure (e.g. ~/htools/lib/htools), 'htools' would be a bad choice if you want to end up in ~). max_depth: int Max number of directory levels to traverse. Don't want to get stuck in an infinite loop if we make a mistake. Examples -------- Sample file structure (abbreviated): my_project/ py/ fetch_raw_data.py notebooks/ nb01_eda.ipynb Running cd_root() from nb01_eda.ipynb will change the working directory from notebooks/ to my_project/, which is typically the same directory we'd run scripts in py/ from. This makes converting from notebooks to scripts easier. """ changes = 0 start_dir = os.getcwd() while root_subdir not in next(os.walk('.'))[1]: if changes >= max_depth: os.chdir(start_dir) raise RuntimeError('Exceeded max_depth. Check that your ' 'root_subdir is <= max_depth directories away.') os.chdir('..') changes += 1 print('Current directory:', os.getcwd()) def ngrams(word, n=3, step=1, drop_last=False): """To get non-overlapping sequences, pass in same value for `step` as `n`. """ stop = max(1, step+len(word)-n) ngrams_ = [] for i in range(0, stop, step): ngrams_.append(word[i:i+n]) if drop_last and len(ngrams_[-1]) < n: ngrams_ = ngrams_[:-1] return ngrams_ def shell(cmd, return_output=True): """Execute shell command (between subprocess and os, there's ~5 different ways to do this and I always forget which I want. This is just a way for me to choose once and not have to decide again. There are rare situations where we may need a different function (subprocess.run is blocking; if we want to launch a process and continue the script without waiting for completion, we can use subprocess.check_call). Parameters ---------- cmd: str Example: 'ls *.csv' return_output: bool If True, return the output of the command: e.g. if cmd is 'pip show requests', this would return a string containing information about the version of the requests library you have installed. If False, we return a tuple of (return code (0/1), stderr, stdout). I've noticed the latter 2 are usually None though - need to read more into subprocess docs to figure out why this is happening. Returns ------- tuple: returncode (int), stderr, stdout. I believe stderr and stdout are None if nothing is returned and str otherwise. """ parts = cmd.split() if return_output: return check_output(parts).decode() res = run(parts) return res.returncode, res.stderr, res.stdout def set_summary(x1, x2, info=('first_only', 'second_only')): """Summarize set comparison between two iterables (they will be converted to sets internally). Parameters ---------- info: Iterable[str] Determines what info to return. 'first_only' returns items only in the first iterable, 'second_only' returns items only in the second, 'and' returns items in both, and 'or' returns items in either. Returns ------- dict[str, set]: Maps str in `info` to set of items. """ s1, s2 = set(x1), set(x2) res = {'and': s1 & s2, 'or': s1 | s2, 'first_only': s1 - s2, 'second_only': s2 - s1} for k, v in res.items(): print(f'{k}: {len(v)} items') return select(res, keep=list(info)) SENTINEL = object()
31.964881
79
0.625798
from bz2 import BZ2File from collections import Counter, Sequence, Iterable, \ Mapping from functools import partial import gc from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText from email import encoders from inspect import signature, getattr_static, ismethod, getmembers, getmodule from itertools import chain import json from multiprocessing import Pool import os from pathlib import Path import pickle from random import choice import re import smtplib from subprocess import run, check_output import sys import time from tqdm.auto import tqdm import wordninja as wn from htools.config import get_credentials, get_default_user class InvalidArgumentError(Exception): pass def hdir(obj, magics=False, internals=False): output = dict() for attr in dir(obj): if (not magics and attr.startswith('__')) or \ (not internals and re.match('_[^_]', attr)): continue # non-sparse Pandas dataframe). try: is_method = callable(getattr(obj, attr)) except Exception: continue # Update output to specify whether attr is callable. if is_method: output[attr] = 'method' else: output[attr] = 'attribute' return output def tdir(obj, **kwargs): return {k: type(getattr(obj, k)) for k, v in hdir(obj, **kwargs).items() if v == 'attribute'} def hasarg(func, arg): return arg in signature(func).parameters def quickmail(subject, message, to_email, from_email=None, img_path=None, img_name=None, verbose=True, password=None): # Load email username. Error handling takes place in config functions. from_email = from_email or get_default_user() if not from_email: return None # Load email password. password = password or get_credentials(from_email) if not password: return None # Create message and add text if specified. msg = MIMEMultipart() msg['Subject'] = subject msg['From'] = from_email msg['To'] = to_email if message: msg.attach(MIMEText(message)) # Load and attach image. if img_path: with open(img_path, 'rb') as f: img = MIMEImage(f.read(), name=img_name or os.path.basename(img_path)) encoders.encode_base64(img) msg.attach(img) # Access server and send email. server = smtplib.SMTP(host='smtp.gmail.com', port=587) server.starttls() server.login(user=from_email, password=password) server.sendmail(from_email, to_email, msg.as_string()) if verbose: print(f'Email sent to {to_email}.') def hsplit(text, sep, group=True, attach=True): sep_re = re.escape(sep) regex = f'[^{sep_re}]*{sep_re}*' ########################################################################## # Case 0: Consecutive delimiters are grouped together. ########################################################################## if group: # Subcase 0.1 if attach: return _grouped_split(text, sep) # Subcase 0.2 else: return [word for word in re.split(f'({sep_re}+)', text) if word] ########################################################################## # Case 1: Consecutive delimiters are NOT grouped together. ########################################################################## words = text.split(sep) # Subcase 1.1 if attach: return [word for word in re.findall(regex[:-1]+'?', text) if word] # Subcase 1.2 return [word for word in chain(*zip(words, [sep]*len(words))) if word][:-1] def _grouped_split(text, sep): res = [] toks = text.split(sep) max_idx = len(toks) - 1 for i, tok in enumerate(toks): if tok: if i < max_idx: tok += sep res.append(tok) elif i < max_idx: if res: res[-1] += sep else: res.append(sep) return res def rmvars(*args): for arg in args: del globals()[arg] gc.collect() def print_object_sizes(space, limit=None, exclude_underscore=True): var_size = [(var, sys.getsizeof(obj)) for var, obj in space.items()] for var, size in sorted(var_size, key=lambda x: -x[1])[:limit]: if not var.startswith('_') or not exclude_underscore: print(var, size) def eprint(arr, indent=2, spacing=1): for i, x in enumerate(arr): print(f'{i:>{indent}}: {x}', end='\n'*spacing) def _read_write_args(path, mode): ext = path.rpartition('.')[-1] if ext not in {'json', 'pkl', 'zip'}: raise InvalidArgumentError( 'Invalid extension. Make sure your filename ends with ' '.json, .pkl, or .zip.' ) # Store in dict to make it easier to add additional formats in future. ext2data = { 'json': (open, '', json), 'pkl': (open, 'b', pickle), 'zip': (BZ2File, '', pickle), } opener, mode_suffix, saver = ext2data[ext] return opener, mode + mode_suffix, saver def save(obj, path, mode_pre='w', verbose=True): path = Path(path) os.makedirs(path.parent, exist_ok=True) if verbose: print(f'Writing data to {path}.') if path.suffix[1:] in ('txt', 'sh', 'md', 'py'): with path.open(mode_pre) as f: f.write(obj) else: opener, mode, saver = _read_write_args(str(path), mode_pre) with opener(path, mode) as f: saver.dump(obj, f) def load(path, verbose=True): path = Path(path) if path.suffix[1:] in ('txt', 'sh', 'md', 'py'): return path.read_text() opener, mode, saver = _read_write_args(str(path), 'r') with opener(path, mode) as f: data = saver.load(f) if verbose: print(f'Object loaded from {path}.') return data def dict_sum(*args): keys = {key for d in args for key in d.keys()} return {key: sum(d.get(key, 0) for d in args) for key in keys} def _select_mapping(items, keep=(), drop=()): if keep: return {k: items[k] for k in keep} return {k: v for k, v in items.items() if k not in set(drop)} def _select_sequence(items, keep=(), drop=()): type_ = type(items) if keep: return type_(x for i, x in enumerate(items) if i in set(keep)) return type_(x for i, x in enumerate(items) if i not in set(drop)) def select(items, keep=(), drop=()): if bool(keep) + bool(drop) != 1: raise InvalidArgumentError('Specify exactly one of `keep` or `drop`.') if isinstance(items, Mapping): return _select_mapping(items, keep, drop) elif isinstance(items, Sequence): return _select_sequence(items, keep, drop) else: raise InvalidArgumentError('`items` must be a Mapping or Sequence.') def differences(obj1, obj2, methods=False, **kwargs): # May built-in comparison functionality. Keep error handling broad. try: if obj1 == obj2: return {} except Exception: pass attr1, attr2 = hdir(obj1, **kwargs), hdir(obj2, **kwargs) assert attr1.keys() == attr2.keys(), 'Objects must have same attributes.' diffs = {} for (k1, v1), (k2, v2) in zip(attr1.items(), attr2.items()): # Only compare non-callable attributes. if not (methods or v1 == 'attribute'): continue # Comparisons work differently for arrays/tensors than other objects. val1, val2 = getattr(obj1, k1), getattr(obj2, k2) try: equal = (val1 == val2).all() except AttributeError: equal = val1 == val2 # Store values that are different for obj1 and obj2. if not equal: diffs[k1] = (val1, val2) return diffs def catch(func, *args, verbose=False): try: return func(*args) except Exception as e: if verbose: print(e) return def safe_map(func, seq): return list( filter(lambda x: x is not None, (catch(func, obj) for obj in seq)) ) def flatten(nested): def _walk(nested): for group in nested: if isinstance(group, Iterable) and not isinstance(group, str): yield from _walk(group) else: yield group return list(_walk(nested)) class BasicPipeline: def __init__(self, *funcs): # Make `funcs` mutable. Could use @htools.meta.delegate('funcs') # but not sure if that would cause circular import issues. Check later. self.funcs = list(funcs) def __call__(self, x, verbose=False, attr=''): for func in self.funcs: x = func(x) if verbose: print(repr(getattr(x, attr, x))) return x def __repr__(self): # Try to display each item in the form that was likely passed in: for # functions, this is the name, but for callable classes this is # the str representation of the object, not the class itself. names = ',\n\t'.join(str(f) if hasattr(f, '__call__') else func_name(f) for f in self.funcs) return f'{type(self).__name__}(\n\t{names}\n)' def pipe(x, *funcs, verbose=False, attr=''): return BasicPipeline(*funcs)(x, verbose=verbose, attr=attr) def vcounts(arr, normalize=True): counts = dict(Counter(arr)) if normalize: length = len(arr) counts = {k: v/length for k, v in counts.items()} return counts def item(it, random=True, try_values=True): if try_values and hasattr(it, 'values'): it = it.values() if random: return choice(list(it)) return next(iter(it)) def lmap(fn, *args): return list(map(fn, args)) def amap(attr, *args): return [getattr(arg, attr) for arg in args] def smap(*x): return amap('shape', *x) def sleepy_range(*args, wait=1, wait_before=True): for i in range(*args): if wait_before: time.sleep(wait) yield i if not wait_before: time.sleep(wait) def venumerate(iterable, start=0, freq=1, print_before=True, message_format='{}'): for i, x in enumerate(iterable, start=start): if i % freq == 0 and print_before: print(message_format.format(i)) yield i, x if i % freq == 0 and not print_before: print(message_format.format(i)) def method_of(meth): cls, name = meth.__qualname__.split('.') return dict(getmembers(getmodule(meth)))[cls] def hasstatic(cls, meth_name): return isinstance(getattr_static(cls, meth_name, None), staticmethod) def isstatic(meth): # First check isn't required but I want to avoid reaching the hackier bits if ismethod(meth) or not callable(meth): return False parts = getattr(meth, '__qualname__', '').split('.') if len(parts) != 2: return False cls = method_of(meth) return hasstatic(cls, parts[-1]) def has_classmethod(cls, meth_name): return isinstance(getattr_static(cls, meth_name), classmethod) def is_classmethod(meth): if not ismethod(meth): return False parts = getattr(meth, '__qualname__', '').split('.') if len(parts) != 2: return False cls = method_of(meth) return has_classmethod(cls, parts[-1]) def parallelize(func, items, total=None, chunksize=1_000, processes=None): total = total or len(items) with Pool(processes) as p: res = list(tqdm(p.imap(func, items, chunksize=chunksize), total=total)) return res def identity(x): return x def always_true(x, *args, **kwargs): return True def ifnone(arg, backup): return arg if arg is not None else backup def listlike(x): return isinstance(x, Iterable) and not isinstance(x, (str, Mapping)) def tolist(x, length_like=None, length=None, error_message='x length does not match desired length.'): if length_like is not None: length = len(length_like) if listlike(x): if length: assert len(x) == length, error_message return list(x) if isinstance(x, Mapping): raise ValueError('x must not be a mapping. It should probably be a ' 'primitive (str, int, etc.) or a list-like object ' '(tuple, list, set).') return [x] * (length or 1) def xor_none(*args, n=1): if sum(bool(arg is not None) for arg in args) != n: raise ValueError(f'Exactly {n} or args must be not None.') def max_key(d, fn=identity): return max(d.items(), key=lambda x: fn(x[1]))[0] def is_builtin(x, drop_callables=True): def _builtin(x, drop_callables): if callable(x) and drop_callables: return False return x.__class__.__module__ == 'builtins' builtin = partial(_builtin, drop_callables=drop_callables) if isinstance(x, Mapping): return builtin(x) and all(builtin(o) for o in flatten(x.items())) elif isinstance(x, Iterable): return builtin(x) and all(builtin(o) for o in flatten(x)) return builtin(x) def hashable(x): try: _ = hash(x) return True except TypeError: return False def fgrep(text, term, window=25, with_idx=False, reverse=False): idx = text.rfind(term) if reverse else text.find(term) if idx == -1: res = '' else: res = text[max(idx-window, 0):idx+window] return (idx, res) if with_idx else res def spacer(char='-', n_chars=79, newlines_before=1, newlines_after=1): return '\n'*newlines_before + char * n_chars + '\n'*newlines_after def func_name(func): assert callable(func), 'Input must be callable.' try: res = func.__name__ except AttributeError: if isinstance(func, partial): return func_name(func.func) else: return func.__class__.__name__ except Exception as e: raise e return res def snake2camel(text): res = [] prev = '' for char in text: if char != '_': res.append(char.upper() if prev == '_' and res else char) prev = char return ''.join(res) def camel2snake(text): res = [] for char in text: if char.islower(): res.append(char) else: res.extend(['_', char.lower()]) return ''.join(res) def to_snake(text): return '_'.join(wn.split(text.lower())) def to_camel(text): return ''.join(w.title() if i > 0 else w for i, w in enumerate(wn.split(text.lower()))) def kwargs_fallback(self, *args, assign=False, **kwargs): res = [] for arg in args: val = kwargs[arg] if arg in kwargs else getattr(self, arg) res.append(val) if assign: setattr(self, arg, val) return res if len(res) > 1 else res[0] def cd_root(root_subdir='notebooks', max_depth=4): changes = 0 start_dir = os.getcwd() while root_subdir not in next(os.walk('.'))[1]: if changes >= max_depth: os.chdir(start_dir) raise RuntimeError('Exceeded max_depth. Check that your ' 'root_subdir is <= max_depth directories away.') os.chdir('..') changes += 1 print('Current directory:', os.getcwd()) def ngrams(word, n=3, step=1, drop_last=False): stop = max(1, step+len(word)-n) ngrams_ = [] for i in range(0, stop, step): ngrams_.append(word[i:i+n]) if drop_last and len(ngrams_[-1]) < n: ngrams_ = ngrams_[:-1] return ngrams_ def shell(cmd, return_output=True): parts = cmd.split() if return_output: return check_output(parts).decode() res = run(parts) return res.returncode, res.stderr, res.stdout def set_summary(x1, x2, info=('first_only', 'second_only')): s1, s2 = set(x1), set(x2) res = {'and': s1 & s2, 'or': s1 | s2, 'first_only': s1 - s2, 'second_only': s2 - s1} for k, v in res.items(): print(f'{k}: {len(v)} items') return select(res, keep=list(info)) SENTINEL = object()
true
true
f70c651ffebe47bce3c52d330166eb8fc9ad41b9
475
py
Python
frameworks/tensorflow/print_tensor_in_ckpt.py
JasonWayne/deep-learning-snippets
7c64e065752fcbb902494d757a41140f42facf05
[ "MIT" ]
null
null
null
frameworks/tensorflow/print_tensor_in_ckpt.py
JasonWayne/deep-learning-snippets
7c64e065752fcbb902494d757a41140f42facf05
[ "MIT" ]
null
null
null
frameworks/tensorflow/print_tensor_in_ckpt.py
JasonWayne/deep-learning-snippets
7c64e065752fcbb902494d757a41140f42facf05
[ "MIT" ]
null
null
null
''' common usage: 1. put this script in ckpt folder 2. python print_tensor_in_ckpt.py > tensors.txt ''' # ref: https://stackoverflow.com/questions/38218174/how-do-i-find-the-variable-names-and-values-that-are-saved-in-a-checkpoint import tensorflow as tf from tensorflow.python.tools.inspect_checkpoint import print_tensors_in_checkpoint_file latest_ckp = tf.train.latest_checkpoint('./') print_tensors_in_checkpoint_file(latest_ckp, all_tensors=True, tensor_name='')
36.538462
126
0.8
import tensorflow as tf from tensorflow.python.tools.inspect_checkpoint import print_tensors_in_checkpoint_file latest_ckp = tf.train.latest_checkpoint('./') print_tensors_in_checkpoint_file(latest_ckp, all_tensors=True, tensor_name='')
true
true
f70c657b24f5e01e4e11443197e6a739d6dc1295
90,707
py
Python
test/functional/p2p-segwit.py
chanchoi829/favcoin
8f836036faae2e3575ef05a22c450e88b120a5fa
[ "MIT" ]
null
null
null
test/functional/p2p-segwit.py
chanchoi829/favcoin
8f836036faae2e3575ef05a22c450e88b120a5fa
[ "MIT" ]
null
null
null
test/functional/p2p-segwit.py
chanchoi829/favcoin
8f836036faae2e3575ef05a22c450e88b120a5fa
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) 2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test segwit transactions and blocks on P2P network.""" from test_framework.mininode import * from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from test_framework.script import * from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER from test_framework.key import CECKey, CPubKey import time import random from binascii import hexlify # The versionbit bit used to signal activation of SegWit VB_WITNESS_BIT = 1 VB_PERIOD = 144 VB_ACTIVATION_THRESHOLD = 108 VB_TOP_BITS = 0x20000000 MAX_SIGOP_COST = 80000 # Calculate the virtual size of a witness block: # (base + witness/4) def get_virtual_size(witness_block): base_size = len(witness_block.serialize()) total_size = len(witness_block.serialize(with_witness=True)) # the "+3" is so we round up vsize = int((3*base_size + total_size + 3)/4) return vsize class TestNode(NodeConnCB): def __init__(self): super().__init__() self.getdataset = set() def on_getdata(self, conn, message): for inv in message.inv: self.getdataset.add(inv.hash) def announce_tx_and_wait_for_getdata(self, tx, timeout=60): with mininode_lock: self.last_message.pop("getdata", None) self.send_message(msg_inv(inv=[CInv(1, tx.sha256)])) self.wait_for_getdata(timeout) def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60): with mininode_lock: self.last_message.pop("getdata", None) self.last_message.pop("getheaders", None) msg = msg_headers() msg.headers = [ CBlockHeader(block) ] if use_header: self.send_message(msg) else: self.send_message(msg_inv(inv=[CInv(2, block.sha256)])) self.wait_for_getheaders() self.send_message(msg) self.wait_for_getdata() def request_block(self, blockhash, inv_type, timeout=60): with mininode_lock: self.last_message.pop("block", None) self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)])) self.wait_for_block(blockhash, timeout) return self.last_message["block"].block def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None): tx_message = msg_tx(tx) if with_witness: tx_message = msg_witness_tx(tx) self.send_message(tx_message) self.sync_with_ping() assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted) if (reason != None and not accepted): # Check the rejection reason as well. with mininode_lock: assert_equal(self.last_message["reject"].reason, reason) # Test whether a witness block had the correct effect on the tip def test_witness_block(self, block, accepted, with_witness=True): if with_witness: self.send_message(msg_witness_block(block)) else: self.send_message(msg_block(block)) self.sync_with_ping() assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted) # Used to keep track of anyone-can-spend outputs that we can use in the tests class UTXO(object): def __init__(self, sha256, n, nValue): self.sha256 = sha256 self.n = n self.nValue = nValue # Helper for getting the script associated with a P2PKH def GetP2PKHScript(pubkeyhash): return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)]) # Add signature for a P2PK witness program. def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key): tx_hash = SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, value) signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1') txTo.wit.vtxinwit[inIdx].scriptWitness.stack = [signature, script] txTo.rehash() class SegWitTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [["-whitelist=127.0.0.1"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0"], ["-whitelist=127.0.0.1", "-vbparams=segwit:0:0"]] def setup_network(self): self.setup_nodes() connect_nodes(self.nodes[0], 1) connect_nodes(self.nodes[0], 2) self.sync_all() ''' Helpers ''' # Build a block on top of node0's tip. def build_next_block(self, nVersion=VB_TOP_BITS): tip = self.nodes[0].getbestblockhash() height = self.nodes[0].getblockcount() + 1 block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1 block = create_block(int(tip, 16), create_coinbase(height), block_time) block.nVersion = nVersion block.rehash() return block # Adds list of transactions to block, adds witness commitment, then solves. def update_witness_block_with_transactions(self, block, tx_list, nonce=0): block.vtx.extend(tx_list) add_witness_commitment(block, nonce) block.solve() return ''' Individual tests ''' def test_witness_services(self): self.log.info("Verifying NODE_WITNESS service bit") assert((self.test_node.connection.nServices & NODE_WITNESS) != 0) # See if sending a regular transaction works, and create a utxo # to use in later tests. def test_non_witness_transaction(self): # Mine a block with an anyone-can-spend coinbase, # let it mature, then try to spend it. self.log.info("Testing non-witness transaction") block = self.build_next_block(nVersion=1) block.solve() self.test_node.send_message(msg_block(block)) self.test_node.sync_with_ping() # make sure the block was processed txid = block.vtx[0].sha256 self.nodes[0].generate(99) # let the block mature # Create a transaction that spends the coinbase tx = CTransaction() tx.vin.append(CTxIn(COutPoint(txid, 0), b"")) tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE]))) tx.calc_sha256() # Check that serializing it with or without witness is the same # This is a sanity check of our testing framework. assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize()) self.test_node.send_message(msg_witness_tx(tx)) self.test_node.sync_with_ping() # make sure the tx was processed assert(tx.hash in self.nodes[0].getrawmempool()) # Save this transaction for later self.utxo.append(UTXO(tx.sha256, 0, 49*100000000)) self.nodes[0].generate(1) # Verify that blocks with witnesses are rejected before activation. def test_unnecessary_witness_before_segwit_activation(self): self.log.info("Testing behavior of unnecessary witnesses") # For now, rely on earlier tests to have created at least one utxo for # us to use assert(len(self.utxo) > 0) assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active') tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])] # Verify the hash with witness differs from the txid # (otherwise our testing framework must be broken!) tx.rehash() assert(tx.sha256 != tx.calc_sha256(with_witness=True)) # Construct a segwit-signaling block that includes the transaction. block = self.build_next_block(nVersion=(VB_TOP_BITS|(1 << VB_WITNESS_BIT))) self.update_witness_block_with_transactions(block, [tx]) # Sending witness data before activation is not allowed (anti-spam # rule). self.test_node.test_witness_block(block, accepted=False) # TODO: fix synchronization so we can test reject reason # Right now, bitcoind delays sending reject messages for blocks # until the future, making synchronization here difficult. #assert_equal(self.test_node.last_message["reject"].reason, "unexpected-witness") # But it should not be permanently marked bad... # Resend without witness information. self.test_node.send_message(msg_block(block)) self.test_node.sync_with_ping() assert_equal(self.nodes[0].getbestblockhash(), block.hash) sync_blocks(self.nodes) # Create a p2sh output -- this is so we can pass the standardness # rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped # in P2SH). p2sh_program = CScript([OP_TRUE]) p2sh_pubkey = hash160(p2sh_program) scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) # Now check that unnecessary witnesses can't be used to blind a node # to a transaction, eg by violating standardness checks. tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, scriptPubKey)) tx2.rehash() self.test_node.test_transaction_acceptance(tx2, False, True) self.nodes[0].generate(1) sync_blocks(self.nodes) # We'll add an unnecessary witness to this transaction that would cause # it to be non-standard, to test that violating policy with a witness before # segwit activation doesn't blind a node to a transaction. Transactions # rejected for having a witness before segwit activation shouldn't be added # to the rejection cache. tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program]))) tx3.vout.append(CTxOut(tx2.vout[0].nValue-100000, scriptPubKey)) tx3.wit.vtxinwit.append(CTxInWitness()) tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a'*400000] tx3.rehash() # Note that this should be rejected for the premature witness reason, # rather than a policy check, since segwit hasn't activated yet. self.std_node.test_transaction_acceptance(tx3, True, False, b'no-witness-yet') # If we send without witness, it should be accepted. self.std_node.test_transaction_acceptance(tx3, False, True) # Now create a new anyone-can-spend utxo for the next test. tx4 = CTransaction() tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), CScript([p2sh_program]))) tx4.vout.append(CTxOut(tx3.vout[0].nValue-100000, CScript([OP_TRUE]))) tx4.rehash() self.test_node.test_transaction_acceptance(tx3, False, True) self.test_node.test_transaction_acceptance(tx4, False, True) self.nodes[0].generate(1) sync_blocks(self.nodes) # Update our utxo list; we spent the first entry. self.utxo.pop(0) self.utxo.append(UTXO(tx4.sha256, 0, tx4.vout[0].nValue)) # Mine enough blocks for segwit's vb state to be 'started'. def advance_to_segwit_started(self): height = self.nodes[0].getblockcount() # Will need to rewrite the tests here if we are past the first period assert(height < VB_PERIOD - 1) # Genesis block is 'defined'. assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined') # Advance to end of period, status should now be 'started' self.nodes[0].generate(VB_PERIOD-height-1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') # Mine enough blocks to lock in segwit, but don't activate. # TODO: we could verify that lockin only happens at the right threshold of # signalling blocks, rather than just at the right period boundary. def advance_to_segwit_lockin(self): height = self.nodes[0].getblockcount() assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') # Advance to end of period, and verify lock-in happens at the end self.nodes[0].generate(VB_PERIOD-1) height = self.nodes[0].getblockcount() assert((height % VB_PERIOD) == VB_PERIOD - 2) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') self.nodes[0].generate(1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') # Mine enough blocks to activate segwit. # TODO: we could verify that activation only happens at the right threshold # of signalling blocks, rather than just at the right period boundary. def advance_to_segwit_active(self): assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') height = self.nodes[0].getblockcount() self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') self.nodes[0].generate(1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active') # This test can only be run after segwit has activated def test_witness_commitments(self): self.log.info("Testing witness commitments") # First try a correct witness commitment. block = self.build_next_block() add_witness_commitment(block) block.solve() # Test the test -- witness serialization should be different assert(msg_witness_block(block).serialize() != msg_block(block).serialize()) # This empty block should be valid. self.test_node.test_witness_block(block, accepted=True) # Try to tweak the nonce block_2 = self.build_next_block() add_witness_commitment(block_2, nonce=28) block_2.solve() # The commitment should have changed! assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]) # This should also be valid. self.test_node.test_witness_block(block_2, accepted=True) # Now test commitments with actual transactions assert (len(self.utxo) > 0) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) # Let's construct a witness program witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey)) tx.rehash() # tx2 will spend tx1, and send back to a regular anyone-can-spend address tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program)) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx2.rehash() block_3 = self.build_next_block() self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1) # Add an extra OP_RETURN output that matches the witness commitment template, # even though it has extra data after the incorrect commitment. # This block should fail. block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10]))) block_3.vtx[0].rehash() block_3.hashMerkleRoot = block_3.calc_merkle_root() block_3.rehash() block_3.solve() self.test_node.test_witness_block(block_3, accepted=False) # Add a different commitment with different nonce, but in the # right location, and with some funds burned(!). # This should succeed (nValue shouldn't affect finding the # witness commitment). add_witness_commitment(block_3, nonce=0) block_3.vtx[0].vout[0].nValue -= 1 block_3.vtx[0].vout[-1].nValue += 1 block_3.vtx[0].rehash() block_3.hashMerkleRoot = block_3.calc_merkle_root() block_3.rehash() assert(len(block_3.vtx[0].vout) == 4) # 3 OP_returns block_3.solve() self.test_node.test_witness_block(block_3, accepted=True) # Finally test that a block with no witness transactions can # omit the commitment. block_4 = self.build_next_block() tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program)) tx3.rehash() block_4.vtx.append(tx3) block_4.hashMerkleRoot = block_4.calc_merkle_root() block_4.solve() self.test_node.test_witness_block(block_4, with_witness=False, accepted=True) # Update available utxo's for use in later test. self.utxo.pop(0) self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) def test_block_malleability(self): self.log.info("Testing witness block malleability") # Make sure that a block that has too big a virtual size # because of a too-large coinbase witness is not permanently # marked bad. block = self.build_next_block() add_witness_commitment(block) block.solve() block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000) assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE) # We can't send over the p2p network, because this is too big to relay # TODO: repeat this test with a block that can be relayed self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert(self.nodes[0].getbestblockhash() != block.hash) block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop() assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE) self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert(self.nodes[0].getbestblockhash() == block.hash) # Now make sure that malleating the witness nonce doesn't # result in a block permanently marked bad. block = self.build_next_block() add_witness_commitment(block) block.solve() # Change the nonce -- should not cause the block to be permanently # failed block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ] self.test_node.test_witness_block(block, accepted=False) # Changing the witness nonce doesn't change the block hash block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ] self.test_node.test_witness_block(block, accepted=True) def test_witness_block_size(self): self.log.info("Testing witness block size limit") # TODO: Test that non-witness carrying blocks can't exceed 1MB # Skipping this test for now; this is covered in p2p-fullblocktest.py # Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB. block = self.build_next_block() assert(len(self.utxo) > 0) # Create a P2WSH transaction. # The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE. # This should give us plenty of room to tweak the spending tx's # virtual size. NUM_DROPS = 200 # 201 max ops per script! NUM_OUTPUTS = 50 witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE]) witness_hash = uint256_from_str(sha256(witness_program)) scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)]) prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n) value = self.utxo[0].nValue parent_tx = CTransaction() parent_tx.vin.append(CTxIn(prevout, b"")) child_value = int(value/NUM_OUTPUTS) for i in range(NUM_OUTPUTS): parent_tx.vout.append(CTxOut(child_value, scriptPubKey)) parent_tx.vout[0].nValue -= 50000 assert(parent_tx.vout[0].nValue > 0) parent_tx.rehash() child_tx = CTransaction() for i in range(NUM_OUTPUTS): child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b"")) child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))] for i in range(NUM_OUTPUTS): child_tx.wit.vtxinwit.append(CTxInWitness()) child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program] child_tx.rehash() self.update_witness_block_with_transactions(block, [parent_tx, child_tx]) vsize = get_virtual_size(block) additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize)*4 i = 0 while additional_bytes > 0: # Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1 extra_bytes = min(additional_bytes+1, 55) block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes) additional_bytes -= extra_bytes i += 1 block.vtx[0].vout.pop() # Remove old commitment add_witness_commitment(block) block.solve() vsize = get_virtual_size(block) assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1) # Make sure that our test case would exceed the old max-network-message # limit assert(len(block.serialize(True)) > 2*1024*1024) self.test_node.test_witness_block(block, accepted=False) # Now resize the second transaction to make the block fit. cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0]) block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1) block.vtx[0].vout.pop() add_witness_commitment(block) block.solve() assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE) self.test_node.test_witness_block(block, accepted=True) # Update available utxo's self.utxo.pop(0) self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue)) # submitblock will try to add the nonce automatically, so that mining # software doesn't need to worry about doing so itself. def test_submit_block(self): block = self.build_next_block() # Try using a custom nonce and then don't supply it. # This shouldn't possibly work. add_witness_commitment(block, nonce=1) block.vtx[0].wit = CTxWitness() # drop the nonce block.solve() self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert(self.nodes[0].getbestblockhash() != block.hash) # Now redo commitment with the standard nonce, but let bitcoind fill it in. add_witness_commitment(block, nonce=0) block.vtx[0].wit = CTxWitness() block.solve() self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert_equal(self.nodes[0].getbestblockhash(), block.hash) # This time, add a tx with non-empty witness, but don't supply # the commitment. block_2 = self.build_next_block() add_witness_commitment(block_2) block_2.solve() # Drop commitment and nonce -- submitblock should not fill in. block_2.vtx[0].vout.pop() block_2.vtx[0].wit = CTxWitness() self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True))) # Tip should not advance! assert(self.nodes[0].getbestblockhash() != block_2.hash) # Consensus tests of extra witness data in a transaction. def test_extra_witness_data(self): self.log.info("Testing extra witness data in tx") assert(len(self.utxo) > 0) block = self.build_next_block() witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) # First try extra witness data on a tx that doesn't require a witness tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-200000, scriptPubKey)) tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])] tx.rehash() self.update_witness_block_with_transactions(block, [tx]) # Extra witness data should not be allowed. self.test_node.test_witness_block(block, accepted=False) # Try extra signature data. Ok if we're not spending a witness output. block.vtx[1].wit.vtxinwit = [] block.vtx[1].vin[0].scriptSig = CScript([OP_0]) block.vtx[1].rehash() add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=True) # Now try extra witness/signature data on an input that DOES require a # witness tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) # witness output tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) # non-witness tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()]) tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ] tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ] block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) # This has extra witness data, so it should fail. self.test_node.test_witness_block(block, accepted=False) # Now get rid of the extra witness, but add extra scriptSig data tx2.vin[0].scriptSig = CScript([OP_TRUE]) tx2.vin[1].scriptSig = CScript([OP_TRUE]) tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0) tx2.wit.vtxinwit[1].scriptWitness.stack = [] tx2.rehash() add_witness_commitment(block) block.solve() # This has extra signature data for a witness input, so it should fail. self.test_node.test_witness_block(block, accepted=False) # Now get rid of the extra scriptsig on the witness input, and verify # success (even with extra scriptsig data in the non-witness input) tx2.vin[0].scriptSig = b"" tx2.rehash() add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=True) # Update utxo for later tests self.utxo.pop(0) self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_max_witness_push_length(self): ''' Should only allow up to 520 byte pushes in witness stack ''' self.log.info("Testing maximum witness push size") MAX_SCRIPT_ELEMENT_SIZE = 520 assert(len(self.utxo)) block = self.build_next_block() witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, scriptPubKey)) tx.rehash() tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))) tx2.wit.vtxinwit.append(CTxInWitness()) # First try a 521-byte stack element tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ] tx2.rehash() self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=False) # Now reduce the length of the stack element tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE) add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=True) # Update the utxo for later tests self.utxo.pop() self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_max_witness_program_length(self): # Can create witness outputs that are long, but can't be greater than # 10k bytes to successfully spend self.log.info("Testing maximum witness program length") assert(len(self.utxo)) MAX_PROGRAM_LENGTH = 10000 # This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes. long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE]) assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1) long_witness_hash = sha256(long_witness_program) long_scriptPubKey = CScript([OP_0, long_witness_hash]) block = self.build_next_block() tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, long_scriptPubKey)) tx.rehash() tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program] tx2.rehash() self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=False) # Try again with one less byte in the witness program witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE]) assert(len(witness_program) == MAX_PROGRAM_LENGTH) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey) tx.rehash() tx2.vin[0].prevout.hash = tx.sha256 tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program] tx2.rehash() block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=True) self.utxo.pop() self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_witness_input_length(self): ''' Ensure that vin length must match vtxinwit length ''' self.log.info("Testing witness input length") assert(len(self.utxo)) witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) # Create a transaction that splits our utxo into many outputs tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) nValue = self.utxo[0].nValue for i in range(10): tx.vout.append(CTxOut(int(nValue/10), scriptPubKey)) tx.vout[0].nValue -= 1000 assert(tx.vout[0].nValue >= 0) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) # Try various ways to spend tx that should all break. # This "broken" transaction serializer will not normalize # the length of vtxinwit. class BrokenCTransaction(CTransaction): def serialize_with_witness(self): flags = 0 if not self.wit.is_null(): flags |= 1 r = b"" r += struct.pack("<i", self.nVersion) if flags: dummy = [] r += ser_vector(dummy) r += struct.pack("<B", flags) r += ser_vector(self.vin) r += ser_vector(self.vout) if flags & 1: r += self.wit.serialize() r += struct.pack("<I", self.nLockTime) return r tx2 = BrokenCTransaction() for i in range(10): tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b"")) tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE]))) # First try using a too long vtxinwit for i in range(11): tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program] block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) # Now try using a too short vtxinwit tx2.wit.vtxinwit.pop() tx2.wit.vtxinwit.pop() block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) # Now make one of the intermediate witnesses be incorrect tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program] tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ] block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) # Fix the broken witness and the block should be accepted. tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program] block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=True) self.utxo.pop() self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_witness_tx_relay_before_segwit_activation(self): self.log.info("Testing relay of witness transactions") # Generate a transaction that doesn't require a witness, but send it # with a witness. Should be rejected for premature-witness, but should # not be added to recently rejected list. assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ] tx.rehash() tx_hash = tx.sha256 tx_value = tx.vout[0].nValue # Verify that if a peer doesn't set nServices to include NODE_WITNESS, # the getdata is just for the non-witness portion. self.old_node.announce_tx_and_wait_for_getdata(tx) assert(self.old_node.last_message["getdata"].inv[0].type == 1) # Since we haven't delivered the tx yet, inv'ing the same tx from # a witness transaction ought not result in a getdata. try: self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2) self.log.error("Error: duplicate tx getdata!") assert(False) except AssertionError as e: pass # Delivering this transaction with witness should fail (no matter who # its from) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) # But eliminating the witness should fix it self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) # Cleanup: mine the first transaction and update utxo self.nodes[0].generate(1) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.utxo.pop(0) self.utxo.append(UTXO(tx_hash, 0, tx_value)) # After segwit activates, verify that mempool: # - rejects transactions with unnecessary/extra witnesses # - accepts transactions with valid witnesses # and that witness transactions are relayed to non-upgraded peers. def test_tx_relay_after_segwit_activation(self): self.log.info("Testing relay of witness transactions") # Generate a transaction that doesn't require a witness, but send it # with a witness. Should be rejected because we can't use a witness # when spending a non-witness output. assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ] tx.rehash() tx_hash = tx.sha256 # Verify that unnecessary witnesses are rejected. self.test_node.announce_tx_and_wait_for_getdata(tx) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) # Verify that removing the witness succeeds. self.test_node.announce_tx_and_wait_for_getdata(tx) self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) # Now try to add extra witness data to a valid witness tx. witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, scriptPubKey)) tx2.rehash() tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) tx3.wit.vtxinwit.append(CTxInWitness()) # Add too-large for IsStandard witness and check that it does not enter reject filter p2sh_program = CScript([OP_TRUE]) p2sh_pubkey = hash160(p2sh_program) witness_program2 = CScript([b'a'*400000]) tx3.vout.append(CTxOut(tx2.vout[0].nValue-100000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]))) tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2] tx3.rehash() # Node will not be blinded to the transaction self.std_node.announce_tx_and_wait_for_getdata(tx3) self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size') self.std_node.announce_tx_and_wait_for_getdata(tx3) self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size') # Remove witness stuffing, instead add extra witness push on stack tx3.vout[0] = CTxOut(tx2.vout[0].nValue-100000, CScript([OP_TRUE])) tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ] tx3.rehash() self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True) self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False) # Get rid of the extra witness, and verify acceptance. tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] # Also check that old_node gets a tx announcement, even though this is # a witness transaction. self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) self.old_node.wait_for_inv([CInv(1, tx3.sha256)]) # Test that getrawtransaction returns correct witness information # hash, size, vsize raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1) assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True)) assert_equal(raw_tx["size"], len(tx3.serialize_with_witness())) vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4 assert_equal(raw_tx["vsize"], vsize) assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1) assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii')) assert(vsize != raw_tx["size"]) # Cleanup: mine the transactions and update utxo for next test self.nodes[0].generate(1) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.utxo.pop(0) self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) # Test that block requests to NODE_WITNESS peer are with MSG_WITNESS_FLAG # This is true regardless of segwit activation. # Also test that we don't ask for blocks from unupgraded peers def test_block_relay(self, segwit_activated): self.log.info("Testing block relay") blocktype = 2|MSG_WITNESS_FLAG # test_node has set NODE_WITNESS, so all getdata requests should be for # witness blocks. # Test announcing a block via inv results in a getdata, and that # announcing a version 4 or random VB block with a header results in a getdata block1 = self.build_next_block() block1.solve() self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False) assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block1, True) # Favcoin: Blocks with nVersion < VB_TOP_BITS are rejected # self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True) # assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) # self.test_node.test_witness_block(block2, True) block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15))) block3.solve() self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True) assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block3, True) # Check that we can getdata for witness blocks or regular blocks, # and the right thing happens. if segwit_activated == False: # Before activation, we should be able to request old blocks with # or without witness, and they should be the same. chain_height = self.nodes[0].getblockcount() # Pick 10 random blocks on main chain, and verify that getdata's # for MSG_BLOCK, MSG_WITNESS_BLOCK, and rpc getblock() are equal. all_heights = list(range(chain_height+1)) random.shuffle(all_heights) all_heights = all_heights[0:10] for height in all_heights: block_hash = self.nodes[0].getblockhash(height) rpc_block = self.nodes[0].getblock(block_hash, False) block_hash = int(block_hash, 16) block = self.test_node.request_block(block_hash, 2) wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG) assert_equal(block.serialize(True), wit_block.serialize(True)) assert_equal(block.serialize(), hex_str_to_bytes(rpc_block)) else: # After activation, witness blocks and non-witness blocks should # be different. Verify rpc getblock() returns witness blocks, while # getdata respects the requested type. block = self.build_next_block() self.update_witness_block_with_transactions(block, []) # This gives us a witness commitment. assert(len(block.vtx[0].wit.vtxinwit) == 1) assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1) self.test_node.test_witness_block(block, accepted=True) # Now try to retrieve it... rpc_block = self.nodes[0].getblock(block.hash, False) non_wit_block = self.test_node.request_block(block.sha256, 2) wit_block = self.test_node.request_block(block.sha256, 2|MSG_WITNESS_FLAG) assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block)) assert_equal(wit_block.serialize(False), non_wit_block.serialize()) assert_equal(wit_block.serialize(True), block.serialize(True)) # Test size, vsize, weight rpc_details = self.nodes[0].getblock(block.hash, True) assert_equal(rpc_details["size"], len(block.serialize(True))) assert_equal(rpc_details["strippedsize"], len(block.serialize(False))) weight = 3*len(block.serialize(False)) + len(block.serialize(True)) assert_equal(rpc_details["weight"], weight) # Upgraded node should not ask for blocks from unupgraded # Favcoin: Blocks with nVersion < VB_TOP_BITS are rejected block4 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15))) block4.solve() self.old_node.getdataset = set() # Blocks can be requested via direct-fetch (immediately upon processing the announcement) # or via parallel download (with an indeterminate delay from processing the announcement) # so to test that a block is NOT requested, we could guess a time period to sleep for, # and then check. We can avoid the sleep() by taking advantage of transaction getdata's # being processed after block getdata's, and announce a transaction as well, # and then check to see if that particular getdata has been received. # Since 0.14, inv's will only be responded to with a getheaders, so send a header # to announce this block. msg = msg_headers() msg.headers = [ CBlockHeader(block4) ] self.old_node.send_message(msg) self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0]) assert(block4.sha256 not in self.old_node.getdataset) # V0 segwit outputs should be standard after activation, but not before. def test_standardness_v0(self, segwit_activated): self.log.info("Testing standardness of v0 outputs (%s activation)" % ("after" if segwit_activated else "before")) assert(len(self.utxo)) witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) p2sh_pubkey = hash160(witness_program) p2sh_scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) # First prepare a p2sh output (so that spending it will pass standardness) p2sh_tx = CTransaction() p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")] p2sh_tx.vout = [CTxOut(self.utxo[0].nValue-100000, p2sh_scriptPubKey)] p2sh_tx.rehash() # Mine it on test_node to create the confirmed output. self.test_node.test_transaction_acceptance(p2sh_tx, with_witness=True, accepted=True) self.nodes[0].generate(1) sync_blocks(self.nodes) # Now test standardness of v0 P2WSH outputs. # Start by creating a transaction with two outputs. tx = CTransaction() tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] tx.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000000, scriptPubKey)] tx.vout.append(CTxOut(800000, scriptPubKey)) # Might burn this later tx.rehash() self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated) # Now create something that looks like a P2PKH output. This won't be spendable. scriptPubKey = CScript([OP_0, hash160(witness_hash)]) tx2 = CTransaction() if segwit_activated: # if tx was accepted, then we spend the second output. tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")] tx2.vout = [CTxOut(700000, scriptPubKey)] tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] else: # if tx wasn't accepted, we just re-spend the p2sh output we started with. tx2.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-100000, scriptPubKey)] tx2.rehash() self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated) # Now update self.utxo for later tests. tx3 = CTransaction() if segwit_activated: # tx and tx2 were both accepted. Don't bother trying to reclaim the # P2PKH output; just send tx's first output back to an anyone-can-spend. sync_mempools([self.nodes[0], self.nodes[1]]) tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")] tx3.vout = [CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))] tx3.wit.vtxinwit.append(CTxInWitness()) tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx3.rehash() self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) else: # tx and tx2 didn't go anywhere; just clean up the p2sh_tx output. tx3.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-100000, witness_program)] tx3.rehash() self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) self.nodes[0].generate(1) sync_blocks(self.nodes) self.utxo.pop(0) self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) assert_equal(len(self.nodes[1].getrawmempool()), 0) # Verify that future segwit upgraded transactions are non-standard, # but valid in blocks. Can run this before and after segwit activation. def test_segwit_versions(self): self.log.info("Testing standardness/consensus for segwit versions (0-16)") assert(len(self.utxo)) NUM_TESTS = 17 # will test OP_0, OP1, ..., OP_16 if (len(self.utxo) < NUM_TESTS): tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) split_value = (self.utxo[0].nValue - 400000) // NUM_TESTS for i in range(NUM_TESTS): tx.vout.append(CTxOut(split_value, CScript([OP_TRUE]))) tx.rehash() block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) self.utxo.pop(0) for i in range(NUM_TESTS): self.utxo.append(UTXO(tx.sha256, i, split_value)) sync_blocks(self.nodes) temp_utxo = [] tx = CTransaction() count = 0 witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) assert_equal(len(self.nodes[1].getrawmempool()), 0) for version in list(range(OP_1, OP_16+1)) + [OP_0]: count += 1 # First try to spend to a future version segwit scriptPubKey. scriptPubKey = CScript([CScriptOp(version), witness_hash]) tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")] tx.vout = [CTxOut(self.utxo[0].nValue-100000, scriptPubKey)] tx.rehash() self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True) self.utxo.pop(0) temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue)) self.nodes[0].generate(1) # Mine all the transactions sync_blocks(self.nodes) assert(len(self.nodes[0].getrawmempool()) == 0) # Finally, verify that version 0 -> version 1 transactions # are non-standard scriptPubKey = CScript([CScriptOp(OP_1), witness_hash]) tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")] tx2.vout = [CTxOut(tx.vout[0].nValue-100000, scriptPubKey)] tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] tx2.rehash() # Gets accepted to test_node, because standardness of outputs isn't # checked with fRequireStandard self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True) self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False) temp_utxo.pop() # last entry in temp_utxo was the output we just spent temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) # Spend everything in temp_utxo back to an OP_TRUE output. tx3 = CTransaction() total_value = 0 for i in temp_utxo: tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b"")) tx3.wit.vtxinwit.append(CTxInWitness()) total_value += i.nValue tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program] tx3.vout.append(CTxOut(total_value - 100000, CScript([OP_TRUE]))) tx3.rehash() # Spending a higher version witness output is not allowed by policy, # even with fRequireStandard=false. self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False) self.test_node.sync_with_ping() with mininode_lock: assert(b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason) # Building a block with the transaction must be valid, however. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2, tx3]) self.test_node.test_witness_block(block, accepted=True) sync_blocks(self.nodes) # Add utxo to our list self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) def test_premature_coinbase_witness_spend(self): self.log.info("Testing premature coinbase witness spend") block = self.build_next_block() # Change the output of the block to be a witness output. witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) block.vtx[0].vout[0].scriptPubKey = scriptPubKey # This next line will rehash the coinbase and update the merkle # root, and solve. self.update_witness_block_with_transactions(block, []) self.test_node.test_witness_block(block, accepted=True) spend_tx = CTransaction() spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")] spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)] spend_tx.wit.vtxinwit.append(CTxInWitness()) spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] spend_tx.rehash() # Now test a premature spend. self.nodes[0].generate(98) sync_blocks(self.nodes) block2 = self.build_next_block() self.update_witness_block_with_transactions(block2, [spend_tx]) self.test_node.test_witness_block(block2, accepted=False) # Advancing one more block should allow the spend. self.nodes[0].generate(1) block2 = self.build_next_block() self.update_witness_block_with_transactions(block2, [spend_tx]) self.test_node.test_witness_block(block2, accepted=True) sync_blocks(self.nodes) def test_signature_version_1(self): self.log.info("Testing segwit signature hash version 1") key = CECKey() key.set_secretbytes(b"9") pubkey = CPubKey(key.get_pubkey()) witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) # First create a witness output for use in the tests. assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, scriptPubKey)) tx.rehash() self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True) # Mine this transaction in preparation for following tests. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) sync_blocks(self.nodes) self.utxo.pop(0) # Test each hashtype prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue) for sigflag in [ 0, SIGHASH_ANYONECANPAY ]: for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]: hashtype |= sigflag block = self.build_next_block() tx = CTransaction() tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b"")) tx.vout.append(CTxOut(prev_utxo.nValue - 100000, scriptPubKey)) tx.wit.vtxinwit.append(CTxInWitness()) # Too-large input value sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key) self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=False) # Too-small input value sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key) block.vtx.pop() # remove last tx self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=False) # Now try correct value sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key) block.vtx.pop() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue) # Test combinations of signature hashes. # Split the utxo into a lot of outputs. # Randomly choose up to 10 to spend, sign with different hashtypes, and # output to a random number of outputs. Repeat NUM_TESTS times. # Ensure that we've tested a situation where we use SIGHASH_SINGLE with # an input index > number of outputs. NUM_TESTS = 500 temp_utxos = [] tx = CTransaction() tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b"")) split_value = prev_utxo.nValue // NUM_TESTS for i in range(NUM_TESTS): tx.vout.append(CTxOut(split_value, scriptPubKey)) tx.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key) for i in range(NUM_TESTS): temp_utxos.append(UTXO(tx.sha256, i, split_value)) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) block = self.build_next_block() used_sighash_single_out_of_bounds = False for i in range(NUM_TESTS): # Ping regularly to keep the connection alive if (not i % 100): self.test_node.sync_with_ping() # Choose random number of inputs to use. num_inputs = random.randint(1, 10) # Create a slight bias for producing more utxos num_outputs = random.randint(1, 11) random.shuffle(temp_utxos) assert(len(temp_utxos) > num_inputs) tx = CTransaction() total_value = 0 for i in range(num_inputs): tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b"")) tx.wit.vtxinwit.append(CTxInWitness()) total_value += temp_utxos[i].nValue split_value = total_value // num_outputs for i in range(num_outputs): tx.vout.append(CTxOut(split_value, scriptPubKey)) for i in range(num_inputs): # Now try to sign each input, using a random hashtype. anyonecanpay = 0 if random.randint(0, 1): anyonecanpay = SIGHASH_ANYONECANPAY hashtype = random.randint(1, 3) | anyonecanpay sign_P2PK_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key) if (hashtype == SIGHASH_SINGLE and i >= num_outputs): used_sighash_single_out_of_bounds = True tx.rehash() for i in range(num_outputs): temp_utxos.append(UTXO(tx.sha256, i, split_value)) temp_utxos = temp_utxos[num_inputs:] block.vtx.append(tx) # Test the block periodically, if we're close to maxblocksize if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000): self.update_witness_block_with_transactions(block, []) self.test_node.test_witness_block(block, accepted=True) block = self.build_next_block() if (not used_sighash_single_out_of_bounds): self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value") # Test the transactions we've added to the block if (len(block.vtx) > 1): self.update_witness_block_with_transactions(block, []) self.test_node.test_witness_block(block, accepted=True) # Now test witness version 0 P2PKH transactions pubkeyhash = hash160(pubkey) scriptPKH = CScript([OP_0, pubkeyhash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b"")) tx.vout.append(CTxOut(temp_utxos[0].nValue, scriptPKH)) tx.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) script = GetP2PKHScript(pubkeyhash) sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL # Check that we can't have a scriptSig tx2.vin[0].scriptSig = CScript([signature, pubkey]) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=False) # Move the signature to the witness. block.vtx.pop() tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey] tx2.vin[0].scriptSig = b"" tx2.rehash() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=True) temp_utxos.pop(0) # Update self.utxos for later tests. Just spend everything in # temp_utxos to a corresponding entry in self.utxos tx = CTransaction() index = 0 for i in temp_utxos: # Just spend to our usual anyone-can-spend output # Use SIGHASH_SINGLE|SIGHASH_ANYONECANPAY so we can build up # the signatures as we go. tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b"")) tx.vout.append(CTxOut(i.nValue, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx, index, SIGHASH_SINGLE|SIGHASH_ANYONECANPAY, i.nValue, key) index += 1 block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) for i in range(len(tx.vout)): self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue)) # Test P2SH wrapped witness programs. def test_p2sh_witness(self, segwit_activated): self.log.info("Testing P2SH witness transactions") assert(len(self.utxo)) # Prepare the p2sh-wrapped witness output witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) p2wsh_pubkey = CScript([OP_0, witness_hash]) p2sh_witness_hash = hash160(p2wsh_pubkey) scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) scriptSig = CScript([p2wsh_pubkey]) # a push of the redeem script # Fund the P2SH output tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, scriptPubKey)) tx.rehash() # Verify mempool acceptance and block validity self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated) sync_blocks(self.nodes) # Now test attempts to spend the output. spend_tx = CTransaction() spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), scriptSig)) spend_tx.vout.append(CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))) spend_tx.rehash() # This transaction should not be accepted into the mempool pre- or # post-segwit. Mempool acceptance will use SCRIPT_VERIFY_WITNESS which # will require a witness to spend a witness program regardless of # segwit activation. Note that older bitcoind's that are not # segwit-aware would also reject this for failing CLEANSTACK. self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False) # Try to put the witness script in the scriptSig, should also fail. spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a']) spend_tx.rehash() self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False) # Now put the witness script in the witness, should succeed after # segwit activates. spend_tx.vin[0].scriptSig = scriptSig spend_tx.rehash() spend_tx.wit.vtxinwit.append(CTxInWitness()) spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ] # Verify mempool acceptance self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated) block = self.build_next_block() self.update_witness_block_with_transactions(block, [spend_tx]) # If we're before activation, then sending this without witnesses # should be valid. If we're after activation, then sending this with # witnesses should be valid. if segwit_activated: self.test_node.test_witness_block(block, accepted=True) else: self.test_node.test_witness_block(block, accepted=True, with_witness=False) # Update self.utxo self.utxo.pop(0) self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue)) # Test the behavior of starting up a segwit-aware node after the softfork # has activated. As segwit requires different block data than pre-segwit # nodes would have stored, this requires special handling. # To enable this test, pass --oldbinary=<path-to-pre-segwit-bitcoind> to # the test. def test_upgrade_after_activation(self, node_id): self.log.info("Testing software upgrade after softfork activation") assert(node_id != 0) # node0 is assumed to be a segwit-active bitcoind # Make sure the nodes are all up sync_blocks(self.nodes) # Restart with the new binary self.stop_node(node_id) self.start_node(node_id, extra_args=[]) connect_nodes(self.nodes[0], node_id) sync_blocks(self.nodes) # Make sure that this peer thinks segwit has activated. assert(get_bip9_status(self.nodes[node_id], 'segwit')['status'] == "active") # Make sure this peers blocks match those of node0. height = self.nodes[node_id].getblockcount() while height >= 0: block_hash = self.nodes[node_id].getblockhash(height) assert_equal(block_hash, self.nodes[0].getblockhash(height)) assert_equal(self.nodes[0].getblock(block_hash), self.nodes[node_id].getblock(block_hash)) height -= 1 def test_witness_sigops(self): '''Ensure sigop counting is correct inside witnesses.''' self.log.info("Testing sigops limit") assert(len(self.utxo)) # Keep this under MAX_OPS_PER_SCRIPT (201) witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) sigops_per_script = 20*5 + 193*1 # We'll produce 2 extra outputs, one with a program that would take us # over max sig ops, and one with a program that would exactly reach max # sig ops outputs = (MAX_SIGOP_COST // sigops_per_script) + 2 extra_sigops_available = MAX_SIGOP_COST % sigops_per_script # We chose the number of checkmultisigs/checksigs to make this work: assert(extra_sigops_available < 100) # steer clear of MAX_OPS_PER_SCRIPT # This script, when spent with the first # N(=MAX_SIGOP_COST//sigops_per_script) outputs of our transaction, # would push us just over the block sigop limit. witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF]) witness_hash_toomany = sha256(witness_program_toomany) scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany]) # If we spend this script instead, we would exactly reach our sigop # limit (for witness sigops). witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available) + [OP_ENDIF]) witness_hash_justright = sha256(witness_program_justright) scriptPubKey_justright = CScript([OP_0, witness_hash_justright]) # First split our available utxo into a bunch of outputs split_value = self.utxo[0].nValue // outputs tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) for i in range(outputs): tx.vout.append(CTxOut(split_value, scriptPubKey)) tx.vout[-2].scriptPubKey = scriptPubKey_toomany tx.vout[-1].scriptPubKey = scriptPubKey_justright tx.rehash() block_1 = self.build_next_block() self.update_witness_block_with_transactions(block_1, [tx]) self.test_node.test_witness_block(block_1, accepted=True) tx2 = CTransaction() # If we try to spend the first n-1 outputs from tx, that should be # too many sigops. total_value = 0 for i in range(outputs-1): tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b"")) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ] total_value += tx.vout[i].nValue tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ] tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE]))) tx2.rehash() block_2 = self.build_next_block() self.update_witness_block_with_transactions(block_2, [tx2]) self.test_node.test_witness_block(block_2, accepted=False) # Try dropping the last input in tx2, and add an output that has # too many sigops (contributing to legacy sigop count). checksig_count = (extra_sigops_available // 4) + 1 scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count) tx2.vout.append(CTxOut(0, scriptPubKey_checksigs)) tx2.vin.pop() tx2.wit.vtxinwit.pop() tx2.vout[0].nValue -= tx.vout[-2].nValue tx2.rehash() block_3 = self.build_next_block() self.update_witness_block_with_transactions(block_3, [tx2]) self.test_node.test_witness_block(block_3, accepted=False) # If we drop the last checksig in this output, the tx should succeed. block_4 = self.build_next_block() tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1)) tx2.rehash() self.update_witness_block_with_transactions(block_4, [tx2]) self.test_node.test_witness_block(block_4, accepted=True) # Reset the tip back down for the next test sync_blocks(self.nodes) for x in self.nodes: x.invalidateblock(block_4.hash) # Try replacing the last input of tx2 to be spending the last # output of tx block_5 = self.build_next_block() tx2.vout.pop() tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs-1), b"")) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ] tx2.rehash() self.update_witness_block_with_transactions(block_5, [tx2]) self.test_node.test_witness_block(block_5, accepted=True) # TODO: test p2sh sigop counting def test_getblocktemplate_before_lockin(self): self.log.info("Testing getblocktemplate setting of segwit versionbit (before lockin)") # Node0 is segwit aware, node2 is not. for node in [self.nodes[0], self.nodes[2]]: gbt_results = node.getblocktemplate() block_version = gbt_results['version'] # If we're not indicating segwit support, we will still be # signalling for segwit activation. assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0]) # If we don't specify the segwit rule, then we won't get a default # commitment. assert('default_witness_commitment' not in gbt_results) # Workaround: # Can either change the tip, or change the mempool and wait 5 seconds # to trigger a recomputation of getblocktemplate. txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16) # Using mocktime lets us avoid sleep() sync_mempools(self.nodes) self.nodes[0].setmocktime(int(time.time())+10) self.nodes[2].setmocktime(int(time.time())+10) for node in [self.nodes[0], self.nodes[2]]: gbt_results = node.getblocktemplate({"rules" : ["segwit"]}) block_version = gbt_results['version'] if node == self.nodes[2]: # If this is a non-segwit node, we should still not get a witness # commitment, nor a version bit signalling segwit. assert_equal(block_version & (1 << VB_WITNESS_BIT), 0) assert('default_witness_commitment' not in gbt_results) else: # For segwit-aware nodes, check the version bit and the witness # commitment are correct. assert(block_version & (1 << VB_WITNESS_BIT) != 0) assert('default_witness_commitment' in gbt_results) witness_commitment = gbt_results['default_witness_commitment'] # Check that default_witness_commitment is present. witness_root = CBlock.get_merkle_root([ser_uint256(0), ser_uint256(txid)]) script = get_witness_script(witness_root, 0) assert_equal(witness_commitment, bytes_to_hex_str(script)) # undo mocktime self.nodes[0].setmocktime(0) self.nodes[2].setmocktime(0) # Uncompressed pubkeys are no longer supported in default relay policy, # but (for now) are still valid in blocks. def test_uncompressed_pubkey(self): self.log.info("Testing uncompressed pubkeys") # Segwit transactions using uncompressed pubkeys are not accepted # under default policy, but should still pass consensus. key = CECKey() key.set_secretbytes(b"9") key.set_compressed(False) pubkey = CPubKey(key.get_pubkey()) assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey assert(len(self.utxo) > 0) utxo = self.utxo.pop(0) # Test 1: P2WPKH # First create a P2WPKH output that uses an uncompressed pubkey pubkeyhash = hash160(pubkey) scriptPKH = CScript([OP_0, pubkeyhash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b"")) tx.vout.append(CTxOut(utxo.nValue-100000, scriptPKH)) tx.rehash() # Confirm it in a block. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) # Now try to spend it. Send it to a P2WSH output, which we'll # use in the next test. witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) witness_hash = sha256(witness_program) scriptWSH = CScript([OP_0, witness_hash]) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, scriptWSH)) script = GetP2PKHScript(pubkeyhash) sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [ signature, pubkey ] tx2.rehash() # Should fail policy test. self.test_node.test_transaction_acceptance(tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') # But passes consensus. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=True) # Test 2: P2WSH # Try to spend the P2WSH output created in last test. # Send it to a P2SH(P2WSH) output, which we'll use in the next test. p2sh_witness_hash = hash160(scriptWSH) scriptP2SH = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) scriptSig = CScript([scriptWSH]) tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) tx3.vout.append(CTxOut(tx2.vout[0].nValue-100000, scriptP2SH)) tx3.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key) # Should fail policy test. self.test_node.test_transaction_acceptance(tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') # But passes consensus. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx3]) self.test_node.test_witness_block(block, accepted=True) # Test 3: P2SH(P2WSH) # Try to spend the P2SH output created in the last test. # Send it to a P2PKH output, which we'll use in the next test. scriptPubKey = GetP2PKHScript(pubkeyhash) tx4 = CTransaction() tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), scriptSig)) tx4.vout.append(CTxOut(tx3.vout[0].nValue-100000, scriptPubKey)) tx4.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key) # Should fail policy test. self.test_node.test_transaction_acceptance(tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx4]) self.test_node.test_witness_block(block, accepted=True) # Test 4: Uncompressed pubkeys should still be valid in non-segwit # transactions. tx5 = CTransaction() tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b"")) tx5.vout.append(CTxOut(tx4.vout[0].nValue-100000, CScript([OP_TRUE]))) (sig_hash, err) = SignatureHash(scriptPubKey, tx5, 0, SIGHASH_ALL) signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL tx5.vin[0].scriptSig = CScript([signature, pubkey]) tx5.rehash() # Should pass policy and consensus. self.test_node.test_transaction_acceptance(tx5, True, True) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx5]) self.test_node.test_witness_block(block, accepted=True) self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue)) def test_non_standard_witness(self): self.log.info("Testing detection of non-standard P2WSH witness") pad = chr(1).encode('latin-1') # Create scripts for tests scripts = [] scripts.append(CScript([OP_DROP] * 100)) scripts.append(CScript([OP_DROP] * 99)) scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60)) scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61)) p2wsh_scripts = [] assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) # For each script, generate a pair of P2WSH and P2SH-P2WSH output. outputvalue = (self.utxo[0].nValue - 100000) // (len(scripts) * 2) for i in scripts: p2wsh = CScript([OP_0, sha256(i)]) p2sh = hash160(p2wsh) p2wsh_scripts.append(p2wsh) tx.vout.append(CTxOut(outputvalue, p2wsh)) tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL]))) tx.rehash() txid = tx.sha256 self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) self.nodes[0].generate(1) sync_blocks(self.nodes) # Creating transactions for tests p2wsh_txs = [] p2sh_txs = [] for i in range(len(scripts)): p2wsh_tx = CTransaction() p2wsh_tx.vin.append(CTxIn(COutPoint(txid,i*2))) p2wsh_tx.vout.append(CTxOut(outputvalue - 500000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) p2wsh_tx.wit.vtxinwit.append(CTxInWitness()) p2wsh_tx.rehash() p2wsh_txs.append(p2wsh_tx) p2sh_tx = CTransaction() p2sh_tx.vin.append(CTxIn(COutPoint(txid,i*2+1), CScript([p2wsh_scripts[i]]))) p2sh_tx.vout.append(CTxOut(outputvalue - 500000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) p2sh_tx.wit.vtxinwit.append(CTxInWitness()) p2sh_tx.rehash() p2sh_txs.append(p2sh_tx) # Testing native P2WSH # Witness stack size, excluding witnessScript, over 100 is non-standard p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]] self.std_node.test_transaction_acceptance(p2wsh_txs[0], True, False, b'bad-witness-nonstandard') # Non-standard nodes should accept self.test_node.test_transaction_acceptance(p2wsh_txs[0], True, True) # Stack element size over 80 bytes is non-standard p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, False, b'bad-witness-nonstandard') # Non-standard nodes should accept self.test_node.test_transaction_acceptance(p2wsh_txs[1], True, True) # Standard nodes should accept if element size is not over 80 bytes p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, True) # witnessScript size at 3600 bytes is standard p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]] self.test_node.test_transaction_acceptance(p2wsh_txs[2], True, True) self.std_node.test_transaction_acceptance(p2wsh_txs[2], True, True) # witnessScript size at 3601 bytes is non-standard p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]] self.std_node.test_transaction_acceptance(p2wsh_txs[3], True, False, b'bad-witness-nonstandard') # Non-standard nodes should accept self.test_node.test_transaction_acceptance(p2wsh_txs[3], True, True) # Repeating the same tests with P2SH-P2WSH p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]] self.std_node.test_transaction_acceptance(p2sh_txs[0], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2sh_txs[0], True, True) p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2sh_txs[1], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2sh_txs[1], True, True) p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2sh_txs[1], True, True) p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]] self.test_node.test_transaction_acceptance(p2sh_txs[2], True, True) self.std_node.test_transaction_acceptance(p2sh_txs[2], True, True) p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]] self.std_node.test_transaction_acceptance(p2sh_txs[3], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2sh_txs[3], True, True) self.nodes[0].generate(1) # Mine and clean up the mempool of non-standard node # Valid but non-standard transactions in a block should be accepted by standard node sync_blocks(self.nodes) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) self.utxo.pop(0) def test_reject_blocks(self): print ("\tTesting rejection of block.nVersion < BIP9_TOP_BITS blocks") block = self.build_next_block(nVersion=4) block.solve() resp = self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert_equal(resp, 'bad-version(0x00000004)') def run_test(self): # Setup the p2p connections and start up the network thread. self.test_node = TestNode() # sets NODE_WITNESS|NODE_NETWORK self.old_node = TestNode() # only NODE_NETWORK self.std_node = TestNode() # for testing node1 (fRequireStandard=true) self.p2p_connections = [self.test_node, self.old_node] self.connections = [] self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS)) self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK)) self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS)) self.test_node.add_connection(self.connections[0]) self.old_node.add_connection(self.connections[1]) self.std_node.add_connection(self.connections[2]) NetworkThread().start() # Start up network handling in another thread # Keep a place to store utxo's that can be used in later tests self.utxo = [] # Test logic begins here self.test_node.wait_for_verack() self.log.info("Starting tests before segwit lock in:") self.test_witness_services() # Verifies NODE_WITNESS self.test_non_witness_transaction() # non-witness tx's are accepted self.test_unnecessary_witness_before_segwit_activation() self.test_block_relay(segwit_activated=False) # Advance to segwit being 'started' self.advance_to_segwit_started() sync_blocks(self.nodes) self.test_getblocktemplate_before_lockin() sync_blocks(self.nodes) # At lockin, nothing should change. self.log.info("Testing behavior post lockin, pre-activation") self.advance_to_segwit_lockin() # Retest unnecessary witnesses self.test_unnecessary_witness_before_segwit_activation() self.test_witness_tx_relay_before_segwit_activation() self.test_block_relay(segwit_activated=False) self.test_p2sh_witness(segwit_activated=False) self.test_standardness_v0(segwit_activated=False) sync_blocks(self.nodes) # Now activate segwit self.log.info("Testing behavior after segwit activation") self.advance_to_segwit_active() sync_blocks(self.nodes) # Test P2SH witness handling again self.test_reject_blocks() self.test_p2sh_witness(segwit_activated=True) self.test_witness_commitments() self.test_block_malleability() self.test_witness_block_size() self.test_submit_block() self.test_extra_witness_data() self.test_max_witness_push_length() self.test_max_witness_program_length() self.test_witness_input_length() self.test_block_relay(segwit_activated=True) self.test_tx_relay_after_segwit_activation() self.test_standardness_v0(segwit_activated=True) self.test_segwit_versions() self.test_premature_coinbase_witness_spend() self.test_uncompressed_pubkey() self.test_signature_version_1() # Favcoin: Disable test due to occasional travis issue #self.test_non_standard_witness() sync_blocks(self.nodes) self.test_upgrade_after_activation(node_id=2) self.test_witness_sigops() if __name__ == '__main__': SegWitTest().main()
46.302705
150
0.658615
from test_framework.mininode import * from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * from test_framework.script import * from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment, get_witness_script, WITNESS_COMMITMENT_HEADER from test_framework.key import CECKey, CPubKey import time import random from binascii import hexlify VB_WITNESS_BIT = 1 VB_PERIOD = 144 VB_ACTIVATION_THRESHOLD = 108 VB_TOP_BITS = 0x20000000 MAX_SIGOP_COST = 80000 def get_virtual_size(witness_block): base_size = len(witness_block.serialize()) total_size = len(witness_block.serialize(with_witness=True)) vsize = int((3*base_size + total_size + 3)/4) return vsize class TestNode(NodeConnCB): def __init__(self): super().__init__() self.getdataset = set() def on_getdata(self, conn, message): for inv in message.inv: self.getdataset.add(inv.hash) def announce_tx_and_wait_for_getdata(self, tx, timeout=60): with mininode_lock: self.last_message.pop("getdata", None) self.send_message(msg_inv(inv=[CInv(1, tx.sha256)])) self.wait_for_getdata(timeout) def announce_block_and_wait_for_getdata(self, block, use_header, timeout=60): with mininode_lock: self.last_message.pop("getdata", None) self.last_message.pop("getheaders", None) msg = msg_headers() msg.headers = [ CBlockHeader(block) ] if use_header: self.send_message(msg) else: self.send_message(msg_inv(inv=[CInv(2, block.sha256)])) self.wait_for_getheaders() self.send_message(msg) self.wait_for_getdata() def request_block(self, blockhash, inv_type, timeout=60): with mininode_lock: self.last_message.pop("block", None) self.send_message(msg_getdata(inv=[CInv(inv_type, blockhash)])) self.wait_for_block(blockhash, timeout) return self.last_message["block"].block def test_transaction_acceptance(self, tx, with_witness, accepted, reason=None): tx_message = msg_tx(tx) if with_witness: tx_message = msg_witness_tx(tx) self.send_message(tx_message) self.sync_with_ping() assert_equal(tx.hash in self.connection.rpc.getrawmempool(), accepted) if (reason != None and not accepted): with mininode_lock: assert_equal(self.last_message["reject"].reason, reason) def test_witness_block(self, block, accepted, with_witness=True): if with_witness: self.send_message(msg_witness_block(block)) else: self.send_message(msg_block(block)) self.sync_with_ping() assert_equal(self.connection.rpc.getbestblockhash() == block.hash, accepted) class UTXO(object): def __init__(self, sha256, n, nValue): self.sha256 = sha256 self.n = n self.nValue = nValue def GetP2PKHScript(pubkeyhash): return CScript([CScriptOp(OP_DUP), CScriptOp(OP_HASH160), pubkeyhash, CScriptOp(OP_EQUALVERIFY), CScriptOp(OP_CHECKSIG)]) def sign_P2PK_witness_input(script, txTo, inIdx, hashtype, value, key): tx_hash = SegwitVersion1SignatureHash(script, txTo, inIdx, hashtype, value) signature = key.sign(tx_hash) + chr(hashtype).encode('latin-1') txTo.wit.vtxinwit[inIdx].scriptWitness.stack = [signature, script] txTo.rehash() class SegWitTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 3 self.extra_args = [["-whitelist=127.0.0.1"], ["-whitelist=127.0.0.1", "-acceptnonstdtxn=0"], ["-whitelist=127.0.0.1", "-vbparams=segwit:0:0"]] def setup_network(self): self.setup_nodes() connect_nodes(self.nodes[0], 1) connect_nodes(self.nodes[0], 2) self.sync_all() def build_next_block(self, nVersion=VB_TOP_BITS): tip = self.nodes[0].getbestblockhash() height = self.nodes[0].getblockcount() + 1 block_time = self.nodes[0].getblockheader(tip)["mediantime"] + 1 block = create_block(int(tip, 16), create_coinbase(height), block_time) block.nVersion = nVersion block.rehash() return block # Adds list of transactions to block, adds witness commitment, then solves. def update_witness_block_with_transactions(self, block, tx_list, nonce=0): block.vtx.extend(tx_list) add_witness_commitment(block, nonce) block.solve() return def test_witness_services(self): self.log.info("Verifying NODE_WITNESS service bit") assert((self.test_node.connection.nServices & NODE_WITNESS) != 0) # See if sending a regular transaction works, and create a utxo # to use in later tests. def test_non_witness_transaction(self): # Mine a block with an anyone-can-spend coinbase, # let it mature, then try to spend it. self.log.info("Testing non-witness transaction") block = self.build_next_block(nVersion=1) block.solve() self.test_node.send_message(msg_block(block)) self.test_node.sync_with_ping() # make sure the block was processed txid = block.vtx[0].sha256 self.nodes[0].generate(99) # let the block mature # Create a transaction that spends the coinbase tx = CTransaction() tx.vin.append(CTxIn(COutPoint(txid, 0), b"")) tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE]))) tx.calc_sha256() # Check that serializing it with or without witness is the same # This is a sanity check of our testing framework. assert_equal(msg_tx(tx).serialize(), msg_witness_tx(tx).serialize()) self.test_node.send_message(msg_witness_tx(tx)) self.test_node.sync_with_ping() # make sure the tx was processed assert(tx.hash in self.nodes[0].getrawmempool()) # Save this transaction for later self.utxo.append(UTXO(tx.sha256, 0, 49*100000000)) self.nodes[0].generate(1) # Verify that blocks with witnesses are rejected before activation. def test_unnecessary_witness_before_segwit_activation(self): self.log.info("Testing behavior of unnecessary witnesses") # For now, rely on earlier tests to have created at least one utxo for # us to use assert(len(self.utxo) > 0) assert(get_bip9_status(self.nodes[0], 'segwit')['status'] != 'active') tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-1000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)])] # Verify the hash with witness differs from the txid # (otherwise our testing framework must be broken!) tx.rehash() assert(tx.sha256 != tx.calc_sha256(with_witness=True)) # Construct a segwit-signaling block that includes the transaction. block = self.build_next_block(nVersion=(VB_TOP_BITS|(1 << VB_WITNESS_BIT))) self.update_witness_block_with_transactions(block, [tx]) # Sending witness data before activation is not allowed (anti-spam # rule). self.test_node.test_witness_block(block, accepted=False) # TODO: fix synchronization so we can test reject reason # Right now, bitcoind delays sending reject messages for blocks # until the future, making synchronization here difficult. #assert_equal(self.test_node.last_message["reject"].reason, "unexpected-witness") # But it should not be permanently marked bad... # Resend without witness information. self.test_node.send_message(msg_block(block)) self.test_node.sync_with_ping() assert_equal(self.nodes[0].getbestblockhash(), block.hash) sync_blocks(self.nodes) # Create a p2sh output -- this is so we can pass the standardness # rules (an anyone-can-spend OP_TRUE would be rejected, if not wrapped # in P2SH). p2sh_program = CScript([OP_TRUE]) p2sh_pubkey = hash160(p2sh_program) scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) # Now check that unnecessary witnesses can't be used to blind a node tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, scriptPubKey)) tx2.rehash() self.test_node.test_transaction_acceptance(tx2, False, True) self.nodes[0].generate(1) sync_blocks(self.nodes) # it to be non-standard, to test that violating policy with a witness before # segwit activation doesn't blind a node to a transaction. Transactions # to the rejection cache. tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), CScript([p2sh_program]))) tx3.vout.append(CTxOut(tx2.vout[0].nValue-100000, scriptPubKey)) tx3.wit.vtxinwit.append(CTxInWitness()) tx3.wit.vtxinwit[0].scriptWitness.stack = [b'a'*400000] tx3.rehash() # Note that this should be rejected for the premature witness reason, # rather than a policy check, since segwit hasn't activated yet. self.std_node.test_transaction_acceptance(tx3, True, False, b'no-witness-yet') self.std_node.test_transaction_acceptance(tx3, False, True) tx4 = CTransaction() tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), CScript([p2sh_program]))) tx4.vout.append(CTxOut(tx3.vout[0].nValue-100000, CScript([OP_TRUE]))) tx4.rehash() self.test_node.test_transaction_acceptance(tx3, False, True) self.test_node.test_transaction_acceptance(tx4, False, True) self.nodes[0].generate(1) sync_blocks(self.nodes) self.utxo.pop(0) self.utxo.append(UTXO(tx4.sha256, 0, tx4.vout[0].nValue)) def advance_to_segwit_started(self): height = self.nodes[0].getblockcount() # Will need to rewrite the tests here if we are past the first period assert(height < VB_PERIOD - 1) # Genesis block is 'defined'. assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'defined') # Advance to end of period, status should now be 'started' self.nodes[0].generate(VB_PERIOD-height-1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') # Mine enough blocks to lock in segwit, but don't activate. def advance_to_segwit_lockin(self): height = self.nodes[0].getblockcount() assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') self.nodes[0].generate(VB_PERIOD-1) height = self.nodes[0].getblockcount() assert((height % VB_PERIOD) == VB_PERIOD - 2) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'started') self.nodes[0].generate(1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') def advance_to_segwit_active(self): assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') height = self.nodes[0].getblockcount() self.nodes[0].generate(VB_PERIOD - (height%VB_PERIOD) - 2) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'locked_in') self.nodes[0].generate(1) assert_equal(get_bip9_status(self.nodes[0], 'segwit')['status'], 'active') def test_witness_commitments(self): self.log.info("Testing witness commitments") block = self.build_next_block() add_witness_commitment(block) block.solve() assert(msg_witness_block(block).serialize() != msg_block(block).serialize()) self.test_node.test_witness_block(block, accepted=True) block_2 = self.build_next_block() add_witness_commitment(block_2, nonce=28) block_2.solve() assert(block_2.vtx[0].vout[-1] != block.vtx[0].vout[-1]) self.test_node.test_witness_block(block_2, accepted=True) assert (len(self.utxo) > 0) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx.vout.append(CTxOut(self.utxo[0].nValue-1000, scriptPubKey)) tx.rehash() # tx2 will spend tx1, and send back to a regular anyone-can-spend address tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program)) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx2.rehash() block_3 = self.build_next_block() self.update_witness_block_with_transactions(block_3, [tx, tx2], nonce=1) # Add an extra OP_RETURN output that matches the witness commitment template, # even though it has extra data after the incorrect commitment. # This block should fail. block_3.vtx[0].vout.append(CTxOut(0, CScript([OP_RETURN, WITNESS_COMMITMENT_HEADER + ser_uint256(2), 10]))) block_3.vtx[0].rehash() block_3.hashMerkleRoot = block_3.calc_merkle_root() block_3.rehash() block_3.solve() self.test_node.test_witness_block(block_3, accepted=False) # Add a different commitment with different nonce, but in the # right location, and with some funds burned(!). # This should succeed (nValue shouldn't affect finding the add_witness_commitment(block_3, nonce=0) block_3.vtx[0].vout[0].nValue -= 1 block_3.vtx[0].vout[-1].nValue += 1 block_3.vtx[0].rehash() block_3.hashMerkleRoot = block_3.calc_merkle_root() block_3.rehash() assert(len(block_3.vtx[0].vout) == 4) block_3.solve() self.test_node.test_witness_block(block_3, accepted=True) block_4 = self.build_next_block() tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) tx3.vout.append(CTxOut(tx.vout[0].nValue-1000, witness_program)) tx3.rehash() block_4.vtx.append(tx3) block_4.hashMerkleRoot = block_4.calc_merkle_root() block_4.solve() self.test_node.test_witness_block(block_4, with_witness=False, accepted=True) self.utxo.pop(0) self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) def test_block_malleability(self): self.log.info("Testing witness block malleability") # Make sure that a block that has too big a virtual size # because of a too-large coinbase witness is not permanently # marked bad. block = self.build_next_block() add_witness_commitment(block) block.solve() block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.append(b'a'*5000000) assert(get_virtual_size(block) > MAX_BLOCK_BASE_SIZE) # We can't send over the p2p network, because this is too big to relay self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert(self.nodes[0].getbestblockhash() != block.hash) block.vtx[0].wit.vtxinwit[0].scriptWitness.stack.pop() assert(get_virtual_size(block) < MAX_BLOCK_BASE_SIZE) self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert(self.nodes[0].getbestblockhash() == block.hash) # result in a block permanently marked bad. block = self.build_next_block() add_witness_commitment(block) block.solve() # Change the nonce -- should not cause the block to be permanently # failed block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(1) ] self.test_node.test_witness_block(block, accepted=False) # Changing the witness nonce doesn't change the block hash block.vtx[0].wit.vtxinwit[0].scriptWitness.stack = [ ser_uint256(0) ] self.test_node.test_witness_block(block, accepted=True) def test_witness_block_size(self): self.log.info("Testing witness block size limit") # Skipping this test for now; this is covered in p2p-fullblocktest.py # Test that witness-bearing blocks are limited at ceil(base + wit/4) <= 1MB. block = self.build_next_block() assert(len(self.utxo) > 0) # Create a P2WSH transaction. # The witness program will be a bunch of OP_2DROP's, followed by OP_TRUE. # virtual size. NUM_DROPS = 200 # 201 max ops per script! NUM_OUTPUTS = 50 witness_program = CScript([OP_2DROP]*NUM_DROPS + [OP_TRUE]) witness_hash = uint256_from_str(sha256(witness_program)) scriptPubKey = CScript([OP_0, ser_uint256(witness_hash)]) prevout = COutPoint(self.utxo[0].sha256, self.utxo[0].n) value = self.utxo[0].nValue parent_tx = CTransaction() parent_tx.vin.append(CTxIn(prevout, b"")) child_value = int(value/NUM_OUTPUTS) for i in range(NUM_OUTPUTS): parent_tx.vout.append(CTxOut(child_value, scriptPubKey)) parent_tx.vout[0].nValue -= 50000 assert(parent_tx.vout[0].nValue > 0) parent_tx.rehash() child_tx = CTransaction() for i in range(NUM_OUTPUTS): child_tx.vin.append(CTxIn(COutPoint(parent_tx.sha256, i), b"")) child_tx.vout = [CTxOut(value - 100000, CScript([OP_TRUE]))] for i in range(NUM_OUTPUTS): child_tx.wit.vtxinwit.append(CTxInWitness()) child_tx.wit.vtxinwit[-1].scriptWitness.stack = [b'a'*195]*(2*NUM_DROPS) + [witness_program] child_tx.rehash() self.update_witness_block_with_transactions(block, [parent_tx, child_tx]) vsize = get_virtual_size(block) additional_bytes = (MAX_BLOCK_BASE_SIZE - vsize)*4 i = 0 while additional_bytes > 0: # Add some more bytes to each input until we hit MAX_BLOCK_BASE_SIZE+1 extra_bytes = min(additional_bytes+1, 55) block.vtx[-1].wit.vtxinwit[int(i/(2*NUM_DROPS))].scriptWitness.stack[i%(2*NUM_DROPS)] = b'a'*(195+extra_bytes) additional_bytes -= extra_bytes i += 1 block.vtx[0].vout.pop() # Remove old commitment add_witness_commitment(block) block.solve() vsize = get_virtual_size(block) assert_equal(vsize, MAX_BLOCK_BASE_SIZE + 1) # Make sure that our test case would exceed the old max-network-message # limit assert(len(block.serialize(True)) > 2*1024*1024) self.test_node.test_witness_block(block, accepted=False) # Now resize the second transaction to make the block fit. cur_length = len(block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0]) block.vtx[-1].wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(cur_length-1) block.vtx[0].vout.pop() add_witness_commitment(block) block.solve() assert(get_virtual_size(block) == MAX_BLOCK_BASE_SIZE) self.test_node.test_witness_block(block, accepted=True) # Update available utxo's self.utxo.pop(0) self.utxo.append(UTXO(block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue)) def test_submit_block(self): block = self.build_next_block() # Try using a custom nonce and then don't supply it. add_witness_commitment(block, nonce=1) block.vtx[0].wit = CTxWitness() # drop the nonce block.solve() self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert(self.nodes[0].getbestblockhash() != block.hash) # Now redo commitment with the standard nonce, but let bitcoind fill it in. add_witness_commitment(block, nonce=0) block.vtx[0].wit = CTxWitness() block.solve() self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert_equal(self.nodes[0].getbestblockhash(), block.hash) # This time, add a tx with non-empty witness, but don't supply block_2 = self.build_next_block() add_witness_commitment(block_2) block_2.solve() block_2.vtx[0].vout.pop() block_2.vtx[0].wit = CTxWitness() self.nodes[0].submitblock(bytes_to_hex_str(block_2.serialize(True))) assert(self.nodes[0].getbestblockhash() != block_2.hash) def test_extra_witness_data(self): self.log.info("Testing extra witness data in tx") assert(len(self.utxo) > 0) block = self.build_next_block() witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-200000, scriptPubKey)) tx.vout.append(CTxOut(1000, CScript([OP_TRUE]))) # non-witness output tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([])] tx.rehash() self.update_witness_block_with_transactions(block, [tx]) # Extra witness data should not be allowed. self.test_node.test_witness_block(block, accepted=False) # Try extra signature data. Ok if we're not spending a witness output. block.vtx[1].wit.vtxinwit = [] block.vtx[1].vin[0].scriptSig = CScript([OP_0]) block.vtx[1].rehash() add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=True) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vin.append(CTxIn(COutPoint(tx.sha256, 1), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) tx2.wit.vtxinwit.extend([CTxInWitness(), CTxInWitness()]) tx2.wit.vtxinwit[0].scriptWitness.stack = [ CScript([CScriptNum(1)]), CScript([CScriptNum(1)]), witness_program ] tx2.wit.vtxinwit[1].scriptWitness.stack = [ CScript([OP_TRUE]) ] block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) tx2.vin[0].scriptSig = CScript([OP_TRUE]) tx2.vin[1].scriptSig = CScript([OP_TRUE]) tx2.wit.vtxinwit[0].scriptWitness.stack.pop(0) tx2.wit.vtxinwit[1].scriptWitness.stack = [] tx2.rehash() add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=False) tx2.vin[0].scriptSig = b"" tx2.rehash() add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=True) self.utxo.pop(0) self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_max_witness_push_length(self): self.log.info("Testing maximum witness push size") MAX_SCRIPT_ELEMENT_SIZE = 520 assert(len(self.utxo)) block = self.build_next_block() witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, scriptPubKey)) tx.rehash() tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [ b'a'*(MAX_SCRIPT_ELEMENT_SIZE+1), witness_program ] tx2.rehash() self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=False) tx2.wit.vtxinwit[0].scriptWitness.stack[0] = b'a'*(MAX_SCRIPT_ELEMENT_SIZE) add_witness_commitment(block) block.solve() self.test_node.test_witness_block(block, accepted=True) self.utxo.pop() self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_max_witness_program_length(self): # 10k bytes to successfully spend self.log.info("Testing maximum witness program length") assert(len(self.utxo)) MAX_PROGRAM_LENGTH = 10000 # This program is 19 max pushes (9937 bytes), then 64 more opcode-bytes. long_witness_program = CScript([b'a'*520]*19 + [OP_DROP]*63 + [OP_TRUE]) assert(len(long_witness_program) == MAX_PROGRAM_LENGTH+1) long_witness_hash = sha256(long_witness_program) long_scriptPubKey = CScript([OP_0, long_witness_hash]) block = self.build_next_block() tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, long_scriptPubKey)) tx.rehash() tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*44 + [long_witness_program] tx2.rehash() self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=False) # Try again with one less byte in the witness program witness_program = CScript([b'a'*520]*19 + [OP_DROP]*62 + [OP_TRUE]) assert(len(witness_program) == MAX_PROGRAM_LENGTH) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx.vout[0] = CTxOut(tx.vout[0].nValue, scriptPubKey) tx.rehash() tx2.vin[0].prevout.hash = tx.sha256 tx2.wit.vtxinwit[0].scriptWitness.stack = [b'a']*43 + [witness_program] tx2.rehash() block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=True) self.utxo.pop() self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_witness_input_length(self): self.log.info("Testing witness input length") assert(len(self.utxo)) witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) # Create a transaction that splits our utxo into many outputs tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) nValue = self.utxo[0].nValue for i in range(10): tx.vout.append(CTxOut(int(nValue/10), scriptPubKey)) tx.vout[0].nValue -= 1000 assert(tx.vout[0].nValue >= 0) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) # Try various ways to spend tx that should all break. # This "broken" transaction serializer will not normalize # the length of vtxinwit. class BrokenCTransaction(CTransaction): def serialize_with_witness(self): flags = 0 if not self.wit.is_null(): flags |= 1 r = b"" r += struct.pack("<i", self.nVersion) if flags: dummy = [] r += ser_vector(dummy) r += struct.pack("<B", flags) r += ser_vector(self.vin) r += ser_vector(self.vout) if flags & 1: r += self.wit.serialize() r += struct.pack("<I", self.nLockTime) return r tx2 = BrokenCTransaction() for i in range(10): tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b"")) tx2.vout.append(CTxOut(nValue-3000, CScript([OP_TRUE]))) # First try using a too long vtxinwit for i in range(11): tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[i].scriptWitness.stack = [b'a', witness_program] block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) # Now try using a too short vtxinwit tx2.wit.vtxinwit.pop() tx2.wit.vtxinwit.pop() block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) # Now make one of the intermediate witnesses be incorrect tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [b'a', witness_program] tx2.wit.vtxinwit[5].scriptWitness.stack = [ witness_program ] block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=False) # Fix the broken witness and the block should be accepted. tx2.wit.vtxinwit[5].scriptWitness.stack = [b'a', witness_program] block.vtx = [block.vtx[0]] self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=True) self.utxo.pop() self.utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) def test_witness_tx_relay_before_segwit_activation(self): self.log.info("Testing relay of witness transactions") # Generate a transaction that doesn't require a witness, but send it assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ] tx.rehash() tx_hash = tx.sha256 tx_value = tx.vout[0].nValue # the getdata is just for the non-witness portion. self.old_node.announce_tx_and_wait_for_getdata(tx) assert(self.old_node.last_message["getdata"].inv[0].type == 1) # Since we haven't delivered the tx yet, inv'ing the same tx from # a witness transaction ought not result in a getdata. try: self.test_node.announce_tx_and_wait_for_getdata(tx, timeout=2) self.log.error("Error: duplicate tx getdata!") assert(False) except AssertionError as e: pass # Delivering this transaction with witness should fail (no matter who # its from) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) self.old_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) # But eliminating the witness should fix it self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) # Cleanup: mine the first transaction and update utxo self.nodes[0].generate(1) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.utxo.pop(0) self.utxo.append(UTXO(tx_hash, 0, tx_value)) # After segwit activates, verify that mempool: # - rejects transactions with unnecessary/extra witnesses # - accepts transactions with valid witnesses # and that witness transactions are relayed to non-upgraded peers. def test_tx_relay_after_segwit_activation(self): self.log.info("Testing relay of witness transactions") # Generate a transaction that doesn't require a witness, but send it # when spending a non-witness output. assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a' ] tx.rehash() tx_hash = tx.sha256 # Verify that unnecessary witnesses are rejected. self.test_node.announce_tx_and_wait_for_getdata(tx) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) # Verify that removing the witness succeeds. self.test_node.announce_tx_and_wait_for_getdata(tx) self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) # Now try to add extra witness data to a valid witness tx. witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx_hash, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, scriptPubKey)) tx2.rehash() tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) tx3.wit.vtxinwit.append(CTxInWitness()) # Add too-large for IsStandard witness and check that it does not enter reject filter p2sh_program = CScript([OP_TRUE]) p2sh_pubkey = hash160(p2sh_program) witness_program2 = CScript([b'a'*400000]) tx3.vout.append(CTxOut(tx2.vout[0].nValue-100000, CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]))) tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program2] tx3.rehash() # Node will not be blinded to the transaction self.std_node.announce_tx_and_wait_for_getdata(tx3) self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size') self.std_node.announce_tx_and_wait_for_getdata(tx3) self.std_node.test_transaction_acceptance(tx3, True, False, b'tx-size') # Remove witness stuffing, instead add extra witness push on stack tx3.vout[0] = CTxOut(tx2.vout[0].nValue-100000, CScript([OP_TRUE])) tx3.wit.vtxinwit[0].scriptWitness.stack = [CScript([CScriptNum(1)]), witness_program ] tx3.rehash() self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True) self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False) # Get rid of the extra witness, and verify acceptance. tx3.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] # Also check that old_node gets a tx announcement, even though this is # a witness transaction. self.old_node.wait_for_inv([CInv(1, tx2.sha256)]) # wait until tx2 was inv'ed self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) self.old_node.wait_for_inv([CInv(1, tx3.sha256)]) raw_tx = self.nodes[0].getrawtransaction(tx3.hash, 1) assert_equal(int(raw_tx["hash"], 16), tx3.calc_sha256(True)) assert_equal(raw_tx["size"], len(tx3.serialize_with_witness())) vsize = (len(tx3.serialize_with_witness()) + 3*len(tx3.serialize_without_witness()) + 3) / 4 assert_equal(raw_tx["vsize"], vsize) assert_equal(len(raw_tx["vin"][0]["txinwitness"]), 1) assert_equal(raw_tx["vin"][0]["txinwitness"][0], hexlify(witness_program).decode('ascii')) assert(vsize != raw_tx["size"]) self.nodes[0].generate(1) assert_equal(len(self.nodes[0].getrawmempool()), 0) self.utxo.pop(0) self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) def test_block_relay(self, segwit_activated): self.log.info("Testing block relay") blocktype = 2|MSG_WITNESS_FLAG # test_node has set NODE_WITNESS, so all getdata requests should be for # witness blocks. # Test announcing a block via inv results in a getdata, and that # announcing a version 4 or random VB block with a header results in a getdata block1 = self.build_next_block() block1.solve() self.test_node.announce_block_and_wait_for_getdata(block1, use_header=False) assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block1, True) # Favcoin: Blocks with nVersion < VB_TOP_BITS are rejected # self.test_node.announce_block_and_wait_for_getdata(block2, use_header=True) # assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) # self.test_node.test_witness_block(block2, True) block3 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15))) block3.solve() self.test_node.announce_block_and_wait_for_getdata(block3, use_header=True) assert(self.test_node.last_message["getdata"].inv[0].type == blocktype) self.test_node.test_witness_block(block3, True) # Check that we can getdata for witness blocks or regular blocks, # and the right thing happens. if segwit_activated == False: # Before activation, we should be able to request old blocks with # or without witness, and they should be the same. chain_height = self.nodes[0].getblockcount() # Pick 10 random blocks on main chain, and verify that getdata's all_heights = list(range(chain_height+1)) random.shuffle(all_heights) all_heights = all_heights[0:10] for height in all_heights: block_hash = self.nodes[0].getblockhash(height) rpc_block = self.nodes[0].getblock(block_hash, False) block_hash = int(block_hash, 16) block = self.test_node.request_block(block_hash, 2) wit_block = self.test_node.request_block(block_hash, 2|MSG_WITNESS_FLAG) assert_equal(block.serialize(True), wit_block.serialize(True)) assert_equal(block.serialize(), hex_str_to_bytes(rpc_block)) else: block = self.build_next_block() self.update_witness_block_with_transactions(block, []) assert(len(block.vtx[0].wit.vtxinwit) == 1) assert(len(block.vtx[0].wit.vtxinwit[0].scriptWitness.stack) == 1) self.test_node.test_witness_block(block, accepted=True) rpc_block = self.nodes[0].getblock(block.hash, False) non_wit_block = self.test_node.request_block(block.sha256, 2) wit_block = self.test_node.request_block(block.sha256, 2|MSG_WITNESS_FLAG) assert_equal(wit_block.serialize(True), hex_str_to_bytes(rpc_block)) assert_equal(wit_block.serialize(False), non_wit_block.serialize()) assert_equal(wit_block.serialize(True), block.serialize(True)) rpc_details = self.nodes[0].getblock(block.hash, True) assert_equal(rpc_details["size"], len(block.serialize(True))) assert_equal(rpc_details["strippedsize"], len(block.serialize(False))) weight = 3*len(block.serialize(False)) + len(block.serialize(True)) assert_equal(rpc_details["weight"], weight) block4 = self.build_next_block(nVersion=(VB_TOP_BITS | (1<<15))) block4.solve() self.old_node.getdataset = set() # being processed after block getdata's, and announce a transaction as well, # to announce this block. msg = msg_headers() msg.headers = [ CBlockHeader(block4) ] self.old_node.send_message(msg) self.old_node.announce_tx_and_wait_for_getdata(block4.vtx[0]) assert(block4.sha256 not in self.old_node.getdataset) # V0 segwit outputs should be standard after activation, but not before. def test_standardness_v0(self, segwit_activated): self.log.info("Testing standardness of v0 outputs (%s activation)" % ("after" if segwit_activated else "before")) assert(len(self.utxo)) witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) p2sh_pubkey = hash160(witness_program) p2sh_scriptPubKey = CScript([OP_HASH160, p2sh_pubkey, OP_EQUAL]) # First prepare a p2sh output (so that spending it will pass standardness) p2sh_tx = CTransaction() p2sh_tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")] p2sh_tx.vout = [CTxOut(self.utxo[0].nValue-100000, p2sh_scriptPubKey)] p2sh_tx.rehash() # Mine it on test_node to create the confirmed output. self.test_node.test_transaction_acceptance(p2sh_tx, with_witness=True, accepted=True) self.nodes[0].generate(1) sync_blocks(self.nodes) # Now test standardness of v0 P2WSH outputs. # Start by creating a transaction with two outputs. tx = CTransaction() tx.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] tx.vout = [CTxOut(p2sh_tx.vout[0].nValue-1000000, scriptPubKey)] tx.vout.append(CTxOut(800000, scriptPubKey)) # Might burn this later tx.rehash() self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=segwit_activated) # Now create something that looks like a P2PKH output. This won't be spendable. scriptPubKey = CScript([OP_0, hash160(witness_hash)]) tx2 = CTransaction() if segwit_activated: tx2.vin = [CTxIn(COutPoint(tx.sha256, 1), b"")] tx2.vout = [CTxOut(700000, scriptPubKey)] tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [witness_program] else: tx2.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] tx2.vout = [CTxOut(p2sh_tx.vout[0].nValue-100000, scriptPubKey)] tx2.rehash() self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=segwit_activated) # Now update self.utxo for later tests. tx3 = CTransaction() if segwit_activated: # tx and tx2 were both accepted. Don't bother trying to reclaim the sync_mempools([self.nodes[0], self.nodes[1]]) tx3.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")] tx3.vout = [CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))] tx3.wit.vtxinwit.append(CTxInWitness()) tx3.wit.vtxinwit[0].scriptWitness.stack = [witness_program] tx3.rehash() self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) else: # tx and tx2 didn't go anywhere; just clean up the p2sh_tx output. tx3.vin = [CTxIn(COutPoint(p2sh_tx.sha256, 0), CScript([witness_program]))] tx3.vout = [CTxOut(p2sh_tx.vout[0].nValue-100000, witness_program)] tx3.rehash() self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=True) self.nodes[0].generate(1) sync_blocks(self.nodes) self.utxo.pop(0) self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) assert_equal(len(self.nodes[1].getrawmempool()), 0) def test_segwit_versions(self): self.log.info("Testing standardness/consensus for segwit versions (0-16)") assert(len(self.utxo)) NUM_TESTS = 17 if (len(self.utxo) < NUM_TESTS): tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) split_value = (self.utxo[0].nValue - 400000) // NUM_TESTS for i in range(NUM_TESTS): tx.vout.append(CTxOut(split_value, CScript([OP_TRUE]))) tx.rehash() block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) self.utxo.pop(0) for i in range(NUM_TESTS): self.utxo.append(UTXO(tx.sha256, i, split_value)) sync_blocks(self.nodes) temp_utxo = [] tx = CTransaction() count = 0 witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) assert_equal(len(self.nodes[1].getrawmempool()), 0) for version in list(range(OP_1, OP_16+1)) + [OP_0]: count += 1 scriptPubKey = CScript([CScriptOp(version), witness_hash]) tx.vin = [CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")] tx.vout = [CTxOut(self.utxo[0].nValue-100000, scriptPubKey)] tx.rehash() self.std_node.test_transaction_acceptance(tx, with_witness=True, accepted=False) self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True) self.utxo.pop(0) temp_utxo.append(UTXO(tx.sha256, 0, tx.vout[0].nValue)) self.nodes[0].generate(1) sync_blocks(self.nodes) assert(len(self.nodes[0].getrawmempool()) == 0) scriptPubKey = CScript([CScriptOp(OP_1), witness_hash]) tx2 = CTransaction() tx2.vin = [CTxIn(COutPoint(tx.sha256, 0), b"")] tx2.vout = [CTxOut(tx.vout[0].nValue-100000, scriptPubKey)] tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] tx2.rehash() # checked with fRequireStandard self.test_node.test_transaction_acceptance(tx2, with_witness=True, accepted=True) self.std_node.test_transaction_acceptance(tx2, with_witness=True, accepted=False) temp_utxo.pop() # last entry in temp_utxo was the output we just spent temp_utxo.append(UTXO(tx2.sha256, 0, tx2.vout[0].nValue)) # Spend everything in temp_utxo back to an OP_TRUE output. tx3 = CTransaction() total_value = 0 for i in temp_utxo: tx3.vin.append(CTxIn(COutPoint(i.sha256, i.n), b"")) tx3.wit.vtxinwit.append(CTxInWitness()) total_value += i.nValue tx3.wit.vtxinwit[-1].scriptWitness.stack = [witness_program] tx3.vout.append(CTxOut(total_value - 100000, CScript([OP_TRUE]))) tx3.rehash() # Spending a higher version witness output is not allowed by policy, # even with fRequireStandard=false. self.test_node.test_transaction_acceptance(tx3, with_witness=True, accepted=False) self.test_node.sync_with_ping() with mininode_lock: assert(b"reserved for soft-fork upgrades" in self.test_node.last_message["reject"].reason) # Building a block with the transaction must be valid, however. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2, tx3]) self.test_node.test_witness_block(block, accepted=True) sync_blocks(self.nodes) # Add utxo to our list self.utxo.append(UTXO(tx3.sha256, 0, tx3.vout[0].nValue)) def test_premature_coinbase_witness_spend(self): self.log.info("Testing premature coinbase witness spend") block = self.build_next_block() # Change the output of the block to be a witness output. witness_program = CScript([OP_TRUE]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) block.vtx[0].vout[0].scriptPubKey = scriptPubKey # This next line will rehash the coinbase and update the merkle # root, and solve. self.update_witness_block_with_transactions(block, []) self.test_node.test_witness_block(block, accepted=True) spend_tx = CTransaction() spend_tx.vin = [CTxIn(COutPoint(block.vtx[0].sha256, 0), b"")] spend_tx.vout = [CTxOut(block.vtx[0].vout[0].nValue, witness_program)] spend_tx.wit.vtxinwit.append(CTxInWitness()) spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ witness_program ] spend_tx.rehash() # Now test a premature spend. self.nodes[0].generate(98) sync_blocks(self.nodes) block2 = self.build_next_block() self.update_witness_block_with_transactions(block2, [spend_tx]) self.test_node.test_witness_block(block2, accepted=False) # Advancing one more block should allow the spend. self.nodes[0].generate(1) block2 = self.build_next_block() self.update_witness_block_with_transactions(block2, [spend_tx]) self.test_node.test_witness_block(block2, accepted=True) sync_blocks(self.nodes) def test_signature_version_1(self): self.log.info("Testing segwit signature hash version 1") key = CECKey() key.set_secretbytes(b"9") pubkey = CPubKey(key.get_pubkey()) witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) # First create a witness output for use in the tests. assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, scriptPubKey)) tx.rehash() self.test_node.test_transaction_acceptance(tx, with_witness=True, accepted=True) # Mine this transaction in preparation for following tests. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) sync_blocks(self.nodes) self.utxo.pop(0) # Test each hashtype prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue) for sigflag in [ 0, SIGHASH_ANYONECANPAY ]: for hashtype in [SIGHASH_ALL, SIGHASH_NONE, SIGHASH_SINGLE]: hashtype |= sigflag block = self.build_next_block() tx = CTransaction() tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b"")) tx.vout.append(CTxOut(prev_utxo.nValue - 100000, scriptPubKey)) tx.wit.vtxinwit.append(CTxInWitness()) # Too-large input value sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue+1, key) self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=False) # Too-small input value sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue-1, key) block.vtx.pop() # remove last tx self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=False) # Now try correct value sign_P2PK_witness_input(witness_program, tx, 0, hashtype, prev_utxo.nValue, key) block.vtx.pop() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) prev_utxo = UTXO(tx.sha256, 0, tx.vout[0].nValue) # Test combinations of signature hashes. # Split the utxo into a lot of outputs. # Randomly choose up to 10 to spend, sign with different hashtypes, and # output to a random number of outputs. Repeat NUM_TESTS times. # Ensure that we've tested a situation where we use SIGHASH_SINGLE with NUM_TESTS = 500 temp_utxos = [] tx = CTransaction() tx.vin.append(CTxIn(COutPoint(prev_utxo.sha256, prev_utxo.n), b"")) split_value = prev_utxo.nValue // NUM_TESTS for i in range(NUM_TESTS): tx.vout.append(CTxOut(split_value, scriptPubKey)) tx.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, prev_utxo.nValue, key) for i in range(NUM_TESTS): temp_utxos.append(UTXO(tx.sha256, i, split_value)) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) block = self.build_next_block() used_sighash_single_out_of_bounds = False for i in range(NUM_TESTS): if (not i % 100): self.test_node.sync_with_ping() num_inputs = random.randint(1, 10) num_outputs = random.randint(1, 11) random.shuffle(temp_utxos) assert(len(temp_utxos) > num_inputs) tx = CTransaction() total_value = 0 for i in range(num_inputs): tx.vin.append(CTxIn(COutPoint(temp_utxos[i].sha256, temp_utxos[i].n), b"")) tx.wit.vtxinwit.append(CTxInWitness()) total_value += temp_utxos[i].nValue split_value = total_value // num_outputs for i in range(num_outputs): tx.vout.append(CTxOut(split_value, scriptPubKey)) for i in range(num_inputs): anyonecanpay = 0 if random.randint(0, 1): anyonecanpay = SIGHASH_ANYONECANPAY hashtype = random.randint(1, 3) | anyonecanpay sign_P2PK_witness_input(witness_program, tx, i, hashtype, temp_utxos[i].nValue, key) if (hashtype == SIGHASH_SINGLE and i >= num_outputs): used_sighash_single_out_of_bounds = True tx.rehash() for i in range(num_outputs): temp_utxos.append(UTXO(tx.sha256, i, split_value)) temp_utxos = temp_utxos[num_inputs:] block.vtx.append(tx) if (get_virtual_size(block) > MAX_BLOCK_BASE_SIZE - 1000): self.update_witness_block_with_transactions(block, []) self.test_node.test_witness_block(block, accepted=True) block = self.build_next_block() if (not used_sighash_single_out_of_bounds): self.log.info("WARNING: this test run didn't attempt SIGHASH_SINGLE with out-of-bounds index value") if (len(block.vtx) > 1): self.update_witness_block_with_transactions(block, []) self.test_node.test_witness_block(block, accepted=True) # Now test witness version 0 P2PKH transactions pubkeyhash = hash160(pubkey) scriptPKH = CScript([OP_0, pubkeyhash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(temp_utxos[0].sha256, temp_utxos[0].n), b"")) tx.vout.append(CTxOut(temp_utxos[0].nValue, scriptPKH)) tx.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx, 0, SIGHASH_ALL, temp_utxos[0].nValue, key) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue, CScript([OP_TRUE]))) script = GetP2PKHScript(pubkeyhash) sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) signature = key.sign(sig_hash) + b'\x01' # 0x1 is SIGHASH_ALL # Check that we can't have a scriptSig tx2.vin[0].scriptSig = CScript([signature, pubkey]) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx, tx2]) self.test_node.test_witness_block(block, accepted=False) block.vtx.pop() tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [signature, pubkey] tx2.vin[0].scriptSig = b"" tx2.rehash() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=True) temp_utxos.pop(0) tx = CTransaction() index = 0 for i in temp_utxos: tx.vin.append(CTxIn(COutPoint(i.sha256, i.n), b"")) tx.vout.append(CTxOut(i.nValue, CScript([OP_TRUE]))) tx.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx, index, SIGHASH_SINGLE|SIGHASH_ANYONECANPAY, i.nValue, key) index += 1 block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) for i in range(len(tx.vout)): self.utxo.append(UTXO(tx.sha256, i, tx.vout[i].nValue)) def test_p2sh_witness(self, segwit_activated): self.log.info("Testing P2SH witness transactions") assert(len(self.utxo)) witness_program = CScript([OP_DROP, OP_TRUE]) witness_hash = sha256(witness_program) p2wsh_pubkey = CScript([OP_0, witness_hash]) p2sh_witness_hash = hash160(p2wsh_pubkey) scriptPubKey = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) scriptSig = CScript([p2wsh_pubkey]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) tx.vout.append(CTxOut(self.utxo[0].nValue-100000, scriptPubKey)) tx.rehash() self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True, with_witness=segwit_activated) sync_blocks(self.nodes) spend_tx = CTransaction() spend_tx.vin.append(CTxIn(COutPoint(tx.sha256, 0), scriptSig)) spend_tx.vout.append(CTxOut(tx.vout[0].nValue-100000, CScript([OP_TRUE]))) spend_tx.rehash() # segwit-aware would also reject this for failing CLEANSTACK. self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False) # Try to put the witness script in the scriptSig, should also fail. spend_tx.vin[0].scriptSig = CScript([p2wsh_pubkey, b'a']) spend_tx.rehash() self.test_node.test_transaction_acceptance(spend_tx, with_witness=False, accepted=False) # Now put the witness script in the witness, should succeed after # segwit activates. spend_tx.vin[0].scriptSig = scriptSig spend_tx.rehash() spend_tx.wit.vtxinwit.append(CTxInWitness()) spend_tx.wit.vtxinwit[0].scriptWitness.stack = [ b'a', witness_program ] # Verify mempool acceptance self.test_node.test_transaction_acceptance(spend_tx, with_witness=True, accepted=segwit_activated) block = self.build_next_block() self.update_witness_block_with_transactions(block, [spend_tx]) # If we're before activation, then sending this without witnesses # witnesses should be valid. if segwit_activated: self.test_node.test_witness_block(block, accepted=True) else: self.test_node.test_witness_block(block, accepted=True, with_witness=False) # Update self.utxo self.utxo.pop(0) self.utxo.append(UTXO(spend_tx.sha256, 0, spend_tx.vout[0].nValue)) # Test the behavior of starting up a segwit-aware node after the softfork # has activated. As segwit requires different block data than pre-segwit # nodes would have stored, this requires special handling. # To enable this test, pass --oldbinary=<path-to-pre-segwit-bitcoind> to # the test. def test_upgrade_after_activation(self, node_id): self.log.info("Testing software upgrade after softfork activation") assert(node_id != 0) # node0 is assumed to be a segwit-active bitcoind # Make sure the nodes are all up sync_blocks(self.nodes) # Restart with the new binary self.stop_node(node_id) self.start_node(node_id, extra_args=[]) connect_nodes(self.nodes[0], node_id) sync_blocks(self.nodes) # Make sure that this peer thinks segwit has activated. assert(get_bip9_status(self.nodes[node_id], 'segwit')['status'] == "active") # Make sure this peers blocks match those of node0. height = self.nodes[node_id].getblockcount() while height >= 0: block_hash = self.nodes[node_id].getblockhash(height) assert_equal(block_hash, self.nodes[0].getblockhash(height)) assert_equal(self.nodes[0].getblock(block_hash), self.nodes[node_id].getblock(block_hash)) height -= 1 def test_witness_sigops(self): self.log.info("Testing sigops limit") assert(len(self.utxo)) # Keep this under MAX_OPS_PER_SCRIPT (201) witness_program = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKMULTISIG]*5 + [OP_CHECKSIG]*193 + [OP_ENDIF]) witness_hash = sha256(witness_program) scriptPubKey = CScript([OP_0, witness_hash]) sigops_per_script = 20*5 + 193*1 # We'll produce 2 extra outputs, one with a program that would take us outputs = (MAX_SIGOP_COST // sigops_per_script) + 2 extra_sigops_available = MAX_SIGOP_COST % sigops_per_script assert(extra_sigops_available < 100) witness_program_toomany = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available + 1) + [OP_ENDIF]) witness_hash_toomany = sha256(witness_program_toomany) scriptPubKey_toomany = CScript([OP_0, witness_hash_toomany]) witness_program_justright = CScript([OP_TRUE, OP_IF, OP_TRUE, OP_ELSE] + [OP_CHECKSIG]*(extra_sigops_available) + [OP_ENDIF]) witness_hash_justright = sha256(witness_program_justright) scriptPubKey_justright = CScript([OP_0, witness_hash_justright]) split_value = self.utxo[0].nValue // outputs tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) for i in range(outputs): tx.vout.append(CTxOut(split_value, scriptPubKey)) tx.vout[-2].scriptPubKey = scriptPubKey_toomany tx.vout[-1].scriptPubKey = scriptPubKey_justright tx.rehash() block_1 = self.build_next_block() self.update_witness_block_with_transactions(block_1, [tx]) self.test_node.test_witness_block(block_1, accepted=True) tx2 = CTransaction() total_value = 0 for i in range(outputs-1): tx2.vin.append(CTxIn(COutPoint(tx.sha256, i), b"")) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program ] total_value += tx.vout[i].nValue tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_toomany ] tx2.vout.append(CTxOut(total_value, CScript([OP_TRUE]))) tx2.rehash() block_2 = self.build_next_block() self.update_witness_block_with_transactions(block_2, [tx2]) self.test_node.test_witness_block(block_2, accepted=False) checksig_count = (extra_sigops_available // 4) + 1 scriptPubKey_checksigs = CScript([OP_CHECKSIG]*checksig_count) tx2.vout.append(CTxOut(0, scriptPubKey_checksigs)) tx2.vin.pop() tx2.wit.vtxinwit.pop() tx2.vout[0].nValue -= tx.vout[-2].nValue tx2.rehash() block_3 = self.build_next_block() self.update_witness_block_with_transactions(block_3, [tx2]) self.test_node.test_witness_block(block_3, accepted=False) block_4 = self.build_next_block() tx2.vout[-1].scriptPubKey = CScript([OP_CHECKSIG]*(checksig_count-1)) tx2.rehash() self.update_witness_block_with_transactions(block_4, [tx2]) self.test_node.test_witness_block(block_4, accepted=True) sync_blocks(self.nodes) for x in self.nodes: x.invalidateblock(block_4.hash) block_5 = self.build_next_block() tx2.vout.pop() tx2.vin.append(CTxIn(COutPoint(tx.sha256, outputs-1), b"")) tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[-1].scriptWitness.stack = [ witness_program_justright ] tx2.rehash() self.update_witness_block_with_transactions(block_5, [tx2]) self.test_node.test_witness_block(block_5, accepted=True) def test_getblocktemplate_before_lockin(self): self.log.info("Testing getblocktemplate setting of segwit versionbit (before lockin)") for node in [self.nodes[0], self.nodes[2]]: gbt_results = node.getblocktemplate() block_version = gbt_results['version'] # signalling for segwit activation. assert_equal((block_version & (1 << VB_WITNESS_BIT) != 0), node == self.nodes[0]) # If we don't specify the segwit rule, then we won't get a default # commitment. assert('default_witness_commitment' not in gbt_results) # Workaround: # Can either change the tip, or change the mempool and wait 5 seconds # to trigger a recomputation of getblocktemplate. txid = int(self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1), 16) # Using mocktime lets us avoid sleep() sync_mempools(self.nodes) self.nodes[0].setmocktime(int(time.time())+10) self.nodes[2].setmocktime(int(time.time())+10) for node in [self.nodes[0], self.nodes[2]]: gbt_results = node.getblocktemplate({"rules" : ["segwit"]}) block_version = gbt_results['version'] if node == self.nodes[2]: # If this is a non-segwit node, we should still not get a witness # commitment, nor a version bit signalling segwit. assert_equal(block_version & (1 << VB_WITNESS_BIT), 0) assert('default_witness_commitment' not in gbt_results) else: # For segwit-aware nodes, check the version bit and the witness # commitment are correct. assert(block_version & (1 << VB_WITNESS_BIT) != 0) assert('default_witness_commitment' in gbt_results) witness_commitment = gbt_results['default_witness_commitment'] # Check that default_witness_commitment is present. witness_root = CBlock.get_merkle_root([ser_uint256(0), ser_uint256(txid)]) script = get_witness_script(witness_root, 0) assert_equal(witness_commitment, bytes_to_hex_str(script)) # undo mocktime self.nodes[0].setmocktime(0) self.nodes[2].setmocktime(0) # Uncompressed pubkeys are no longer supported in default relay policy, # but (for now) are still valid in blocks. def test_uncompressed_pubkey(self): self.log.info("Testing uncompressed pubkeys") # Segwit transactions using uncompressed pubkeys are not accepted # under default policy, but should still pass consensus. key = CECKey() key.set_secretbytes(b"9") key.set_compressed(False) pubkey = CPubKey(key.get_pubkey()) assert_equal(len(pubkey), 65) # This should be an uncompressed pubkey assert(len(self.utxo) > 0) utxo = self.utxo.pop(0) # Test 1: P2WPKH # First create a P2WPKH output that uses an uncompressed pubkey pubkeyhash = hash160(pubkey) scriptPKH = CScript([OP_0, pubkeyhash]) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(utxo.sha256, utxo.n), b"")) tx.vout.append(CTxOut(utxo.nValue-100000, scriptPKH)) tx.rehash() # Confirm it in a block. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx]) self.test_node.test_witness_block(block, accepted=True) # Now try to spend it. Send it to a P2WSH output, which we'll witness_program = CScript([pubkey, CScriptOp(OP_CHECKSIG)]) witness_hash = sha256(witness_program) scriptWSH = CScript([OP_0, witness_hash]) tx2 = CTransaction() tx2.vin.append(CTxIn(COutPoint(tx.sha256, 0), b"")) tx2.vout.append(CTxOut(tx.vout[0].nValue-100000, scriptWSH)) script = GetP2PKHScript(pubkeyhash) sig_hash = SegwitVersion1SignatureHash(script, tx2, 0, SIGHASH_ALL, tx.vout[0].nValue) signature = key.sign(sig_hash) + b'\x01' tx2.wit.vtxinwit.append(CTxInWitness()) tx2.wit.vtxinwit[0].scriptWitness.stack = [ signature, pubkey ] tx2.rehash() self.test_node.test_transaction_acceptance(tx2, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx2]) self.test_node.test_witness_block(block, accepted=True) p2sh_witness_hash = hash160(scriptWSH) scriptP2SH = CScript([OP_HASH160, p2sh_witness_hash, OP_EQUAL]) scriptSig = CScript([scriptWSH]) tx3 = CTransaction() tx3.vin.append(CTxIn(COutPoint(tx2.sha256, 0), b"")) tx3.vout.append(CTxOut(tx2.vout[0].nValue-100000, scriptP2SH)) tx3.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx3, 0, SIGHASH_ALL, tx2.vout[0].nValue, key) # Should fail policy test. self.test_node.test_transaction_acceptance(tx3, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') # But passes consensus. block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx3]) self.test_node.test_witness_block(block, accepted=True) # Test 3: P2SH(P2WSH) # Try to spend the P2SH output created in the last test. # Send it to a P2PKH output, which we'll use in the next test. scriptPubKey = GetP2PKHScript(pubkeyhash) tx4 = CTransaction() tx4.vin.append(CTxIn(COutPoint(tx3.sha256, 0), scriptSig)) tx4.vout.append(CTxOut(tx3.vout[0].nValue-100000, scriptPubKey)) tx4.wit.vtxinwit.append(CTxInWitness()) sign_P2PK_witness_input(witness_program, tx4, 0, SIGHASH_ALL, tx3.vout[0].nValue, key) self.test_node.test_transaction_acceptance(tx4, True, False, b'non-mandatory-script-verify-flag (Using non-compressed keys in segwit)') block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx4]) self.test_node.test_witness_block(block, accepted=True) tx5 = CTransaction() tx5.vin.append(CTxIn(COutPoint(tx4.sha256, 0), b"")) tx5.vout.append(CTxOut(tx4.vout[0].nValue-100000, CScript([OP_TRUE]))) (sig_hash, err) = SignatureHash(scriptPubKey, tx5, 0, SIGHASH_ALL) signature = key.sign(sig_hash) + b'\x01' tx5.vin[0].scriptSig = CScript([signature, pubkey]) tx5.rehash() self.test_node.test_transaction_acceptance(tx5, True, True) block = self.build_next_block() self.update_witness_block_with_transactions(block, [tx5]) self.test_node.test_witness_block(block, accepted=True) self.utxo.append(UTXO(tx5.sha256, 0, tx5.vout[0].nValue)) def test_non_standard_witness(self): self.log.info("Testing detection of non-standard P2WSH witness") pad = chr(1).encode('latin-1') scripts = [] scripts.append(CScript([OP_DROP] * 100)) scripts.append(CScript([OP_DROP] * 99)) scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 60)) scripts.append(CScript([pad * 59] * 59 + [OP_DROP] * 61)) p2wsh_scripts = [] assert(len(self.utxo)) tx = CTransaction() tx.vin.append(CTxIn(COutPoint(self.utxo[0].sha256, self.utxo[0].n), b"")) outputvalue = (self.utxo[0].nValue - 100000) // (len(scripts) * 2) for i in scripts: p2wsh = CScript([OP_0, sha256(i)]) p2sh = hash160(p2wsh) p2wsh_scripts.append(p2wsh) tx.vout.append(CTxOut(outputvalue, p2wsh)) tx.vout.append(CTxOut(outputvalue, CScript([OP_HASH160, p2sh, OP_EQUAL]))) tx.rehash() txid = tx.sha256 self.test_node.test_transaction_acceptance(tx, with_witness=False, accepted=True) self.nodes[0].generate(1) sync_blocks(self.nodes) p2wsh_txs = [] p2sh_txs = [] for i in range(len(scripts)): p2wsh_tx = CTransaction() p2wsh_tx.vin.append(CTxIn(COutPoint(txid,i*2))) p2wsh_tx.vout.append(CTxOut(outputvalue - 500000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) p2wsh_tx.wit.vtxinwit.append(CTxInWitness()) p2wsh_tx.rehash() p2wsh_txs.append(p2wsh_tx) p2sh_tx = CTransaction() p2sh_tx.vin.append(CTxIn(COutPoint(txid,i*2+1), CScript([p2wsh_scripts[i]]))) p2sh_tx.vout.append(CTxOut(outputvalue - 500000, CScript([OP_0, hash160(hex_str_to_bytes(""))]))) p2sh_tx.wit.vtxinwit.append(CTxInWitness()) p2sh_tx.rehash() p2sh_txs.append(p2sh_tx) p2wsh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]] self.std_node.test_transaction_acceptance(p2wsh_txs[0], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2wsh_txs[0], True, True) p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2wsh_txs[1], True, True) p2wsh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2wsh_txs[1], True, True) p2wsh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]] self.test_node.test_transaction_acceptance(p2wsh_txs[2], True, True) self.std_node.test_transaction_acceptance(p2wsh_txs[2], True, True) p2wsh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]] self.std_node.test_transaction_acceptance(p2wsh_txs[3], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2wsh_txs[3], True, True) p2sh_txs[0].wit.vtxinwit[0].scriptWitness.stack = [pad] * 101 + [scripts[0]] self.std_node.test_transaction_acceptance(p2sh_txs[0], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2sh_txs[0], True, True) p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 81] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2sh_txs[1], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2sh_txs[1], True, True) p2sh_txs[1].wit.vtxinwit[0].scriptWitness.stack = [pad * 80] * 100 + [scripts[1]] self.std_node.test_transaction_acceptance(p2sh_txs[1], True, True) p2sh_txs[2].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, scripts[2]] self.test_node.test_transaction_acceptance(p2sh_txs[2], True, True) self.std_node.test_transaction_acceptance(p2sh_txs[2], True, True) p2sh_txs[3].wit.vtxinwit[0].scriptWitness.stack = [pad, pad, pad, scripts[3]] self.std_node.test_transaction_acceptance(p2sh_txs[3], True, False, b'bad-witness-nonstandard') self.test_node.test_transaction_acceptance(p2sh_txs[3], True, True) self.nodes[0].generate(1) sync_blocks(self.nodes) assert_equal(len(self.nodes[0].getrawmempool()), 0) assert_equal(len(self.nodes[1].getrawmempool()), 0) self.utxo.pop(0) def test_reject_blocks(self): print ("\tTesting rejection of block.nVersion < BIP9_TOP_BITS blocks") block = self.build_next_block(nVersion=4) block.solve() resp = self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True))) assert_equal(resp, 'bad-version(0x00000004)') def run_test(self): self.test_node = TestNode() self.old_node = TestNode() self.std_node = TestNode() self.p2p_connections = [self.test_node, self.old_node] self.connections = [] self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node, services=NODE_NETWORK|NODE_WITNESS)) self.connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.old_node, services=NODE_NETWORK)) self.connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], self.std_node, services=NODE_NETWORK|NODE_WITNESS)) self.test_node.add_connection(self.connections[0]) self.old_node.add_connection(self.connections[1]) self.std_node.add_connection(self.connections[2]) NetworkThread().start() self.utxo = [] # Test logic begins here self.test_node.wait_for_verack() self.log.info("Starting tests before segwit lock in:") self.test_witness_services() # Verifies NODE_WITNESS self.test_non_witness_transaction() # non-witness tx's are accepted self.test_unnecessary_witness_before_segwit_activation() self.test_block_relay(segwit_activated=False) self.advance_to_segwit_started() sync_blocks(self.nodes) self.test_getblocktemplate_before_lockin() sync_blocks(self.nodes) self.log.info("Testing behavior post lockin, pre-activation") self.advance_to_segwit_lockin() self.test_unnecessary_witness_before_segwit_activation() self.test_witness_tx_relay_before_segwit_activation() self.test_block_relay(segwit_activated=False) self.test_p2sh_witness(segwit_activated=False) self.test_standardness_v0(segwit_activated=False) sync_blocks(self.nodes) self.log.info("Testing behavior after segwit activation") self.advance_to_segwit_active() sync_blocks(self.nodes) self.test_reject_blocks() self.test_p2sh_witness(segwit_activated=True) self.test_witness_commitments() self.test_block_malleability() self.test_witness_block_size() self.test_submit_block() self.test_extra_witness_data() self.test_max_witness_push_length() self.test_max_witness_program_length() self.test_witness_input_length() self.test_block_relay(segwit_activated=True) self.test_tx_relay_after_segwit_activation() self.test_standardness_v0(segwit_activated=True) self.test_segwit_versions() self.test_premature_coinbase_witness_spend() self.test_uncompressed_pubkey() self.test_signature_version_1() sync_blocks(self.nodes) self.test_upgrade_after_activation(node_id=2) self.test_witness_sigops() if __name__ == '__main__': SegWitTest().main()
true
true
f70c6638a3a13cbe074afeaa5a903fcedc67e553
7,114
py
Python
notion_tqdm/std.py
shunyooo/notion-tqdm
78c05eeea9427deaed63709740c4b9329fb1b7e7
[ "MIT" ]
52
2020-11-26T09:07:27.000Z
2022-03-22T21:14:49.000Z
notion_tqdm/std.py
shunyooo/notion-tqdm
78c05eeea9427deaed63709740c4b9329fb1b7e7
[ "MIT" ]
1
2021-05-25T18:35:18.000Z
2021-05-25T18:35:18.000Z
notion_tqdm/std.py
shunyooo/notion-tqdm
78c05eeea9427deaed63709740c4b9329fb1b7e7
[ "MIT" ]
3
2021-02-20T09:36:36.000Z
2022-01-11T17:36:20.000Z
import inspect import json import logging import threading from datetime import datetime, timezone from time import time import pytz import requests from notion.block import TextBlock from notion.client import NotionClient from notion.collection import NotionDate from tqdm import tqdm from tzlocal import get_localzone from .constants import ( POST_INTERVAL_SEC, REQUIRED_COLUMNS, REQUIRED_STATUS_OPTIONS, Status, ) def get_localzone_name(): local_tz = get_localzone() return datetime.now(local_tz).tzname() class notion_tqdm(tqdm): _is_configured = False post_interval_sec = POST_INTERVAL_SEC timezone = get_localzone_name() common_props = {} @classmethod def _get_table_schema_prop_names(cls): return set( [prop["name"] for prop in cls.table_view.collection.get_schema_properties()] ) @classmethod def _validate_table_shcema(cls): # Check table view type if "collection" not in dir(cls.table_view): raise Exception( f"table_view is not referring to the table correctly. Make sure you are setting a table link that is not a page link." ) # Check required columns table_view_columns = cls._get_table_schema_prop_names() missing_columns = REQUIRED_COLUMNS - table_view_columns if len(missing_columns) > 0: raise Exception( f"There are missing columns in the table: {missing_columns}. Did you duplicate this view?: https://www.notion.so/syunyo/notion-tqdm-template-7d2d53595e774c9eb7a020e00fd81fab" ) # Check select options table_status_options = set( [ op["value"] for op in cls.table_view.collection.get_schema_property("status")[ "options" ] ] ) missing_options = REQUIRED_STATUS_OPTIONS - table_status_options if len(missing_options) > 0: raise Exception( f"There are missing options in the status columns: {missing_options}. Did you duplicate this view?: https://www.notion.so/syunyo/notion-tqdm-template-7d2d53595e774c9eb7a020e00fd81fab" ) @classmethod def set_config( cls, token_v2, table_url, email=None, timezone=None, post_interval_sec=None ): # Common Config if timezone is not None: cls.timezone = timezone if post_interval_sec is not None: ls.post_interval_sec = post_interval_sec cls._timezone_pytz = pytz.timezone(cls.timezone) # Notion Config cls.client = NotionClient(token_v2=token_v2) if email is not None: cls.client.set_user_by_email(email) cls.table_view = cls.client.get_block(table_url) # Validation cls._validate_table_shcema() cls._is_configured = True @classmethod def set_common_props(cls, **kwargs): cls.common_props = kwargs missing_columns = set(kwargs) - cls._get_table_schema_prop_names() if len(missing_columns) > 0: logging.error( f"There are missing columns in the table: {missing_columns}." ) def localize_timestamp(self, timestamp): utc_datetime = datetime.fromtimestamp(timestamp, tz=timezone.utc) return utc_datetime.astimezone(notion_tqdm._timezone_pytz) def _update_row(self): if not notion_tqdm._is_configured: logging.warning( "notion_tqdm does not seem to be set yet. call notion_tqdm.set_config and configure it.\nrefer to https://github.com/shunyooo/notion-tqdm#usage" ) return if self._row_creating: return if self.row is None and not self._row_creating: self._row_creating = True self.row = notion_tqdm.table_view.collection.add_row() self._row_creating = False for c, v in notion_tqdm.common_props.items(): self.row.set_property(c, v) if self.row is not None: # Base props # TODO: Difference only updates now = time() row = self.row row.total = self.total row.name = self.desc row.status = self.status row.value = self.n row.start_timestamp = self.start_t row.update_timestamp = now row.timerange = NotionDate( self.localize_timestamp(self.start_t), self.localize_timestamp(now), timezone=notion_tqdm.timezone, ) row.elapsed_sec = now - self.start_t # Custom props # TODO: Set the props that have been skipped during creating. for c, v in self.custom_props.items(): row.set_property(c, v) # Add Text Blocks for text in self._pending_texts: self.row.children.add_new(TextBlock).title = text self._pending_texts = [] @property def _can_post(self): is_past = ( self.last_post_time is None or (time() - self.last_post_time) > notion_tqdm.post_interval_sec ) return not self._loading and is_past def _post_if_need(self, force): if self._can_post or force: self._loading = True try: self._update_row() except Exception as e: logging.warning(e) self.last_post_time = time() self._loading = False def display(self, msg=None, pos=None, status=None, force=False): force = status is not None or force self.status = Status.doing if status is None else status t = threading.Thread( name="_post_if_need", target=self._post_if_need, args=[force] ) t.setDaemon(True) t.start() def __init__(self, *args, **kwargs): self.row = None self.total = 0 self.last_post_time = None self.status = Status.doing self._loading = False self._row_creating = False super().__init__(*args, **kwargs) self.sp = self.display self.custom_props = {} self._pending_texts = [] def __iter__(self, *args, **kwargs): try: for obj in super().__iter__(*args, **kwargs): yield obj except: self.display(status=Status.error) raise def add_text(self, text, force=False): self._pending_texts.append(text) self.display(force) def update_props(self, force=False, **kwags): self.custom_props = kwags self.display(force) def update(self, *args, **kwargs): try: super().update(*args, **kwargs) except: self.display(status=Status.error) raise def close(self, *args, **kwargs): if self.total and self.n < self.total: self.display(status=Status.error) else: self.display(status=Status.done)
34.201923
199
0.608237
import inspect import json import logging import threading from datetime import datetime, timezone from time import time import pytz import requests from notion.block import TextBlock from notion.client import NotionClient from notion.collection import NotionDate from tqdm import tqdm from tzlocal import get_localzone from .constants import ( POST_INTERVAL_SEC, REQUIRED_COLUMNS, REQUIRED_STATUS_OPTIONS, Status, ) def get_localzone_name(): local_tz = get_localzone() return datetime.now(local_tz).tzname() class notion_tqdm(tqdm): _is_configured = False post_interval_sec = POST_INTERVAL_SEC timezone = get_localzone_name() common_props = {} @classmethod def _get_table_schema_prop_names(cls): return set( [prop["name"] for prop in cls.table_view.collection.get_schema_properties()] ) @classmethod def _validate_table_shcema(cls): if "collection" not in dir(cls.table_view): raise Exception( f"table_view is not referring to the table correctly. Make sure you are setting a table link that is not a page link." ) table_view_columns = cls._get_table_schema_prop_names() missing_columns = REQUIRED_COLUMNS - table_view_columns if len(missing_columns) > 0: raise Exception( f"There are missing columns in the table: {missing_columns}. Did you duplicate this view?: https://www.notion.so/syunyo/notion-tqdm-template-7d2d53595e774c9eb7a020e00fd81fab" ) table_status_options = set( [ op["value"] for op in cls.table_view.collection.get_schema_property("status")[ "options" ] ] ) missing_options = REQUIRED_STATUS_OPTIONS - table_status_options if len(missing_options) > 0: raise Exception( f"There are missing options in the status columns: {missing_options}. Did you duplicate this view?: https://www.notion.so/syunyo/notion-tqdm-template-7d2d53595e774c9eb7a020e00fd81fab" ) @classmethod def set_config( cls, token_v2, table_url, email=None, timezone=None, post_interval_sec=None ): if timezone is not None: cls.timezone = timezone if post_interval_sec is not None: ls.post_interval_sec = post_interval_sec cls._timezone_pytz = pytz.timezone(cls.timezone) cls.client = NotionClient(token_v2=token_v2) if email is not None: cls.client.set_user_by_email(email) cls.table_view = cls.client.get_block(table_url) cls._validate_table_shcema() cls._is_configured = True @classmethod def set_common_props(cls, **kwargs): cls.common_props = kwargs missing_columns = set(kwargs) - cls._get_table_schema_prop_names() if len(missing_columns) > 0: logging.error( f"There are missing columns in the table: {missing_columns}." ) def localize_timestamp(self, timestamp): utc_datetime = datetime.fromtimestamp(timestamp, tz=timezone.utc) return utc_datetime.astimezone(notion_tqdm._timezone_pytz) def _update_row(self): if not notion_tqdm._is_configured: logging.warning( "notion_tqdm does not seem to be set yet. call notion_tqdm.set_config and configure it.\nrefer to https://github.com/shunyooo/notion-tqdm#usage" ) return if self._row_creating: return if self.row is None and not self._row_creating: self._row_creating = True self.row = notion_tqdm.table_view.collection.add_row() self._row_creating = False for c, v in notion_tqdm.common_props.items(): self.row.set_property(c, v) if self.row is not None: now = time() row = self.row row.total = self.total row.name = self.desc row.status = self.status row.value = self.n row.start_timestamp = self.start_t row.update_timestamp = now row.timerange = NotionDate( self.localize_timestamp(self.start_t), self.localize_timestamp(now), timezone=notion_tqdm.timezone, ) row.elapsed_sec = now - self.start_t for c, v in self.custom_props.items(): row.set_property(c, v) for text in self._pending_texts: self.row.children.add_new(TextBlock).title = text self._pending_texts = [] @property def _can_post(self): is_past = ( self.last_post_time is None or (time() - self.last_post_time) > notion_tqdm.post_interval_sec ) return not self._loading and is_past def _post_if_need(self, force): if self._can_post or force: self._loading = True try: self._update_row() except Exception as e: logging.warning(e) self.last_post_time = time() self._loading = False def display(self, msg=None, pos=None, status=None, force=False): force = status is not None or force self.status = Status.doing if status is None else status t = threading.Thread( name="_post_if_need", target=self._post_if_need, args=[force] ) t.setDaemon(True) t.start() def __init__(self, *args, **kwargs): self.row = None self.total = 0 self.last_post_time = None self.status = Status.doing self._loading = False self._row_creating = False super().__init__(*args, **kwargs) self.sp = self.display self.custom_props = {} self._pending_texts = [] def __iter__(self, *args, **kwargs): try: for obj in super().__iter__(*args, **kwargs): yield obj except: self.display(status=Status.error) raise def add_text(self, text, force=False): self._pending_texts.append(text) self.display(force) def update_props(self, force=False, **kwags): self.custom_props = kwags self.display(force) def update(self, *args, **kwargs): try: super().update(*args, **kwargs) except: self.display(status=Status.error) raise def close(self, *args, **kwargs): if self.total and self.n < self.total: self.display(status=Status.error) else: self.display(status=Status.done)
true
true
f70c665447c79ac8275fcc74e677d12bb8275fcf
299
py
Python
cleanup_repo.py
cweb-project/cweb-project
53e4b77018996533a8bae8e79c30abf62e81ea5a
[ "MIT" ]
2
2018-11-06T12:43:29.000Z
2019-01-09T15:23:18.000Z
cleanup_repo.py
cweb-project/cweb-project
53e4b77018996533a8bae8e79c30abf62e81ea5a
[ "MIT" ]
1
2018-11-06T20:37:57.000Z
2018-11-13T17:13:11.000Z
cleanup_repo.py
cweb-project/cweb-project
53e4b77018996533a8bae8e79c30abf62e81ea5a
[ "MIT" ]
3
2019-01-15T17:35:26.000Z
2020-03-01T18:53:03.000Z
import shutil import sys import os for (root,dirs,files) in os.walk(os.path.abspath('.'),topdown=True): for d in dirs: path = os.path.join(root,d) if '__pycache__' in path: shutil.rmtree(path) for f in files: path = os.path.join(root,f) if '.DS_Store' in path: os.remove(path)
18.6875
69
0.665552
import shutil import sys import os for (root,dirs,files) in os.walk(os.path.abspath('.'),topdown=True): for d in dirs: path = os.path.join(root,d) if '__pycache__' in path: shutil.rmtree(path) for f in files: path = os.path.join(root,f) if '.DS_Store' in path: os.remove(path)
true
true
f70c669144c2a6b334af8258d02f848255428606
2,915
py
Python
libragram/objects/messageEntities.py
kensoi/libragram
a0119244dceb09edca36b23c95f3e97a28ddae9a
[ "Apache-2.0" ]
null
null
null
libragram/objects/messageEntities.py
kensoi/libragram
a0119244dceb09edca36b23c95f3e97a28ddae9a
[ "Apache-2.0" ]
null
null
null
libragram/objects/messageEntities.py
kensoi/libragram
a0119244dceb09edca36b23c95f3e97a28ddae9a
[ "Apache-2.0" ]
null
null
null
class messageEntity: isBotMention = False start: int end: int def __repr__(self): return "<class 'messageEntity' ({})>".format(type(self)) class formatEntity(messageEntity): pass class mention(messageEntity): #(@username) text: str user: str def __init__(self, text, user = None, botname:str = ""): self.isBotMention = text == "@" + botname if text[0] == '@': text = text[1:] self.text = text self.user = user class hashtag(messageEntity): #(#hashtag) text: str def __init__(self, text): if text[0] == '#': text = text[1:] self.text = text class cashtag(messageEntity): #($USD) text: str def __init__(self, text): self.text = text class bot_command(messageEntity): #(/start@jobs_bot) text: str def __init__(self, text, botname:str = ""): self.text = text if text.count("@") == 1: command, bot_mention = text[:-len(botname)], text[-len(botname):] self.isBotMention = bot_mention == botname else: command = text self.isBotMention == True command = command[command[0] == '/':] self.command = command class url(messageEntity): #(https://telegram.org) text: str def __init__(self, text, botname:str = ""): self.isBotMention = text == "https://t.me/" + botname self.text = text class email(messageEntity): #(do-not-reply@telegram.org) text: str def __init__(self, text): self.text = text class phone_number(messageEntity): #(+1-212-555-0123) text: str def __init__(self, text): self.text = text class bold(formatEntity): #(bold text) text: str def __init__(self, text): self.text = text class italic(formatEntity): #(italic text) text: str def __init__(self, text): self.text = text class underline(formatEntity): #(underlined text) text: str def __init__(self, text): self.text = text class strikethrough(formatEntity): #(strikethrough text) text: str def __init__(self, text): self.text = text class code(messageEntity): #(monowidth string) text: str def __init__(self, text): self.text = text class pre(code): #(monowidth block) text: str language: str def __init__(self, text, language = None): self.text = text self.language = language class text_link(messageEntity): #(for clickable text URLs) text: str def __init__(self, text, url, botname: str = ""): self.isBotMention = url == "https://t.me/" + botname self.text = text self.url = url class text_mention(messageEntity): #(for users without usernames) text: str user: str def __init__(self, text, user = None): self.text = text self.user = user
19.433333
77
0.590395
class messageEntity: isBotMention = False start: int end: int def __repr__(self): return "<class 'messageEntity' ({})>".format(type(self)) class formatEntity(messageEntity): pass class mention(messageEntity): text: str user: str def __init__(self, text, user = None, botname:str = ""): self.isBotMention = text == "@" + botname if text[0] == '@': text = text[1:] self.text = text self.user = user class hashtag(messageEntity): text: str def __init__(self, text): if text[0] == '#': text = text[1:] self.text = text class cashtag(messageEntity): text: str def __init__(self, text): self.text = text class bot_command(messageEntity): text: str def __init__(self, text, botname:str = ""): self.text = text if text.count("@") == 1: command, bot_mention = text[:-len(botname)], text[-len(botname):] self.isBotMention = bot_mention == botname else: command = text self.isBotMention == True command = command[command[0] == '/':] self.command = command class url(messageEntity): text: str def __init__(self, text, botname:str = ""): self.isBotMention = text == "https://t.me/" + botname self.text = text class email(messageEntity): text: str def __init__(self, text): self.text = text class phone_number(messageEntity): text: str def __init__(self, text): self.text = text class bold(formatEntity): text: str def __init__(self, text): self.text = text class italic(formatEntity): text: str def __init__(self, text): self.text = text class underline(formatEntity): text: str def __init__(self, text): self.text = text class strikethrough(formatEntity): text: str def __init__(self, text): self.text = text class code(messageEntity): text: str def __init__(self, text): self.text = text class pre(code): text: str language: str def __init__(self, text, language = None): self.text = text self.language = language class text_link(messageEntity): text: str def __init__(self, text, url, botname: str = ""): self.isBotMention = url == "https://t.me/" + botname self.text = text self.url = url class text_mention(messageEntity): text: str user: str def __init__(self, text, user = None): self.text = text self.user = user
true
true
f70c67dc63f485a17c66eca2fd849f64dc0f0e72
1,185
py
Python
keystoneclient/tests/v3/test_domains.py
citrix-openstack-build/python-keystoneclient
e170955d6de5cbf521d54105bdefaf606ccdb356
[ "Apache-1.1" ]
1
2015-02-26T03:24:30.000Z
2015-02-26T03:24:30.000Z
keystoneclient/tests/v3/test_domains.py
NeCTAR-RC/python-keystoneclient
dca1d4259d44a0d282396a18c895de834580fe66
[ "Apache-1.1" ]
null
null
null
keystoneclient/tests/v3/test_domains.py
NeCTAR-RC/python-keystoneclient
dca1d4259d44a0d282396a18c895de834580fe66
[ "Apache-1.1" ]
1
2019-03-08T07:21:48.000Z
2019-03-08T07:21:48.000Z
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from keystoneclient.tests.v3 import utils from keystoneclient.v3 import domains class DomainTests(utils.TestCase, utils.CrudTests): def setUp(self): super(DomainTests, self).setUp() self.key = 'domain' self.collection_key = 'domains' self.model = domains.Domain self.manager = self.client.domains def new_ref(self, **kwargs): kwargs = super(DomainTests, self).new_ref(**kwargs) kwargs.setdefault('enabled', True) kwargs.setdefault('name', uuid.uuid4().hex) return kwargs
34.852941
78
0.69789
import uuid from keystoneclient.tests.v3 import utils from keystoneclient.v3 import domains class DomainTests(utils.TestCase, utils.CrudTests): def setUp(self): super(DomainTests, self).setUp() self.key = 'domain' self.collection_key = 'domains' self.model = domains.Domain self.manager = self.client.domains def new_ref(self, **kwargs): kwargs = super(DomainTests, self).new_ref(**kwargs) kwargs.setdefault('enabled', True) kwargs.setdefault('name', uuid.uuid4().hex) return kwargs
true
true
f70c687e1484c9313c581a5b09bc28790ea9805d
2,435
py
Python
setup.py
asmeurer/markdown-it-py
ee34305cc8b590c7390b0ac4149d0d22104cf908
[ "MIT" ]
null
null
null
setup.py
asmeurer/markdown-it-py
ee34305cc8b590c7390b0ac4149d0d22104cf908
[ "MIT" ]
null
null
null
setup.py
asmeurer/markdown-it-py
ee34305cc8b590c7390b0ac4149d0d22104cf908
[ "MIT" ]
null
null
null
# from importlib import import_module from os import path import re from setuptools import find_packages, setup def get_version(): text = open(path.join(path.dirname(__file__), "markdown_it", "__init__.py")).read() match = re.compile(r"^__version__\s*\=\s*[\"\']([^\s\'\"]+)", re.M).search(text) return match.group(1) setup( name="markdown-it-py", version=get_version(), description="Python port of markdown-it. Markdown parsing, done right!", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/executablebooks/markdown-it-py", project_urls={"Documentation": "https://markdown-it-py.readthedocs.io"}, author="Chris Sewell", author_email="chrisj_sewell@hotmail.com", license="MIT", packages=find_packages(exclude=["test*", "benchmarking"]), include_package_data=True, entry_points={"console_scripts": ["markdown-it = markdown_it.cli.parse:main"]}, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Markup", ], keywords="markdown lexer parser development", python_requires="~=3.6", install_requires=["attrs>=19,<21"], extras_require={ "code_style": ["pre-commit==2.6"], "testing": [ "coverage", "pytest>=3.6,<4", "pytest-cov", "pytest-regressions", "pytest-benchmark~=3.2", "psutil", ], "rtd": [ "myst-nb", "sphinx_book_theme", "sphinx-panels~=0.4.0", "sphinx-copybutton", "sphinx>=2,<4", "pyyaml", ], "compare": [ "commonmark~=0.9.1", "markdown~=3.2", "mistune~=0.8.4", # "mistletoe~=0.7.2", "mistletoe-ebp~=0.10.0", "panflute~=1.12", ], }, zip_safe=False, )
33.819444
87
0.571253
from os import path import re from setuptools import find_packages, setup def get_version(): text = open(path.join(path.dirname(__file__), "markdown_it", "__init__.py")).read() match = re.compile(r"^__version__\s*\=\s*[\"\']([^\s\'\"]+)", re.M).search(text) return match.group(1) setup( name="markdown-it-py", version=get_version(), description="Python port of markdown-it. Markdown parsing, done right!", long_description=open("README.md").read(), long_description_content_type="text/markdown", url="https://github.com/executablebooks/markdown-it-py", project_urls={"Documentation": "https://markdown-it-py.readthedocs.io"}, author="Chris Sewell", author_email="chrisj_sewell@hotmail.com", license="MIT", packages=find_packages(exclude=["test*", "benchmarking"]), include_package_data=True, entry_points={"console_scripts": ["markdown-it = markdown_it.cli.parse:main"]}, classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Text Processing :: Markup", ], keywords="markdown lexer parser development", python_requires="~=3.6", install_requires=["attrs>=19,<21"], extras_require={ "code_style": ["pre-commit==2.6"], "testing": [ "coverage", "pytest>=3.6,<4", "pytest-cov", "pytest-regressions", "pytest-benchmark~=3.2", "psutil", ], "rtd": [ "myst-nb", "sphinx_book_theme", "sphinx-panels~=0.4.0", "sphinx-copybutton", "sphinx>=2,<4", "pyyaml", ], "compare": [ "commonmark~=0.9.1", "markdown~=3.2", "mistune~=0.8.4", "mistletoe-ebp~=0.10.0", "panflute~=1.12", ], }, zip_safe=False, )
true
true
f70c6ba3ffd63063082dff1b3d2ca2ae52c325fe
1,000
py
Python
app/http/controllers/BlogController.py
kerlinlopes/kerlin-blog-backend
f77f07a60d554b6ab28cd926e27d7acc66a4ec6b
[ "MIT" ]
null
null
null
app/http/controllers/BlogController.py
kerlinlopes/kerlin-blog-backend
f77f07a60d554b6ab28cd926e27d7acc66a4ec6b
[ "MIT" ]
null
null
null
app/http/controllers/BlogController.py
kerlinlopes/kerlin-blog-backend
f77f07a60d554b6ab28cd926e27d7acc66a4ec6b
[ "MIT" ]
1
2021-12-06T23:03:46.000Z
2021-12-06T23:03:46.000Z
""" A BlogController Module """ from masonite.controllers import Controller from masonite.request import Request from app.Blog import Blog class BlogController(Controller): def __init__(self, request: Request): self.request = request def show(self): id = self.request.param("id") return Blog.find(id) def index(self): return Blog.all() def create(self): subject = self.request.input("title") details = self.request.input("body") blog = Blog.create({"title": subject, "body": details}) return blog def update(self): title = self.request.input("title") body = self.request.input("body") id = self.request.param("id") Blog.where("id", id).update({"title": title, "body": body}) return Blog.where("id", id).get() def destroy(self): id = self.request.param("id") blog = Blog.where("id", id).get() Blog.where("id", id).delete() return blog
28.571429
67
0.601
from masonite.controllers import Controller from masonite.request import Request from app.Blog import Blog class BlogController(Controller): def __init__(self, request: Request): self.request = request def show(self): id = self.request.param("id") return Blog.find(id) def index(self): return Blog.all() def create(self): subject = self.request.input("title") details = self.request.input("body") blog = Blog.create({"title": subject, "body": details}) return blog def update(self): title = self.request.input("title") body = self.request.input("body") id = self.request.param("id") Blog.where("id", id).update({"title": title, "body": body}) return Blog.where("id", id).get() def destroy(self): id = self.request.param("id") blog = Blog.where("id", id).get() Blog.where("id", id).delete() return blog
true
true
f70c6bdd656e55c1af0d5793d4d9788da45852c8
19,704
py
Python
ramp-database/ramp_database/model/event.py
symerio/ramp-board
977b27ffdd648eccc5c992926870319bfef544b6
[ "BSD-3-Clause" ]
13
2019-02-16T22:30:11.000Z
2021-01-11T10:13:47.000Z
ramp-database/ramp_database/model/event.py
symerio/ramp-board
977b27ffdd648eccc5c992926870319bfef544b6
[ "BSD-3-Clause" ]
427
2018-11-22T22:01:47.000Z
2022-03-15T17:35:57.000Z
ramp-database/ramp_database/model/event.py
symerio/ramp-board
977b27ffdd648eccc5c992926870319bfef544b6
[ "BSD-3-Clause" ]
18
2018-11-22T16:22:18.000Z
2021-12-07T14:42:41.000Z
import uuid import datetime from sqlalchemy import Float from sqlalchemy import Column from sqlalchemy import String from sqlalchemy import Integer from sqlalchemy import Boolean from sqlalchemy import DateTime from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint from sqlalchemy.orm import backref from sqlalchemy.orm import relationship from .base import Model from .problem import Problem from .score import ScoreType __all__ = [ "Event", "EventTeam", "EventAdmin", "EventScoreType", ] class Event(Model): """Event table. This table contains all information of a RAMP event. Parameters ---------- problem_name : str The name of the problem. name : str The name of the event. event_title : str The title to give for the event (used in the frontend, can contain spaces). ramp_sandbox_name : str Name of the submission which will be considered the sandbox. It will correspond to the key ``sandbox_name`` of the dictionary created with :func:`ramp_utils.generate_ramp_config`. path_ramp_submissions : str Path to the deployment RAMP submissions directory. It will corresponds to the key ``ramp_submissions_dir`` of the dictionary created with :func:`ramp_utils.generate_ramp_config`. session : None or :class:`sqlalchemy.orm.Session`, optional The session used to perform some required queries. It is a required argument when interacting with the database outside of Flask. Attributes ---------- id : int ID of the table row. name : str Event name. title : str Event title. problem_id : int The problem ID associated with this event. problem : :class:`ramp_database.model.Problem` The :class:`ramp_database.model.Problem` instance. max_members_per_team : int The maximum number of members per team. max_n_ensemble : int The maximum number of models in the ensemble. is_send_trained_mails : bool Whether or not to send an email when a model is trained. is_public : bool Whether or not the event is public. is_controled_signup : bool Whether or not the sign-up to the event is moderated. is_competitive : bool Whether or not the challenge is in the competitive phase. min_duration_between_submission : int The amount of time to wait between two submissions. opening_timestamp : datetime The date and time of the event opening. public_opening_timestamp : datetime The date and time of the publicly event opening. closing_timestamp : datetime The date and time of the event closure. official_score_name : str The name of the official score used to evaluate the submissions. combined_combined_valid_score : float The combined public score for all folds. combine_combined_test_score : float The combined private score for all folds. combined_foldwise_valid_score : float The combined public scores for each fold. combined_foldwise_test_score : float The combined public scores for each fold. n_submissions : int The number of submissions for an event. public_leaderboard_html_no_links : str The public leaderboard in HTML format with links to the submissions. public_leaderboard_html_with_links : str The public leaderboard in HTML format. private_leaderboard_html : str The private leaderboard in HTML. failed_leaderboard_html : str The leaderboard with the failed submissions. new_leaderboard_html : str The leaderboard with the new submitted submissions. public_competition_leaderboard_html : str The public leaderboard of the competition in HTML. private_competition_leaderboard_html : str The private leaderboard of the competition in HTML. path_ramp_kit : str The path where the kit are located. ramp_sandbox_name : str Name of the submission which will be considered the sandbox. path_ramp_submissions : str Path to the deployment RAMP submissions directory. It will correspond to the key `ramp_submissions_dir` of the dictionary created with :func:`ramp_utils.generate_ramp_config`. score_types : list of :class:`ramp_database.model.EventScoreType` A back-reference to the score type used in the event. event_admins : list of :class:`ramp_database.model.EventAdmin` A back-reference to the admin for the event. event_teams: list of :class:`ramp_database.model.EventTeam` A back-reference to the teams enrolled in the event. cv_folds : list of :class:`ramp_database.model.CVFold` A back-reference to the CV folds for the event. """ __tablename__ = "events" id = Column(Integer, primary_key=True) name = Column(String, nullable=False, unique=True) title = Column(String, nullable=False) problem_id = Column(Integer, ForeignKey("problems.id"), nullable=False) problem = relationship( "Problem", backref=backref("events", cascade="all, delete-orphan") ) max_members_per_team = Column(Integer, default=1) # max number of submissions in Caruana's ensemble max_n_ensemble = Column(Integer, default=80) is_send_trained_mails = Column(Boolean, default=True) is_send_submitted_mails = Column(Boolean, default=True) is_public = Column(Boolean, default=False) is_controled_signup = Column(Boolean, default=True) # in competitive events participants can select the submission # with which they want to participate in the competition is_competitive = Column(Boolean, default=False) min_duration_between_submissions = Column(Integer, default=15 * 60) opening_timestamp = Column(DateTime, default=datetime.datetime(2000, 1, 1, 0, 0, 0)) # before links to submissions in leaderboard are not alive public_opening_timestamp = Column( DateTime, default=datetime.datetime(2100, 1, 1, 0, 0, 0) ) closing_timestamp = Column(DateTime, default=datetime.datetime(2100, 1, 1, 0, 0, 0)) # the name of the score in self.event_score_types which is used for # ensembling and contributivity. official_score_name = Column(String) # official_score_index = Column(Integer, default=0) combined_combined_valid_score = Column(Float, default=None) combined_combined_test_score = Column(Float, default=None) combined_foldwise_valid_score = Column(Float, default=None) combined_foldwise_test_score = Column(Float, default=None) n_submissions = Column(Integer, default=0) public_leaderboard_html_no_links = Column(String, default=None) public_leaderboard_html_with_links = Column(String, default=None) private_leaderboard_html = Column(String, default=None) failed_leaderboard_html = Column(String, default=None) new_leaderboard_html = Column(String, default=None) public_competition_leaderboard_html = Column(String, default=None) private_competition_leaderboard_html = Column(String, default=None) # big change in the database ramp_sandbox_name = Column( String, nullable=False, unique=False, default="starting-kit" ) path_ramp_submissions = Column(String, nullable=False, unique=False) def __init__( self, problem_name, name, event_title, ramp_sandbox_name, path_ramp_submissions, session=None, ): self.name = name self.ramp_sandbox_name = ramp_sandbox_name self.path_ramp_submissions = path_ramp_submissions if session is None: self.problem = Problem.query.filter_by(name=problem_name).one() else: self.problem = ( session.query(Problem).filter(Problem.name == problem_name).one() ) self.title = event_title def __repr__(self): return "Event({})".format(self.name) def set_n_submissions(self): """Set the number of submissions for the current event by checking each team.""" self.n_submissions = 0 for event_team in self.event_teams: # substract one for starting kit self.n_submissions += len(event_team.submissions) - 1 @property def Predictions(self): """:class:`rampwf.prediction_types.base.BasePrediction`: Predictions for the given event.""" return self.problem.Predictions @property def workflow(self): """:class:`ramp_database.model.Workflow`: The workflow used for the event.""" return self.problem.workflow @property def official_score_type(self): """:class:`ramp_database.model.EventScoreType`: The score type for the current event.""" return EventScoreType.query.filter_by( event=self, name=self.official_score_name ).one() def get_official_score_type(self, session): """Get the type of the default score used for the current event. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session used to make the query. Returns ------- event_type_score : :class:`ramp_database.model.EventTypeScore` The default type score for the current event. """ return ( session.query(EventScoreType) .filter(EventScoreType.event == self) .filter(EventScoreType.name == self.official_score_name) .one() ) @property def official_score_function(self): """callable: The default function used for scoring in the event.""" return self.official_score_type.score_function @property def combined_combined_valid_score_str(self): """str: Convert to string the combined public score for all folds.""" return ( None if self.combined_combined_valid_score is None else str( round( self.combined_combined_valid_score, self.official_score_type.precision, ) ) ) @property def combined_combined_test_score_str(self): """str: Convert to string the combined private score for all folds.""" return ( None if self.combined_combined_test_score is None else str( round( self.combined_combined_test_score, self.official_score_type.precision, ) ) ) @property def combined_foldwise_valid_score_str(self): """str: Convert to string the combined public score for each fold.""" return ( None if self.combined_foldwise_valid_score is None else str( round( self.combined_foldwise_valid_score, self.official_score_type.precision, ) ) ) @property def combined_foldwise_test_score_str(self): """str: Convert to string the combined public score for each fold.""" return ( None if self.combined_foldwise_test_score is None else str( round( self.combined_foldwise_test_score, self.official_score_type.precision, ) ) ) @property def is_open(self): """bool: Whether or not the event is opened.""" now = datetime.datetime.utcnow() return self.closing_timestamp > now > self.opening_timestamp @property def is_public_open(self): """bool: Whether or not the public phase of the event is opened.""" now = datetime.datetime.utcnow() return self.closing_timestamp > now > self.public_opening_timestamp @property def is_closed(self): """bool: Whether or not the event is closed.""" now = datetime.datetime.utcnow() return now > self.closing_timestamp @property def n_jobs(self): """int: The number of cv fold which can be used as number of jobs.""" return sum(1 for cv_fold in self.cv_folds if cv_fold.type == "live") @property def n_participants(self): """int: The number of participants to the event.""" # Only select individual teams return len( [ event_team for event_team in self.event_teams if event_team.team.is_individual ] ) class EventScoreType(Model): """EventScoreType table. This is a many-to-one relationship between Event and ScoreType. Stores the ScoresTypes for each event. For each Event / ScoreType combo, also a new record in ScoreType is created, which is not that useful (TODO consider removing ScoreType table) Parameters ---------- event : :class:`ramp_database.model.Event` The event instance. score_type_object : :class:`rampwf.score_types` A scoring instance. Attributes ---------- id : int The ID of the table row. name : str The name of the score. event_id : int The ID of the event associated. event : :class:`ramp_database.model.Event` The event instance. score_type_id : int The ID of the score. score_type : :class:`ramp_database.model.ScoreType` The score type instance. precision : int The numerical precision of the score. submissions : list of :class:`ramp_database.model.SubmissionScore` A back-reference of the submissions for the event/score type. """ __tablename__ = "event_score_types" id = Column(Integer, primary_key=True) # Can be renamed, default is the same as score_type.name name = Column(String, nullable=False) event_id = Column(Integer, ForeignKey("events.id"), nullable=False) event = relationship( "Event", backref=backref("score_types", cascade="all, delete-orphan") ) score_type_id = Column(Integer, ForeignKey("score_types.id"), nullable=False) score_type = relationship("ScoreType", backref=backref("events")) # display precision in n_digits # default is the same as score_type.precision precision = Column(Integer) UniqueConstraint(event_id, score_type_id, name="es_constraint") UniqueConstraint(event_id, name, name="en_constraint") def __init__(self, event, score_type_object): self.event = event self.score_type = ScoreType(str(uuid.uuid4()), True, 0, 1) # XXX after migration we should store the index of the # score_type so self.score_type_object (should be renamed # score_type) wouldn't have to do a search each time. self.name = score_type_object.name self.precision = score_type_object.precision def __repr__(self): return "{}: {}".format(self.name, self.event) @property def score_type_object(self): """:class:`rampwf.score_types`: Score type object.""" score_types = self.event.problem.module.score_types for score_type in score_types: if score_type.name == self.name: return score_type @property def score_function(self): """callable: Scoring function.""" return self.score_type_object.score_function @property def is_lower_the_better(self): """bool: Whether a lower score is better.""" return self.score_type_object.is_lower_the_better @property def minimum(self): """float: the lower bound of the score.""" return self.score_type_object.minimum @property def maximum(self): """float: the higher bound of the score.""" return self.score_type_object.maximum @property def worst(self): """float: the worst possible score.""" return self.score_type_object.worst class EventAdmin(Model): """EventAdmin table. This is a many-to-many relationship between Event and User to defined admins. Parameters ---------- event : :class:`ramp_database.model.Event` The event instance. admin : :class:`ramp_database.model.User` The user instance. Attributes ---------- id : int The ID of the table row. event_id : int The ID of the event. event : :class:`ramp_database.model.Event` The event instance. admin_id : int The ID of the user defined as an admin. admin : :class:`ramp_database.model.User` The user instance. """ __tablename__ = "event_admins" id = Column(Integer, primary_key=True) event_id = Column(Integer, ForeignKey("events.id"), nullable=False) event = relationship( "Event", backref=backref("event_admins", cascade="all, delete-orphan") ) admin_id = Column(Integer, ForeignKey("users.id"), nullable=False) admin = relationship( "User", backref=backref("admined_events", cascade="all, delete-orphan") ) class EventTeam(Model): """EventTeam table. This is a many-to-many relationship between Event and Team. Parameters ---------- event : :class:`ramp_database.model.Event` The event instance. team : :class:`ramp_database.model.Team` The team instance. Attributes ---------- id : int The ID of a row in the table. event_id : int The ID of the event. event : :class:`ramp_database.model.Event` The event instance. team_id : int The ID of the team. team : :class:`ramp_database.model.Team` The team instance. is_active : bool Whether the team is active for the event. last_submission_name : str The name of the last submission to the event. signup_timestamp : datetime The date and time when the team signed up for the event. approved : bool Whether the team has been approved to participate to the event. leaderboard_html : str The leaderboard for the team for the specific event. failed_leaderboard_html : str The failed submission board for the team for the specific event. new_leaderboard_html : str The new submission board for the team for the specific event. submissions : list of :class:`ramp_database.model.Submission` A back-reference to the submissions associated with this event/team. """ __tablename__ = "event_teams" id = Column(Integer, primary_key=True) event_id = Column(Integer, ForeignKey("events.id"), nullable=False) event = relationship( "Event", backref=backref("event_teams", cascade="all, delete-orphan") ) team_id = Column(Integer, ForeignKey("teams.id"), nullable=False) team = relationship( "Team", backref=backref("team_events", cascade="all, delete-orphan") ) is_active = Column(Boolean, default=True) last_submission_name = Column(String, default=None) signup_timestamp = Column(DateTime, nullable=False) approved = Column(Boolean, default=False) leaderboard_html = Column(String, default=None) failed_leaderboard_html = Column(String, default=None) new_leaderboard_html = Column(String, default=None) UniqueConstraint(event_id, team_id, name="et_constraint") def __init__(self, event, team): self.event = event self.team = team self.signup_timestamp = datetime.datetime.utcnow() def __repr__(self): return "{}/{}".format(self.event, self.team)
34.568421
88
0.661541
import uuid import datetime from sqlalchemy import Float from sqlalchemy import Column from sqlalchemy import String from sqlalchemy import Integer from sqlalchemy import Boolean from sqlalchemy import DateTime from sqlalchemy import ForeignKey from sqlalchemy import UniqueConstraint from sqlalchemy.orm import backref from sqlalchemy.orm import relationship from .base import Model from .problem import Problem from .score import ScoreType __all__ = [ "Event", "EventTeam", "EventAdmin", "EventScoreType", ] class Event(Model): __tablename__ = "events" id = Column(Integer, primary_key=True) name = Column(String, nullable=False, unique=True) title = Column(String, nullable=False) problem_id = Column(Integer, ForeignKey("problems.id"), nullable=False) problem = relationship( "Problem", backref=backref("events", cascade="all, delete-orphan") ) max_members_per_team = Column(Integer, default=1) max_n_ensemble = Column(Integer, default=80) is_send_trained_mails = Column(Boolean, default=True) is_send_submitted_mails = Column(Boolean, default=True) is_public = Column(Boolean, default=False) is_controled_signup = Column(Boolean, default=True) # in competitive events participants can select the submission # with which they want to participate in the competition is_competitive = Column(Boolean, default=False) min_duration_between_submissions = Column(Integer, default=15 * 60) opening_timestamp = Column(DateTime, default=datetime.datetime(2000, 1, 1, 0, 0, 0)) # before links to submissions in leaderboard are not alive public_opening_timestamp = Column( DateTime, default=datetime.datetime(2100, 1, 1, 0, 0, 0) ) closing_timestamp = Column(DateTime, default=datetime.datetime(2100, 1, 1, 0, 0, 0)) # the name of the score in self.event_score_types which is used for # ensembling and contributivity. official_score_name = Column(String) # official_score_index = Column(Integer, default=0) combined_combined_valid_score = Column(Float, default=None) combined_combined_test_score = Column(Float, default=None) combined_foldwise_valid_score = Column(Float, default=None) combined_foldwise_test_score = Column(Float, default=None) n_submissions = Column(Integer, default=0) public_leaderboard_html_no_links = Column(String, default=None) public_leaderboard_html_with_links = Column(String, default=None) private_leaderboard_html = Column(String, default=None) failed_leaderboard_html = Column(String, default=None) new_leaderboard_html = Column(String, default=None) public_competition_leaderboard_html = Column(String, default=None) private_competition_leaderboard_html = Column(String, default=None) # big change in the database ramp_sandbox_name = Column( String, nullable=False, unique=False, default="starting-kit" ) path_ramp_submissions = Column(String, nullable=False, unique=False) def __init__( self, problem_name, name, event_title, ramp_sandbox_name, path_ramp_submissions, session=None, ): self.name = name self.ramp_sandbox_name = ramp_sandbox_name self.path_ramp_submissions = path_ramp_submissions if session is None: self.problem = Problem.query.filter_by(name=problem_name).one() else: self.problem = ( session.query(Problem).filter(Problem.name == problem_name).one() ) self.title = event_title def __repr__(self): return "Event({})".format(self.name) def set_n_submissions(self): self.n_submissions = 0 for event_team in self.event_teams: # substract one for starting kit self.n_submissions += len(event_team.submissions) - 1 @property def Predictions(self): return self.problem.Predictions @property def workflow(self): return self.problem.workflow @property def official_score_type(self): return EventScoreType.query.filter_by( event=self, name=self.official_score_name ).one() def get_official_score_type(self, session): return ( session.query(EventScoreType) .filter(EventScoreType.event == self) .filter(EventScoreType.name == self.official_score_name) .one() ) @property def official_score_function(self): return self.official_score_type.score_function @property def combined_combined_valid_score_str(self): return ( None if self.combined_combined_valid_score is None else str( round( self.combined_combined_valid_score, self.official_score_type.precision, ) ) ) @property def combined_combined_test_score_str(self): return ( None if self.combined_combined_test_score is None else str( round( self.combined_combined_test_score, self.official_score_type.precision, ) ) ) @property def combined_foldwise_valid_score_str(self): return ( None if self.combined_foldwise_valid_score is None else str( round( self.combined_foldwise_valid_score, self.official_score_type.precision, ) ) ) @property def combined_foldwise_test_score_str(self): return ( None if self.combined_foldwise_test_score is None else str( round( self.combined_foldwise_test_score, self.official_score_type.precision, ) ) ) @property def is_open(self): now = datetime.datetime.utcnow() return self.closing_timestamp > now > self.opening_timestamp @property def is_public_open(self): now = datetime.datetime.utcnow() return self.closing_timestamp > now > self.public_opening_timestamp @property def is_closed(self): now = datetime.datetime.utcnow() return now > self.closing_timestamp @property def n_jobs(self): return sum(1 for cv_fold in self.cv_folds if cv_fold.type == "live") @property def n_participants(self): # Only select individual teams return len( [ event_team for event_team in self.event_teams if event_team.team.is_individual ] ) class EventScoreType(Model): __tablename__ = "event_score_types" id = Column(Integer, primary_key=True) # Can be renamed, default is the same as score_type.name name = Column(String, nullable=False) event_id = Column(Integer, ForeignKey("events.id"), nullable=False) event = relationship( "Event", backref=backref("score_types", cascade="all, delete-orphan") ) score_type_id = Column(Integer, ForeignKey("score_types.id"), nullable=False) score_type = relationship("ScoreType", backref=backref("events")) # display precision in n_digits # default is the same as score_type.precision precision = Column(Integer) UniqueConstraint(event_id, score_type_id, name="es_constraint") UniqueConstraint(event_id, name, name="en_constraint") def __init__(self, event, score_type_object): self.event = event self.score_type = ScoreType(str(uuid.uuid4()), True, 0, 1) # XXX after migration we should store the index of the # score_type so self.score_type_object (should be renamed # score_type) wouldn't have to do a search each time. self.name = score_type_object.name self.precision = score_type_object.precision def __repr__(self): return "{}: {}".format(self.name, self.event) @property def score_type_object(self): score_types = self.event.problem.module.score_types for score_type in score_types: if score_type.name == self.name: return score_type @property def score_function(self): return self.score_type_object.score_function @property def is_lower_the_better(self): return self.score_type_object.is_lower_the_better @property def minimum(self): return self.score_type_object.minimum @property def maximum(self): return self.score_type_object.maximum @property def worst(self): return self.score_type_object.worst class EventAdmin(Model): __tablename__ = "event_admins" id = Column(Integer, primary_key=True) event_id = Column(Integer, ForeignKey("events.id"), nullable=False) event = relationship( "Event", backref=backref("event_admins", cascade="all, delete-orphan") ) admin_id = Column(Integer, ForeignKey("users.id"), nullable=False) admin = relationship( "User", backref=backref("admined_events", cascade="all, delete-orphan") ) class EventTeam(Model): __tablename__ = "event_teams" id = Column(Integer, primary_key=True) event_id = Column(Integer, ForeignKey("events.id"), nullable=False) event = relationship( "Event", backref=backref("event_teams", cascade="all, delete-orphan") ) team_id = Column(Integer, ForeignKey("teams.id"), nullable=False) team = relationship( "Team", backref=backref("team_events", cascade="all, delete-orphan") ) is_active = Column(Boolean, default=True) last_submission_name = Column(String, default=None) signup_timestamp = Column(DateTime, nullable=False) approved = Column(Boolean, default=False) leaderboard_html = Column(String, default=None) failed_leaderboard_html = Column(String, default=None) new_leaderboard_html = Column(String, default=None) UniqueConstraint(event_id, team_id, name="et_constraint") def __init__(self, event, team): self.event = event self.team = team self.signup_timestamp = datetime.datetime.utcnow() def __repr__(self): return "{}/{}".format(self.event, self.team)
true
true