content
stringlengths
0
1.55M
<import_from_stmt>qtwrapper QtGui QtWidgets pyqtSignal get_icon<class_stmt>BuildErrors(QtWidgets.QTreeView)<block_start>sigErrorSelected=pyqtSignal(object)<def_stmt>__init__ self parent=<none><block_start>super().__init__(parent)<line_sep>model=QtGui.QStandardItemModel()<line_sep>self.setModel(model)<line_sep>self.clicked.connect(self.itemSelected)<line_sep>self.errorIcon=get_icon('error.png')<line_sep>self.model=QtGui.QStandardItemModel()<line_sep>self.model.setHorizontalHeaderLabels(['Message' 'Row' 'Column'])<line_sep>self.header().setStretchLastSection(<true>)<line_sep>self.setModel(self.model)<block_end><def_stmt>setErrorList self errorlist<block_start>c=self.model.rowCount()<line_sep>self.model.removeRows(0 c)<for_stmt>e errorlist<block_start>item=QtGui.QStandardItem(self.errorIcon str(e.msg))<line_sep>item.setData(e)<line_sep>row=str(e.loc.row)<if>e.loc<else>''<line_sep>irow=QtGui.QStandardItem(row)<line_sep>irow.setData(e)<line_sep>col=str(e.loc.col)<if>e.loc<else>''<line_sep>icol=QtGui.QStandardItem(col)<line_sep>icol.setData(e)<line_sep>self.model.appendRow([item irow icol])<block_end><for_stmt>i range(3)<block_start>self.resizeColumnToContents(i)<block_end><block_end><def_stmt>itemSelected self index<block_start><if_stmt><not>index.isValid()<block_start><return><block_end>item=self.model.itemFromIndex(index)<line_sep>err=item.data()<line_sep>self.sigErrorSelected.emit(err)<block_end><block_end>
#! /usr/bin/env python # -*- coding: utf-8 -*- <import_stmt>sys<import_stmt>os<import_stmt>math<import_stmt>LPC_rundef<import_stmt>rundef<import_stmt>boot<line_sep>sys.path.append(os.path.abspath(".."))<import_from_stmt>gen LPC_gencore<import_from_stmt>gen LPC_gendef<import_from_stmt>ui LPC_uidef<import_from_stmt>ui uidef<import_from_stmt>ui uivar<import_from_stmt>ui uilang<import_from_stmt>mem LPC_memdef<import_from_stmt>boot bltest<import_from_stmt>boot target<import_from_stmt>utils misc<def_stmt>LPC_createTarget device exeBinRoot# Build path to target directory and config file. <block_start>cpu="LPC55S69"<if_stmt>device<in>uidef.kMcuDevice_Niobe4minis<block_start>cpu="LPC55S16"<block_end><elif_stmt>device<in>uidef.kMcuDevice_Niobe4s<block_start>cpu="LPC55S69"<block_end><else_stmt><block_start><pass><block_end>targetBaseDir=os.path.join(os.path.dirname(os.path.dirname(__file__)) 'targets' cpu)<line_sep># Check for existing target directory. <if_stmt><not>os.path.isdir(targetBaseDir)<block_start>targetBaseDir=os.path.join(os.path.dirname(exeBinRoot) 'src' 'targets' cpu)<if_stmt><not>os.path.isdir(targetBaseDir)<block_start><raise>ValueError("Missing target directory at path %s"%targetBaseDir)<block_end><block_end>targetConfigFile=os.path.join(targetBaseDir 'bltargetconfig.py')<line_sep># Check for config file existence. <if_stmt><not>os.path.isfile(targetConfigFile)<block_start><raise>RuntimeError("Missing target config file at path %s"%targetConfigFile)<block_end># Build locals dict by copying our locals and adjusting file path and name. targetConfig=locals().copy()<line_sep>targetConfig['__file__']=targetConfigFile<line_sep>targetConfig['__name__']='bltargetconfig'<line_sep># Execute the target config script. execfile(targetConfigFile globals() targetConfig)<line_sep># Create the target object. tgt=target.Target(**targetConfig)<line_sep><return>tgt targetBaseDir<block_end><class_stmt>secBootLpcRun(LPC_gencore.secBootLpcGen)<block_start><def_stmt>__init__ self parent<block_start>LPC_gencore.secBootLpcGen.__init__(self parent)<if_stmt>self.mcuSeries<eq>uidef.kMcuSeries_LPC<block_start>self.LPC_initRun()<block_end><block_end><def_stmt>LPC_initRun self<block_start>self.blhost=<none><line_sep>self.tgt=<none><line_sep>self.cpuDir=<none><line_sep>self.blhostVectorsDir=os.path.join(self.exeTopRoot 'tools' 'blhost2_3' 'win' 'vectors')<line_sep>self.LPC_isDeviceEnabledToOperate=<true><line_sep>self.bootDeviceMemId=0<line_sep>self.bootDeviceMemBase=<none><line_sep>self.comMemWriteUnit=0x1<line_sep>self.comMemEraseUnit=0x1<line_sep>self.comMemReadUnit=0x1<line_sep>self.LPC_createMcuTarget()<block_end><def_stmt>LPC_createMcuTarget self<block_start>self.tgt,self.cpuDir=LPC_createTarget(self.mcuDevice self.exeBinRoot)<block_end><def_stmt>LPC_getUsbid self<block_start>self.LPC_createMcuTarget()<line_sep><return>[self.tgt.romUsbVid self.tgt.romUsbPid self.tgt.flashloaderUsbVid self.tgt.flashloaderUsbPid]<block_end><def_stmt>LPC_connectToDevice self connectStage<block_start><if_stmt>connectStage<eq>uidef.kConnectStage_Rom<or>connectStage<eq>uidef.kConnectStage_Flashloader# Create the target object. <block_start>self.LPC_createMcuTarget()<if_stmt>self.isUartPortSelected<block_start>blPeripheral='uart'<line_sep>uartComPort=self.uartComPort<line_sep>uartBaudrate=int(self.uartBaudrate)<line_sep>usbVid=''<line_sep>usbPid=''<block_end><elif_stmt>self.isUsbhidPortSelected<block_start>blPeripheral='usb'<line_sep>uartComPort=''<line_sep>uartBaudrate=''<if_stmt>connectStage<eq>uidef.kConnectStage_Rom<block_start>usbVid=self.tgt.romUsbVid<line_sep>usbPid=self.tgt.romUsbPid<block_end><elif_stmt>connectStage<eq>uidef.kConnectStage_Flashloader<block_start>usbVid=self.tgt.flashloaderUsbVid<line_sep>usbPid=self.tgt.flashloaderUsbPid<block_end><block_end><else_stmt><block_start><pass><block_end>self.blhost=bltest.createBootloader(self.tgt self.blhostVectorsDir blPeripheral uartBaudrate uartComPort usbVid usbPid <true>)<block_end><elif_stmt>connectStage<eq>uidef.kConnectStage_Reset<block_start>self.tgt=<none><block_end><else_stmt><block_start><pass><block_end><block_end><def_stmt>LPC_pingRom self<block_start>status,results,cmdStr=self.blhost.getProperty(boot.properties.kPropertyTag_CurrentVersion)<line_sep>self.printLog(cmdStr)<line_sep><return>(status<eq>boot.status.kStatus_Success)<block_end><def_stmt>_LPC_getMcuDeviceIds self<block_start>status,results,cmdStr=self.blhost.getProperty(boot.properties.kPropertyTag_SystemDeviceIdent)<line_sep>self.printLog(cmdStr)<if_stmt>status<eq>boot.status.kStatus_Success<block_start>self.printDeviceStatus("SYSCON->DEVICE_ID0 = "+self.convertLongIntHexText(str(hex(results[0]))))<block_end><else_stmt><block_start><pass><block_end>status,results,cmdStr=self.blhost.getProperty(boot.properties.kPropertyTag_UniqueDeviceIdent)<line_sep>self.printLog(cmdStr)<if_stmt>status<eq>boot.status.kStatus_Success<block_start>self.printDeviceStatus("PFR UUID0 = "+self.convertLongIntHexText(str(hex(results[0]))))<line_sep>self.printDeviceStatus("PFR UUID1 = "+self.convertLongIntHexText(str(hex(results[1]))))<line_sep>self.printDeviceStatus("PFR UUID2 = "+self.convertLongIntHexText(str(hex(results[2]))))<line_sep>self.printDeviceStatus("PFR UUID3 = "+self.convertLongIntHexText(str(hex(results[3]))))<block_end><else_stmt><block_start><pass><block_end><block_end><def_stmt>LPC_getMcuDeviceInfoViaRom self<block_start>self.printDeviceStatus("----------MCU ROM info-----------")<line_sep>self.getMcuDeviceBootloaderVersion()<line_sep>self._LPC_getMcuDeviceIds()<block_end><def_stmt>_LPC_getC040hdFlashProperties self<block_start>status,results,cmdStr=self.blhost.getProperty(boot.properties.kPropertyTag_FlashSectorSize)<line_sep>self.printLog(cmdStr)<if_stmt>status<eq>boot.status.kStatus_Success<block_start>self.printDeviceStatus("Sector Size = "+self.showAsOptimalMemoryUnit(results[0]))<line_sep>self.comMemEraseUnit=results[0]<block_end><else_stmt><block_start><pass><block_end>status,results,cmdStr=self.blhost.getProperty(boot.properties.kPropertyTag_FlashSizeInBytes)<line_sep>self.printLog(cmdStr)<if_stmt>status<eq>boot.status.kStatus_Success<block_start>self.printDeviceStatus("Total Size = "+self.showAsOptimalMemoryUnit(results[0]))<block_end><else_stmt><block_start><pass><block_end><block_end><def_stmt>LPC_getBootDeviceInfoViaRom self<block_start><if_stmt>self.bootDevice<eq>LPC_uidef.kBootDevice_InternalNor<block_start>self.printDeviceStatus("-------On-chip NOR memory------")<line_sep>self._LPC_getC040hdFlashProperties()<block_end><else_stmt><block_start><pass><block_end><block_end><def_stmt>_LPC_prepareForBootDeviceOperation self<block_start><if_stmt>self.bootDevice<eq>LPC_uidef.kBootDevice_InternalNor<block_start>self.bootDeviceMemBase=self.tgt.c040hdNorMemBase<block_end><else_stmt><block_start><pass><block_end><block_end><def_stmt>_eraseC040hdNorForImageLoading self<block_start>imageLen=os.path.getsize(self.destAppFilename)<line_sep>memEraseLen=misc.align_up(imageLen self.comMemEraseUnit)<line_sep>status,results,cmdStr=self.blhost.flashEraseRegion(self.tgt.c040hdNorMemBase memEraseLen)<line_sep>self.printLog(cmdStr)<line_sep><return>(status<eq>boot.status.kStatus_Success)<block_end><def_stmt>LPC_flashBootableImage self<block_start>self._LPC_prepareForBootDeviceOperation()<line_sep>imageLen=os.path.getsize(self.destAppFilename)<if_stmt>self.bootDevice<eq>LPC_uidef.kBootDevice_InternalNor<block_start><if_stmt><not>self._eraseC040hdNorForImageLoading()<block_start><return><false><block_end><if_stmt>self.secureBootType<eq>LPC_uidef.kSecureBootType_PlainUnsigned<block_start><pass><block_end>imageLoadAddr=self.bootDeviceMemBase<line_sep>status,results,cmdStr=self.blhost.writeMemory(imageLoadAddr self.destAppFilename)<line_sep>self.printLog(cmdStr)<if_stmt>status<ne>boot.status.kStatus_Success<block_start><return><false><block_end><block_end><else_stmt><block_start><pass><block_end><if_stmt>self.isConvertedAppUsed<block_start><try_stmt><block_start>os.remove(self.srcAppFilename)<block_end><except_stmt><block_start><pass><block_end>self.isConvertedAppUsed=<false><block_end><return><true><block_end><def_stmt>LPC_resetMcuDevice self<block_start>status,results,cmdStr=self.blhost.reset()<line_sep>self.printLog(cmdStr)<line_sep><return>(status<eq>boot.status.kStatus_Success)<block_end><block_end>
# coding: utf-8 """ Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at http://www.apache.org/licenses/LICENSE-2.0 or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """<line_sep>""" ProductAdvertisingAPI https://webservices.amazon.com/paapi5/documentation/index.html # noqa: E501 """<import_stmt>pprint<import_stmt>re# noqa: F401 <import_stmt>six<import_from_stmt>.availability Availability# noqa: F401,E501 <import_from_stmt>.condition Condition# noqa: F401,E501 <import_from_stmt>.delivery_flag DeliveryFlag# noqa: F401,E501 <import_from_stmt>.max_price MaxPrice# noqa: F401,E501 <import_from_stmt>.merchant Merchant# noqa: F401,E501 <import_from_stmt>.min_price MinPrice# noqa: F401,E501 <import_from_stmt>.min_reviews_rating MinReviewsRating# noqa: F401,E501 <import_from_stmt>.min_saving_percent MinSavingPercent# noqa: F401,E501 <import_from_stmt>.offer_count OfferCount# noqa: F401,E501 <import_from_stmt>.partner_type PartnerType# noqa: F401,E501 <import_from_stmt>.properties Properties# noqa: F401,E501 <import_from_stmt>.search_items_resource SearchItemsResource# noqa: F401,E501 <import_from_stmt>.sort_by SortBy# noqa: F401,E501 <class_stmt>SearchItemsRequest(object)<block_start>"""NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """<line_sep>""" Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """<line_sep>swagger_types={'actor':'str' 'artist':'str' 'author':'str' 'availability':'Availability' 'brand':'str' 'browse_node_id':'str' 'condition':'Condition' 'currency_of_preference':'str' 'delivery_flags':'list[DeliveryFlag]' 'item_count':'int' 'item_page':'int' 'keywords':'str' 'languages_of_preference':'list[str]' 'marketplace':'str' 'max_price':'MaxPrice' 'merchant':'Merchant' 'min_price':'MinPrice' 'min_reviews_rating':'MinReviewsRating' 'min_saving_percent':'MinSavingPercent' 'offer_count':'OfferCount' 'partner_tag':'str' 'partner_type':'PartnerType' 'properties':'Properties' 'resources':'list[SearchItemsResource]' 'search_index':'str' 'sort_by':'SortBy' 'title':'str'}<line_sep>attribute_map={'actor':'Actor' 'artist':'Artist' 'author':'Author' 'availability':'Availability' 'brand':'Brand' 'browse_node_id':'BrowseNodeId' 'condition':'Condition' 'currency_of_preference':'CurrencyOfPreference' 'delivery_flags':'DeliveryFlags' 'item_count':'ItemCount' 'item_page':'ItemPage' 'keywords':'Keywords' 'languages_of_preference':'LanguagesOfPreference' 'marketplace':'Marketplace' 'max_price':'MaxPrice' 'merchant':'Merchant' 'min_price':'MinPrice' 'min_reviews_rating':'MinReviewsRating' 'min_saving_percent':'MinSavingPercent' 'offer_count':'OfferCount' 'partner_tag':'PartnerTag' 'partner_type':'PartnerType' 'properties':'Properties' 'resources':'Resources' 'search_index':'SearchIndex' 'sort_by':'SortBy' 'title':'Title'}<def_stmt>__init__ self actor=<none> artist=<none> author=<none> availability=<none> brand=<none> browse_node_id=<none> condition=<none> currency_of_preference=<none> delivery_flags=<none> item_count=<none> item_page=<none> keywords=<none> languages_of_preference=<none> marketplace=<none> max_price=<none> merchant=<none> min_price=<none> min_reviews_rating=<none> min_saving_percent=<none> offer_count=<none> partner_tag=<none> partner_type=<none> properties=<none> resources=<none> search_index=<none> sort_by=<none> title=<none># noqa: E501 <block_start>"""SearchItemsRequest - a model defined in Swagger"""# noqa: E501 self._actor=<none><line_sep>self._artist=<none><line_sep>self._author=<none><line_sep>self._availability=<none><line_sep>self._brand=<none><line_sep>self._browse_node_id=<none><line_sep>self._condition=<none><line_sep>self._currency_of_preference=<none><line_sep>self._delivery_flags=<none><line_sep>self._item_count=<none><line_sep>self._item_page=<none><line_sep>self._keywords=<none><line_sep>self._languages_of_preference=<none><line_sep>self._marketplace=<none><line_sep>self._max_price=<none><line_sep>self._merchant=<none><line_sep>self._min_price=<none><line_sep>self._min_reviews_rating=<none><line_sep>self._min_saving_percent=<none><line_sep>self._offer_count=<none><line_sep>self._partner_tag=<none><line_sep>self._partner_type=<none><line_sep>self._properties=<none><line_sep>self._resources=<none><line_sep>self._search_index=<none><line_sep>self._sort_by=<none><line_sep>self._title=<none><line_sep>self.discriminator=<none><if_stmt>actor<is><not><none><block_start>self.actor=actor<block_end><if_stmt>artist<is><not><none><block_start>self.artist=artist<block_end><if_stmt>author<is><not><none><block_start>self.author=author<block_end><if_stmt>availability<is><not><none><block_start>self.availability=availability<block_end><if_stmt>brand<is><not><none><block_start>self.brand=brand<block_end><if_stmt>browse_node_id<is><not><none><block_start>self.browse_node_id=browse_node_id<block_end><if_stmt>condition<is><not><none><block_start>self.condition=condition<block_end><if_stmt>currency_of_preference<is><not><none><block_start>self.currency_of_preference=currency_of_preference<block_end><if_stmt>delivery_flags<is><not><none><block_start>self.delivery_flags=delivery_flags<block_end><if_stmt>item_count<is><not><none><block_start>self.item_count=item_count<block_end><if_stmt>item_page<is><not><none><block_start>self.item_page=item_page<block_end><if_stmt>keywords<is><not><none><block_start>self.keywords=keywords<block_end><if_stmt>languages_of_preference<is><not><none><block_start>self.languages_of_preference=languages_of_preference<block_end><if_stmt>marketplace<is><not><none><block_start>self.marketplace=marketplace<block_end><if_stmt>max_price<is><not><none><block_start>self.max_price=max_price<block_end><if_stmt>merchant<is><not><none><block_start>self.merchant=merchant<block_end><if_stmt>min_price<is><not><none><block_start>self.min_price=min_price<block_end><if_stmt>min_reviews_rating<is><not><none><block_start>self.min_reviews_rating=min_reviews_rating<block_end><if_stmt>min_saving_percent<is><not><none><block_start>self.min_saving_percent=min_saving_percent<block_end><if_stmt>offer_count<is><not><none><block_start>self.offer_count=offer_count<block_end>self.partner_tag=partner_tag<line_sep>self.partner_type=partner_type<if_stmt>properties<is><not><none><block_start>self.properties=properties<block_end><if_stmt>resources<is><not><none><block_start>self.resources=resources<block_end><if_stmt>search_index<is><not><none><block_start>self.search_index=search_index<block_end><if_stmt>sort_by<is><not><none><block_start>self.sort_by=sort_by<block_end><if_stmt>title<is><not><none><block_start>self.title=title<block_end><block_end>@property<def_stmt>actor self<block_start>"""Gets the actor of this SearchItemsRequest. # noqa: E501 :return: The actor of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._actor<block_end>@actor.setter<def_stmt>actor self actor<block_start>"""Sets the actor of this SearchItemsRequest. :param actor: The actor of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._actor=actor<block_end>@property<def_stmt>artist self<block_start>"""Gets the artist of this SearchItemsRequest. # noqa: E501 :return: The artist of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._artist<block_end>@artist.setter<def_stmt>artist self artist<block_start>"""Sets the artist of this SearchItemsRequest. :param artist: The artist of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._artist=artist<block_end>@property<def_stmt>author self<block_start>"""Gets the author of this SearchItemsRequest. # noqa: E501 :return: The author of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._author<block_end>@author.setter<def_stmt>author self author<block_start>"""Sets the author of this SearchItemsRequest. :param author: The author of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._author=author<block_end>@property<def_stmt>availability self<block_start>"""Gets the availability of this SearchItemsRequest. # noqa: E501 :return: The availability of this SearchItemsRequest. # noqa: E501 :rtype: Availability """<line_sep><return>self._availability<block_end>@availability.setter<def_stmt>availability self availability<block_start>"""Sets the availability of this SearchItemsRequest. :param availability: The availability of this SearchItemsRequest. # noqa: E501 :type: Availability """<line_sep>self._availability=availability<block_end>@property<def_stmt>brand self<block_start>"""Gets the brand of this SearchItemsRequest. # noqa: E501 :return: The brand of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._brand<block_end>@brand.setter<def_stmt>brand self brand<block_start>"""Sets the brand of this SearchItemsRequest. :param brand: The brand of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._brand=brand<block_end>@property<def_stmt>browse_node_id self<block_start>"""Gets the browse_node_id of this SearchItemsRequest. # noqa: E501 :return: The browse_node_id of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._browse_node_id<block_end>@browse_node_id.setter<def_stmt>browse_node_id self browse_node_id<block_start>"""Sets the browse_node_id of this SearchItemsRequest. :param browse_node_id: The browse_node_id of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._browse_node_id=browse_node_id<block_end>@property<def_stmt>condition self<block_start>"""Gets the condition of this SearchItemsRequest. # noqa: E501 :return: The condition of this SearchItemsRequest. # noqa: E501 :rtype: Condition """<line_sep><return>self._condition<block_end>@condition.setter<def_stmt>condition self condition<block_start>"""Sets the condition of this SearchItemsRequest. :param condition: The condition of this SearchItemsRequest. # noqa: E501 :type: Condition """<line_sep>self._condition=condition<block_end>@property<def_stmt>currency_of_preference self<block_start>"""Gets the currency_of_preference of this SearchItemsRequest. # noqa: E501 :return: The currency_of_preference of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._currency_of_preference<block_end>@currency_of_preference.setter<def_stmt>currency_of_preference self currency_of_preference<block_start>"""Sets the currency_of_preference of this SearchItemsRequest. :param currency_of_preference: The currency_of_preference of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._currency_of_preference=currency_of_preference<block_end>@property<def_stmt>delivery_flags self<block_start>"""Gets the delivery_flags of this SearchItemsRequest. # noqa: E501 :return: The delivery_flags of this SearchItemsRequest. # noqa: E501 :rtype: list[DeliveryFlag] """<line_sep><return>self._delivery_flags<block_end>@delivery_flags.setter<def_stmt>delivery_flags self delivery_flags<block_start>"""Sets the delivery_flags of this SearchItemsRequest. :param delivery_flags: The delivery_flags of this SearchItemsRequest. # noqa: E501 :type: list[DeliveryFlag] """<line_sep>self._delivery_flags=delivery_flags<block_end>@property<def_stmt>item_count self<block_start>"""Gets the item_count of this SearchItemsRequest. # noqa: E501 :return: The item_count of this SearchItemsRequest. # noqa: E501 :rtype: int """<line_sep><return>self._item_count<block_end>@item_count.setter<def_stmt>item_count self item_count<block_start>"""Sets the item_count of this SearchItemsRequest. :param item_count: The item_count of this SearchItemsRequest. # noqa: E501 :type: int """<line_sep>self._item_count=item_count<block_end>@property<def_stmt>item_page self<block_start>"""Gets the item_page of this SearchItemsRequest. # noqa: E501 :return: The item_page of this SearchItemsRequest. # noqa: E501 :rtype: int """<line_sep><return>self._item_page<block_end>@item_page.setter<def_stmt>item_page self item_page<block_start>"""Sets the item_page of this SearchItemsRequest. :param item_page: The item_page of this SearchItemsRequest. # noqa: E501 :type: int """<line_sep>self._item_page=item_page<block_end>@property<def_stmt>keywords self<block_start>"""Gets the keywords of this SearchItemsRequest. # noqa: E501 :return: The keywords of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._keywords<block_end>@keywords.setter<def_stmt>keywords self keywords<block_start>"""Sets the keywords of this SearchItemsRequest. :param keywords: The keywords of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._keywords=keywords<block_end>@property<def_stmt>languages_of_preference self<block_start>"""Gets the languages_of_preference of this SearchItemsRequest. # noqa: E501 :return: The languages_of_preference of this SearchItemsRequest. # noqa: E501 :rtype: list[str] """<line_sep><return>self._languages_of_preference<block_end>@languages_of_preference.setter<def_stmt>languages_of_preference self languages_of_preference<block_start>"""Sets the languages_of_preference of this SearchItemsRequest. :param languages_of_preference: The languages_of_preference of this SearchItemsRequest. # noqa: E501 :type: list[str] """<line_sep>self._languages_of_preference=languages_of_preference<block_end>@property<def_stmt>marketplace self<block_start>"""Gets the marketplace of this SearchItemsRequest. # noqa: E501 :return: The marketplace of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._marketplace<block_end>@marketplace.setter<def_stmt>marketplace self marketplace<block_start>"""Sets the marketplace of this SearchItemsRequest. :param marketplace: The marketplace of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._marketplace=marketplace<block_end>@property<def_stmt>max_price self<block_start>"""Gets the max_price of this SearchItemsRequest. # noqa: E501 :return: The max_price of this SearchItemsRequest. # noqa: E501 :rtype: MaxPrice """<line_sep><return>self._max_price<block_end>@max_price.setter<def_stmt>max_price self max_price<block_start>"""Sets the max_price of this SearchItemsRequest. :param max_price: The max_price of this SearchItemsRequest. # noqa: E501 :type: MaxPrice """<line_sep>self._max_price=max_price<block_end>@property<def_stmt>merchant self<block_start>"""Gets the merchant of this SearchItemsRequest. # noqa: E501 :return: The merchant of this SearchItemsRequest. # noqa: E501 :rtype: Merchant """<line_sep><return>self._merchant<block_end>@merchant.setter<def_stmt>merchant self merchant<block_start>"""Sets the merchant of this SearchItemsRequest. :param merchant: The merchant of this SearchItemsRequest. # noqa: E501 :type: Merchant """<line_sep>self._merchant=merchant<block_end>@property<def_stmt>min_price self<block_start>"""Gets the min_price of this SearchItemsRequest. # noqa: E501 :return: The min_price of this SearchItemsRequest. # noqa: E501 :rtype: MinPrice """<line_sep><return>self._min_price<block_end>@min_price.setter<def_stmt>min_price self min_price<block_start>"""Sets the min_price of this SearchItemsRequest. :param min_price: The min_price of this SearchItemsRequest. # noqa: E501 :type: MinPrice """<line_sep>self._min_price=min_price<block_end>@property<def_stmt>min_reviews_rating self<block_start>"""Gets the min_reviews_rating of this SearchItemsRequest. # noqa: E501 :return: The min_reviews_rating of this SearchItemsRequest. # noqa: E501 :rtype: MinReviewsRating """<line_sep><return>self._min_reviews_rating<block_end>@min_reviews_rating.setter<def_stmt>min_reviews_rating self min_reviews_rating<block_start>"""Sets the min_reviews_rating of this SearchItemsRequest. :param min_reviews_rating: The min_reviews_rating of this SearchItemsRequest. # noqa: E501 :type: MinReviewsRating """<line_sep>self._min_reviews_rating=min_reviews_rating<block_end>@property<def_stmt>min_saving_percent self<block_start>"""Gets the min_saving_percent of this SearchItemsRequest. # noqa: E501 :return: The min_saving_percent of this SearchItemsRequest. # noqa: E501 :rtype: MinSavingPercent """<line_sep><return>self._min_saving_percent<block_end>@min_saving_percent.setter<def_stmt>min_saving_percent self min_saving_percent<block_start>"""Sets the min_saving_percent of this SearchItemsRequest. :param min_saving_percent: The min_saving_percent of this SearchItemsRequest. # noqa: E501 :type: MinSavingPercent """<line_sep>self._min_saving_percent=min_saving_percent<block_end>@property<def_stmt>offer_count self<block_start>"""Gets the offer_count of this SearchItemsRequest. # noqa: E501 :return: The offer_count of this SearchItemsRequest. # noqa: E501 :rtype: OfferCount """<line_sep><return>self._offer_count<block_end>@offer_count.setter<def_stmt>offer_count self offer_count<block_start>"""Sets the offer_count of this SearchItemsRequest. :param offer_count: The offer_count of this SearchItemsRequest. # noqa: E501 :type: OfferCount """<line_sep>self._offer_count=offer_count<block_end>@property<def_stmt>partner_tag self<block_start>"""Gets the partner_tag of this SearchItemsRequest. # noqa: E501 :return: The partner_tag of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._partner_tag<block_end>@partner_tag.setter<def_stmt>partner_tag self partner_tag<block_start>"""Sets the partner_tag of this SearchItemsRequest. :param partner_tag: The partner_tag of this SearchItemsRequest. # noqa: E501 :type: str """<if_stmt>partner_tag<is><none><block_start><raise>ValueError("Invalid value for `partner_tag`, must not be `None`")<block_end># noqa: E501 self._partner_tag=partner_tag<block_end>@property<def_stmt>partner_type self<block_start>"""Gets the partner_type of this SearchItemsRequest. # noqa: E501 :return: The partner_type of this SearchItemsRequest. # noqa: E501 :rtype: PartnerType """<line_sep><return>self._partner_type<block_end>@partner_type.setter<def_stmt>partner_type self partner_type<block_start>"""Sets the partner_type of this SearchItemsRequest. :param partner_type: The partner_type of this SearchItemsRequest. # noqa: E501 :type: PartnerType """<if_stmt>partner_type<is><none><block_start><raise>ValueError("Invalid value for `partner_type`, must not be `None`")<block_end># noqa: E501 self._partner_type=partner_type<block_end>@property<def_stmt>properties self<block_start>"""Gets the properties of this SearchItemsRequest. # noqa: E501 :return: The properties of this SearchItemsRequest. # noqa: E501 :rtype: Properties """<line_sep><return>self._properties<block_end>@properties.setter<def_stmt>properties self properties<block_start>"""Sets the properties of this SearchItemsRequest. :param properties: The properties of this SearchItemsRequest. # noqa: E501 :type: Properties """<line_sep>self._properties=properties<block_end>@property<def_stmt>resources self<block_start>"""Gets the resources of this SearchItemsRequest. # noqa: E501 :return: The resources of this SearchItemsRequest. # noqa: E501 :rtype: list[SearchItemsResource] """<line_sep><return>self._resources<block_end>@resources.setter<def_stmt>resources self resources<block_start>"""Sets the resources of this SearchItemsRequest. :param resources: The resources of this SearchItemsRequest. # noqa: E501 :type: list[SearchItemsResource] """<line_sep>self._resources=resources<block_end>@property<def_stmt>search_index self<block_start>"""Gets the search_index of this SearchItemsRequest. # noqa: E501 :return: The search_index of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._search_index<block_end>@search_index.setter<def_stmt>search_index self search_index<block_start>"""Sets the search_index of this SearchItemsRequest. :param search_index: The search_index of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._search_index=search_index<block_end>@property<def_stmt>sort_by self<block_start>"""Gets the sort_by of this SearchItemsRequest. # noqa: E501 :return: The sort_by of this SearchItemsRequest. # noqa: E501 :rtype: SortBy """<line_sep><return>self._sort_by<block_end>@sort_by.setter<def_stmt>sort_by self sort_by<block_start>"""Sets the sort_by of this SearchItemsRequest. :param sort_by: The sort_by of this SearchItemsRequest. # noqa: E501 :type: SortBy """<line_sep>self._sort_by=sort_by<block_end>@property<def_stmt>title self<block_start>"""Gets the title of this SearchItemsRequest. # noqa: E501 :return: The title of this SearchItemsRequest. # noqa: E501 :rtype: str """<line_sep><return>self._title<block_end>@title.setter<def_stmt>title self title<block_start>"""Sets the title of this SearchItemsRequest. :param title: The title of this SearchItemsRequest. # noqa: E501 :type: str """<line_sep>self._title=title<block_end><def_stmt>to_dict self<block_start>"""Returns the model properties as a dict"""<line_sep>result={}<for_stmt>attr,_ six.iteritems(self.swagger_types)<block_start>value=getattr(self attr)<if_stmt>isinstance(value list)<block_start>result[attr]=list(map(<lambda>x:x.to_dict()<if>hasattr(x "to_dict")<else>x value))<block_end><elif_stmt>hasattr(value "to_dict")<block_start>result[attr]=value.to_dict()<block_end><elif_stmt>isinstance(value dict)<block_start>result[attr]=dict(map(<lambda>item:(item[0] item[1].to_dict())<if>hasattr(item[1] "to_dict")<else>item value.items()))<block_end><else_stmt><block_start>result[attr]=value<block_end><block_end><if_stmt>issubclass(SearchItemsRequest dict)<block_start><for_stmt>key,value self.items()<block_start>result[key]=value<block_end><block_end><return>result<block_end><def_stmt>to_str self<block_start>"""Returns the string representation of the model"""<line_sep><return>pprint.pformat(self.to_dict())<block_end><def_stmt>__repr__ self<block_start>"""For `print` and `pprint`"""<line_sep><return>self.to_str()<block_end><def_stmt>__eq__ self other<block_start>"""Returns true if both objects are equal"""<if_stmt><not>isinstance(other SearchItemsRequest)<block_start><return><false><block_end><return>self.__dict__<eq>other.__dict__<block_end><def_stmt>__ne__ self other<block_start>"""Returns true if both objects are not equal"""<line_sep><return><not>self<eq>other<block_end><block_end>
<import_from_stmt>typing List Optional<import_stmt>databases<import_stmt>sqlalchemy<import_from_stmt>fastapi FastAPI<import_stmt>ormar<line_sep>app=FastAPI()<line_sep>metadata=sqlalchemy.MetaData()<line_sep>database=databases.Database("sqlite:///test.db")<line_sep>app.state.database=database<line_sep>@app.on_event("startup")<async_keyword><def_stmt>startup <arrow><none><block_start>database_=app.state.database<if_stmt><not>database_.is_connected<block_start><await>database_.connect()<block_end><block_end>@app.on_event("shutdown")<async_keyword><def_stmt>shutdown <arrow><none><block_start>database_=app.state.database<if_stmt>database_.is_connected<block_start><await>database_.disconnect()<block_end><block_end><class_stmt>Category(ormar.Model)<block_start><class_stmt>Meta<block_start>tablename="categories"<line_sep>metadata=metadata<line_sep>database=database<block_end>id:int=ormar.Integer(primary_key=<true>)<line_sep>name:str=ormar.String(max_length=100)<block_end><class_stmt>Item(ormar.Model)<block_start><class_stmt>Meta<block_start>tablename="items"<line_sep>metadata=metadata<line_sep>database=database<block_end>id:int=ormar.Integer(primary_key=<true>)<line_sep>name:str=ormar.String(max_length=100)<line_sep>category:Optional[Category]=ormar.ForeignKey(Category nullable=<true>)<block_end>@app.get("/items/" response_model=List[Item])<async_keyword><def_stmt>get_items <block_start>items=<await>Item.objects.select_related("category").all()<line_sep><return>items<block_end>@app.post("/items/" response_model=Item)<async_keyword><def_stmt>create_item item:Item<block_start><await>item.save()<line_sep><return>item<block_end>@app.post("/categories/" response_model=Category)<async_keyword><def_stmt>create_category category:Category<block_start><await>category.save()<line_sep><return>category<block_end>@app.put("/items/{item_id}")<async_keyword><def_stmt>get_item item_id:int item:Item<block_start>item_db=<await>Item.objects.get(pk=item_id)<line_sep><return><await>item_db.update(**item.dict())<block_end>@app.delete("/items/{item_id}")<async_keyword><def_stmt>delete_item item_id:int item:Item=<none><block_start><if_stmt>item<block_start><return>{"deleted_rows":<await>item.delete()}<block_end>item_db=<await>Item.objects.get(pk=item_id)<line_sep><return>{"deleted_rows":<await>item_db.delete()}<block_end>
# Copyright (c) 2020 Uber Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== <import_stmt>os<import_from_stmt>typing Any List Optional<import_stmt>torch<try_stmt><block_start><import_stmt>horovod.torch<line_sep>_HVD=horovod.torch<block_end><except_stmt>(ModuleNotFoundError ImportError)<block_start>_HVD=<none><block_end><def_stmt>initialize_horovod <block_start><if_stmt><not>_HVD<block_start>""" raise ValueError("Horovod backend specified, " "but cannot import `horovod.tensorflow`. " "Install Horovod following the instructions at: " "https://github.com/horovod/horovod") """<line_sep><raise>ValueError("Horovod backend specified, "<concat>"but cannot import `horovod.torch`. "<concat>"Install Horovod following the instructions at: "<concat>"https://github.com/horovod/horovod")<block_end>_HVD.init()<line_sep><return>_HVD<block_end><def_stmt>has_horovodrun <block_start>"""Returns True if running with `horovodrun` using Gloo or OpenMPI."""<line_sep><return>"OMPI_COMM_WORLD_RANK"<in>os.environ<or>"HOROVOD_RANK"<in>os.environ<block_end><def_stmt>return_first fn<block_start>"""Wraps function so results are only returned by the first (coordinator) rank. The purpose of this function is to reduce network overhead. """<def_stmt>wrapped *args **kwargs<block_start>res=fn(*args **kwargs)<line_sep><return>res<if>_HVD.rank()<eq>0<else><none><block_end><return>wrapped<block_end><def_stmt>gather_all_tensors result:torch.Tensor group:Optional[Any]=<none><arrow>List[torch.Tensor]<block_start>"""Function to gather all tensors from several processes onto a list that is broadcast to all processes. Works on tensors that have the same number of dimensions, but where each dimension may differ. In this case tensors are padded, gathered and then trimmed to secure equal workload for all processes. :param result: the value to sync :param group: the process group to gather results from (not supported: always uses world) :return: list with size equal to the process group where gathered_result[i] corresponds to result tensor from process i """<if_stmt>group<is><not><none><block_start><raise>ValueError("Horovod does not support allgather using a subcommunicator at this time. "<concat>"Unset `group`.")<block_end><if_stmt>_HVD<is><none><or><not>_HVD.is_initialized()<block_start><return>[result]<block_end><if_stmt>len(result.shape)<eq>0# Convert scalars to single dimension tensors <block_start>result=result.reshape(1)<block_end>is_bool=<false><if_stmt>result.dtype<eq>torch.bool# need to convert to int due to Horovod limitation <block_start>result=result.int()<line_sep>is_bool=<true><block_end># sync and gather all gathered=_HVD.allgather(result)<line_sep>gathered_result=list(gathered.split(1 dim=0))<if_stmt>is_bool# convert back if needed <block_start>gathered_result=[t.bool()<for>t gathered_result]<block_end><return>gathered_result<block_end><def_stmt>is_distributed_available <arrow>bool<block_start><return>_HVD<is><not><none><and>_HVD.is_initialized()<block_end>
<import_stmt>yaml<import_stmt>pytest<import_from_stmt>unittest mock<import_stmt>kubernetes<import_from_stmt>kubernetes.config.config_exception ConfigException<import_from_stmt>mlflow.projects kubernetes<as>kb<import_from_stmt>mlflow.exceptions ExecutionException<import_from_stmt>mlflow.entities RunStatus<def_stmt>test_run_command_creation # pylint: disable=unused-argument <block_start>""" Tests command creation. """<line_sep>command=["python train.py --alpha 0.5 --l1-ratio 0.1" "--comment 'foo bar'" '--comment-bis "bar foo"' ]<line_sep>command=kb._get_run_command(command)<assert_stmt>["python" "train.py" "--alpha" "0.5" "--l1-ratio" "0.1" "--comment" "'foo bar'" "--comment-bis" "'bar foo'" ]<eq>command<block_end><def_stmt>test_valid_kubernetes_job_spec # pylint: disable=unused-argument <block_start>""" Tests job specification for Kubernetes. """<line_sep>custom_template=yaml.safe_load("apiVersion: batch/v1\n"<concat>"kind: Job\n"<concat>"metadata:\n"<concat>" name: pi-with-ttl\n"<concat>"spec:\n"<concat>" ttlSecondsAfterFinished: 100\n"<concat>" template:\n"<concat>" spec:\n"<concat>" containers:\n"<concat>" - name: pi\n"<concat>" image: perl\n"<concat>" command: ['perl', '-Mbignum=bpi', '-wle']\n"<concat>" env: \n"<concat>" - name: DUMMY\n"<concat>' value: "test_var"\n'<concat>" restartPolicy: Never\n")<line_sep>project_name="mlflow-docker-example"<line_sep>image_tag="image_tag"<line_sep>image_digest="5e74a5a"<line_sep>command=["mlflow" "run" "." "--no-conda" "-P" "alpha=0.5"]<line_sep>env_vars={"RUN_ID":"1"}<line_sep>job_definition=kb._get_kubernetes_job_definition(project_name=project_name image_tag=image_tag image_digest=image_digest command=command env_vars=env_vars job_template=custom_template )<line_sep>container_spec=job_definition["spec"]["template"]["spec"]["containers"][0]<assert_stmt>container_spec["name"]<eq>project_name<assert_stmt>container_spec["image"]<eq>image_tag+"@"+image_digest<assert_stmt>container_spec["command"]<eq>command<assert_stmt>2<eq>len(container_spec["env"])<assert_stmt>container_spec["env"][0]["name"]<eq>"DUMMY"<assert_stmt>container_spec["env"][0]["value"]<eq>"test_var"<assert_stmt>container_spec["env"][1]["name"]<eq>"RUN_ID"<assert_stmt>container_spec["env"][1]["value"]<eq>"1"<block_end><def_stmt>test_run_kubernetes_job <block_start>active_run=mock.Mock()<line_sep>project_name="mlflow-docker-example"<line_sep>image_tag="image_tag"<line_sep>image_digest="5e74a5a"<line_sep>command=["python train.py --alpha 0.5 --l1-ratio 0.1"]<line_sep>env_vars={"RUN_ID":"1"}<line_sep>kube_context="docker-for-desktop"<line_sep>job_template=yaml.safe_load("apiVersion: batch/v1\n"<concat>"kind: Job\n"<concat>"metadata:\n"<concat>" name: pi-with-ttl\n"<concat>" namespace: mlflow\n"<concat>"spec:\n"<concat>" ttlSecondsAfterFinished: 100\n"<concat>" template:\n"<concat>" spec:\n"<concat>" containers:\n"<concat>" - name: pi\n"<concat>" image: perl\n"<concat>" command: ['perl', '-Mbignum=bpi', '-wle']\n"<concat>" restartPolicy: Never\n")<with_stmt>mock.patch("kubernetes.config.load_kube_config")<as>kube_config_mock<block_start><with_stmt>mock.patch("kubernetes.client.BatchV1Api.create_namespaced_job")<as>kube_api_mock<block_start>submitted_run_obj=kb.run_kubernetes_job(project_name=project_name active_run=active_run image_tag=image_tag image_digest=image_digest command=command env_vars=env_vars job_template=job_template kube_context=kube_context )<assert_stmt>submitted_run_obj._mlflow_run_id<eq>active_run.info.run_id<assert_stmt>submitted_run_obj._job_name.startswith(project_name)<assert_stmt>submitted_run_obj._job_namespace<eq>"mlflow"<assert_stmt>kube_api_mock.call_count<eq>1<line_sep>args=kube_config_mock.call_args_list<assert_stmt>args[0][1]["context"]<eq>kube_context<block_end><block_end><block_end><def_stmt>test_run_kubernetes_job_current_kubecontext <block_start>active_run=mock.Mock()<line_sep>project_name="mlflow-docker-example"<line_sep>image_tag="image_tag"<line_sep>image_digest="5e74a5a"<line_sep>command=["python train.py --alpha 0.5 --l1-ratio 0.1"]<line_sep>env_vars={"RUN_ID":"1"}<line_sep>kube_context=<none><line_sep>job_template=yaml.safe_load("apiVersion: batch/v1\n"<concat>"kind: Job\n"<concat>"metadata:\n"<concat>" name: pi-with-ttl\n"<concat>" namespace: mlflow\n"<concat>"spec:\n"<concat>" ttlSecondsAfterFinished: 100\n"<concat>" template:\n"<concat>" spec:\n"<concat>" containers:\n"<concat>" - name: pi\n"<concat>" image: perl\n"<concat>" command: ['perl', '-Mbignum=bpi', '-wle']\n"<concat>" restartPolicy: Never\n")<with_stmt>mock.patch("kubernetes.config.load_kube_config")<as>kube_config_mock<block_start><with_stmt>mock.patch("kubernetes.config.load_incluster_config")<as>incluster_kube_config_mock<block_start><with_stmt>mock.patch("kubernetes.client.BatchV1Api.create_namespaced_job")<as>kube_api_mock<block_start>submitted_run_obj=kb.run_kubernetes_job(project_name=project_name active_run=active_run image_tag=image_tag image_digest=image_digest command=command env_vars=env_vars job_template=job_template kube_context=kube_context )<assert_stmt>submitted_run_obj._mlflow_run_id<eq>active_run.info.run_id<assert_stmt>submitted_run_obj._job_name.startswith(project_name)<assert_stmt>submitted_run_obj._job_namespace<eq>"mlflow"<assert_stmt>kube_api_mock.call_count<eq>1<assert_stmt>kube_config_mock.call_count<eq>1<assert_stmt>incluster_kube_config_mock.call_count<eq>0<block_end><block_end><block_end><block_end><def_stmt>test_run_kubernetes_job_in_cluster <block_start>active_run=mock.Mock()<line_sep>project_name="mlflow-docker-example"<line_sep>image_tag="image_tag"<line_sep>image_digest="5e74a5a"<line_sep>command=["python train.py --alpha 0.5 --l1-ratio 0.1"]<line_sep>env_vars={"RUN_ID":"1"}<line_sep>kube_context=<none><line_sep>job_template=yaml.safe_load("apiVersion: batch/v1\n"<concat>"kind: Job\n"<concat>"metadata:\n"<concat>" name: pi-with-ttl\n"<concat>" namespace: mlflow\n"<concat>"spec:\n"<concat>" ttlSecondsAfterFinished: 100\n"<concat>" template:\n"<concat>" spec:\n"<concat>" containers:\n"<concat>" - name: pi\n"<concat>" image: perl\n"<concat>" command: ['perl', '-Mbignum=bpi', '-wle']\n"<concat>" restartPolicy: Never\n")<with_stmt>mock.patch("kubernetes.config.load_kube_config")<as>kube_config_mock<block_start>kube_config_mock.side_effect=ConfigException()<with_stmt>mock.patch("kubernetes.config.load_incluster_config")<as>incluster_kube_config_mock<block_start><with_stmt>mock.patch("kubernetes.client.BatchV1Api.create_namespaced_job")<as>kube_api_mock<block_start>submitted_run_obj=kb.run_kubernetes_job(project_name=project_name active_run=active_run image_tag=image_tag image_digest=image_digest command=command env_vars=env_vars job_template=job_template kube_context=kube_context )<assert_stmt>submitted_run_obj._mlflow_run_id<eq>active_run.info.run_id<assert_stmt>submitted_run_obj._job_name.startswith(project_name)<assert_stmt>submitted_run_obj._job_namespace<eq>"mlflow"<assert_stmt>kube_api_mock.call_count<eq>1<assert_stmt>kube_config_mock.call_count<eq>1<assert_stmt>incluster_kube_config_mock.call_count<eq>1<block_end><block_end><block_end><block_end><def_stmt>test_push_image_to_registry <block_start>image_uri="dockerhub_account/mlflow-kubernetes-example"<with_stmt>mock.patch("docker.from_env")<as>docker_mock<block_start>client=mock.MagicMock()<line_sep>docker_mock.return_value=client<line_sep>kb.push_image_to_registry(image_uri)<assert_stmt>client.images.push.call_count<eq>1<line_sep>args=client.images.push.call_args_list<assert_stmt>args[0][1]["repository"]<eq>image_uri<block_end><block_end><def_stmt>test_push_image_to_registry_handling_errors <block_start>image_uri="dockerhub_account/mlflow-kubernetes-example"<with_stmt>pytest.raises(ExecutionException)<block_start>kb.push_image_to_registry(image_uri)<block_end><block_end><def_stmt>test_submitted_run_get_status_killed <block_start>mlflow_run_id=1<line_sep>job_name="job-name"<line_sep>job_namespace="job-namespace"<with_stmt>mock.patch("kubernetes.client.BatchV1Api.delete_namespaced_job")<as>kube_api_mock<block_start>submitted_run=kb.KubernetesSubmittedRun(mlflow_run_id job_name job_namespace)<line_sep>submitted_run.cancel()<assert_stmt>RunStatus.KILLED<eq>submitted_run.get_status()<assert_stmt>kube_api_mock.call_count<eq>1<line_sep>args=kube_api_mock.call_args_list<assert_stmt>args[0][1]["name"]<eq>job_name<assert_stmt>args[0][1]["namespace"]<eq>job_namespace<block_end><block_end><def_stmt>test_submitted_run_get_status_failed <block_start>mlflow_run_id=1<line_sep>job_name="job-name"<line_sep>job_namespace="job-namespace"<line_sep>condition=kubernetes.client.models.V1JobCondition(type="Failed" status="True")<line_sep>job_status=kubernetes.client.models.V1JobStatus(active=1 completion_time=<none> conditions=[condition] failed=1 start_time=1 succeeded=<none> )<line_sep>job=kubernetes.client.models.V1Job(status=job_status)<with_stmt>mock.patch("kubernetes.client.BatchV1Api.read_namespaced_job_status")<as>kube_api_mock<block_start>kube_api_mock.return_value=job<line_sep>submitted_run=kb.KubernetesSubmittedRun(mlflow_run_id job_name job_namespace)<line_sep>print("status" submitted_run.get_status())<assert_stmt>RunStatus.FAILED<eq>submitted_run.get_status()<assert_stmt>kube_api_mock.call_count<eq>1<line_sep>args=kube_api_mock.call_args_list<assert_stmt>args[0][1]["name"]<eq>job_name<assert_stmt>args[0][1]["namespace"]<eq>job_namespace<block_end><block_end><def_stmt>test_submitted_run_get_status_succeeded <block_start>mlflow_run_id=1<line_sep>job_name="job-name"<line_sep>job_namespace="job-namespace"<line_sep>condition=kubernetes.client.models.V1JobCondition(type="Complete" status="True")<line_sep>job_status=kubernetes.client.models.V1JobStatus(active=<none> completion_time=<none> conditions=[condition] failed=<none> start_time=<none> succeeded=1 )<line_sep>job=kubernetes.client.models.V1Job(status=job_status)<with_stmt>mock.patch("kubernetes.client.BatchV1Api.read_namespaced_job_status")<as>kube_api_mock<block_start>kube_api_mock.return_value=job<line_sep>submitted_run=kb.KubernetesSubmittedRun(mlflow_run_id job_name job_namespace)<line_sep>print("status" submitted_run.get_status())<assert_stmt>RunStatus.FINISHED<eq>submitted_run.get_status()<assert_stmt>kube_api_mock.call_count<eq>1<line_sep>args=kube_api_mock.call_args_list<assert_stmt>args[0][1]["name"]<eq>job_name<assert_stmt>args[0][1]["namespace"]<eq>job_namespace<block_end><block_end><def_stmt>test_submitted_run_get_status_running <block_start>mlflow_run_id=1<line_sep>job_name="job-name"<line_sep>job_namespace="job-namespace"<line_sep>job_status=kubernetes.client.models.V1JobStatus(active=1 completion_time=<none> conditions=<none> failed=1 start_time=1 succeeded=1)<line_sep>job=kubernetes.client.models.V1Job(status=job_status)<with_stmt>mock.patch("kubernetes.client.BatchV1Api.read_namespaced_job_status")<as>kube_api_mock<block_start>kube_api_mock.return_value=job<line_sep>submitted_run=kb.KubernetesSubmittedRun(mlflow_run_id job_name job_namespace)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<assert_stmt>kube_api_mock.call_count<eq>1<line_sep>args=kube_api_mock.call_args_list<line_sep>print(args)<assert_stmt>args[0][1]["name"]<eq>job_name<assert_stmt>args[0][1]["namespace"]<eq>job_namespace<block_end><block_end><def_stmt>test_state_transitions <block_start>mlflow_run_id=1<line_sep>job_name="job-name"<line_sep>job_namespace="job-namespace"<line_sep>submitted_run=kb.KubernetesSubmittedRun(mlflow_run_id job_name job_namespace)<with_stmt>mock.patch("kubernetes.client.BatchV1Api.read_namespaced_job_status")<as>kube_api_mock<block_start><def_stmt>set_return_value **kwargs<block_start>job_status=kubernetes.client.models.V1JobStatus(**kwargs)<line_sep>kube_api_mock.return_value=kubernetes.client.models.V1Job(status=job_status)<block_end>set_return_value()<assert_stmt>RunStatus.SCHEDULED<eq>submitted_run.get_status()<line_sep>set_return_value(start_time=1)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<line_sep>set_return_value(start_time=1 failed=1)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<line_sep>set_return_value(start_time=1 failed=1)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<line_sep>set_return_value(start_time=1 failed=1 active=1)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<line_sep>set_return_value(start_time=1 failed=1 succeeded=1)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<line_sep>set_return_value(start_time=1 failed=1 succeeded=1 completion_time=2)<assert_stmt>RunStatus.RUNNING<eq>submitted_run.get_status()<line_sep>condition=kubernetes.client.models.V1JobCondition(type="Complete" status="True")<line_sep>set_return_value(conditions=[condition] failed=1 start_time=1 completion_time=2 succeeded=1)<assert_stmt>RunStatus.FINISHED<eq>submitted_run.get_status()<block_end><block_end>
<import_stmt>argparse<import_stmt>os<import_stmt>windows<import_stmt>windows.debug.symbols<as>symbols<line_sep>parser=argparse.ArgumentParser(prog=__file__ formatter_class=argparse.ArgumentDefaultsHelpFormatter)<line_sep>parser.add_argument('pattern')<line_sep>parser.add_argument('file' help="The PE file to load")<line_sep>parser.add_argument('--addr' type=<lambda>x:int(x 0) default=0 help="The load address of the PE")<line_sep>parser.add_argument('--tag' type=<lambda>x:int(x 0) default=0)<line_sep>parser.add_argument('--dbghelp' help='The path of DBG help to use (default use env:PFW_DBGHELP_PATH)')<line_sep>args=parser.parse_args()<if_stmt>args.dbghelp<block_start>symbols.set_dbghelp_path(args.dbghelp)<block_end><else_stmt><block_start><if_stmt>"PFW_DBGHELP_PATH"<not><in>os.environ<block_start>print("Not dbghelp path given and no environ var 'PFW_DBGHELP_PATH' sample may fail")<block_end><block_end>sh=symbols.VirtualSymbolHandler()<line_sep>mod=sh.load_file(path=args.file addr=args.addr)<line_sep>res=sh.search(args.pattern mod=mod tag=args.tag)<line_sep>print("{0} symbols found:".format(len(res)))<for_stmt>sym res<block_start>print(" * {0!r}".format(sym))<block_end>
<import_from_future_stmt> print_function<import_stmt>logrun<import_stmt>deepstate_base<class_stmt>FixtureTest(deepstate_base.DeepStateTestCase)<block_start><def_stmt>run_deepstate self deepstate<block_start>(r output)=logrun.logrun([deepstate "build/examples/Fixture"] "deepstate.out" 1800)<line_sep>self.assertEqual(r 0)<line_sep>self.assertTrue("Passed: MyTest_Something"<in>output)<line_sep>self.assertFalse("Failed: MyTest_Something"<in>output)<line_sep>self.assertTrue("Setting up!"<in>output)<line_sep>self.assertTrue("Tearing down!"<in>output)<block_end><block_end>
<import_stmt>threading json<import_from_stmt>pathlib Path<import_from_stmt>http.server SimpleHTTPRequestHandler HTTPServer<import_from_stmt>coldtype.renderer.winman.passthrough WinmanPassthrough<import_from_stmt>coldtype.renderer.config ConfigOption ColdtypeConfig<import_from_stmt>coldtype.pens.svgpen SVGPen<import_from_stmt>coldtype.pens.jsonpen JSONPen<line_sep>WEBSOCKET_PORT=<none><class_stmt>WebViewerHandler(SimpleHTTPRequestHandler)<block_start><def_stmt>_set_headers self<block_start>self.send_response(200)<line_sep>self.send_header("Content-type" "text/html")<line_sep>self.end_headers()<block_end><def_stmt>do_GET self<block_start>self._set_headers()<line_sep>self.wfile.write((Path(__file__).parent.parent.parent/"webserver/webviewer.html").read_text().replace("localhost:8007" f"localhost:{WEBSOCKET_PORT}").encode("utf8"))<block_end><def_stmt>do_HEAD self<block_start>self._set_headers()<block_end><def_stmt>log_message self format *args<block_start><pass><block_end><block_end><class_stmt>WinmanWebview(WinmanPassthrough)<block_start><def_stmt>__init__ self config:ColdtypeConfig renderer<block_start>self.config=config<line_sep>self.renderer=renderer<line_sep><global>WEBSOCKET_PORT<line_sep>WEBSOCKET_PORT=self.config.websocket_port<line_sep>wv_port=self.config.webviewer_port<if_stmt>wv_port<ne>0<block_start>print("WEBVIEWER>" f"localhost:{wv_port}")<def_stmt>start_server port<block_start>httpd=HTTPServer(('' port) WebViewerHandler)<line_sep>httpd.serve_forever()<block_end>daemon=threading.Thread(name='daemon_server' target=start_server args=(wv_port ))<line_sep>daemon.setDaemon(<true>)<line_sep>daemon.start()<block_end><block_end><def_stmt>turn_over self<block_start>renders=[]<try_stmt><block_start>title=self.renderer.watchees[0][1].name<block_end><except_stmt><block_start>title="coldtype"<block_end><for_stmt>idx,(render result rp) enumerate(self.renderer.previews_waiting)<block_start><if_stmt>self.renderer.args.format<eq>"canvas"# TODO config? <block_start>renders.append(dict(fmt="canvas" jsonpen=JSONPen.Composite(result render.rect) rect=[*render.rect] bg=[*render.bg]))<block_end><else_stmt><block_start>renders.append(dict(fmt="svg" svg=SVGPen.Composite(result render.rect viewBox=render.viewBox) rect=[*render.rect] bg=[*render.bg]))<block_end><block_end><if_stmt>renders<block_start><for_stmt>_,client self.renderer.server.connections.items()<block_start><if_stmt>hasattr(client "webviewer")<and>client.webviewer<block_start>client.sendMessage(json.dumps({"renders":renders "title":title}))<block_end><block_end><block_end><return>[]<block_end><block_end>
# Copyright 2017-2019 EPAM Systems, Inc. (https://www.epam.com/) # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>luigi<import_from_stmt>luigi.util inherits<import_stmt>pipeline<import_from_stmt>pipeline LogEntry TaskStatus<class_stmt>DefaultPipeline(pipeline.Pipeline)<block_start><def_stmt>requires self<block_start><yield>self.clone(Task)<block_end><block_end>@inherits(DefaultPipeline)<class_stmt>Task(pipeline.HelperTask)<block_start>helper=<false><def_stmt>output self<block_start><return>luigi.LocalTarget("./tmp.txt")<block_end><def_stmt>run self<block_start>self.log_event(LogEntry(self.run_id TaskStatus.RUNNING "Running luigi pipeline" self.__repr__() self.uu_name))<with_stmt>open(self.output().path "w")<as>result<block_start>result.write("Running luigi pipeline")<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>val=luigi.run()<if_stmt><not>val<block_start>sys.exit(1)<block_end><block_end>
# coding=utf-8 # # Copyright 2016 F5 Networks Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """BIG-IP® system config module REST URI ``http://localhost/mgmt/tm/sys/config`` GUI Path N/A REST Kind ``tm:sys:config:*`` """<import_from_stmt>f5.bigip.mixins CommandExecutionMixin<import_from_stmt>f5.bigip.resource Collection<import_from_stmt>f5.bigip.resource OrganizingCollection<import_from_stmt>f5.bigip.resource Resource<class_stmt>Crypto(OrganizingCollection)<block_start><def_stmt>__init__ self sys<block_start>super(Crypto self).__init__(sys)<line_sep>self._meta_data['allowed_lazy_attributes']=[Certs Keys]<block_end><block_end><class_stmt>Keys(Collection CommandExecutionMixin)<block_start>"""BIG-IP® Crypto key collection note:: This collection supports install command. Given the fact that we will be expecting hyphen parameters, the function will need to utilize variable keyword argument syntax. In other words define a dictionary with the arbitrary keys and then pass it as in the form **foo into the method call. e.g. param_set ={'from-local-file': FOOPATH, 'name': 'FOOKEY'} bigip.tm.sys.crypto.keys.exec_cmd('install', **param_set) """<def_stmt>__init__ self crypto<block_start>super(Keys self).__init__(crypto)<line_sep>self._meta_data['allowed_lazy_attributes']=[Key]<line_sep>self._meta_data['allowed_commands'].append('install')<line_sep>self._meta_data['attribute_registry']={'tm:sys:crypto:key:keystate':Key}<block_end><block_end><class_stmt>Key(Resource)<block_start>"""BIG-IP® Crypto key resource"""<def_stmt>__init__ self keys<block_start>super(Key self).__init__(keys)<line_sep>self._meta_data['required_json_kind']='tm:sys:crypto:key:keystate'<block_end><block_end><class_stmt>Certs(Collection CommandExecutionMixin)<block_start>"""BIG-IP® Crypto cert collection note:: This collection supports install command. Given the fact that we will be expecting hyphen parameters, the function will need to utilize variable keyword argument syntax. In other words define a dictionary with the arbitrary keys and then pass it as in the form **foo into the method call. e.g. param_set ={'from-local-file': FOOPATH, 'name': 'FOOCERT'} bigip.tm.sys.crypto.certs.exec_cmd('install', **param_set) """<def_stmt>__init__ self crypto<block_start>super(Certs self).__init__(crypto)<line_sep>self._meta_data['allowed_lazy_attributes']=[Cert]<line_sep>self._meta_data['allowed_commands'].append('install')<line_sep>self._meta_data['attribute_registry']={'tm:sys:crypto:cert:certstate':Cert}<block_end><block_end><class_stmt>Cert(Resource)<block_start>"""BIG-IP® Crypto cert resource"""<def_stmt>__init__ self certs<block_start>super(Cert self).__init__(certs)<line_sep>self._meta_data['required_json_kind']='tm:sys:crypto:cert:certstate'<block_end><block_end>
<import_from_stmt>unittest.case TestCase<import_stmt>unittest<import_stmt>pandas<as>pd<import_stmt>numpy<as>np<import_from_stmt>datetime datetime<import_from_stmt>qlib init<import_from_stmt>qlib.config C<import_from_stmt>qlib.log TimeInspector<import_from_stmt>qlib.utils.time cal_sam_minute<as>cal_sam_minute_new get_min_cal<def_stmt>cal_sam_minute x sam_minutes<block_start>""" Sample raw calendar into calendar with sam_minutes freq, shift represents the shift minute the market time - open time of stock market is [9:30 - shift*pd.Timedelta(minutes=1)] - mid close time of stock market is [11:29 - shift*pd.Timedelta(minutes=1)] - mid open time of stock market is [13:00 - shift*pd.Timedelta(minutes=1)] - close time of stock market is [14:59 - shift*pd.Timedelta(minutes=1)] """<line_sep># TODO: actually, this version is much faster when no cache or optimization day_time=pd.Timestamp(x.date())<line_sep>shift=C.min_data_shift<line_sep>open_time=day_time+pd.Timedelta(hours=9 minutes=30)-shift<times>pd.Timedelta(minutes=1)<line_sep>mid_close_time=day_time+pd.Timedelta(hours=11 minutes=29)-shift<times>pd.Timedelta(minutes=1)<line_sep>mid_open_time=day_time+pd.Timedelta(hours=13 minutes=00)-shift<times>pd.Timedelta(minutes=1)<line_sep>close_time=day_time+pd.Timedelta(hours=14 minutes=59)-shift<times>pd.Timedelta(minutes=1)<if_stmt>open_time<le>x<le>mid_close_time<block_start>minute_index=(x-open_time).seconds<floordiv>60<block_end><elif_stmt>mid_open_time<le>x<le>close_time<block_start>minute_index=(x-mid_open_time).seconds<floordiv>60+120<block_end><else_stmt><block_start><raise>ValueError("datetime of calendar is out of range")<block_end>minute_index=minute_index<floordiv>sam_minutes<times>sam_minutes<if_stmt>0<le>minute_index<l>120<block_start><return>open_time+minute_index<times>pd.Timedelta(minutes=1)<block_end><elif_stmt>120<le>minute_index<l>240<block_start><return>mid_open_time+(minute_index-120)<times>pd.Timedelta(minutes=1)<block_end><else_stmt><block_start><raise>ValueError("calendar minute_index error, check `min_data_shift` in qlib.config.C")<block_end><block_end><class_stmt>TimeUtils(TestCase)<block_start>@classmethod<def_stmt>setUpClass cls<block_start>init()<block_end><def_stmt>test_cal_sam_minute self# test the correctness of the code <block_start>random_n=1000<line_sep>cal=get_min_cal()<def_stmt>gen_args <block_start><for_stmt>time np.random.choice(cal size=random_n replace=<true>)<block_start>sam_minutes=np.random.choice([1 2 3 4 5 6])<line_sep>dt=pd.Timestamp(datetime(2021 month=3 day=3 hour=time.hour minute=time.minute second=time.second microsecond=time.microsecond ))<line_sep>args=dt sam_minutes<line_sep><yield>args<block_end><block_end><for_stmt>args gen_args()<block_start><assert_stmt>cal_sam_minute(*args)<eq>cal_sam_minute_new(*args)<block_end># test the performance of the code args_l=list(gen_args())<with_stmt>TimeInspector.logt()<block_start><for_stmt>args args_l<block_start>cal_sam_minute(*args)<block_end><block_end><with_stmt>TimeInspector.logt()<block_start><for_stmt>args args_l<block_start>cal_sam_minute_new(*args)<block_end><block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
<import_from_stmt>math ceil<import_stmt>hmac<import_stmt>hashlib<line_sep>BLOCK_SIZE=32<def_stmt>extract salt:bytes ikm:bytes<arrow>bytes<block_start>h=hmac.new(salt ikm hashlib.sha256)<line_sep><return>h.digest()<block_end><def_stmt>expand L:int prk:bytes info:bytes<arrow>bytes<block_start>N:int=ceil(L/BLOCK_SIZE)<line_sep>bytes_written:int=0<line_sep>okm:bytes=b""<for_stmt>i range(1 N+1)<block_start><if_stmt>i<eq>1<block_start>h=hmac.new(prk info+bytes([1]) hashlib.sha256)<line_sep>T:bytes=h.digest()<block_end><else_stmt><block_start>h=hmac.new(prk T+info+bytes([i]) hashlib.sha256)<line_sep>T=h.digest()<block_end>to_write=L-bytes_written<if_stmt>to_write<g>BLOCK_SIZE<block_start>to_write=BLOCK_SIZE<block_end>okm<augadd>T[:to_write]<line_sep>bytes_written<augadd>to_write<block_end><assert_stmt>bytes_written<eq>L<line_sep><return>okm<block_end><def_stmt>extract_expand L:int key:bytes salt:bytes info:bytes<arrow>bytes<block_start>prk=extract(salt key)<line_sep><return>expand(L prk info)<block_end>""" Copyright 2020 Chia Network Inc Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """<line_sep>
<import_from_stmt>numbers Real Integral<import_stmt>numpy<as>np<import_stmt>openmc.checkvalue<as>cv<import_from_stmt>.angle_energy AngleEnergy<import_from_stmt>.endf get_cont_record<class_stmt>NBodyPhaseSpace(AngleEnergy)<block_start>"""N-body phase space distribution Parameters ---------- total_mass : float Total mass of product particles n_particles : int Number of product particles atomic_weight_ratio : float Atomic weight ratio of target nuclide q_value : float Q value for reaction in eV Attributes ---------- total_mass : float Total mass of product particles n_particles : int Number of product particles atomic_weight_ratio : float Atomic weight ratio of target nuclide q_value : float Q value for reaction in eV """<def_stmt>__init__ self total_mass n_particles atomic_weight_ratio q_value<block_start>self.total_mass=total_mass<line_sep>self.n_particles=n_particles<line_sep>self.atomic_weight_ratio=atomic_weight_ratio<line_sep>self.q_value=q_value<block_end>@property<def_stmt>total_mass self<block_start><return>self._total_mass<block_end>@property<def_stmt>n_particles self<block_start><return>self._n_particles<block_end>@property<def_stmt>atomic_weight_ratio self<block_start><return>self._atomic_weight_ratio<block_end>@property<def_stmt>q_value self<block_start><return>self._q_value<block_end>@total_mass.setter<def_stmt>total_mass self total_mass<block_start>name='N-body phase space total mass'<line_sep>cv.check_type(name total_mass Real)<line_sep>cv.check_greater_than(name total_mass 0.)<line_sep>self._total_mass=total_mass<block_end>@n_particles.setter<def_stmt>n_particles self n_particles<block_start>name='N-body phase space number of particles'<line_sep>cv.check_type(name n_particles Integral)<line_sep>cv.check_greater_than(name n_particles 0)<line_sep>self._n_particles=n_particles<block_end>@atomic_weight_ratio.setter<def_stmt>atomic_weight_ratio self atomic_weight_ratio<block_start>name='N-body phase space atomic weight ratio'<line_sep>cv.check_type(name atomic_weight_ratio Real)<line_sep>cv.check_greater_than(name atomic_weight_ratio 0.0)<line_sep>self._atomic_weight_ratio=atomic_weight_ratio<block_end>@q_value.setter<def_stmt>q_value self q_value<block_start>name='N-body phase space Q value'<line_sep>cv.check_type(name q_value Real)<line_sep>self._q_value=q_value<block_end><def_stmt>to_hdf5 self group<block_start>"""Write distribution to an HDF5 group Parameters ---------- group : h5py.Group HDF5 group to write to """<line_sep>group.attrs['type']=np.string_('nbody')<line_sep>group.attrs['total_mass']=self.total_mass<line_sep>group.attrs['n_particles']=self.n_particles<line_sep>group.attrs['atomic_weight_ratio']=self.atomic_weight_ratio<line_sep>group.attrs['q_value']=self.q_value<block_end>@classmethod<def_stmt>from_hdf5 cls group<block_start>"""Generate N-body phase space distribution from HDF5 data Parameters ---------- group : h5py.Group HDF5 group to read from Returns ------- openmc.data.NBodyPhaseSpace N-body phase space distribution """<line_sep>total_mass=group.attrs['total_mass']<line_sep>n_particles=group.attrs['n_particles']<line_sep>awr=group.attrs['atomic_weight_ratio']<line_sep>q_value=group.attrs['q_value']<line_sep><return>cls(total_mass n_particles awr q_value)<block_end>@classmethod<def_stmt>from_ace cls ace idx q_value<block_start>"""Generate N-body phase space distribution from ACE data Parameters ---------- ace : openmc.data.ace.Table ACE table to read from idx : int Index in XSS array of the start of the energy distribution data (LDIS + LOCC - 1) q_value : float Q-value for reaction in eV Returns ------- openmc.data.NBodyPhaseSpace N-body phase space distribution """<line_sep>n_particles=int(ace.xss[idx])<line_sep>total_mass=ace.xss[idx+1]<line_sep><return>cls(total_mass n_particles ace.atomic_weight_ratio q_value)<block_end>@classmethod<def_stmt>from_endf cls file_obj<block_start>"""Generate N-body phase space distribution from an ENDF evaluation Parameters ---------- file_obj : file-like object ENDF file positions at the start of the N-body phase space distribution Returns ------- openmc.data.NBodyPhaseSpace N-body phase space distribution """<line_sep>items=get_cont_record(file_obj)<line_sep>total_mass=items[0]<line_sep>n_particles=items[5]<line_sep># TODO: get awr and Q value <return>cls(total_mass n_particles 1.0 0.0)<block_end><block_end>
""" Handles the template tags and description @copyright: 2002-2007 <NAME> @author: <NAME> @author: <NAME> @copyright: 2016 <NAME> @license: MIT (see LICENSE.txt) - THIS PROGRAM COMES WITH NO WARRANTY """<import_stmt>glob<import_stmt>logging<import_stmt>os<import_stmt>wx<import_from_stmt>xml.dom minidom<import_from_stmt>xml.sax saxutils<import_stmt>common<import_stmt>config<import_stmt>misc<import_stmt>templates_ui<class_stmt>Template(object)<block_start>"A class that handles the specific aspects of template files"<def_stmt>__init__ self filename=<none><block_start>self.author=''<line_sep>self.description=''<line_sep>self.instructions=''<line_sep>self.filename=filename<if_stmt>filename<is><not><none><block_start>filexml=minidom.parse(filename)<line_sep># we have no use for all the xml data in the file. We only care # about what is between the "description" tags templatedata=filexml.getElementsByTagName('templatedata')<if_stmt>len(templatedata)<block_start>desc_xml=templatedata[0]<try_stmt><block_start>self.author=saxutils.unescape(desc_xml.getElementsByTagName('author')[0].firstChild.data)<block_end><except_stmt>(IndexError AttributeError)<block_start>self.author=''<block_end><try_stmt><block_start>self.description=saxutils.unescape(desc_xml.getElementsByTagName('description')[0].firstChild.data)<block_end><except_stmt>(IndexError AttributeError)<block_start>self.description=''<block_end><try_stmt><block_start>self.instructions=saxutils.unescape(desc_xml.getElementsByTagName('instructions')[0].firstChild.data)<block_end><except_stmt>(IndexError AttributeError)<block_start>self.instructions=''<block_end><block_end><else_stmt><block_start>self.author=''<line_sep>self.description=''<line_sep>self.instructions=''<block_end><block_end><block_end><def_stmt>write self output tabs<block_start>outer_tab=u' '<times>tabs<line_sep>stmt=[u'%s<templatedata>\n'%outer_tab]<line_sep>stmt<augadd>common.format_xml_tag(u'author' self.author tabs+1)<line_sep>stmt<augadd>common.format_xml_tag(u'description' self.description tabs+1)<line_sep>stmt<augadd>common.format_xml_tag(u'instructions' self.instructions tabs+1)<line_sep>stmt.append(u'%s</templatedata>\n'%outer_tab)<line_sep>output.extend(stmt)<block_end><block_end><class_stmt>TemplateListDialog(templates_ui.TemplateListDialog)<block_start>"""\ Class TemplateListDialog @ivar _logger: Class specific logging instance """<def_stmt>__init__ self<block_start>templates_ui.TemplateListDialog.__init__(self <none> -1 "")<line_sep>self.templates=[]<line_sep>self.fill_template_list()<line_sep>self.selected_template=<none><block_end><def_stmt>get_selected self<block_start>index=self.template_names.GetSelection()<if_stmt>index<ge>0<block_start><return>self.templates[index]<block_end><else_stmt><block_start><return><none><block_end><block_end><def_stmt>on_open self event<block_start>self.selected_template=self.get_selected()<line_sep>self.EndModal(wx.ID_OPEN)<block_end><def_stmt>on_select_template self event<block_start>self.selected_template=self.get_selected()<if_stmt>self.selected_template<is><not><none><block_start>t=Template(self.selected_template)<line_sep>self.set_template_name(self.template_names.GetStringSelection())<line_sep>self.author.SetValue(misc.wxstr(t.author))<line_sep>self.description.SetValue(misc.wxstr(t.description))<line_sep>self.instructions.SetValue(misc.wxstr(t.instructions))<if_stmt>os.path.dirname(self.selected_template)<eq>config.templates_path<block_start>self.btn_delete.Disable()<line_sep>self.btn_edit.Disable()<block_end><else_stmt><block_start>self.btn_delete.Enable()<line_sep>self.btn_edit.Enable()<block_end><block_end><else_stmt><block_start>self.set_template_name("")<line_sep>self.author.SetValue("")<line_sep>self.description.SetValue("")<line_sep>self.instructions.SetValue("")<block_end><if_stmt>event<block_start>event.Skip()<block_end><block_end><def_stmt>set_template_name self name<block_start>self.template_name.SetLabel(_("wxGlade template:\n")+misc.wxstr(name))<block_end><def_stmt>on_edit self event<block_start>self.selected_template=self.get_selected()<line_sep>self.EndModal(wx.ID_EDIT)<block_end><def_stmt>on_delete self event<block_start>self.selected_template=self.get_selected()<if_stmt>self.selected_template<is><not><none><block_start>name=self.template_names.GetStringSelection()<if_stmt>wx.MessageBox(_("Delete template '%s'?")%misc.wxstr(name) _("Are you sure?") style=wx.YES|wx.NO|wx.CENTRE)<eq>wx.YES<block_start><try_stmt><block_start>os.unlink(self.selected_template)<block_end><except_stmt>Exception<block_start>logging.exception(_('Internal Error'))<block_end>self.fill_template_list()<line_sep>self.selected_template=<none><block_end><block_end><block_end><def_stmt>fill_template_list self<block_start>self.templates=load_templates()<line_sep>self.template_names.Clear()<if_stmt>self.templates<block_start><for_stmt>n self.templates<block_start>self.template_names.Append(os.path.splitext(os.path.basename(n))[0])<line_sep># show details of first template self.template_names.SetSelection(0)<line_sep>self.on_select_template(<none>)<block_end><block_end><block_end><block_end># end of class TemplateListDialog <def_stmt>load_templates <block_start>"Finds all the available templates"<if_stmt>config.appdata_path<ne>config.wxglade_path<block_start>extra=glob.glob(os.path.join(config.appdata_path "templates" "*.wgt"))<block_end><else_stmt><block_start>extra=[]<block_end><return>sorted(glob.glob(os.path.join(config.templates_path "*.wgt")))+sorted(extra)<block_end><def_stmt>select_template <block_start>"Returns the filename of a template to load"<line_sep>dlg=TemplateListDialog()<line_sep>dlg.btn_delete.Hide()<line_sep>dlg.btn_edit.Hide()<if_stmt>dlg.ShowModal()<eq>wx.ID_OPEN<block_start>ret=dlg.selected_template<block_end><else_stmt><block_start>ret=<none><block_end>dlg.Destroy()<line_sep><return>ret<block_end><def_stmt>save_template data=<none><block_start>"Returns an out file name and template description for saving a template"<line_sep>dlg=templates_ui.TemplateInfoDialog(<none> -1 "")<if_stmt>data<is><not><none><block_start>dlg.template_name.SetValue(misc.wxstr(os.path.basename(os.path.splitext(data.filename)[0])))<line_sep>dlg.author.SetValue(misc.wxstr(data.author))<line_sep>dlg.description.SetValue(misc.wxstr(data.description))<line_sep>dlg.instructions.SetValue(misc.wxstr(data.instructions))<block_end>ret=<none><line_sep>retdata=Template()<if_stmt>dlg.ShowModal()<eq>wx.ID_OK<block_start>ret=dlg.template_name.GetValue().strip()<line_sep>retdata.author=dlg.author.GetValue()<line_sep>retdata.description=dlg.description.GetValue()<line_sep>retdata.instructions=dlg.instructions.GetValue()<if_stmt><not>ret<block_start>wx.MessageBox(_("Can't save a template with an empty name") _("Error") wx.OK|wx.ICON_ERROR)<block_end><block_end>dlg.Destroy()<line_sep>name=ret<if_stmt>ret<block_start>template_directory=os.path.join(config.appdata_path 'templates')<if_stmt><not>os.path.exists(template_directory)<block_start><try_stmt><block_start>os.makedirs(template_directory)<block_end><except_stmt>EnvironmentError<block_start>logging.exception(_('ERROR creating directory "%s"') template_directory)<line_sep><return><none> retdata<block_end><block_end>ret=os.path.join(template_directory ret+'.wgt')<block_end><if_stmt>ret<and>os.path.exists(ret)<and>wx.MessageBox(_("A template called '%s' already exists:\ndo you want to overwrite it?")%name _("Question") wx.YES|wx.NO|wx.ICON_QUESTION)<ne>wx.YES<block_start>ret=<none><block_end><return>ret retdata<block_end><def_stmt>manage_templates <block_start>dlg=TemplateListDialog()<line_sep>dlg.btn_open.Hide()<line_sep>#dlg.btn_edit.Hide() ret=<none><if_stmt>dlg.ShowModal()<eq>templates_ui.ID_EDIT<block_start>ret=dlg.selected_template<block_end>dlg.Destroy()<line_sep><return>ret<block_end>
# -*- coding: utf-8 -*- """Test CLR property support."""<import_stmt>pytest<import_from_stmt>Python.Test PropertyTest<def_stmt>test_public_instance_property <block_start>"""Test public instance properties."""<line_sep>ob=PropertyTest()<assert_stmt>ob.PublicProperty<eq>0<line_sep>ob.PublicProperty=1<assert_stmt>ob.PublicProperty<eq>1<with_stmt>pytest.raises(TypeError)<block_start><del_stmt>PropertyTest().PublicProperty<block_end><block_end><def_stmt>test_public_static_property <block_start>"""Test public static properties."""<line_sep>ob=PropertyTest()<assert_stmt>PropertyTest.PublicStaticProperty<eq>0<line_sep>PropertyTest.PublicStaticProperty=1<assert_stmt>PropertyTest.PublicStaticProperty<eq>1<assert_stmt>ob.PublicStaticProperty<eq>1<line_sep>ob.PublicStaticProperty=0<assert_stmt>ob.PublicStaticProperty<eq>0<with_stmt>pytest.raises(TypeError)<block_start><del_stmt>PropertyTest.PublicStaticProperty<block_end><with_stmt>pytest.raises(TypeError)<block_start><del_stmt>PropertyTest().PublicStaticProperty<block_end><block_end><def_stmt>test_protected_instance_property <block_start>"""Test protected instance properties."""<line_sep>ob=PropertyTest()<assert_stmt>ob.ProtectedProperty<eq>0<line_sep>ob.ProtectedProperty=1<assert_stmt>ob.ProtectedProperty<eq>1<with_stmt>pytest.raises(TypeError)<block_start><del_stmt>PropertyTest().ProtectedProperty<block_end><block_end><def_stmt>test_protected_static_property <block_start>"""Test protected static properties."""<line_sep>ob=PropertyTest()<assert_stmt>PropertyTest.ProtectedStaticProperty<eq>0<line_sep>PropertyTest.ProtectedStaticProperty=1<assert_stmt>PropertyTest.ProtectedStaticProperty<eq>1<assert_stmt>ob.ProtectedStaticProperty<eq>1<line_sep>ob.ProtectedStaticProperty=0<assert_stmt>ob.ProtectedStaticProperty<eq>0<with_stmt>pytest.raises(TypeError)<block_start><del_stmt>PropertyTest.ProtectedStaticProperty<block_end><with_stmt>pytest.raises(TypeError)<block_start><del_stmt>PropertyTest().ProtectedStaticProperty<block_end><block_end><def_stmt>test_internal_property <block_start>"""Test internal properties."""<with_stmt>pytest.raises(AttributeError)<block_start>_=PropertyTest().InternalProperty<block_end><with_stmt>pytest.raises(AttributeError)<block_start>_=PropertyTest().InternalStaticProperty<block_end><with_stmt>pytest.raises(AttributeError)<block_start>_=PropertyTest.InternalStaticProperty<block_end><block_end><def_stmt>test_private_property <block_start>"""Test private properties."""<with_stmt>pytest.raises(AttributeError)<block_start>_=PropertyTest().PrivateProperty<block_end><with_stmt>pytest.raises(AttributeError)<block_start>_=PropertyTest().PrivateStaticProperty<block_end><with_stmt>pytest.raises(AttributeError)<block_start>_=PropertyTest.PrivateStaticProperty<block_end><block_end><def_stmt>test_property_descriptor_get_set <block_start>"""Test property descriptor get / set."""<line_sep># This test ensures that setting an attribute implemented with # a descriptor actually goes through the descriptor (rather than # silently replacing the descriptor in the instance or type dict. ob=PropertyTest()<assert_stmt>PropertyTest.PublicStaticProperty<eq>0<assert_stmt>ob.PublicStaticProperty<eq>0<line_sep>descriptor=PropertyTest.__dict__['PublicStaticProperty']<assert_stmt>type(descriptor)<ne>int<line_sep>ob.PublicStaticProperty=0<line_sep>descriptor=PropertyTest.__dict__['PublicStaticProperty']<assert_stmt>type(descriptor)<ne>int<line_sep>PropertyTest.PublicStaticProperty=0<line_sep>descriptor=PropertyTest.__dict__['PublicStaticProperty']<assert_stmt>type(descriptor)<ne>int<block_end><def_stmt>test_property_descriptor_wrong_type <block_start>"""Test setting a property using a value of the wrong type."""<with_stmt>pytest.raises(TypeError)<block_start>ob=PropertyTest()<line_sep>ob.PublicProperty="spam"<block_end><block_end><def_stmt>test_property_descriptor_abuse <block_start>"""Test property descriptor abuse."""<line_sep>desc=PropertyTest.__dict__['PublicProperty']<with_stmt>pytest.raises(TypeError)<block_start>desc.__get__(0 0)<block_end><with_stmt>pytest.raises(TypeError)<block_start>desc.__set__(0 0)<block_end><block_end><def_stmt>test_interface_property <block_start>"""Test properties of interfaces. Added after a bug report that an IsAbstract check was inappropriate and prevented use of properties when only the interface is known."""<import_from_stmt>System.Collections Hashtable ICollection<line_sep>mapping=Hashtable()<line_sep>coll=ICollection(mapping)<assert_stmt>coll.Count<eq>0<block_end>
<class_stmt>RequestConnectionError(Exception)<block_start><pass><block_end><class_stmt>ReferralError(Exception)<block_start><pass><block_end><class_stmt>DataRegistryCaseUpdateError(Exception)<block_start><pass><block_end>
# The following comments couldn't be translated into the new config version: # services <import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>process=cms.Process("EcalFullValid")<line_sep># initialize MessageLogger process.load("FWCore.MessageLogger.MessageLogger_cfi")<line_sep># initialize magnetic field process.load("Configuration.StandardSequences.MagneticField_cff")<line_sep># geometry (Only Ecal) process.load("Geometry.EcalCommonData.EcalOnly_cfi")<line_sep>process.load("Geometry.CaloEventSetup.CaloGeometry_cff")<line_sep>process.load("Geometry.CaloEventSetup.EcalTrigTowerConstituents_cfi")<line_sep>process.load("Geometry.EcalMapping.EcalMapping_cfi")<line_sep>process.load("Geometry.EcalMapping.EcalMappingRecord_cfi")<line_sep># DQM services process.load("DQMServices.Core.DQM_cfg")<line_sep># ECAL hits validation sequence process.load("Validation.EcalHits.ecalSimHitsValidationSequence_cff")<line_sep># ECAL digis validation sequence process.load("Validation.EcalDigis.ecalDigisValidationSequence_cff")<line_sep># ECAL rechits validation sequence process.load("Validation.EcalRecHits.ecalRecHitsValidationSequence_cff")<line_sep>process.maxEvents=cms.untracked.PSet(input=cms.untracked.int32(200))<line_sep>process.source=cms.Source("PoolSource" fileNames=cms.untracked.vstring('file:hits.root'))<line_sep>process.Timing=cms.Service("Timing")<line_sep>process.SimpleMemoryCheck=cms.Service("SimpleMemoryCheck")<line_sep>process.simhits=cms.Sequence(process.ecalSimHitsValidationSequence)<line_sep>process.digis=cms.Sequence(process.ecalDigisValidationSequence)<line_sep>process.rechits=cms.Sequence(process.ecalRecHitsValidationSequence)<line_sep>process.p1=cms.Path(process.simhits<times>process.digis<times>process.rechits)<line_sep>process.DQM.collectorHost=''<line_sep>
# -*- coding: utf-8 -*- <import_from_future_stmt> division print_function<import_from_stmt>keras.layers Dense Merge Dropout RepeatVector<import_from_stmt>keras.layers.embeddings Embedding<import_from_stmt>keras.layers.recurrent LSTM<import_from_stmt>keras.models Sequential<import_stmt>os<import_stmt>babi<line_sep>BABI_DIR="../data/babi_data/tasks_1-20_v1-2/en"<line_sep>TASK_NBR=1<line_sep>EMBED_HIDDEN_SIZE=50<line_sep>BATCH_SIZE=32<line_sep>NBR_EPOCHS=40<line_sep>train_file,test_file=babi.get_files_for_task(TASK_NBR BABI_DIR)<line_sep>data_train=babi.get_stories(os.path.join(BABI_DIR train_file))<line_sep>data_test=babi.get_stories(os.path.join(BABI_DIR test_file))<line_sep>word2idx=babi.build_vocab([data_train data_test])<line_sep>vocab_size=len(word2idx)+1<line_sep>print("vocab_size=" vocab_size)<line_sep>story_maxlen,question_maxlen=babi.get_maxlens([data_train data_test])<line_sep>print("story_maxlen=" story_maxlen)<line_sep>print("question_maxlen=" question_maxlen)<line_sep>Xs_train,Xq_train,Y_train=babi.vectorize(data_train word2idx story_maxlen question_maxlen)<line_sep>Xs_test,Xq_test,Y_test=babi.vectorize(data_test word2idx story_maxlen question_maxlen)<line_sep>print(Xs_train.shape Xq_train.shape Y_train.shape)<line_sep>print(Xs_test.shape Xq_test.shape Y_test.shape)<line_sep># define model # generate embeddings for stories story_rnn=Sequential()<line_sep>story_rnn.add(Embedding(vocab_size EMBED_HIDDEN_SIZE input_length=story_maxlen))<line_sep>story_rnn.add(Dropout(0.3))<line_sep># generate embeddings for question and make adaptable to story question_rnn=Sequential()<line_sep>question_rnn.add(Embedding(vocab_size EMBED_HIDDEN_SIZE input_length=question_maxlen))<line_sep>question_rnn.add(Dropout(0.3))<line_sep>question_rnn.add(LSTM(EMBED_HIDDEN_SIZE return_sequences=<false>))<line_sep>question_rnn.add(RepeatVector(story_maxlen))<line_sep># merge the two model=Sequential()<line_sep>model.add(Merge([story_rnn question_rnn] mode="sum"))<line_sep>model.add(LSTM(EMBED_HIDDEN_SIZE return_sequences=<false>))<line_sep>model.add(Dropout(0.3))<line_sep>model.add(Dense(vocab_size activation="softmax"))<line_sep>model.compile(optimizer="adam" loss="categorical_crossentropy" metrics=["accuracy"])<line_sep>print("Training...")<line_sep>model.fit([Xs_train Xq_train] Y_train batch_size=BATCH_SIZE nb_epoch=NBR_EPOCHS validation_split=0.05)<line_sep>loss,acc=model.evaluate([Xs_test Xq_test] Y_test batch_size=BATCH_SIZE)<line_sep>print()<line_sep>print("Test loss/accuracy = {:.4f}, {:.4f}".format(loss acc))<line_sep>
# Copyright (c) 2019-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # <import_stmt>math<import_stmt>os<import_stmt>sys<import_from_stmt>logging getLogger<import_stmt>torch<import_from_stmt>.pretrain load_embeddings<line_sep># , TRANSFORMER_LAYER_PARAMS <import_from_stmt>.transformer DECODER_ONLY_PARAMS TransformerModel Classifier<import_from_stmt>..data.dictionary UNK_WORD<line_sep>logger=getLogger()<def_stmt>check_model_params params<block_start>""" Check models parameters. """<line_sep># masked language modeling task parameters <assert_stmt>params.bptt<ge>1<assert_stmt>0<le>params.word_pred<l>1<assert_stmt>0<le>params.sample_alpha<l>1<line_sep>s=params.word_mask_keep_rand.split(",")<assert_stmt>len(s)<eq>3<line_sep>s=[float(x)<for>x s]<assert_stmt>all([0<le>x<le>1<for>x s])<and>sum(s)<eq>1<line_sep>params.word_mask=s[0]<line_sep>params.word_keep=s[1]<line_sep>params.word_rand=s[2]<if_stmt>params.mask_length<eq>""<block_start>params.mask_length=<none><line_sep>params.mask_length_dist=<none><block_end><elif_stmt>params.mask_length<eq>"poisson"<block_start><assert_stmt>(params.poisson_lambda<is><not><none>) "poisson_lambda is None, it should be set when using poisson mask_length"<line_sep>_lambda=params.poisson_lambda<line_sep>lambda_to_the_k=1<line_sep>e_to_the_minus_lambda=math.exp(-_lambda)<line_sep>k_factorial=1<line_sep>ps=[]<for_stmt>k range(0 128)<block_start>ps.append(e_to_the_minus_lambda<times>lambda_to_the_k/k_factorial)<line_sep>lambda_to_the_k<augmul>_lambda<line_sep>k_factorial<augmul>k+1<if_stmt>ps[-1]<l>0.0000001<block_start><break><block_end><block_end>ps=torch.FloatTensor(ps)<line_sep>params.mask_length_dist_probas=ps<line_sep>params.mask_length_dist=torch.distributions.Categorical(ps)<block_end><else_stmt><block_start>params.mask_length=int(params.mask_length)<line_sep>ps=torch.FloatTensor(params.mask_length+1).fill_(0.0)<line_sep>ps[params.mask_length]=1<line_sep>params.mask_length_dist=torch.distributions.Categorical(ps)<block_end># input sentence noise for DAE <if_stmt>len(params.ae_steps)<eq>0<block_start><assert_stmt>params.word_shuffle<eq>0<assert_stmt>params.word_dropout<eq>0<assert_stmt>params.word_blank<eq>0<block_end><else_stmt><block_start><assert_stmt>params.word_shuffle<eq>0<or>params.word_shuffle<g>1<assert_stmt>0<le>params.word_dropout<l>1<assert_stmt>0<le>params.word_blank<l>1<block_end># model dimensions <if_stmt>params.emb_dim_encoder<eq>0<and>params.emb_dim_decoder<eq>0<block_start><assert_stmt>params.emb_dim<g>0<line_sep>params.emb_dim_encoder=params.emb_dim<line_sep>params.emb_dim_decoder=params.emb_dim<block_end><else_stmt><block_start><assert_stmt>params.emb_dim<eq>0<assert_stmt>params.emb_dim_encoder<g>0<and>params.emb_dim_decoder<g>0<if_stmt>params.emb_dim_encoder<eq>params.emb_dim_decoder<block_start>params.emb_dim=params.emb_dim_decoder<block_end><else_stmt><block_start><assert_stmt>params.reload_emb<eq>"" ("Pre-trained embeddings are not supported when the embedding size of the "<concat>"encoder and the decoder do not match ")<block_end><block_end><assert_stmt>params.emb_dim_encoder%params.n_heads<eq>0<assert_stmt>params.emb_dim_decoder%params.n_heads<eq>0<if_stmt>params.n_layers_encoder<eq>0<and>params.n_layers_decoder<eq>0<block_start><assert_stmt>params.n_layers<g>0<line_sep>params.n_layers_encoder=params.n_layers<line_sep>params.n_layers_decoder=params.n_layers<block_end><else_stmt><block_start><assert_stmt>params.n_layers<eq>0<block_end><assert_stmt>params.n_layers_encoder<g>0<and>params.n_layers_decoder<g>0<line_sep># reload pretrained word embeddings <if_stmt>params.reload_emb<ne>""<block_start><assert_stmt>os.path.isfile(params.reload_emb)<block_end># reload a pretrained model <if_stmt>params.reload_model<ne>""<block_start><if_stmt>params.encoder_only<block_start><assert_stmt>os.path.isfile(params.reload_model)<block_end><else_stmt><block_start>s=params.reload_model.split(",")<assert_stmt>len(s)<eq>2<assert_stmt>all([x<eq>""<or>os.path.isfile(x)<for>x s]) [x<for>x s<if><not>os.path.isfile(x)]<block_end><if_stmt>params.use_classifier<and>params.reload_classifier<eq>""<block_start>params.reload_classifier=params.reload_model<block_end><block_end><assert_stmt><not>(params.beam_size<g>1<and>params.number_samples<g>1) "Cannot sample when already doing beam search"<assert_stmt>(params.eval_temperature<is><none>)<eq>(params.number_samples<le>1) "Eval temperature should be set if and only if taking several samples at eval time"<block_end><def_stmt>set_pretrain_emb model dico word2id embeddings gpu<block_start>""" Pretrain word embeddings. """<line_sep>n_found=0<with_stmt>torch.no_grad()<block_start><for_stmt>i range(len(dico))<block_start>idx=word2id.get(dico[i] <none>)<if_stmt>idx<is><none><block_start><continue><block_end>n_found<augadd>1<line_sep>model.embeddings.weight[i]=(embeddings[idx].cuda()<if>gpu<else>embeddings[idx])<line_sep>model.pred_layer.proj.weight[i]=(embeddings[idx].cuda()<if>gpu<else>embeddings[idx])<block_end><block_end>logger.info("Pretrained %i/%i words (%.3f%%)."%(n_found len(dico) 100.0<times>n_found/len(dico)))<block_end>@torch.no_grad()<def_stmt>build_model params dico gpu=<true><block_start>""" Build model. """<if_stmt>params.encoder_only# build <block_start>model=TransformerModel(params dico is_encoder=<true> with_output=<true>)<line_sep># reload pretrained word embeddings <if_stmt>params.reload_emb<ne>""<block_start>word2id,embeddings=load_embeddings(params.reload_emb params)<line_sep>set_pretrain_emb(model dico word2id embeddings gpu)<block_end># reload a pretrained model <if_stmt>params.reload_model<ne>""<block_start>logger.info("============ Model Reloading")<line_sep>logger.info("Reloading model from %s ..."%params.reload_model)<line_sep>reload_transformer(params params.reload_model dico model "model" gpu)<block_end>logger.info("Model: {}".format(model))<line_sep>logger.info("Number of parameters (model): %i"%sum([p.numel()<for>p model.parameters()<if>p.requires_grad]))<line_sep>logger.info("")<line_sep><return>[model.cuda()<if>gpu<else>model]<block_end><else_stmt># build # TODO: only output when necessary - len(params.clm_steps + params.mlm_steps) > 0 <block_start>encoder=TransformerModel(params dico is_encoder=<true> with_output=<true>)<if_stmt>params.separate_decoders<block_start>decoders=[TransformerModel(params dico is_encoder=<false> with_output=<true>)<for>_ params.lang2id.values()]<block_end><else_stmt><block_start>decoders=[TransformerModel(params dico is_encoder=<false> with_output=<true>)]<block_end><for_stmt>layer range(params.n_layers_decoder)<block_start><if_stmt>layer<le>params.n_share_dec-1<block_start><assert_stmt>params.amp<eq>-1 "sharing layers is not supported with AMP"<line_sep>logger.info("Sharing decoder attention parameters for layer %i"%layer)<for_stmt>i range(1 len(decoders))<block_start>decoders[i].attentions[layer]=decoders[0].attentions[layer]<block_end><block_end><block_end># reload pretrained word embeddings <if_stmt>params.reload_emb<ne>""<block_start>word2id,embeddings=load_embeddings(params.reload_emb params)<line_sep>set_pretrain_emb(encoder dico word2id embeddings gpu)<for_stmt>decoder decoders<block_start>set_pretrain_emb(decoder dico word2id embeddings gpu)<block_end><block_end># reload a pretrained model <if_stmt>params.reload_model<ne>""<block_start>logger.info("============ Model Reloading")<line_sep>enc_path,dec_path=params.reload_model.split(",")<assert_stmt><not>(enc_path<eq>""<and>dec_path<eq>"")<line_sep># reload encoder <if_stmt>enc_path<ne>""<block_start>logger.info("Reloading encoder from %s ..."%enc_path)<line_sep>reload_transformer(params enc_path dico encoder "encoder" gpu)<block_end># reload decoders <if_stmt>dec_path<ne>""<block_start><for_stmt>dec decoders<block_start>logger.info("Reloading decoders from %s ..."%dec_path)<if_stmt>params.reload_encoder_for_decoder<block_start>reload_transformer(params dec_path dico dec "encoder" gpu)<block_end><else_stmt><block_start>reload_transformer(params dec_path dico dec "decoder" gpu)<block_end><block_end><block_end><block_end>logger.debug("Encoder: {}".format(encoder))<line_sep>logger.debug("Decoder: {}".format(decoders))<line_sep>logger.info("Number of parameters (encoder): %i"%sum([p.numel()<for>p encoder.parameters()<if>p.requires_grad]))<line_sep>logger.info("Number of parameters (decoders): %i"%sum([p.numel()<for>p decoders[0].parameters()<if>p.requires_grad]))<line_sep>logger.info(f"Number of decoders: {len(decoders)}")<line_sep>logger.info("")<line_sep><return>([encoder.cuda()<if>gpu<else>encoder] [dec.cuda()<if>gpu<else>dec<for>dec decoders] )<block_end><block_end>@torch.no_grad()<def_stmt>build_classifier params<block_start>""" Build classifier. """<line_sep># build classifier=Classifier(params)<line_sep># reload a pretrained model <if_stmt>params.reload_classifier<ne>""<block_start>logger.info("Reloading classifier from %s ..."%params.reload_classifier)<line_sep>reloaded=torch.load(params.reload_classifier map_location=<lambda>storage loc:storage.cuda(params.local_rank) )<if_stmt>"classifier"<not><in>reloaded<block_start>logger.warning(f"There is no classifier in {params.reload_classifier}. The classifier weights will be initialized randomly")<block_end><else_stmt><block_start>reloaded=reloaded["classifier"]<if_stmt>all([k.startswith("module.")<for>k reloaded.keys()])<block_start>reloaded={k[len("module."):]:v<for>k,v reloaded.items()}<block_end>classifier.load_state_dict(reloaded)<block_end><block_end>logger.info("Classifier: {}".format(classifier))<line_sep><return>[classifier.cuda()]<block_end><def_stmt>reload_transformer params path dico model model_type gpu=<true><block_start>""" Reload a transformer state dict to current model: clean 'module.' from state dict, match the word embeddings comparing dicos, match lang embedding with params lang mapping, extend or truncate position embeddings when size dont match, load state dict. """<line_sep>reloaded=torch.load(path map_location=<lambda>storage loc:storage.cuda(params.local_rank)<if>gpu<else>storage.cpu() )<line_sep>clean_model_state_dict(reloaded model_type)<line_sep>reload_word_embeddings(reloaded dico model_type)<line_sep>reload_lang_embeddings(reloaded params model_type)<line_sep>reload_position_embeddings(reloaded model model_type)<line_sep># if the model is a decoder <if_stmt>hasattr(model "encoder_attn")<block_start><for_stmt>i range(params.n_layers_decoder)<block_start><for_stmt>name DECODER_ONLY_PARAMS<block_start>weight_name=name%i<if_stmt>weight_name<not><in>reloaded[model_type]<block_start>logger.warning("Parameter %s not found."%(weight_name))<line_sep>encoder_attn_name=weight_name.replace("encoder_attn" "attentions")<if_stmt>(getattr(params "reload_encoder_attn_on_decoder" <false>)<and>"encoder_attn"<in>weight_name<and>encoder_attn_name<in>reloaded[model_type])<block_start>logger.warning(f"Reloading {encoder_attn_name} instead")<line_sep>reloaded[model_type][weight_name]=(reloaded[model_type][encoder_attn_name].clone().detach())<block_end><else_stmt><block_start>reloaded[model_type][weight_name]=model.state_dict()[weight_name]<block_end><block_end><block_end><block_end><block_end>model.load_state_dict(reloaded[model_type] strict=<not>params.spans_emb_encoder)<block_end><def_stmt>clean_model_state_dict reloaded model_type<block_start>""" remove prefix module from the keys of the model state dict. """<line_sep>model_reloaded=reloaded[model_type<if>model_type<in>reloaded<else>"model"]<if_stmt>all([k.startswith("module.")<for>k model_reloaded.keys()])<block_start>model_reloaded={k[len("module."):]:v<for>k,v model_reloaded.items()}<block_end>reloaded[model_type]=model_reloaded<block_end><def_stmt>reload_word_embeddings reloaded dico model_type<block_start>""" Check when reloading a model that dictionary are the same. If not, do a word embedding mapping if possible. """<line_sep>reloaded_word2id=reloaded["dico_word2id"]<line_sep>reloaded_id2word=reloaded["dico_id2word"]<assert_stmt>len(reloaded_word2id)<eq>len(reloaded_id2word)<assert_stmt>all(reloaded_id2word[v]<eq>k<for>k,v reloaded_word2id.items())<line_sep>matching_indices=[]<line_sep>word_not_found=[]<for_stmt>idx,word dico.id2word.items()<block_start><if_stmt>word<not><in>reloaded_word2id<block_start>word_not_found<augadd>[word]<line_sep>matching_indices<augadd>[reloaded_word2id[UNK_WORD]]<block_end><else_stmt><block_start>matching_indices<augadd>[reloaded_word2id[word]]<block_end><block_end><assert_stmt>len(matching_indices)<eq>len(dico)<if_stmt>len(word_not_found)<g>0<block_start>logger.warning(f"When reloading word embeddings, could not find embeddings for {len(word_not_found)} words: {word_not_found[0:5]+['...']+word_not_found[-5:]}... Initializing them to < unk >.")<block_end>reloaded[model_type]["embeddings.weight"]=torch.cat([reloaded[model_type]["embeddings.weight"][index:index+1]<for>index matching_indices] dim=0 )<if_stmt>"pred_layer.proj.weight"<in>reloaded[model_type]<block_start>first_line=reloaded[model_type]["pred_layer.proj.weight"][0:1]<line_sep>embedding_size=reloaded[model_type]["pred_layer.proj.weight"].shape[1]<line_sep>reloaded[model_type]["pred_layer.proj.weight"]=torch.cat([reloaded[model_type]["pred_layer.proj.weight"][index:index+1]<if>index<is><not><none><else>torch.normal(torch.zeros_like(first_line) torch.ones_like(first_line<times>(embedding_size<power>(-0.5))) )<for>index matching_indices] dim=0 )<line_sep>reloaded[model_type]["pred_layer.proj.bias"]=torch.cat([reloaded[model_type]["pred_layer.proj.bias"][index].view(1)<if>index<is><not><none><else>torch.rand_like(reloaded[model_type]["pred_layer.proj.bias"][0].view(1))<for>index matching_indices])<block_end><block_end><def_stmt>reload_lang_embeddings reloaded params model_type<block_start>""" When pretrained models has not been trained with the same languages: change lang embedding state dict. Otherwise, keep as it is. """<line_sep>model_reloaded=reloaded[model_type]<line_sep>reloaded_params=reloaded["params"]<if_stmt>params.lgs_mapping<eq>""<block_start>lang_mapping={}<block_end><else_stmt><block_start>lang_mapping={mapping.split(":")[0]:mapping.split(":")[1]<for>mapping params.lgs_mapping.split(",")}<block_end>langs_reloaded=reloaded_params["lang2id"]<line_sep>langs_reloaded_id2lang=reloaded_params["id2lang"]<line_sep>indices=[]<for_stmt>lang [l<for>i,l sorted(params.id2lang.items())]<block_start><if_stmt>lang<in>lang_mapping<block_start>lang_=lang_mapping[lang]<block_end><else_stmt><block_start>lang_=lang<block_end>index=[id<for>l,id langs_reloaded.items()<if>l<eq>lang_]<if_stmt>len(index)<eq>0<block_start>logger.warning(f"No match found for lang {lang} {lang_} in {langs_reloaded.keys()}. Initializing randomly.")<line_sep>indices.append(<none>)<line_sep><continue><block_end><else_stmt><block_start><assert_stmt>(len(index)<eq>1) f"matching lang found: {index} in reloaded model for lang {lang} in {langs_reloaded.keys()}"<line_sep>logger.warning(f"Lang {lang} matched to pretrained {langs_reloaded_id2lang[index[0]]} lang embedding.")<block_end>indices.append(index[0])<block_end>first_line=model_reloaded["lang_embeddings.weight"][0:1]<line_sep>embedding_size=model_reloaded["lang_embeddings.weight"].shape[1]<line_sep>model_reloaded["lang_embeddings.weight"]=torch.cat([model_reloaded["lang_embeddings.weight"][index:index+1]<if>index<is><not><none><else>torch.normal(torch.zeros_like(first_line) torch.ones_like(first_line<times>(embedding_size<power>(-0.5))) )<for>index indices] dim=0 )<line_sep>reloaded[model_type]=model_reloaded<block_end><def_stmt>reload_position_embeddings reloaded encoder model_type<block_start>""" When pretrained models has not been trained with the same size of position embedding: remove unused or add extra positions. """<line_sep>model_reloaded=reloaded[model_type]<line_sep>current_size=encoder.position_embeddings.weight.size()[0]<line_sep>reloaded_size=model_reloaded["position_embeddings.weight"].size()[0]<if_stmt>current_size<eq>reloaded_size<block_start><return>model_reloaded<block_end><elif_stmt>current_size<l>reloaded_size<block_start>logger.warning(f"The size of position embeddings in current model is {current_size}, the size of reloaded is {reloaded_size}. need to truncate the reloaded position embeddings.")<line_sep>model_reloaded["position_embeddings.weight"]=model_reloaded["position_embeddings.weight"][:current_size :]<block_end><else_stmt><block_start>logger.warning(f"The size of position embeddings in current model is {current_size}, the size of reloaded is {reloaded_size}. need to repeat last positions {current_size-reloaded_size} times.")<line_sep>model_reloaded["position_embeddings.weight"]=torch.cat([model_reloaded["position_embeddings.weight"] model_reloaded["position_embeddings.weight"][-1 :].repeat(current_size-reloaded_size 1) ] dim=0 )<block_end>reloaded[model_type]=model_reloaded<block_end>
<import_stmt>requests<import_from_stmt>lxml etree<import_from_stmt>db Db<class_stmt>Classify(object)<block_start>''' 爬取首页分类数据 '''<def_stmt>reptileIndexClassify self<block_start>print('爬取首页分类数据:开始:(classify/reptileIndexClassify)...')<line_sep>target_url='https://www.biquge5200.com/modules/article/search.php'<try_stmt># r = requests.get(target_url) # root = etree.HTML(r.text) # classifies = root.xpath('//div[@class="nav"]//li[position()>2]') # arr1 = [] # for classify in classifies: # path = classify.xpath('a/@href')[0].split('/')[-2] # desc = classify.xpath('a/text()')[0] # arr1.append(( path, desc )) <block_start>db=Db()<line_sep># db.insertMany('insert ignore into gysw_classify (`path`, `desc`) values (%s, %s)', tuple(arr1)) db.insertOne('insert ignore into gysw_classify(`path`, `desc`) values ("xxx2", "yyy2")')<line_sep>db.close()<line_sep>print('爬取首页分类数据:成功:(classify/reptileIndexClassify)...')<block_end><except_stmt>Exception<as>e<block_start>print('爬取首页分类数据:失败:(classify/reptileIndexClassify)...')<line_sep>print(e)<block_end><block_end><block_end># def classify(): # target_url = 'https://www.biquge5200.com/modules/article/search.php' # try: # r = requests.get(target_url) # root = etree.HTML(r.text) # classifies = root.xpath('//div[@class="nav"]//li[position()>2]') # arr1 = [] # for classify in classifies: # path = classify.xpath('a/@href')[0].split('/')[-2] # desc = classify.xpath('a/text()')[0] # arr1.append({ 'path': path, 'desc': desc }) # # 存库 # db = Db() # arr2 = db.selectAll('select `path`, `desc` from gysw_classify') # # 求交集 # arr3 = [i for i in arr1 if i not in arr2] # arr4 = [] # for item in arr3: # arr4.append(tuple(item.values())) # db.insertMany('insert into gysw_classify (`path`, `desc`) values (%s, %s)', tuple(arr4)) # db.close() # print('操作成功') # except Exception as e: # print(e) # print('操作失败') <if_stmt>__name__<eq>'__main__'<block_start>classify=Classify()<line_sep>classify.reptileIndexClassify()<block_end>
<import_from_stmt>..utils.constants *<import_from_stmt>..utils.vector3 vec3 rgb extract<import_from_stmt>functools reduce<as>reduce<import_from_stmt>..ray Ray get_raycolor<import_from_stmt>.. lights<import_stmt>numpy<as>np<import_from_stmt>. Material<import_from_stmt>..textures *<class_stmt>Emissive(Material)<block_start><def_stmt>__init__ self color **kwargs<block_start><if_stmt>isinstance(color vec3)<block_start>self.texture_color=solid_color(color)<block_end><elif_stmt>isinstance(color texture)<block_start>self.texture_color=color<block_end>super().__init__(**kwargs)<block_end><def_stmt>get_color self scene ray hit<block_start>diff_color=self.texture_color.get_color(hit)<line_sep><return>diff_color<block_end><block_end>
<class_stmt>PartOfSpeech<block_start>NOUN='noun'<line_sep>VERB='verb'<line_sep>ADJECTIVE='adjective'<line_sep>ADVERB='adverb'<line_sep>pos2con={'n':['NN' 'NNS' 'NNP' 'NNPS' # from WordNet 'NP'# from PPDB ] 'v':['VB' 'VBD' 'VBG' 'VBN' 'VBZ' # from WordNet 'VBP'# from PPDB ] 'a':['JJ' 'JJR' 'JJS' 'IN'] 's':['JJ' 'JJR' 'JJS' 'IN'] # Adjective Satellite 'r':['RB' 'RBR' 'RBS'] # Adverb }<line_sep>con2pos={}<line_sep>poses=[]<for_stmt>key,values pos2con.items()<block_start>poses.extend(values)<for_stmt>value values<block_start><if_stmt>value<not><in>con2pos<block_start>con2pos[value]=[]<block_end>con2pos[value].append(key)<block_end><block_end>@staticmethod<def_stmt>pos2constituent pos<block_start><if_stmt>pos<in>PartOfSpeech.pos2con<block_start><return>PartOfSpeech.pos2con[pos]<block_end><return>[]<block_end>@staticmethod<def_stmt>constituent2pos con<block_start><if_stmt>con<in>PartOfSpeech.con2pos<block_start><return>PartOfSpeech.con2pos[con]<block_end><return>[]<block_end>@staticmethod<def_stmt>get_pos <block_start><return>PartOfSpeech.poses<block_end><block_end>
<import_from_stmt>typing Callable<import_stmt>pandas<as>pd<import_stmt>pytest<import_stmt>requests<import_from_stmt>pybaseball.team_batting team_batting<line_sep>@pytest.fixture(name="sample_html")<def_stmt>_sample_html get_data_file_contents:Callable<arrow>str<block_start><return>get_data_file_contents('team_batting.html')<block_end>@pytest.fixture(name="sample_processed_result")<def_stmt>_sample_processed_result get_data_file_dataframe:Callable<arrow>pd.DataFrame<block_start><return>get_data_file_dataframe('team_batting.csv')<block_end><def_stmt>test_team_batting response_get_monkeypatch:Callable sample_html:str sample_processed_result:pd.DataFrame<block_start>season=2019<line_sep>response_get_monkeypatch(sample_html)<line_sep>team_batting_result=team_batting(season).reset_index(drop=<true>)<line_sep>pd.testing.assert_frame_equal(team_batting_result sample_processed_result check_dtype=<false>)<block_end>
<def_stmt>superTuple name attributes<block_start>"""Creates a Super Tuple class."""<line_sep>dct={}<line_sep>#Create __new__. nargs=len(attributes)<def_stmt>_new_ cls *args<block_start><if_stmt>len(args)<ne>nargs<block_start><raise>TypeError("%s takes %d arguments (%d given)."%(cls.__name__ nargs len(args)))<block_end><return>tuple.__new__(cls args)<block_end>dct["__new__"]=staticmethod(_new_)<line_sep>#Create __repr__. <def_stmt>_repr_ self<block_start>contents=[repr(elem)<for>elem self]<line_sep><return>"%s<%s>"%(self.__class__.__name__ ", ".join(contents))<block_end>dct["__repr__"]=_repr_<line_sep>#Create attribute properties. <def_stmt>getter i<block_start><return><lambda>self:self.__getitem__(i)<block_end><for_stmt>index,attribute enumerate(attributes)<block_start>dct[attribute]=property(getter(index))<block_end>#Set slots. dct["__slots__"]=[]<line_sep>#Return class. <return>type(name (tuple ) dct)<block_end>
# -*- coding: utf-8 -*- # ------------------------------------------------------------------------------ # Name: examples.py # Purpose: music21 class which allows running of test cases # Authors: <NAME> # # Copyright: Copyright © 2010-2011 <NAME> and the music21 Project # License: BSD, see license.txt # ------------------------------------------------------------------------------ ''' Each of the example methods in this module provides a figured bass line as a :class:`~music21.figuredBass.realizer.FiguredBassLine` instance. These can be realized by calling :meth:`~music21.figuredBass.realizer.FiguredBassLine.realize`, which takes in an optional :class:`~music21.figuredBass.rules.Rules` object. The result is a :class:`~music21.figuredBass.realizer.Realization` object which can generate realizations as instances of :class:`~music21.stream.Score`. These realizations can then be displayed in external software such as MuseScore or Finale by calling :meth:`~music21.base.Music21Object.show`. '''<import_stmt>copy<import_stmt>unittest<import_from_stmt>music21.figuredBass realizer<import_from_stmt>music21.figuredBass rules<line_sep># ------------------------------------------------------------------------------ <def_stmt>exampleA <block_start>''' This example was a homework assignment for 21M.302: Harmony & Counterpoint II at MIT in the fall of 2010, taught by <NAME> of the MIT Music Program. >>> from music21.figuredBass import examples >>> fbLine = examples.exampleA() >>> #_DOCS_SHOW fbLine.generateBassLine().show() .. image:: images/figuredBass/fbExamples_bassLineA.* :width: 700 The following is a realization of fbLine in four parts using the default rules set. The soprano part is limited to stepwise motion, and the alto and tenor parts are limited to motions within a perfect octave. >>> from music21.figuredBass import rules >>> fbRules = rules.Rules() >>> fbRules.partMovementLimits = [(1, 2), (2, 12), (3, 12)] >>> fbRealization1 = fbLine.realize(fbRules) >>> fbRealization1.getNumSolutions() 360 >>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol1A.* :width: 700 Now, the restriction on upper parts being within a perfect octave of each other is removed, and fbLine is realized again. >>> fbRules.upperPartsMaxSemitoneSeparation = None >>> fbRealization2 = fbLine.realize(fbRules) >>> fbRealization2.keyboardStyleOutput = False >>> fbRealization2.getNumSolutions() 3713168 >>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol2A.* :width: 700 '''<import_from_stmt>music21 converter<line_sep>s=converter.parse("tinynotation: 3/2 C2 D2_6 E2_6 F2_6 C#2_b7,5,3 D2 "<concat>"BB2_#6,5,3 C2_6 AA#2_7,5,#3 BB1_6,4 BB2_7,#5,#3 E1." makeNotation=<false>)<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>exampleD <block_start>''' This example was a homework assignment for 21M.302: Harmony & Counterpoint II at MIT in the fall of 2010, taught by <NAME> of the MIT Music Program. >>> from music21.figuredBass import examples >>> fbLine = examples.exampleD() >>> #_DOCS_SHOW fbLine.generateBassLine().show() .. image:: images/figuredBass/fbExamples_bassLineD.* :width: 700 The following is a realization of fbLine in four parts using the default rules set. The soprano part is limited to stepwise motion, and the alto and tenor parts are limited to motions within a perfect octave. >>> from music21.figuredBass import rules >>> fbRules = rules.Rules() >>> fbRules.partMovementLimits = [(1, 2), (2, 12), (3, 12)] >>> fbRealization1 = fbLine.realize(fbRules) >>> fbRealization1.getNumSolutions() 1560 >>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol1D.* :width: 700 Now, the restriction on voice overlap is lifted, which is common in keyboard-style figured bass, and fbLine is realized again. Voice overlap can be seen in the fourth measure. >>> fbRules.forbidVoiceOverlap = False >>> fbRealization2 = fbLine.realize(fbRules) >>> fbRealization2.getNumSolutions() 109006 >>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol2D.* :width: 700 Now, the restriction on voice overlap is reset, but the restriction on the upper parts being within a perfect octave of each other is removed. fbLine is realized again. >>> fbRules.forbidVoiceOverlap = True >>> fbRules.upperPartsMaxSemitoneSeparation = None >>> fbRealization3 = fbLine.realize(fbRules) >>> fbRealization3.getNumSolutions() 29629539 >>> fbRealization3.keyboardStyleOutput = False >>> #_DOCS_SHOW fbRealization3.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol3D.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: 3/4 BB4 C#4_#6 D4_6 E2 E#4_7,5,#3 F#2_6,4 "<concat>"F#4_5,#3 G2 E4_6 F#2_6,4 E4_#4,2 D2_6 EE4_7,5,#3 AA2." makeNotation=<false>)<line_sep>s.insert(0 key.Key('b'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>exampleB <block_start>''' This example was retrieved from page 114 of *The Music Theory Handbook* by <NAME>. >>> from music21.figuredBass import examples >>> fbLine = examples.exampleB() >>> #_DOCS_SHOW fbLine.generateBassLine().show() .. image:: images/figuredBass/fbExamples_bassLineB.* :width: 700 First, fbLine is realized with the default rules set. >>> fbRealization1 = fbLine.realize() >>> fbRealization1.getNumSolutions() 422 >>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol1B.* :width: 700 Now, a Rules object is created, and the restriction that the chords need to be complete is lifted. fbLine is realized once again. >>> from music21.figuredBass import rules >>> fbRules = rules.Rules() >>> fbRules.forbidIncompletePossibilities = False >>> fbRealization2 = fbLine.realize(fbRules) >>> fbRealization2.getNumSolutions() 188974 >>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol2B.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: 4/4 D4 A4_7,5,#3 B-4 F4_6 G4_6 AA4_7,5,#3 D2" makeNotation=<false>)<line_sep>s.insert(0 key.Key('d'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>exampleC <block_start>''' This example was retrieved from page 114 of *The Music Theory Handbook* by <NAME>. >>> from music21.figuredBass import examples >>> fbLine = examples.exampleC() >>> #_DOCS_SHOW fbLine.generateBassLine().show() .. image:: images/figuredBass/fbExamples_bassLineC.* :width: 700 First, fbLine is realized with the default rules set. >>> fbRealization1 = fbLine.realize() >>> fbRealization1.getNumSolutions() 833 >>> #_DOCS_SHOW fbRealization1.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol1C.* :width: 700 Now, parallel fifths are allowed in realizations. The image below shows one of them. There is a parallel fifth between the bass and alto parts going from the half-diminished 6,5 (B,F#) to the dominant seventh (C#,G#) in the second measure. >>> from music21.figuredBass import rules >>> fbRules = rules.Rules() >>> fbRules.forbidParallelFifths = False >>> fbRealization2 = fbLine.realize(fbRules) >>> fbRealization2.getNumSolutions() 2427 >>> #_DOCS_SHOW fbRealization2.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_sol2C.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: 4/4 FF#4 GG#4_#6 AA4_6 FF#4 BB4_6,5 C#4_7,5,#3 F#2" makeNotation=<false>)<line_sep>s.insert(0 key.Key('f#'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>V43ResolutionExample <block_start>''' The dominant 4,3 can resolve to either the tonic 5,3 or tonic 6,3. The proper resolution is dependent on the bass note of the tonic, and is determined in context, as shown in the following figured bass realization. >>> from music21.figuredBass import examples >>> fbLine = examples.V43ResolutionExample() >>> fbRealization = fbLine.realize() >>> #_DOCS_SHOW fbRealization.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_V43.* :width: 350 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: 4/4 D2 E2_4,3 D2_5,3 E2_4,3 F#1_6,3" makeNotation=<false>)<line_sep>s.insert(0 key.Key('D'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>viio65ResolutionExample <block_start>''' For a fully diminished seventh chord resolving to the tonic, the resolution chord can contain either a doubled third (standard resolution) or a doubled tonic (alternate resolution), depending on whether the third of the diminished chord rises or falls. The user can control this in a Rules object by modifying :attr:`~music21.figuredBass.rules.Rules.doubledRootInDim7`. However, when resolving a diminished 6,5, the third is found in the bass and the proper resolution is determined in context, regardless of user preference. The following shows both cases involving a diminished 6,5. The resolution of the first diminished chord has a doubled D, while that of the second has a doubled F#. Notice that the resolution of the first involves a diminished fifth (E, Bb) going to a perfect fifth (D, A). >>> from music21.figuredBass import examples >>> fbLine = examples.viio65ResolutionExample() >>> fbRealization = fbLine.realize() >>> #_DOCS_SHOW fbRealization.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_vii65.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinyNotation: 4/4 D2 E2_6,b5 D2 E2_6,b5 F#1_6" makeNotation=<false>)<line_sep>s.insert(0 key.Key('D'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>augmentedSixthResolutionExample <block_start>''' This example was retrieved from page 61 of *The Music Theory Handbook* by <NAME>. Italian (8,#6,3), French (#6,4,3), German (#6,5,3), and Swiss (#6,#4,3) augmented sixth resolutions to either the major dominant or the major/minor tonic 6,4 are supported. The first four bars show the resolutions to the dominant in the order above, while the last bar shows the German augmented sixth resolving to the tonic. >>> from music21.figuredBass import examples >>> fbLine = examples.augmentedSixthResolutionExample() >>> fbRealization = fbLine.realize() >>> #_DOCS_SHOW fbRealization.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_a6.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: 4/4 D4 BB-4_8,#6,3 AA2_# D4 BB-4_#6,4,3 "<concat>"AA2_# D4 BB-4_#6,5,3 AA2_# D4 BB-4_#6,#4,3 AA2_# D4 "<concat>"BB-4_#6,5,3 AA2_6,4" makeNotation=<false>)<line_sep>s.insert(0 key.Key('d'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>italianA6ResolutionExample <block_start>''' The Italian augmented sixth chord (It+6) is the only augmented sixth chord to consist of only three pitch names, and when represented in four parts, the tonic is doubled. The tonic can resolve up, down or stay the same, and in four parts, the two tonics always resolve differently, resulting in two equally acceptable resolutions. An alternate approach to resolving the It+6 chord was taken, such that an It+6 chord could map internally to two different resolutions. Every other special resolution in fbRealizer consists of a 1:1 mapping of special chords to resolutions. Here, the It+6 chord is resolving to the dominant, minor tonic, and major tonic, respectively. In the dominant resolution shown, the tonics (D) are resolving inward, but they can resolve outward as well. In the minor tonic resolution, the higher tonic is resolving up to F, and the lower tonic remains the same. In the major tonic resolution, the higher tonic remains the same, while the lower tonic resolves up to the F#. >>> from music21.figuredBass import examples >>> from music21.figuredBass import rules >>> fbLine = examples.italianA6ResolutionExample() >>> fbRules = rules.Rules() >>> fbRules.upperPartsMaxSemitoneSeparation = None >>> fbRules.partMovementLimits.append([1, 4]) >>> fbRealization = fbLine.realize(fbRules) >>> fbRealization.keyboardStyleOutput = False >>> #_DOCS_SHOW fbRealization.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_it+6.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: D4 BB-4_#6,3 AA2_# D4 BB-4_#6,3 AA2_6,4 D4 BB-4_#6,3 AA2_#6,4" makeNotation=<false>)<line_sep>s.insert(0 key.Key('d'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end><def_stmt>twelveBarBlues <block_start>''' This is an progression in Bb major based on the twelve bar blues. The progression used is: I | IV | I | I7 IV | IV | I | I7 V7 | IV6 | I | I >>> from music21.figuredBass import examples >>> from music21.figuredBass import rules >>> bluesLine = examples.twelveBarBlues() >>> #_DOCS_SHOW bluesLine.generateBassLine().show() .. image:: images/figuredBass/fbExamples_bluesBassLine.* :width: 700 >>> fbRules = rules.Rules() >>> fbRules.partMovementLimits = [(1, 4), (2, 12), (3, 12)] >>> fbRules.forbidVoiceOverlap = False >>> blRealization = bluesLine.realize(fbRules) >>> blRealization.getNumSolutions() 2224978 >>> #_DOCS_SHOW blRealization.generateRandomRealization().show() .. image:: images/figuredBass/fbExamples_twelveBarBlues.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 key<line_sep>s=converter.parse("tinynotation: BB-1 E-1 BB-1 BB-1_7 E-1 E-1 BB-1 BB-1_7 F1_7 G1_6 BB-1 BB-1" makeNotation=<false>)<line_sep>s.insert(0 key.Key('B-'))<line_sep><return>realizer.figuredBassFromStream(s)<block_end># ----------------------------------------------------------------- # METHODS FOR GENERATION OF BLUES VAMPS <def_stmt>generateBoogieVamp blRealization=<none> numRepeats=5<block_start>''' Turns whole notes in twelve bar blues bass line to blues boogie woogie bass line. Takes in numRepeats, which is the number of times to repeat the bass line. Also, takes in a realization of :meth:`~music21.figuredBass.examples.twelveBarBlues`. If none is provided, a default realization with :attr:`~music21.figuredBass.rules.Rules.forbidVoiceOverlap` set to False and :attr:`~music21.figuredBass.rules.Rules.partMovementLimits` set to [(1, 4), (2, 12), (3, 12)] is used. >>> from music21.figuredBass import examples >>> #_DOCS_SHOW examples.generateBoogieVamp(numRepeats=1).show() .. image:: images/figuredBass/fbExamples_boogieVamp.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 stream<import_from_stmt>music21 interval<if_stmt>blRealization<is><none><block_start>bluesLine=twelveBarBlues()<line_sep>fbRules=rules.Rules()<line_sep>fbRules.partMovementLimits=[(1 4) (2 12) (3 12)]<line_sep>fbRules.forbidVoiceOverlap=<false><line_sep>blRealization=bluesLine.realize(fbRules)<block_end>sampleScore=blRealization.generateRandomRealizations(numRepeats)<line_sep>boogieBassLine=converter.parse("tinynotation: BB-8. D16 F8. G16 A-8. G16 F8. D16" makeNotation=<false>)<line_sep>newBassLine=stream.Part()<line_sep>newBassLine.append(sampleScore[1][0])# Time signature newBassLine.append(sampleScore[1][1])# Key signature <for_stmt>n sampleScore[1].notes<block_start>i=interval.notesToInterval(boogieBassLine[0] n)<line_sep>tp=boogieBassLine.transpose(i)<for_stmt>lyr n.lyrics<block_start>tp.notes.first().addLyric(lyr.text)<block_end><for_stmt>m tp.notes<block_start>newBassLine.append(m)<block_end><block_end>newScore=stream.Score()<line_sep>newScore.insert(0 sampleScore[0])<line_sep>newScore.insert(newBassLine)<line_sep><return>newScore<block_end><def_stmt>generateTripletBlues blRealization=<none> numRepeats=5# 12/8 <block_start>''' Turns whole notes in twelve bar blues bass line to triplet blues bass line. Takes in numRepeats, which is the number of times to repeat the bass line. Also, takes in a realization of :meth:`~music21.figuredBass.examples.twelveBarBlues`. If none is provided, a default realization with :attr:`~music21.figuredBass.rules.Rules.forbidVoiceOverlap` set to False and :attr:`~music21.figuredBass.rules.Rules.partMovementLimits` set to [(1, 4), (2, 12), (3, 12)] is used. >>> from music21.figuredBass import examples >>> #_DOCS_SHOW examples.generateTripletBlues(numRepeats=1).show() .. image:: images/figuredBass/fbExamples_tripletBlues.* :width: 700 '''<import_from_stmt>music21 converter<import_from_stmt>music21 stream<import_from_stmt>music21 interval<import_from_stmt>music21 meter<if_stmt>blRealization<is><none><block_start>bluesLine=twelveBarBlues()<line_sep>fbRules=rules.Rules()<line_sep>fbRules.partMovementLimits=[(1 4) (2 12) (3 12)]<line_sep>fbRules.forbidVoiceOverlap=<false><line_sep>blRealization=bluesLine.realize(fbRules)<block_end>sampleScore=blRealization.generateRandomRealizations(numRepeats)<line_sep>tripletBassLine=converter.parse("tinynotation: BB-4 BB-8 D4 D8 F4 F8 A-8 G8 F8" makeNotation=<false>)<line_sep>newBassLine=stream.Part()<for_stmt>n sampleScore[1].notes<block_start>i=interval.notesToInterval(tripletBassLine[0] n)<line_sep>tp=tripletBassLine.transpose(i)<for_stmt>lyr n.lyrics<block_start>tp.notes.first().addLyric(lyr.text)<block_end><for_stmt>m tp.notes<block_start>newBassLine.append(m)<block_end><block_end>newTopLine=stream.Part()<for_stmt>sampleChord sampleScore[0].notes<block_start>sampleChordCopy=copy.deepcopy(sampleChord)<line_sep>sampleChordCopy.quarterLength=6.0<line_sep>newTopLine.append(sampleChordCopy)<block_end>newScore=stream.Score()<line_sep>newScore.append(meter.TimeSignature("12/8"))# Time signature newScore.append(sampleScore[1][1])# Key signature newScore.insert(0 newTopLine)<line_sep>newScore.insert(0 newBassLine)<line_sep><return>newScore<block_end>_DOC_ORDER=[exampleA exampleB exampleC exampleD V43ResolutionExample viio65ResolutionExample augmentedSixthResolutionExample italianA6ResolutionExample twelveBarBlues generateBoogieVamp generateTripletBlues]<line_sep># ------------------------------------------------------------------------------ <class_stmt>Test(unittest.TestCase)<block_start><pass><block_end><if_stmt>__name__<eq>'__main__'<block_start><import_stmt>music21<line_sep>music21.mainTest(Test)<block_end>
<import_stmt>moment<import_stmt>os<import_stmt>pandas<import_stmt>pyarrow<as>pa<import_stmt>pyarrow.parquet<as>pq<import_stmt>requests<import_from_stmt>func_timeout func_set_timeout FunctionTimedOut<import_from_stmt>pandas DataFrame<import_from_stmt>pathlib Path<import_from_stmt>pyspark.sql SparkSession<import_from_stmt>pyspark.sql.functions col lit<import_from_stmt>pyspark.sql.types StringType<import_from_stmt>python_utils.src.spark_udfs get_confidence_based_h3_and_name_distance get_h3_distance get_string_distance<import_from_stmt>time sleep<line_sep>MAX_H3_DISTANCE=500<class_stmt>SearchScraper<block_start>"""Get result for search strings"""<line_sep>@staticmethod@func_set_timeout(180)<def_stmt>send_query batch query_type# noinspection PyBroadException <block_start><try_stmt><block_start>host=os.getenv('GOOGLE_POI_API_HOST')<or>'127.0.0.1'<line_sep># noinspection HttpUrlsUsage result=requests.request(method='get' url=f'http://{host}:3003/{"search"<if>query_type<eq>"search"<else>"poi-information"}' json=batch)<line_sep><return>result.json()<if>result<else><none><block_end><except_stmt>Exception<as>e<block_start>print(f'[{moment.now().format("YYYY-MM-DDTHH-mm-ss")}]: Search query failed: ' e)<line_sep>print(f'[{moment.now().format("YYYY-MM-DDTHH-mm-ss")}]: Continuing without batch.')<line_sep><return><none><block_end><block_end>"""Match the queries that have been sent to the received results"""<line_sep>@staticmethod<def_stmt>match_search_results directory:str file_name:str<block_start>memory=os.getenv('SPARK_MEMORY')<or>'16g'<line_sep>spark=SparkSession.builder.appName('google-poi').config('spark.driver.memory' memory).getOrCreate()<line_sep>df_str=spark.read.parquet(directory+file_name)<line_sep>path_results=directory.replace('Strings' 'Results')+file_name.replace('strings' 'results')<line_sep>df_res=spark.read.parquet(path_results)<line_sep># noinspection PyTypeChecker df_res=df_str.alias('df_str').join(df_res df_str.query<eq>df_res.query 'inner').filter(col('data.h3Index').isNotNull()).withColumn('osmName' col('df_str.name')).withColumn('googleName' col('data.name')).withColumn('nameDistance' get_string_distance(col('googleName') col('osmName') col('df_str.query'))).withColumn('h3Distance' get_h3_distance(col('h3Index').cast(StringType()) col('data.h3Index').cast(StringType()) lit(MAX_H3_DISTANCE))).withColumn('confidence' get_confidence_based_h3_and_name_distance(col('h3Distance') col('nameDistance') lit(MAX_H3_DISTANCE))).select('osmId' 'type' 'confidence' 'data.id')<line_sep>df_res.write.parquet(path_results.replace('results' 'results_matched'))<block_end>"""Match the POI ids that have been sent to the received results"""<line_sep>@staticmethod<def_stmt>match_poi_results directory:str file_name:str<block_start>memory=os.getenv('SPARK_MEMORY')<or>'16g'<line_sep>spark=SparkSession.builder.appName('google-poi').config('spark.driver.memory' memory).getOrCreate()<line_sep>df_res=spark.read.parquet(directory.replace('Strings' 'Results')+file_name.replace('strings' 'results_matched'))<line_sep>path_poi_data=directory.replace('searchStrings' 'poiData')+file_name.replace('search_strings' 'poi_data')<line_sep>df_pd=spark.read.parquet(path_poi_data)<line_sep># noinspection PyTypeChecker df_pd=df_res.alias('df_res').join(df_pd df_res.id<eq>df_pd.id 'inner').filter(col('data.h3Index').isNotNull()).select('osmId' 'type' 'confidence' col('df_res.id').alias('id') 'data.*')<line_sep>df_pd.write.parquet(path_poi_data.replace('poi_data' 'poi_data_matched'))<block_end>"""Send queries in batches for each partition of a dataframe"""<line_sep>@staticmethod<def_stmt>batch_queries df:DataFrame output_dir:str file_name:str query_property:str query_type:str schema=<none><block_start>batch=list()<line_sep>batch_size=100<line_sep>max_sleep_time=120<line_sep>writer=<none><for_stmt>index,row df.iterrows()<block_start>batch.append(row[query_property])<line_sep># noinspection PyTypeChecker <if_stmt>(len(batch)<eq>batch_size)<or>((index+1)<eq>len(df.index))<block_start>successful=<false><line_sep>sleep_time=1<while_stmt><not>successful<and>(sleep_time<l>max_sleep_time)<block_start><try_stmt><block_start>result=SearchScraper.send_query(batch query_type)<if_stmt>result<and>('data'<in>result)<block_start>data=pandas.DataFrame(result['data'])<line_sep># noinspection PyArgumentList table=pa.Table.from_pandas(df=data schema=schema)<if_stmt><not>writer<block_start>script_dir=os.path.dirname(__file__)<line_sep>output_dir=os.path.join(script_dir output_dir)<line_sep>output_file=os.path.join(output_dir file_name)<line_sep>Path(output_dir).mkdir(parents=<true> exist_ok=<true>)<line_sep>writer=pq.ParquetWriter(output_file schema=schema<if>schema<else>table.schema flavor='spark')<block_end>writer.write_table(table)<line_sep>successful=<true><block_end><else_stmt><block_start>sleep(sleep_time)<line_sep>sleep_time<augmul>2<block_end><block_end><except_stmt>FunctionTimedOut<block_start>sleep(sleep_time)<line_sep>sleep_time<augmul>2<if_stmt>sleep_time<ge>max_sleep_time<block_start>print(f'[{moment.now().format("YYYY-MM-DDTHH-mm-ss")}]: Request timed out too many times. '<concat>f'Skipping batch')<block_end><block_end><block_end>batch=list()<block_end><block_end><if_stmt>writer<block_start>writer.close()<block_end><block_end>"""Send Google POI ids to retrieve all POI information"""<line_sep>@staticmethod<def_stmt>send_poi_queries directory:str file_name:str<block_start>pois=pq.read_table(directory.replace('Strings' 'Results')+file_name.replace('strings' 'results_matched')).to_pandas()<line_sep>pois=pois[['id']].drop_duplicates()<line_sep>schema=pa.schema([pa.field('id' pa.string()) pa.field('data' pa.struct([pa.field('name' pa.string()) pa.field('placeID' pa.string()) pa.field('location' pa.struct([pa.field('lat' pa.float64()) pa.field('lng' pa.float64())])) pa.field('h3Index' pa.string()) pa.field('address' pa.list_(pa.string())) pa.field('timezone' pa.string()) pa.field('categories' pa.struct([pa.field('google' pa.list_(pa.string())) pa.field('kuwala' pa.list_(pa.string()))])) pa.field('temporarilyClosed' pa.bool_()) pa.field('permanentlyClosed' pa.bool_()) pa.field('insideOf' pa.string()) pa.field('contact' pa.struct([pa.field('phone' pa.string()) pa.field('website' pa.string())])) pa.field('openingHours' pa.list_(pa.struct([pa.field('closingTime' pa.string()) pa.field('openingTime' pa.string()) pa.field('date' pa.string())]))) pa.field('rating' pa.struct([pa.field('numberOfReviews' pa.int64()) pa.field('stars' pa.float64())])) pa.field('priceLevel' pa.int64()) pa.field('popularity' pa.list_(pa.struct([pa.field('popularity' pa.int64()) pa.field('timestamp' pa.string())]))) pa.field('waitingTime' pa.list_(pa.struct([pa.field('waitingTime' pa.int64()) pa.field('timestamp' pa.string())]))) pa.field('spendingTime' pa.list_(pa.int64()))]))])<line_sep>SearchScraper.batch_queries(df=pois output_dir=f'../../tmp/googleFiles/poiData/' file_name=file_name.replace('search_strings' 'poi_data') query_property='id' query_type='poi' schema=schema)<block_end>"""Send search strings to get Google POI ids"""<line_sep>@staticmethod<def_stmt>send_search_queries directory:str file_name:str<block_start>search_strings=pq.read_table(directory+file_name).to_pandas()<line_sep>schema=pa.schema([pa.field('query' pa.string()) pa.field('data' pa.struct([pa.field('h3Index' pa.string()) pa.field('id' pa.string()) pa.field('location' pa.struct([pa.field('lat' pa.float64()) pa.field('lng' pa.float64())])) pa.field('name' pa.string())]))])<line_sep>SearchScraper.batch_queries(df=search_strings output_dir=f'../../tmp/googleFiles/searchResults/' file_name=file_name.replace('strings' 'results') query_property='query' query_type='search' schema=schema)<block_end>"""Write scraped POI information to a Parquet file"""<line_sep>@staticmethod<def_stmt>scrape_with_search_string <block_start>script_dir=os.path.dirname(__file__)<line_sep>parquet_files=os.path.join(script_dir '../../tmp/googleFiles/searchStrings/')<line_sep>file_name=sorted(os.listdir(parquet_files) reverse=<true>)[0]<line_sep>SearchScraper.send_search_queries(parquet_files file_name)<line_sep>SearchScraper.match_search_results(parquet_files file_name)<line_sep>SearchScraper.send_poi_queries(parquet_files file_name)<line_sep>SearchScraper.match_poi_results(parquet_files file_name)<block_end><block_end>
<import_stmt>argparse<import_stmt>mlflow<import_from_stmt>ax.service.ax_client AxClient<import_from_stmt>iris IrisClassification<import_from_stmt>iris_data_module IrisDataModule<import_stmt>pytorch_lightning<as>pl<def_stmt>train_evaluate params max_epochs=100<block_start>model=IrisClassification(**params)<line_sep>dm=IrisDataModule()<line_sep>dm.setup(stage="fit")<line_sep>trainer=pl.Trainer(max_epochs=max_epochs)<line_sep>mlflow.pytorch.autolog()<line_sep>trainer.fit(model dm)<line_sep>trainer.test(datamodule=dm)<line_sep>test_accuracy=trainer.callback_metrics.get("test_acc")<line_sep><return>test_accuracy<block_end><def_stmt>model_training_hyperparameter_tuning max_epochs total_trials params<block_start>""" This function takes input params max_epochs, total_trials, params and creates a nested run in Mlflow. The parameters, metrics, model and summary are dumped into their respective mlflow-run ids. The best parameters are dumped along with the baseline model. :param max_epochs: Max epochs used for training the model. Type:int :param total_trials: Number of ax-client experimental trials. Type:int :param params: Model parameters. Type:dict """<with_stmt>mlflow.start_run(run_name="Parent Run")<block_start>train_evaluate(params=params max_epochs=max_epochs)<line_sep>ax_client=AxClient()<line_sep>ax_client.create_experiment(parameters=[{"name":"lr" "type":"range" "bounds":[1e-3 0.15] "log_scale":<true>} {"name":"weight_decay" "type":"range" "bounds":[1e-4 1e-3]} {"name":"momentum" "type":"range" "bounds":[0.7 1.0]} ] objective_name="test_accuracy" )<for_stmt>i range(total_trials)<block_start><with_stmt>mlflow.start_run(nested=<true> run_name="Trial "+str(i))<as>child_run<block_start>parameters,trial_index=ax_client.get_next_trial()<line_sep>test_accuracy=train_evaluate(params=parameters max_epochs=max_epochs)<line_sep># completion of trial ax_client.complete_trial(trial_index=trial_index raw_data=test_accuracy.item())<block_end><block_end>best_parameters,metrics=ax_client.get_best_parameters()<for_stmt>param_name,value best_parameters.items()<block_start>mlflow.log_param("optimum_"+param_name value)<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser()<line_sep>parser=pl.Trainer.add_argparse_args(parent_parser=parser)<line_sep>parser.add_argument("--total_trials" default=3 help="umber of trials to be run for the optimization experiment" )<line_sep>args=parser.parse_args()<if_stmt>"max_epochs"<in>args<block_start>max_epochs=args.max_epochs<block_end><else_stmt><block_start>max_epochs=100<block_end>params={"lr":0.1 "momentum":0.9 "weight_decay":0}<line_sep>model_training_hyperparameter_tuning(max_epochs=int(max_epochs) total_trials=int(args.total_trials) params=params)<block_end>
# -*- coding: utf-8 -*- # vim: sw=4:ts=4:expandtab """ riko.modules.fetchtext ~~~~~~~~~~~~~~~~~~~~~~ Provides functions for fetching text data sources. Accesses and extracts data from text sources on the web. This data can then be merged with other data in your Pipe. Examples: basic usage:: >>> from riko import get_path >>> from riko.modules.fetchtext import pipe >>> >>> conf = {'url': get_path('lorem.txt')} >>> next(pipe(conf=conf))['content'] == 'What is Lorem Ipsum?' True Attributes: OPTS (dict): The default pipe options DEFAULTS (dict): The default parser options """<import_stmt>pygogo<as>gogo<import_from_stmt>. processor<import_from_stmt>riko ENCODING<import_from_stmt>riko.utils fetch auto_close get_abspath<import_from_stmt>riko.bado coroutine return_value io<line_sep>OPTS={"ftype":"none" "assign":"content"}<line_sep>DEFAULTS={"encoding":ENCODING}<line_sep>logger=gogo.Gogo(__name__ monolog=<true>).logger<line_sep>@coroutine<def_stmt>async_parser _ objconf skip=<false> **kwargs<block_start>"""Asynchronously parses the pipe content Args: _ (None): Ignored objconf (obj): The pipe configuration (an Objectify instance) skip (bool): Don't parse the content kwargs (dict): Keyword arguments Kwargs: stream (dict): The original item Returns: Iter[dict]: The stream of items Examples: >>> from riko import get_path >>> from riko.bado import react >>> from riko.bado.mock import FakeReactor >>> from meza.fntools import Objectify >>> >>> def run(reactor): ... callback = lambda x: print(next(x)['content']) ... url = get_path('lorem.txt') ... objconf = Objectify({'url': url, 'encoding': ENCODING}) ... d = async_parser(None, objconf, assign='content') ... return d.addCallbacks(callback, logger.error) >>> >>> try: ... react(run, _reactor=FakeReactor()) ... except SystemExit: ... pass ... What is Lorem Ipsum? """<if_stmt>skip<block_start>stream=kwargs["stream"]<block_end><else_stmt><block_start>url=get_abspath(objconf.url)<line_sep>f=<yield>io.async_url_open(url)<line_sep>assign=kwargs["assign"]<line_sep>encoding=objconf.encoding<line_sep>_stream=({assign:line.strip().decode(encoding)}<for>line f)<line_sep>stream=auto_close(_stream f)<block_end>return_value(stream)<block_end><def_stmt>parser _ objconf skip=<false> **kwargs<block_start>"""Parses the pipe content Args: _ (None): Ignored objconf (obj): The pipe configuration (an Objectify instance) skip (bool): Don't parse the content kwargs (dict): Keyword arguments Kwargs: stream (dict): The original item Returns: Iter[dict]: The stream of items Examples: >>> from riko import get_path >>> from meza.fntools import Objectify >>> >>> url = get_path('lorem.txt') >>> objconf = Objectify({'url': url, 'encoding': ENCODING}) >>> result = parser(None, objconf, assign='content') >>> next(result)['content'] == 'What is Lorem Ipsum?' True """<if_stmt>skip<block_start>stream=kwargs["stream"]<block_end><else_stmt><block_start>f=fetch(decode=<true> **objconf)<line_sep>_stream=({kwargs["assign"]:line.strip()}<for>line f)<line_sep>stream=auto_close(_stream f)<block_end><return>stream<block_end>@processor(DEFAULTS isasync=<true> **OPTS)<def_stmt>async_pipe *args **kwargs<block_start>"""A source that asynchronously fetches and parses an XML or JSON file to return the entries. Args: item (dict): The entry to process kwargs (dict): The keyword arguments passed to the wrapper Kwargs: conf (dict): The pipe configuration. Must contain the key 'url'. May contain the key 'encoding'. url (str): The web site to fetch. encoding (str): The file encoding (default: utf-8). assign (str): Attribute to assign parsed content (default: content) Returns: Deferred: twisted.internet.defer.Deferred stream of items Examples: >>> from riko import get_path >>> from riko.bado import react >>> from riko.bado.mock import FakeReactor >>> >>> def run(reactor): ... callback = lambda x: print(next(x)['content']) ... conf = {'url': get_path('lorem.txt')} ... d = async_pipe(conf=conf) ... return d.addCallbacks(callback, logger.error) >>> >>> try: ... react(run, _reactor=FakeReactor()) ... except SystemExit: ... pass ... What is Lorem Ipsum? """<line_sep><return>async_parser(*args **kwargs)<block_end>@processor(DEFAULTS **OPTS)<def_stmt>pipe *args **kwargs<block_start>"""A source that fetches and parses an XML or JSON file to return the entries. Args: item (dict): The entry to process kwargs (dict): The keyword arguments passed to the wrapper Kwargs: conf (dict): The pipe configuration. Must contain the key 'url'. May contain the key 'encoding'. url (str): The web site to fetch encoding (str): The file encoding (default: utf-8). assign (str): Attribute to assign parsed content (default: content) Returns: dict: an iterator of items Examples: >>> from riko import get_path >>> >>> conf = {'url': get_path('lorem.txt')} >>> next(pipe(conf=conf))['content'] == 'What is Lorem Ipsum?' True """<line_sep><return>parser(*args **kwargs)<block_end>
<import_stmt>re<import_stmt>copy<import_from_stmt>...const ty_to_const_class vex_int_class get_type_size<import_from_stmt>...expr Const RdTmp Unop Binop Load CCall Get ITE<import_from_stmt>...stmt WrTmp Put IMark Store NoOp Exit<import_from_stmt>...enums IRCallee<import_from_stmt>future.utils with_metaclass<class_stmt>JumpKind(object)<block_start>Boring='Ijk_Boring'<line_sep>Call='Ijk_Call'<line_sep>Ret='Ijk_Ret'<line_sep>Segfault='Ijk_SigSEGV'<line_sep>Exit='Ijk_Exit'<line_sep>Syscall='Ijk_Sys_syscall'<line_sep>Sysenter='Ijk_Sys_sysenter'<line_sep>Invalid='Ijk_INVALID'<line_sep>NoDecode='Ijk_NoDecode'<block_end>typemeta_re=re.compile(r'int_(?P<size>\d+)$')<class_stmt>TypeMeta(type)<block_start><def_stmt>__getattr__ self name<block_start>match=typemeta_re.match(name)<if_stmt>match<block_start>width=int(match.group('size'))<line_sep><return>vex_int_class(width).type<block_end><else_stmt><block_start><return>type.__getattr__(name)<block_end><block_end><block_end><class_stmt>Type(with_metaclass(TypeMeta object))<block_start>__metaclass__=TypeMeta<line_sep>ieee_float_16='Ity_F16'<line_sep>ieee_float_32='Ity_F32'<line_sep>ieee_float_64='Ity_F64'<line_sep>ieee_float_128='Ity_F128'<line_sep>decimal_float_32='Ity_D32'<line_sep>decimal_float_64='Ity_D64'<line_sep>decimal_float_128='Ity_D128'<line_sep>simd_vector_128='Ity_V128'<line_sep>simd_vector_256='Ity_V256'<block_end><def_stmt>get_op_format_from_const_ty ty<block_start><return>ty_to_const_class(ty).op_format<block_end><def_stmt>make_format_op_generator fmt_string<block_start>""" Return a function which generates an op format (just a string of the vex instruction) Functions by formatting the fmt_string with the types of the arguments """<def_stmt>gen arg_types<block_start>converted_arg_types=list(map(get_op_format_from_const_ty arg_types))<line_sep>op=fmt_string.format(arg_t=converted_arg_types)<line_sep><return>op<block_end><return>gen<block_end><def_stmt>mkbinop fstring<block_start><return><lambda>self expr_a expr_b:self.op_binary(make_format_op_generator(fstring))(expr_a expr_b)<block_end><def_stmt>mkunop fstring<block_start><return><lambda>self expr_a:self.op_unary(make_format_op_generator(fstring))(expr_a)<block_end><def_stmt>mkcmpop fstring_fragment signedness=''<block_start><def_stmt>cmpop self expr_a expr_b<block_start>ty=self.get_type(expr_a)<line_sep>fstring='Iop_Cmp%s{arg_t[0]}%s'%(fstring_fragment signedness)<line_sep>retval=mkbinop(fstring)(self expr_a expr_b)<line_sep><return>self.cast_to(retval ty)<block_end><return>cmpop<block_end><class_stmt>IRSBCustomizer(object)<block_start>op_add=mkbinop('Iop_Add{arg_t[0]}')<line_sep>op_sub=mkbinop('Iop_Sub{arg_t[0]}')<line_sep>op_umul=mkbinop('Iop_Mul{arg_t[0]}')<line_sep>op_smul=mkbinop('Iop_MullS{arg_t[0]}')<line_sep>op_sdiv=mkbinop('Iop_DivS{arg_t[0]}')<line_sep>op_udiv=mkbinop('Iop_DivU{arg_t[0]}')<line_sep># Custom operation that does not exist in libVEX op_mod=mkbinop('Iop_Mod{arg_t[0]}')<line_sep>op_or=mkbinop('Iop_Or{arg_t[0]}')<line_sep>op_and=mkbinop('Iop_And{arg_t[0]}')<line_sep>op_xor=mkbinop('Iop_Xor{arg_t[0]}')<line_sep>op_shr=mkbinop('Iop_Shr{arg_t[0]}')<line_sep>op_shl=mkbinop('Iop_Shl{arg_t[0]}')<line_sep>op_not=mkunop('Iop_Not{arg_t[0]}')<line_sep>op_cmp_eq=mkcmpop('EQ')<line_sep>op_cmp_ne=mkcmpop('NE')<line_sep>op_cmp_slt=mkcmpop('LT' 'S')<line_sep>op_cmp_sle=mkcmpop('LE' 'S')<line_sep>op_cmp_ult=mkcmpop('LT' 'U')<line_sep>op_cmp_ule=mkcmpop('LE' 'U')<line_sep>op_cmp_sge=mkcmpop('GE' 'S')<line_sep>op_cmp_uge=mkcmpop('GE' 'U')<line_sep>op_cmp_sgt=mkcmpop('GT' 'S')<line_sep>op_cmp_ugt=mkcmpop('GT' 'U')<def_stmt>__init__ self irsb<block_start>self.arch=irsb.arch<line_sep>self.irsb=irsb<block_end><def_stmt>get_type self rdt<block_start><return>rdt.result_type(self.irsb.tyenv)<block_end># Statements (no return value) <def_stmt>_append_stmt self stmt<block_start>self.irsb.statements<augadd>[stmt]<block_end><def_stmt>imark self int_addr int_length int_delta=0<block_start>self._append_stmt(IMark(int_addr int_length int_delta))<block_end><def_stmt>get_reg self regname# TODO move this into the lifter <block_start><return>self.arch.registers[regname][0]<block_end><def_stmt>put self expr_val tuple_reg<block_start>self._append_stmt(Put(copy.copy(expr_val) tuple_reg))<block_end><def_stmt>store self addr expr<block_start>self._append_stmt(Store(copy.copy(addr) copy.copy(expr) self.arch.memory_endness))<block_end><def_stmt>noop self<block_start>self._append_stmt(NoOp())<block_end><def_stmt>add_exit self guard dst jk ip<block_start>""" Add an exit out of the middle of an IRSB. (e.g., a conditional jump) :param guard: An expression, the exit is taken if true :param dst: the destination of the exit (a Const) :param jk: the JumpKind of this exit (probably Ijk_Boring) :param ip: The address of this exit's source """<line_sep>self.irsb.statements.append(Exit(guard dst.con jk ip))<block_end># end statements <def_stmt>goto self addr<block_start>self.irsb.next=addr<line_sep>self.irsb.jumpkind=JumpKind.Boring<block_end><def_stmt>ret self addr<block_start>self.irsb.next=addr<line_sep>self.irsb.jumpkind=JumpKind.Ret<block_end><def_stmt>call self addr<block_start>self.irsb.next=addr<line_sep>self.irsb.jumpkind=JumpKind.Call<block_end><def_stmt>_add_tmp self t<block_start><return>self.irsb.tyenv.add(t)<block_end><def_stmt>_rdtmp self tmp<block_start><return>RdTmp.get_instance(tmp)<block_end><def_stmt>_settmp self expr<block_start>ty=self.get_type(expr)<line_sep>tmp=self._add_tmp(ty)<line_sep>self._append_stmt(WrTmp(tmp expr))<line_sep><return>self._rdtmp(tmp)<block_end><def_stmt>rdreg self reg ty<block_start><return>self._settmp(Get(reg ty))<block_end><def_stmt>load self addr ty<block_start><return>self._settmp(Load(self.arch.memory_endness ty copy.copy(addr)))<block_end><def_stmt>op_ccall self retty funcstr args<block_start><return>self._settmp(CCall(retty IRCallee(len(args) funcstr 0xffff) args))<block_end><def_stmt>ite self condrdt iftruerdt iffalserdt<block_start><return>self._settmp(ITE(copy.copy(condrdt) copy.copy(iffalserdt) copy.copy(iftruerdt)))<block_end><def_stmt>mkconst self val ty<block_start>cls=ty_to_const_class(ty)<line_sep><return>Const(cls(val))<block_end># Operations <def_stmt>op_generic self Operation op_generator<block_start><def_stmt>instance *args# Note: The args here are all RdTmps <block_start><for_stmt>arg args<block_start><assert_stmt>isinstance(arg RdTmp)<or>isinstance(arg Const)<block_end>arg_types=[self.get_type(arg)<for>arg args]<line_sep># two operations should never share the same argument instances, copy them here to ensure that args=[copy.copy(a)<for>a args]<line_sep>op=Operation(op_generator(arg_types) args)<line_sep>msg="operation needs to be well typed: "+str(op)<assert_stmt>op.typecheck(self.irsb.tyenv) msg+"\ntypes: "+str(self.irsb.tyenv)<line_sep><return>self._settmp(op)<block_end><return>instance<block_end><def_stmt>op_binary self op_format_str<block_start><return>self.op_generic(Binop op_format_str)<block_end><def_stmt>op_unary self op_format_str<block_start><return>self.op_generic(Unop op_format_str)<block_end><def_stmt>cast_to self rdt tydest signed=<false> high=<false><block_start>goalwidth=get_type_size(tydest)<line_sep>rdtwidth=self.get_rdt_width(rdt)<if_stmt>rdtwidth<g>goalwidth<block_start><return>self.op_narrow_int(rdt tydest high_half=high)<block_end><elif_stmt>rdtwidth<l>goalwidth<block_start><return>self.op_widen_int(rdt tydest signed=signed)<block_end><else_stmt><block_start><return>rdt<block_end><block_end><def_stmt>op_to_one_bit self rdt<block_start>rdtty=self.get_type(rdt)<if_stmt>rdtty<not><in>[Type.int_64 Type.int_32]<block_start>rdt=self.op_widen_int_unsigned(rdt Type.int_32)<block_end>onebit=self.op_narrow_int(rdt Type.int_1)<line_sep><return>onebit<block_end><def_stmt>op_narrow_int self rdt tydest high_half=<false><block_start>op_name='{op}{high}to{dest}'.format(op='Iop_{arg_t[0]}' high='HI'<if>high_half<else>'' dest=get_op_format_from_const_ty(tydest))<line_sep><return>self.op_unary(make_format_op_generator(op_name))(rdt)<block_end><def_stmt>op_widen_int self rdt tydest signed=<false><block_start>op_name='{op}{sign}to{dest}'.format(op='Iop_{arg_t[0]}' sign='S'<if>signed<else>'U' dest=get_op_format_from_const_ty(tydest))<line_sep><return>self.op_unary(make_format_op_generator(op_name))(rdt)<block_end><def_stmt>op_widen_int_signed self rdt tydest<block_start><return>self.op_widen_int(rdt tydest signed=<true>)<block_end><def_stmt>op_widen_int_unsigned self rdt tydest<block_start><return>self.op_widen_int(rdt tydest signed=<false>)<block_end><def_stmt>get_msb self tmp ty<block_start>width=get_type_size(ty)<line_sep><return>self.get_bit(tmp width-1)<block_end><def_stmt>get_bit self rdt idx<block_start>shifted=self.op_shr(rdt idx)<line_sep>bit=self.op_extract_lsb(shifted)<line_sep><return>bit<block_end><def_stmt>op_extract_lsb self rdt<block_start>bitmask=self.mkconst(1 self.get_type(rdt))<line_sep><return>self.op_and(bitmask rdt)<block_end><def_stmt>set_bit self rdt idx bval<block_start>currbit=self.get_bit(rdt idx)<line_sep>bvalbit=self.op_extract_lsb(bval)<line_sep>areequalextrabits=self.op_xor(bval currbit)<line_sep>one=self.mkconst(1 self.get_type(areequalextrabits))<line_sep>areequal=self.op_and(areequalextrabits one)<line_sep>shifted=self.op_shl(areequal idx)<line_sep><return>self.op_xor(rdt shifted)<block_end><def_stmt>set_bits self rdt idxsandvals<block_start>ty=self.get_type(rdt)<if_stmt>all([isinstance(idx Const)<for>idx,_ idxsandvals])<block_start>relevantbits=self.mkconst(sum([1<lshift>idx.con.value<for>idx,_ idxsandvals]) ty)<block_end><else_stmt><block_start>relevantbits=self.mkconst(0 ty)<for_stmt>idx,_ idxsandvals<block_start>shifted=self.op_shl(self.mkconst(1 ty) idx)<line_sep>relevantbits=self.op_or(relevantbits shifted)<block_end><block_end>setto=self.mkconst(0 ty)<for_stmt>idx,bval idxsandvals<block_start>bvalbit=self.op_extract_lsb(bval)<line_sep>shifted=self.op_shl(bvalbit idx)<line_sep>setto=self.op_or(setto shifted)<block_end>shouldflip=self.op_and(self.op_xor(setto rdt) relevantbits)<line_sep><return>self.op_xor(rdt shouldflip)<block_end><def_stmt>get_rdt_width self rdt<block_start><return>rdt.result_size(self.irsb.tyenv)<block_end><block_end>
# SPDX-License-Identifier: MIT OR Apache-2.0 # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the MIT License. See the LICENSE file in the root of this # repository for complete details. """ Generic utilities. """<import_stmt>errno<import_stmt>sys<import_from_stmt>typing Any Callable<def_stmt>until_not_interrupted f:Callable[<ellipsis> Any] *args:Any **kw:Any<arrow>Any<block_start>""" Retry until *f* succeeds or an exception that isn't caused by EINTR occurs. :param f: A callable like a function. :param *args: Positional arguments for *f*. :param **kw: Keyword arguments for *f*. """<while_stmt><true><block_start><try_stmt><block_start><return>f(*args **kw)<block_end><except_stmt>OSError<as>e<block_start><if_stmt>e.args[0]<eq>errno.EINTR<block_start><continue><block_end><raise><block_end><block_end><block_end><def_stmt>get_processname <arrow>str# based on code from # https://github.com/python/cpython/blob/313f92a57bc3887026ec16adb536bb2b7580ce47/Lib/logging/__init__.py#L342-L352 <block_start>processname="n/a"<line_sep>mp:Any=sys.modules.get("multiprocessing")<if_stmt>mp<is><not><none># Errors may occur if multiprocessing has not finished loading # yet - e.g. if a custom import hook causes third-party code # to run when multiprocessing calls import. <block_start><try_stmt><block_start>processname=mp.current_process().name<block_end><except_stmt>Exception<block_start><pass><block_end><block_end><return>processname<block_end>
<import_from_future_stmt> absolute_import<import_stmt>uuid<import_from_stmt>datetime datetime<import_from_stmt>sqlalchemy Column String DateTime ForeignKey<import_from_stmt>sqlalchemy.orm relationship backref<import_from_stmt>sqlalchemy.schema UniqueConstraint Index<import_from_stmt>changes.config db<import_from_stmt>changes.db.types.filestorage FileData FileStorage<import_from_stmt>changes.db.types.guid GUID<import_from_stmt>changes.db.types.json JSONEncodedDict<line_sep>ARTIFACT_STORAGE_OPTIONS={'path':'artifacts' }<class_stmt>Artifact(db.Model)<block_start>""" The artifact produced by one job/step, produced on a single machine. Sometimes this is a JSON dict referencing a file in S3, sometimes it is Null, sometimes it is an empty dict. It is basically any file left behind after a run for changes to pick up """<line_sep>id=Column(GUID primary_key=<true> default=uuid.uuid4)<line_sep>job_id=Column(GUID ForeignKey('job.id' ondelete="CASCADE") nullable=<false>)<line_sep>step_id=Column(GUID ForeignKey('jobstep.id' ondelete="CASCADE") nullable=<false>)<line_sep>project_id=Column(GUID ForeignKey('project.id' ondelete="CASCADE") nullable=<false>)<line_sep>name=Column(String(1024) nullable=<false>)<line_sep>date_created=Column(DateTime nullable=<false> default=datetime.utcnow)<line_sep>data=Column(JSONEncodedDict)<line_sep>file=Column(FileStorage(**ARTIFACT_STORAGE_OPTIONS))<line_sep>job=relationship('Job' backref=backref('artifacts'))<line_sep>project=relationship('Project')<line_sep>step=relationship('JobStep' backref=backref('artifacts'))<line_sep>__tablename__='artifact'<line_sep>__table_args__=(UniqueConstraint('step_id' 'name' name='unq_artifact_name') Index('idx_artifact_job_id' 'job_id') Index('idx_artifact_project_id' 'project_id') )<def_stmt>__init__ self **kwargs<block_start>super(Artifact self).__init__(**kwargs)<if_stmt>self.id<is><none><block_start>self.id=uuid.uuid4()<block_end><if_stmt>self.date_created<is><none><block_start>self.date_created=datetime.utcnow()<block_end><if_stmt>self.data<is><none><block_start>self.data={}<block_end><if_stmt>self.file<is><none># TODO(dcramer): this is super hacky but not sure a better way to # do it with SQLAlchemy <block_start>self.file=FileData({} ARTIFACT_STORAGE_OPTIONS)<block_end><block_end><block_end>
# -*- coding: utf-8 -*- # Resource object code # # Created: Tue Jul 2 13:23:21 2013 # by: The Resource Compiler for PyQt (Qt v4.8.3) # # WARNING! All changes made in this file will be lost! # this line manually edited <import_from_stmt>enaml.qt QtCore<line_sep>qt_resource_data=b"\ \x00\x00\x02\x61\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x6f\x00\x00\x00\x6f\x08\x06\x00\x00\x00\xe2\xc5\x9e\x60\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x02\x03\x49\x44\x41\x54\x78\x9c\xed\ \xdc\xc1\x49\xc5\x40\x14\x46\xe1\xff\xe9\x2b\x2d\xb5\xd8\x42\xb2\ \x75\xf7\xd2\x82\x6d\x58\x43\x96\xf6\x93\x9d\x2e\x64\xe0\x21\x88\ \x20\x38\x77\xce\x78\x4e\x03\x73\xc9\x47\x36\x21\x73\x2f\x19\xa8\ \x75\x5d\xdf\xab\x67\xf8\xa9\x7d\xdf\x2f\xd5\x33\xb4\xae\xd5\x03\ \x7c\xed\x76\xbb\xbd\x54\xcf\xf0\x5d\xdb\xb6\x3d\x55\xcf\x70\xdf\ \x43\xf5\x00\xf6\xfb\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\ \x70\xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\ \x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\ \xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\xdc\x63\xf5\x00\xad\ \x75\x5d\xdf\x47\xfe\xe1\x36\x49\x96\x65\x79\x3b\xcf\xf3\xf5\x38\ \x8e\xe7\xea\x59\x92\x41\xf0\x08\x70\xad\x91\x00\xcb\xf1\x48\x70\ \xad\x51\x00\x4b\xf1\x88\x70\xad\x11\x00\xcb\xf0\xc8\x70\xad\x6a\ \xc0\x12\xbc\x19\xe0\x5a\x95\x80\xdd\xf1\x66\x82\x6b\x55\x01\x76\ \xc5\x9b\x11\xae\x55\x01\xd8\x0d\x6f\x66\xb8\x56\x6f\xc0\x4b\xaf\ \xab\xc4\xb3\xc3\xdd\xd7\xeb\x06\xed\xe5\x3f\xbc\x11\x33\xb6\x6d\ \xdb\x93\xdf\x36\xc1\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\ \xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\ \x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\ \xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\ \x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\ \xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\xdc\xc3\xf2\ \x07\x75\xdb\xc3\xd2\xe3\x90\xe4\x7f\x6c\x40\x4a\x3e\xe1\xf6\x7d\ \xef\xf2\x5c\xbb\xad\xaf\x3a\x8e\xe3\xf9\x3c\xcf\xd7\x65\x59\xde\ \x7a\x9d\xd9\xbb\x9e\x70\x49\xe7\xc5\x71\x33\x03\xf6\x86\x4b\x0a\ \x56\x36\xce\x08\x58\x01\x97\x14\x2d\x4b\x9d\x09\xb0\x0a\x2e\x29\ \x5c\x53\x3c\x03\x60\x25\x5c\x52\xbc\x20\x9c\x0c\x58\x0d\x97\x0c\ \xb0\x9a\x9f\x08\x38\x02\x5c\x32\x00\x5e\xc2\x02\x1c\x05\x2e\x19\ \x04\x2f\x61\x00\x8e\x04\x97\xf8\x6d\x13\x9d\x78\xe0\xc4\x03\x27\ \x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\x9c\x78\xe0\ \xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\x81\x13\x0f\ \x9c\x78\xe0\xc4\x03\x27\x1e\x38\xf1\xc0\x89\x07\x4e\x3c\x70\xe2\ \x81\xbb\x56\x0f\xf0\xb5\x5e\x57\x82\x67\xe8\x03\xdb\xf1\xfe\x32\ \xdf\x7a\xb4\x66\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \ \x00\x00\x01\xda\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x49\x49\x44\x41\x54\ \x48\x4b\xbd\xcc\xbd\x4a\x03\x51\x10\x86\x61\x2f\xce\x8b\xf0\x46\ \x2c\x2c\x2c\x2c\x2c\x14\x2c\x62\x61\x21\xc1\xc6\xc2\xc2\xc2\xc2\ \xc2\xc2\x1b\x10\x11\x11\x91\x20\x22\x31\x6e\xf6\xf7\xec\x6e\xf6\ \x37\xd9\x24\x36\xce\x30\x73\xa6\x90\x54\x0e\x67\xe1\xed\xbe\x8f\ \x67\xeb\x78\x30\x74\x17\xea\x6d\xb7\x76\x11\xeb\xcd\x62\xe5\xa2\ \x5e\xf4\x7a\xbe\xa4\xb6\x77\xf6\xf4\x89\xd6\x8b\x5e\xb5\x1d\xf5\ \xe7\xf7\xbf\x44\xeb\x45\x2f\x9b\x05\x05\xdb\x8f\xb9\xd7\x04\x82\ \x68\x9b\xf4\xf8\x4e\xd3\x06\xbd\xa8\xe7\x14\xea\xe1\xad\x26\x10\ \x44\x63\x7d\x56\xb5\x14\xea\xfe\x8d\x26\x10\x44\x63\x3d\x2f\x1b\ \x0a\xb6\xb5\x77\xad\x09\x04\xd1\x58\xcf\x8a\x86\x42\x7d\x72\xa5\ \x09\x04\xd1\x58\x37\x79\x4d\xa1\x3e\xbe\xd4\x04\x82\x68\xac\x27\ \x59\x45\xc1\xb6\xfa\xbc\xd0\x04\x82\x68\xac\xc7\x69\x49\xa1\xfe\ \x71\xae\x09\x04\xd1\x58\x8f\x4c\x41\xa1\xfe\x7e\xa6\x09\x04\xd1\ \x58\x0f\x93\x82\x42\x7d\x74\xaa\x09\x04\xd1\x58\x0f\xe2\x19\x05\ \xdb\xf2\xed\x44\x13\x08\xa2\xb1\xee\x47\x39\x85\xfa\xeb\x91\x26\ \x10\x44\x63\x7d\x1a\xe5\x14\xea\x2f\x87\x9a\x40\x10\xcd\xea\x61\ \x46\xc1\xd6\x3d\x1f\x68\x42\xdd\x6a\xac\x7b\x41\x46\xa1\xfe\xb4\ \xaf\x09\x04\xd1\x58\xff\x0e\x52\x0a\xf5\x47\x55\x20\x88\x66\x75\ \x3f\xa5\x50\x7f\xd8\xd5\x84\xba\xd5\x58\x9f\xf8\x86\x82\x4d\x9f\ \x68\xac\x7f\x4d\x8d\x8b\xac\xee\x25\x2e\x62\x7d\xec\x25\x2e\x62\ \xdd\x55\x83\xe1\x2f\x82\x32\x64\x70\x80\xdc\x0e\xed\x00\x00\x00\ \x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\xcb\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x8b\x00\x00\x00\x8b\x08\x06\x00\x00\x00\x51\x19\x6a\xff\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x02\x6d\x49\x44\x41\x54\x78\x9c\xed\ \xdd\xb1\x6d\xdc\x50\x10\x45\xd1\x59\x5b\xa5\xb1\x16\xb5\x40\xa6\ \xca\x96\x2d\xb8\x0d\xd5\xc0\x50\xfd\x30\x93\x23\x02\x4e\x0c\x3d\ \x63\xd7\x20\xe7\xeb\x9c\x0a\x1e\xb0\x77\x23\x02\xf3\x6f\x35\x80\ \x79\x9e\x3f\xcf\xde\xf0\x95\x75\x5d\x6f\x67\x6f\x78\xd4\xcb\xd9\ \x03\x9e\xe5\x7e\xbf\xff\x3a\x7b\xc3\xdf\x2c\xcb\xf2\x7a\xf6\x86\ \x67\xf8\x71\xf6\x00\xfa\x10\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\ \x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\ \x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\ \x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\ \xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\ \x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\ \x13\x0b\xb1\x9f\x67\x0f\x78\xd4\x3c\xcf\x9f\x57\x3e\x6b\x5a\x55\ \x35\x4d\xd3\xc7\xbe\xef\xef\xdb\xb6\xbd\x9d\xbd\xe5\x11\xad\x63\ \xe9\x10\xca\x61\x84\x60\xda\xc6\xd2\x29\x94\x43\xf7\x60\x5a\xc6\ \xd2\x31\x94\x43\xe7\x60\xda\xc5\xd2\x39\x94\x43\xd7\x60\x5a\xc5\ \x32\x42\x28\x87\x8e\xc1\xb4\x89\x65\xa4\x50\x0e\xdd\x82\x69\x11\ \xcb\x88\xa1\x1c\x3a\x05\x73\xf9\x58\x46\x0e\xe5\xd0\x25\x98\xdb\ \xd5\x1f\x76\x1a\x3d\x94\x3f\x5d\xfd\x5d\xa2\xdb\x77\xf8\xe7\xf2\ \xb8\x65\x59\x5e\x7d\x1b\x22\x26\x16\x62\x62\x21\x26\x16\x62\x62\ \x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\ \x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\ \x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\ \x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\ \x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\x21\x26\x16\x62\x62\ \x21\x26\x16\x62\xee\xe0\x5e\xc8\xe5\xef\xe0\x9e\x3d\xe0\x2b\xdf\ \xe5\x4e\xef\xb2\x2c\xaf\xeb\xba\x5e\xfa\xf7\xb8\xfc\x39\xf6\x6d\ \xdb\xde\xf6\x7d\x7f\x9f\xa6\xe9\xe3\xec\x2d\xff\x4b\x87\x50\xaa\ \x1a\xc4\x52\x35\x76\x30\x5d\x42\xa9\x6a\x12\x4b\xd5\x98\xc1\x74\ \x0a\xa5\xaa\x51\x2c\x55\x63\x05\xd3\x2d\x94\xaa\x66\xb1\x54\x8d\ \x11\x4c\xc7\x50\xaa\x1a\xc6\x52\xd5\x3b\x98\xae\xa1\x54\x35\x8d\ \xa5\xaa\x67\x30\x9d\x43\xa9\x6a\x1c\x4b\x55\xaf\x60\xba\x87\x52\ \xd5\x3c\x96\xaa\x1e\xc1\x8c\x10\x4a\x95\x6f\x43\xfc\x03\xb1\x10\ \x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\ \x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\ \x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\ \xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\ \x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\ \x13\x0b\x31\xb1\x10\x13\x0b\x31\xb1\x10\x7b\x39\x7b\xc0\xb3\x5c\ \xfd\x61\xa7\x11\xfc\x06\x85\xf5\xfe\x6a\xa4\x26\xa3\xb0\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\xe1\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x13\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x8a\xf0\x61\xe0\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x50\x49\x44\x41\x54\ \x48\x4b\xa5\x95\x39\x4a\x04\x61\x10\x46\xe7\x70\x1e\xc2\x8b\x18\ \x18\x18\x18\x18\x28\x18\x68\x60\x20\x62\x62\x60\x60\x60\x60\x60\ \xe0\x05\x44\x44\x44\x64\x10\x91\x59\x7a\x5f\xa7\xd7\x99\x9e\x25\ \xb1\x6a\xaa\xfe\x02\xc1\x1a\xe9\x16\x5e\xf6\xbe\x47\x85\xd5\x3b\ \x3a\xb9\xe8\x46\x0f\xb2\xba\x59\xb6\x05\x2a\x2c\xab\xd9\xa2\x2d\ \xff\x2e\xcb\xe9\x9c\xd8\xda\xde\xdd\x8c\x2c\xf9\x66\xf7\xb2\xa8\ \x1b\xe2\xcf\x9b\xb2\xe4\x9b\xdd\xcb\xbc\x9a\x11\x70\x73\x15\x3d\ \x68\x80\x95\x25\xdf\xfc\x51\x06\xf7\x2b\x85\x5f\xca\xac\x9c\x12\ \x78\xd3\xbb\xd3\x00\x2b\x4b\xbe\x39\x29\x6a\x02\x4b\xe7\x56\x03\ \xac\x2c\xb9\x4c\xf3\x8a\x00\xb7\xb4\x6e\x34\xc0\xca\x92\xcb\x24\ \xab\x08\x2c\x47\xd7\x1a\x60\x65\xc9\x65\x94\x96\x04\x96\x83\x2b\ \x0d\xb0\xb2\xe4\x32\x4c\x0a\x02\xdc\xe2\xeb\x52\x03\xac\x2c\xb9\ \x0c\xe2\x9c\xc0\xf2\xf3\x5c\x03\xac\x2c\xb9\xf4\xa3\x8c\xc0\xf2\ \xe3\x4c\x03\xac\x2c\xb9\xf4\xc2\x8c\xc0\xb2\x7f\xaa\x01\x56\x96\ \x5c\xba\xc1\x84\x00\x37\x7f\x3f\xd6\x00\x2b\x4b\x2e\x1d\x3f\x25\ \xb0\x7c\x3b\xd4\x00\x2b\x4b\x2e\x6d\x3f\x25\xb0\x7c\x3d\xd0\x00\ \x2b\x4b\x53\x7a\x89\xbd\x06\x5c\xf3\xb2\xaf\x81\xa5\x59\x72\x69\ \xb9\x09\x81\xe5\xf3\x9e\x06\x58\x59\x72\x39\x76\x63\x02\xcb\x27\ \x15\xb0\xb2\x34\xa5\x13\x8f\xd7\x60\xf9\xb8\xa3\x81\xa5\x59\x72\ \x39\x72\x22\x02\xdc\x66\x64\xc9\xe5\xd0\x8e\xda\x62\x4a\x2b\x1c\ \xb6\x84\xcb\x81\x15\xb6\x85\xcb\x6e\xbf\xec\x1b\xdd\xce\x28\xdf\ \xf5\x17\x62\x31\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \ \x00\x00\x01\xb8\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x27\x49\x44\x41\x54\ \x48\x4b\xbd\xd2\x3d\x4e\xc3\x40\x10\x86\x61\x0e\xc7\x21\x38\x0a\ \xd4\x74\x48\x29\x42\x1a\x8a\xc8\x14\xd4\x34\x49\xc3\x15\x68\xa8\ \x68\x68\x02\x38\x76\x9c\xf5\x4f\xfc\x9b\x38\x09\x0d\x63\xcd\xe7\ \x45\x5a\x27\x01\xb1\x1a\x4b\xaf\xb6\xd8\x19\x3d\xdb\xec\xd9\xcd\ \x70\x2c\x57\xa3\x57\xf5\x5e\x22\xe8\xe5\x66\x27\x51\x2f\x7a\xb1\ \xde\x72\xe7\x17\x57\xf6\x69\xad\x17\x3d\xaf\x6a\xce\xd8\xfb\x5f\ \x5a\xeb\x45\xcf\xca\x0d\x47\xb3\x2f\xf5\x64\x13\x09\x5a\x3b\xa0\ \xef\xbd\x47\x9b\x0e\xe8\x69\xb1\xe6\x1a\x7d\xf6\x60\x13\x09\x5a\ \x83\xbe\xca\x2b\x8e\x66\xbb\xb7\x3b\x9b\x48\xd0\x1a\xf4\x24\x2b\ \x39\x9a\x6d\x5f\x07\x36\x91\xa0\x35\xe8\x71\x5a\x72\x8d\xfe\x72\ \x6d\x13\x09\x5a\x83\x1e\x26\x05\x47\xb3\xfa\xf9\xd2\x26\x12\xb4\ \x06\x5d\xc5\x39\x47\x33\xfb\xb4\x06\x7d\x19\x65\x12\x41\x0f\xc2\ \x54\x22\xe8\x0b\x95\x4a\x04\xdd\x5f\xae\x24\x82\xee\x05\x89\x44\ \xd0\xe7\x41\xf2\x97\x46\xf7\x53\xfa\x12\x74\x1a\xf7\xc7\x6a\xf5\ \x45\xfc\x6b\x4c\x73\xcd\x03\x9d\x85\x6e\xd0\x5d\x3f\x3e\xdd\xc8\ \xf9\xa1\xf1\x80\x33\x35\x76\xba\x41\xff\xf4\xa3\x13\xdd\x3a\x13\ \x83\xe6\xe8\xde\xd8\x34\x6a\x75\x2f\x92\x08\xfa\x87\x17\x4a\x04\ \xfd\x7d\x1e\x4a\xd4\xea\xae\x92\x08\xfa\xcc\x55\x12\x41\x97\x6a\ \x38\xfe\x06\xe0\x80\xad\xee\xa3\x69\x89\x6f\x00\x00\x00\x00\x49\ \x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x00\xc2\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x29\x00\x00\x00\x29\x08\x06\x00\x00\x00\xa8\x60\x00\xf6\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x00\x64\x49\x44\x41\x54\x58\x85\xed\ \xd9\xc1\x0d\x80\x30\x0c\xc0\xc0\x14\x31\x2b\x2b\x24\x23\x64\x06\ \x96\x85\x09\x90\x78\x58\x6a\x2b\xd9\x13\xdc\xdf\x23\x33\x9f\x58\ \xbc\x33\x22\xa2\xbb\xef\xd9\x90\xaf\xaa\xea\x3a\x66\x23\xfe\x24\ \x92\x4a\x24\x95\x48\x2a\x91\x54\x22\xa9\x44\x52\x89\xa4\x12\x49\ \x25\x92\x4a\x24\x95\x48\x2a\x91\x54\x22\xa9\x44\x52\x89\xa4\x12\ \x49\x25\x92\x4a\x24\xd5\x16\xc8\xb1\xc3\xc7\x79\x01\x28\xc6\x09\ \x1b\x33\x94\xbf\xef\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x01\xc2\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x31\x49\x44\x41\x54\ \x48\x4b\xbd\xcd\xbb\x4e\xc3\x40\x10\x85\x61\x1e\x8e\x87\xe0\x4d\ \x10\x25\x6d\xba\x88\x36\xb4\xb4\xd4\x3c\x02\x05\x0d\x0d\x05\x12\ \x04\xc7\x8e\xb3\xbe\xc4\xd7\xc4\x49\x68\x38\xab\x99\x1d\xad\x28\ \x67\xb5\x96\x7e\x59\x23\x9d\xd5\xe7\xab\xc5\x72\x15\x2f\xab\x8f\ \xd3\x25\x46\xac\x0f\xc7\x73\x8c\x66\xd1\xfb\xc3\x89\xba\xbe\xb9\ \x0b\x4f\xb4\x59\xf4\x6e\x9c\xa8\x7f\xef\x74\x89\x36\x8b\xde\x0e\ \x47\x0a\x9b\xdc\xba\x7c\x61\x16\xbd\xe9\x0f\x14\x36\xb9\x75\xf9\ \x02\xeb\xfb\x6e\xa4\xb0\xc9\xad\xcb\x17\x58\xaf\xdb\x81\xc2\x26\ \xb7\x2e\x5f\x60\xbd\x6a\x06\x0a\x1b\xfa\x35\x2f\xea\x2f\x12\x8d\ \xf5\xa2\xee\x29\xfb\x28\x7b\x0e\x09\x82\x68\xac\x9b\xaa\xa3\xb0\ \x5d\xd6\x4f\x21\x41\x10\x8d\xf5\x5d\xd9\x52\xd8\xce\x9f\x8f\x21\ \x41\x10\x8d\xf5\xbc\x68\x28\xab\x7f\x3c\x84\x04\x41\x34\xd6\xb7\ \xa6\xa1\xb0\x9d\xde\x17\x21\x41\x10\x8d\xf5\x6c\xb7\xa7\xb0\x4d\ \x6f\xf7\x21\x41\x10\x8d\xf5\x34\xaf\x29\x6c\xf6\x07\xaf\xb7\xea\ \x2f\x12\x8d\xf5\x4d\x5e\x53\xd8\xe4\xd6\xe5\x0b\x4e\xdf\x56\x94\ \xdd\xdc\xad\xcb\x17\x58\x4f\xb2\x8a\xc2\x26\xb7\x2e\x5f\x60\xfd\ \x27\x2b\x29\x6c\x72\xeb\xf2\x05\xa7\xa7\x25\x65\x37\x77\xeb\xf2\ \x05\xd6\xd7\x69\x41\x61\x0b\x4f\x34\xd6\xbf\x37\x45\x8c\x9c\x9e\ \x98\x18\xb1\xfe\x95\x98\x18\xb1\x1e\xab\xe5\xea\x0f\x0e\x98\x91\ \x35\xc6\xa1\x36\xaa\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x01\x4e\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x0a\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x51\x4b\xcb\xc2\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\x01\xd2\xdd\ \x7e\xfc\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x00\xbd\x49\x44\x41\x54\ \x38\x4f\x8d\xc9\xc9\x8d\xc2\x50\x00\x04\x51\x82\x23\x08\x52\x21\ \x05\x6e\xdc\x89\x6d\x00\x63\x63\xbc\xe1\x95\x65\xe6\x34\xa0\xea\ \xdf\xa7\x0f\xb2\x54\x97\xd2\x5b\x6c\xb6\xbb\x2f\xbd\x79\x7a\xfc\ \x45\x13\x8f\xf7\xdf\x68\xf3\x78\xb8\x3d\x69\xb9\x5a\xbf\xf2\xce\ \xe3\x7e\x7a\x10\xec\x9d\xc7\xdd\x78\x27\xd8\x3b\x8f\xdb\xe1\x46\ \xb0\x57\x7c\xed\x27\x82\xbd\xe2\xa6\x1b\x09\xf6\x8a\xeb\x76\x24\ \xd8\x2b\x2e\x9b\x81\x60\xaf\xb8\xa8\x7b\x82\xbd\xe2\x4b\xd5\x11\ \xec\x15\xe7\x65\x4b\xb0\x57\x7c\x2e\x5a\x82\xbd\xe2\xec\x72\x25\ \xd8\x2b\x4e\xf3\x86\x60\xaf\xf8\x94\x37\x04\x7b\x03\x9f\x6b\x12\ \x87\x15\x27\x59\x4d\xb0\x57\x7c\xcc\x2a\x82\xbd\x81\xd3\x8a\xc4\ \x61\xc5\x87\xb4\x24\xd8\x2b\xde\x9f\xca\x68\x81\x93\x22\x9a\xf8\ \x27\x29\xa2\x89\x3f\xb6\xdd\xfd\x03\xaf\x34\xbc\x27\xb0\x9e\x89\ \xd7\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\x22\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x91\x49\x44\x41\x54\ \x48\x4b\xbd\xcd\xcb\x4a\xc3\x50\x10\xc6\x71\x1f\xce\x87\xf0\x45\ \x5c\xb8\x70\xe1\xc2\x85\x82\x8b\x8a\xd6\x0b\x45\x5a\x5b\x70\xe1\ \xc2\x85\xb5\x5d\xf8\x02\x22\x22\x22\x22\x22\x52\x6b\x9a\x5e\x92\ \x26\x4d\xdb\xf4\xee\xc6\x19\x32\x19\x0e\xa7\x65\x7a\x10\x52\xf8\ \xad\x26\xdf\xf9\x67\x6d\x3f\x95\x49\x0e\xd6\x07\xe3\x59\x12\xa8\ \x1e\x8e\xa6\x49\x58\x49\xbd\x3f\x9c\x18\x5a\xdf\xd8\x5a\x8a\xc7\ \x2b\xa9\xf7\x06\x63\x43\x5a\x68\x21\x1e\xaf\xa4\xde\x0d\x47\x86\ \xe0\xf1\xaf\x7b\x2f\x80\x01\x8f\xff\x55\x6f\x95\x05\x0b\xea\x41\ \x7f\x68\x08\xeb\x8d\x5b\x01\x0c\x78\x4c\xf5\x4e\x6f\xa0\x3a\x2b\ \x94\xb5\x0b\xc3\xba\x7d\x23\x80\x01\x8f\xa9\xee\x77\x43\x15\x2c\ \x4e\x0b\x25\xed\x18\x81\x4f\x33\xeb\x5a\x00\x03\x1e\x53\xdd\x0b\ \x42\x15\x2c\xf0\x07\xf9\x92\x76\x07\x58\xaf\x5e\x09\x60\xc0\x63\ \xaa\xbb\x7e\x5f\x15\xd5\xc1\xc9\x65\x69\xfe\xd3\xac\x92\x17\xc0\ \x80\xc7\x54\x77\xbc\x9e\x8a\xeb\x60\xfe\xd3\xf4\xeb\x42\xa0\x3e\ \xa1\x7a\xab\xdd\x55\x71\x3a\x9d\xbb\x9b\xff\x34\xfd\x3c\x17\xc0\ \x80\xc7\x54\x6f\xba\x81\x2a\x4a\x1f\xe7\x8a\xda\x1d\x60\xfd\x23\ \x2d\x80\x01\x8f\xa9\xde\x70\x02\x15\xa6\xb3\x45\xed\x18\xc1\xfa\ \xfb\xa1\x00\x06\x3c\xa6\x7a\xbd\xd5\x51\x1d\x65\x8b\xda\x85\xc1\ \xe3\xc9\xdb\x81\x00\x06\x3c\xa6\xba\xdd\xf4\x0d\x61\xfd\x75\x4f\ \x00\x03\x1e\x53\xbd\xd6\xf4\x0d\x61\xfd\x65\x57\x00\x03\x1e\xc7\ \xf5\x86\x67\x08\x1e\x8f\x9f\x77\x04\x58\x8f\xc7\x54\xb7\xea\x9e\ \x21\xac\x3f\x6d\x0b\x60\xc0\x63\xaa\xff\xd4\xdb\x86\xb0\xfe\x28\ \x81\x01\x8f\xe3\xba\xdd\x36\x84\xf5\x87\x4d\x01\xd6\xe3\x31\xd5\ \xab\xb6\x6b\x08\x1e\x2f\xc5\x63\xaa\x7f\xd7\xdc\x24\xc4\x75\xcb\ \x49\x02\xd5\x2b\x96\x93\x04\xaa\x27\x25\x95\xf9\x03\x6c\x41\xe7\ \xb2\x07\xe6\xaf\xd1\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ \x00\x00\x02\x24\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x93\x49\x44\x41\x54\ \x48\x4b\xbd\xcc\xbb\x4a\x03\x61\x10\x05\x60\x1f\xce\x87\xf0\x45\ \x2c\x52\x58\x58\x58\x28\x58\xc4\xc2\x42\x82\x8d\x85\x85\x85\x85\ \x85\x85\x2f\x20\x22\x22\x22\x22\x22\x31\xe6\x9e\xcd\xfd\xe6\x66\ \xb3\x8d\x27\xcc\xec\xe1\xe7\xff\x19\x10\xc3\x06\xbe\xe2\x30\x73\ \x38\x5b\x47\xc5\x52\x7e\x56\xeb\xb3\x78\x99\x07\x5d\x9f\xfe\x24\ \x79\xd8\xc8\xfa\x64\xbe\x10\xdb\x3b\x85\xf5\x71\x6d\x23\xeb\xe3\ \x59\x2c\xbc\xde\xff\x70\x6d\x23\xeb\xa3\xe9\x8f\xc0\x8f\x99\xd2\ \xe8\xce\xe2\x35\xc1\x5d\xf8\xd3\x3a\x8e\x69\xfb\x36\x64\x95\x99\ \x75\x7d\x38\x99\x0b\xfc\x98\x29\x6d\xde\x58\xbc\x26\xb8\x0b\xba\ \x3e\x18\xcf\x04\x7e\xcc\x84\x63\x5a\xbf\x0e\x59\x65\x66\x5d\xef\ \x8f\xa6\x02\x3f\x66\x5a\x56\xaf\x2c\x5e\x13\xdc\x05\x5d\xef\x0d\ \xa7\x02\x3f\x66\xc2\x71\x59\xb9\x0c\x59\x65\x66\x5d\x8f\xfa\x13\ \x81\x1f\x33\x2d\xcb\x17\x16\xaf\x09\xee\x82\xae\x77\x7a\x63\x81\ \x1f\x33\xe1\x98\x7c\x9e\x87\xac\x32\xb3\xae\xb7\xbb\x23\x81\x1f\ \x33\x25\x1f\x67\x16\xaf\x09\xee\x82\xae\xb7\xa2\xa1\xc0\x8f\x99\ \x70\x4c\xde\x4f\x43\x56\x99\x59\xd7\x9b\x9d\xa1\xc0\x8f\x99\x92\ \xb7\x13\x8b\xd7\x04\x77\x41\xd7\x1b\xed\x81\xc0\x8f\x99\x70\x5c\ \xbc\x1e\x87\xac\x32\xb3\xae\xd7\x5b\x7d\x81\x1f\x33\x2d\x5e\x0e\ \x2d\x5e\x13\xdc\x05\x5d\xaf\xb5\xfa\x02\x3f\x66\xc2\x71\xf1\x7c\ \x10\xb2\xca\xcc\xd9\x7a\xb3\x27\x56\xbf\x2c\x53\xfc\xb4\x6f\xf1\ \x9a\xe0\x2e\xe8\x7a\xb5\xd1\x13\xf8\x31\x13\x8e\xf1\xe3\x5e\xc8\ \x2a\x33\xeb\xfa\x77\xa3\x2b\xf0\x63\xa6\xf8\xa1\x60\xf1\x9a\xe0\ \x2e\x64\xeb\xf5\xae\x58\xfd\xb2\x4c\x38\xc6\xf7\xbb\x21\xab\xcc\ \xac\xeb\x95\x7a\x24\xf0\x5b\x1f\xd7\x74\xfd\xab\x16\xe5\x21\x5b\ \xaf\x76\xf2\xa0\xeb\xe5\x6a\x27\x0f\xba\x9e\x97\x62\xe9\x17\xda\ \xb5\x98\x10\x31\x42\x5d\xab\x00\x00\x00\x00\x49\x45\x4e\x44\xae\ \x42\x60\x82\ \x00\x00\x00\xed\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x0a\x08\x02\x00\x00\x00\xc3\xd7\x12\x46\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0b\x12\x00\x00\x0b\x12\x01\xd2\xdd\ \x7e\xfc\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x00\x5c\x49\x44\x41\x54\ \x38\x4f\x63\xe8\x9a\x38\x87\x76\x08\x64\xfa\xa7\xaf\x3f\x68\x81\ \xa0\xa6\x7f\xf8\xfc\x8d\x16\x08\x6a\xfa\xbb\x8f\x5f\x68\x81\xa0\ \xa6\xbf\x7a\xfb\x11\x82\x5c\x22\x2a\x29\x47\x70\xd3\xa0\xa6\x3f\ \x7b\xf9\x1e\x82\xd0\xd4\x91\x87\xe0\xa6\x41\x4d\x7f\xfc\xec\x2d\ \x2d\x10\xd4\xf4\x87\x4f\x5e\xd3\x02\x41\x4d\xbf\xff\xe8\x25\x2d\ \x10\xd4\x74\x5a\xa1\x89\x73\x00\xf8\x06\xba\x5a\xe8\x93\x6f\x68\ \x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x01\x3f\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x13\x08\x02\x00\x00\x00\xe7\x0e\x41\x15\ \x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc2\x00\x00\x0e\xc2\ \x01\x15\x28\x4a\x80\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\ \x74\x77\x61\x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\ \x76\x33\x2e\x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x00\xbb\x49\ \x44\x41\x54\x38\x4f\xbd\xd5\x2b\x0e\xc2\x50\x10\x85\xe1\x2e\x8e\ \x45\xb0\x15\x34\x8e\x04\x81\x43\xe0\xd0\x78\x56\x81\xc2\xd4\xa0\ \x48\xa0\xa5\xa5\xbd\x8f\x3e\x31\x9c\x66\x26\x13\x04\x6e\x18\x92\ \xdf\x4d\xf2\xdd\x23\x9a\x34\x59\x6d\x76\x76\x25\xa0\x63\x37\x5a\ \x04\x79\xd2\x43\x3b\x58\xf4\x17\xdd\x37\x3d\x35\x9b\x2f\xf4\x89\ \xc6\xdb\x6d\x75\x17\x3b\x4a\x3f\x1c\x82\x68\xbc\xdd\x56\xaf\x43\ \x4b\xe1\xe5\x57\x76\xd4\x04\x41\x34\xde\xfe\xa9\x8f\xd7\x83\xa6\ \x2f\x7a\xe5\x1b\x0a\xb7\xf1\xb2\xd7\x04\x41\x34\xde\xfe\x74\x91\ \xc2\x6d\x48\xb7\x9a\x20\x88\xc6\x7a\x59\x07\x0a\xb7\xfe\xbc\xd6\ \x04\x41\x34\xd6\x8b\x2a\x50\x93\x7e\x5a\x6a\x82\x20\x1a\xeb\x79\ \xe9\xa9\x9f\x7c\xef\xa2\xb1\x9e\x15\xce\x22\xd6\xef\x8f\xda\x22\ \xd6\x6f\x79\x65\x11\xeb\x76\xff\xa6\x37\x06\x80\x09\x57\x1d\xbe\ \x2e\x15\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\x26\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x95\x49\x44\x41\x54\ \x48\x4b\xbd\xcd\xbb\x4a\x03\x51\x10\xc6\x71\x1f\xce\x87\xf0\x45\ \x2c\x2c\x2c\x2c\x2c\x14\x2c\x22\x1a\xa3\x04\x49\x4c\xc0\xc2\xc2\ \xc2\x68\x0a\x5f\x40\x44\x44\x44\x44\x44\x62\xdc\x5c\x77\xb3\x9b\ \xcd\xfd\x66\xe3\x0c\x33\x3b\x84\x93\x30\x07\x84\x0d\xfc\xaa\x73\ \xbe\xf3\x3f\x6b\xfb\x89\x74\x7c\xb0\x3e\x18\xcf\xe2\xc0\xf5\xfe\ \x68\x1a\x87\x95\xd4\x7b\xc3\x09\x59\xdf\xd8\xb2\x92\xb1\xd5\x4a\ \xea\xdd\xc1\x98\x18\xa1\xa5\x64\x6c\xb5\x92\x7a\xa7\x3f\x22\xf0\ \xf8\xd7\xbb\x57\xc0\x40\xc6\x56\xcb\xea\xcd\xa2\xe2\x3f\xf5\xb0\ \x37\x24\x58\xaf\xdf\x28\x60\x20\x63\x2b\xae\xb7\xbb\x03\x82\xf5\ \xea\xb5\x02\x06\x32\x36\x9c\xe6\x8b\xc6\x09\xd7\x83\x4e\x9f\xc0\ \xe3\x99\x73\xa5\x80\x81\x8c\xe7\xa5\xf2\x77\x8b\x57\x5c\xf7\xc3\ \x3e\xc1\x7a\xf9\x52\x01\x03\x19\x8b\x54\x0e\xd3\x8b\x57\x5c\xf7\ \x82\x1e\xc1\x7a\x29\xa7\x80\x81\x8c\xc9\xc9\x05\xa7\x17\xaf\xb8\ \xee\xfa\x5d\x02\x8b\xe9\xd7\xb9\x02\x06\x32\x96\x27\xc2\xb8\xe2\ \x7a\xb3\xd5\x21\xb0\x98\x7e\x9e\x29\x60\x20\x63\x92\xcc\xde\x4a\ \xdd\xb8\xe2\x7a\xc3\x0b\x09\xd6\x3f\x92\x0a\x18\xc8\x58\x1c\x67\ \x0b\x54\x37\xce\xb9\x5e\x77\x43\x82\xf5\xf7\x43\x05\x0c\x64\x3c\ \xef\x38\x83\x1f\x18\x87\x5c\xaf\x35\xdb\x04\x16\x93\xb7\x03\x05\ \x0c\x64\x6c\x38\xca\x14\x8c\x13\xae\x57\x1b\x01\xc1\xfa\xeb\x9e\ \x02\x06\x32\xb6\xe2\x7a\xa5\x11\x10\xac\xbf\xec\x2a\x60\x20\x63\ \xab\xa8\x5e\xf7\x09\x3c\x1e\x3f\xef\x28\xb0\x1e\x8d\xad\xb8\xee\ \xd4\x7c\x82\xf5\xa7\x6d\x05\x0c\x64\x6c\xc5\xf5\x9f\x5a\x8b\x60\ \xfd\x51\x03\x03\x19\x5b\x45\xf5\x6a\x8b\x60\xfd\x61\x53\x81\xf5\ \x68\x6c\xc5\xf5\x72\xd5\x23\xf0\xd8\x4a\xc6\x56\x5c\xff\xae\x78\ \x71\x88\xea\x8e\x1b\x07\xae\x97\x1c\x37\x0e\x5c\x8f\x4b\x22\xfd\ \x07\x5d\xb2\xe7\xb2\x6f\xdb\xf3\x18\x00\x00\x00\x00\x49\x45\x4e\ \x44\xae\x42\x60\x82\ \x00\x00\x01\xbb\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x1f\x00\x00\x00\x1f\x08\x02\x00\x00\x00\x90\xcc\x81\x6e\ \x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\ \x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\ \x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\ \xa8\x64\x00\x00\x00\x1a\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\ \x72\x65\x00\x50\x61\x69\x6e\x74\x2e\x4e\x45\x54\x20\x76\x33\x2e\ \x35\x2e\x31\x30\x30\xf4\x72\xa1\x00\x00\x01\x2a\x49\x44\x41\x54\ \x48\x4b\xbd\xd2\xbd\x4e\x02\x51\x10\x86\x61\x2f\xce\x8b\xf0\x52\ \xb0\xb6\x33\xb1\xa0\x33\x84\xce\x68\x67\xcf\x2d\xd8\x50\xd9\xd8\ \xa0\x2e\xbb\x2c\xfb\xc7\xfe\xc2\x02\x36\xce\xc9\x7c\x3b\x26\x07\ \x23\xb8\x27\x43\xf2\x16\x84\x6f\xf2\x9c\x66\x2f\x6e\x87\x63\xbd\ \x8c\xde\xb4\x7b\x8d\xa0\xd7\x9b\x9d\x46\x67\xd1\xab\xf5\x56\xa3\ \xb3\xe8\x65\xd3\xfe\xd1\xe8\x69\x72\x79\x75\x7d\x18\xfd\x6f\x5d\ \x5a\x9d\xa4\x53\x87\x0f\x1c\xa5\x29\xe8\x45\xbd\x39\xda\xe8\xf1\ \xe7\x01\xfa\x6d\xad\xbf\xf6\x0f\x9d\xe2\x07\x4e\xa4\x29\xe8\x79\ \xb5\xd6\x08\xfa\xaa\x6c\x34\x82\x9e\x15\xb5\x46\xd0\xd3\xbc\xd6\ \x08\x7a\x9c\x55\x1a\x41\x8f\xd2\x92\x93\x6f\xce\x25\xd1\xa0\x2f\ \x93\x82\xb3\xee\xfa\x25\x1a\xf4\x30\xce\x39\xeb\xae\x5f\xa2\x41\ \x5f\x44\x39\x67\xdd\xf5\x4b\x34\xe8\xc1\x72\xc5\xd1\xf6\x15\x4d\ \x5c\x22\x41\x34\xe8\x7e\x98\x71\xb4\xed\xfd\x67\x97\x48\x10\x0d\ \xfa\x3c\xcc\x38\xa3\xcf\x1e\x5c\x22\x41\xb4\x4e\x5f\xa4\x1c\x6d\ \xbb\xb7\x7b\x97\x8c\xde\x69\xd0\xbd\x20\xe5\x68\xdb\xbe\xde\xb9\ \x44\x82\x68\xd0\x3f\x83\x84\x33\xfa\xf4\xc6\x25\x12\x44\xeb\x74\ \x3f\xe1\x68\x6b\x5f\x06\x2e\x19\xbd\xd3\xa0\x7f\xf8\x31\x47\x9b\ \x7b\xa2\x41\x7f\x9f\xc7\x1a\x75\xba\x17\x69\x04\x7d\xe6\x45\x1a\ \x41\xd7\x6a\x38\xfe\x06\x3c\xec\xc9\x88\xb5\xd8\x55\x59\x00\x00\ \x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\ \x00\x00\x02\x62\ \x89\ \x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\ \x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\ \x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\ \x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0e\xc4\x00\x00\x0e\xc4\ \x01\x95\x2b\x0e\x1b\x00\x00\x02\x04\x49\x44\x41\x54\x78\x9c\xed\ \xd6\xb1\x0d\x03\x01\x0c\x03\x31\x21\x13\x64\xff\x65\x3f\x6d\xbc\ \x80\x2d\xe0\xa9\x8e\xfd\x15\x4a\xe6\xbe\xfc\x2e\x7f\xfe\xf5\x3c\ \x4f\xf8\x9d\x3e\x2f\x91\x6f\xfc\x49\xba\x4a\xe4\x7d\xff\xaf\xa6\ \x4c\xde\xb1\x0f\xc0\x49\x0a\x4a\xe4\x1b\xfb\x00\x3c\x56\x53\x26\ \xef\xd8\x07\xe0\x24\x05\x25\xf2\x8d\x7d\x00\x1e\xab\x29\x93\x77\ \xec\x03\x70\x92\x82\x12\xf9\xc6\x3e\x00\x8f\xd5\x94\xc9\x3b\xf6\ \x01\x38\x49\x41\x89\x7c\x63\x1f\x80\xc7\x6a\xca\xe4\x1d\xfb\x00\ \x9c\xa4\xa0\x44\xbe\xb1\x0f\xc0\x63\x35\x65\xf2\x8e\x7d\x00\x4e\ \x52\x50\x22\xdf\xd8\x07\xe0\xb1\x9a\x32\x79\xc7\x3e\x00\x27\x29\ \x28\x91\x6f\xec\x03\xf0\x58\x4d\x99\xbc\x63\x1f\x80\x93\x14\x94\ \xc8\x37\xf6\x01\x78\xac\xa6\x4c\xde\xb1\x0f\xc0\x49\x0a\x4a\xe4\ \x1b\xfb\x00\x3c\x56\x53\x26\xef\xd8\x07\xe0\x24\x05\x25\xf2\x8d\ \x7d\x00\x1e\xab\x29\x93\x77\xec\x03\x70\x92\x82\x12\xf9\xc6\x3e\ \x00\x8f\xd5\x94\xc9\x3b\xf6\x01\x38\x49\x41\x89\x7c\x63\x1f\x80\ \xc7\x6a\xca\xe4\x1d\xfb\x00\x9c\xa4\xa0\x44\xbe\xb1\x0f\xc0\x63\ \x35\x65\xf2\x8e\x7d\x00\x4e\x52\x50\x22\xdf\xd8\x07\xe0\xb1\x9a\ \x32\x79\xc7\x3e\x00\x27\x29\x28\x91\x6f\xec\x03\xf0\x58\x4d\x99\ \xbc\x63\x1f\x80\x93\x14\x94\xc8\x37\xf6\x01\x78\xac\xa6\x4c\xde\ \xb1\x0f\xc0\x49\x0a\x4a\xe4\x1b\xfb\x00\x3c\x56\x53\x26\xef\xd8\ \x07\xe0\x24\x05\x25\xf2\x8d\x7d\x00\x1e\xab\x29\x93\x77\xec\x03\ \x70\x92\x82\x12\xf9\xc6\x3e\x00\x8f\xd5\x94\xc9\x3b\xf6\x01\x38\ \x49\x41\x89\x7c\x63\x1f\x80\xc7\x6a\xca\xe4\x1d\xfb\x00\x9c\xa4\ \xa0\x44\xbe\xb1\x0f\xc0\x63\x35\x65\xf2\x8e\x7d\x00\x4e\x52\x50\ \x22\xdf\xd8\x07\xe0\xb1\x9a\x32\x79\xc7\x3e\x00\x27\x29\x28\x91\ \x6f\xec\x03\xf0\x58\x4d\x99\xbc\x63\x1f\x80\x93\x14\x94\xc8\x37\ \xf6\x01\x78\xac\xa6\x4c\xde\xb1\x0f\xc0\x49\x0a\x4a\xe4\x1b\xfb\ \x00\x3c\x56\x53\x26\xef\xd8\x07\xe0\x24\x05\x25\xf2\x8d\x7d\x00\ \x1e\xab\x29\x93\x77\xec\x03\x70\x92\x82\x12\xf9\xc6\x3e\x00\x8f\ \xd5\x94\xc9\x3b\xf6\x01\x38\x49\x41\x89\x7c\x63\x1f\x80\xc7\x6a\ \xca\xe4\x1d\xfb\x00\x9c\xa4\xa0\x44\xbe\xb1\x0f\xc0\x63\x35\x65\ \xf2\x8e\x7d\x00\x4e\x52\x50\x22\xdf\xd8\x07\xe0\xb1\x9a\x32\x79\ \xc7\x3e\x00\x27\x29\x28\x91\x6f\xec\x03\xbc\xdc\x3f\xe4\x79\x69\ \xe9\x67\xab\xcf\x62\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\ \x82\ "<line_sep>qt_resource_name=b"\ \x00\x0b\ \x05\x55\xc9\xe3\ \x00\x64\ \x00\x6f\x00\x63\x00\x6b\x00\x5f\x00\x69\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\ \x00\x0d\ \x0c\x46\x04\x47\ \x00\x63\ \x00\x72\x00\x6f\x00\x73\x00\x73\x00\x5f\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0a\ \x0a\xc8\x6f\xe7\ \x00\x63\ \x00\x65\x00\x6e\x00\x74\x00\x65\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x10\ \x0c\x5a\x16\x47\ \x00\x63\ \x00\x72\x00\x6f\x00\x73\x00\x73\x00\x5f\x00\x65\x00\x78\x00\x5f\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x11\ \x05\x0d\xa3\xa7\ \x00\x74\ \x00\x68\x00\x69\x00\x6e\x00\x5f\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\ \ \x00\x0f\ \x0b\x70\x3f\xe7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x6e\x00\x6f\x00\x72\x00\x74\x00\x68\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0d\ \x04\x14\x00\x47\ \x00\x67\ \x00\x75\x00\x69\x00\x64\x00\x65\x00\x5f\x00\x62\x00\x6f\x00\x78\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x12\ \x0a\x7a\xa0\x07\ \x00\x73\ \x00\x70\x00\x6c\x00\x69\x00\x74\x00\x5f\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\ \x00\x67\ \x00\x10\ \x04\xfc\x40\xa7\ \x00\x62\ \x00\x61\x00\x72\x00\x5f\x00\x76\x00\x65\x00\x72\x00\x74\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x0b\x8a\xe6\x07\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x65\x00\x61\x00\x73\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x14\ \x0b\x9f\xd1\x07\ \x00\x73\ \x00\x70\x00\x6c\x00\x69\x00\x74\x00\x5f\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\ \x00\x70\x00\x6e\x00\x67\ \x00\x12\ \x0d\x7f\x14\x07\ \x00\x62\ \x00\x61\x00\x72\x00\x5f\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\x00\x70\x00\x6e\ \x00\x67\ \x00\x13\ \x0c\x9c\x17\xe7\ \x00\x74\ \x00\x68\x00\x69\x00\x6e\x00\x5f\x00\x68\x00\x6f\x00\x72\x00\x69\x00\x7a\x00\x6f\x00\x6e\x00\x74\x00\x61\x00\x6c\x00\x2e\x00\x70\ \x00\x6e\x00\x67\ \x00\x0e\ \x0f\x8a\xe0\xc7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x77\x00\x65\x00\x73\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0f\ \x0e\x70\x21\xe7\ \x00\x61\ \x00\x72\x00\x72\x00\x6f\x00\x77\x00\x5f\x00\x73\x00\x6f\x00\x75\x00\x74\x00\x68\x00\x2e\x00\x70\x00\x6e\x00\x67\ \x00\x0e\ \x07\x04\x9f\x87\ \x00\x62\ \x00\x61\x00\x63\x00\x6b\x00\x67\x00\x72\x00\x6f\x00\x75\x00\x6e\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\ "<line_sep>qt_resource_struct=b"\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\ \x00\x00\x00\x00\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x02\ \x00\x00\x00\xc8\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xb3\ \x00\x00\x01\x12\x00\x00\x00\x00\x00\x01\x00\x00\x0d\x3f\ \x00\x00\x00\x7c\x00\x00\x00\x00\x00\x01\x00\x00\x07\x12\ \x00\x00\x02\x24\x00\x00\x00\x00\x00\x01\x00\x00\x18\xfc\ \x00\x00\x00\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x0b\x79\ \x00\x00\x00\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x02\x65\ \x00\x00\x00\xa4\x00\x00\x00\x00\x00\x01\x00\x00\x08\xf7\ \x00\x00\x01\x38\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x91\ \x00\x00\x01\x5a\x00\x00\x00\x00\x00\x01\x00\x00\x10\xb7\ \x00\x00\x00\x1c\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\ \x00\x00\x00\x56\x00\x00\x00\x00\x00\x01\x00\x00\x04\x43\ \x00\x00\x01\xb2\x00\x00\x00\x00\x00\x01\x00\x00\x13\xd0\ \x00\x00\x01\x88\x00\x00\x00\x00\x00\x01\x00\x00\x12\xdf\ \x00\x00\x02\x00\x00\x00\x00\x00\x00\x01\x00\x00\x17\x3d\ \x00\x00\x01\xde\x00\x00\x00\x00\x00\x01\x00\x00\x15\x13\ "<def_stmt>qInitResources <block_start>QtCore.qRegisterResourceData(0x01 qt_resource_struct qt_resource_name qt_resource_data)<block_end><def_stmt>qCleanupResources <block_start>QtCore.qUnregisterResourceData(0x01 qt_resource_struct qt_resource_name qt_resource_data)<block_end>qInitResources()<line_sep>
"""Functions for running the rpm build commands."""<import_from_future_stmt> division<import_from_future_stmt> absolute_import<import_from_future_stmt> print_function<import_from_future_stmt> unicode_literals<import_stmt>glob<import_stmt>os<import_stmt>shlex<import_stmt>shutil<import_stmt>subprocess<import_stmt>sys<import_stmt>tempfile<line_sep>IGNORED_PATTERNS=('*.pyc' '*.pyo' '*.pyd' '__pycache__' )<class_stmt>RpmProcessError(subprocess.CalledProcessError)<block_start>"""An exception thrown during the RPM build process. This exception extends the subprocess CalledProcessError to add standard out and standard error string fields. """<def_stmt>__init__ self returncode cmd output=<none> stdout=<none> stderr=<none><block_start>"""Initialize the exception with process information."""<line_sep>super(RpmProcessError self).__init__(returncode cmd)<line_sep>self.output=output<or>''<line_sep>self.stdout=stdout<or>''<line_sep>self.stderr=stderr<or>''<block_end><block_end><def_stmt>topdir <block_start>"""Get the absolute path to a valid rpmbuild %_topdir."""<line_sep>top=tempfile.mkdtemp(prefix='rpmvenv')<line_sep>os.makedirs(os.path.join(top 'SOURCES'))<line_sep>os.makedirs(os.path.join(top 'SPECS'))<line_sep>os.makedirs(os.path.join(top 'BUILD'))<line_sep>os.makedirs(os.path.join(top 'RPMS'))<line_sep>os.makedirs(os.path.join(top 'SRPMS'))<line_sep><return>top<block_end><def_stmt>write_spec top spec<block_start>"""Write a SPEC file to the SOURCES directory. Args: top: The absolute path to the %_topdir. spec: The contents of the SPEC file. Returns: The absolute path to the SPEC file. """<line_sep>path=os.path.join(top 'SOURCES' 'package.spec')<with_stmt>open(path 'w')<as>specfile<block_start>specfile.write(spec)<block_end><return>path<block_end><def_stmt>copy_source top source name=<none><block_start>"""Copy the source directory into the SOURCES directory. Args: top: The absolute path to the %_topdir. source: The absolute path to the source directory. name: The name of the directory to place in SOURCES. Returns: The absolute path to the copy. """<line_sep>name=name<or>os.path.basename(source)<line_sep>path=os.path.join(top 'SOURCES' name)<line_sep>shutil.copytree(source path ignore=shutil.ignore_patterns(*IGNORED_PATTERNS) )<line_sep><return>path<block_end><def_stmt>verbose_popen cmd<block_start>"""Run a command with streaming output. Args: cmd (str): A command to run with popen. Raises: CalledProcessError: If the returncode is not 0. """<line_sep>proc=subprocess.Popen(shlex.split(cmd))<line_sep>proc.wait()<if_stmt>proc.returncode<ne>0<block_start><raise>subprocess.CalledProcessError(returncode=proc.returncode cmd=cmd )<block_end><block_end><def_stmt>quiet_popen cmd<block_start>"""Run a command with captured output. Args: cmd (str): A command to run with popen. Raises: RpmProcessError: If the returncode is not 0. """<line_sep>proc=subprocess.Popen(shlex.split(cmd) stdout=subprocess.PIPE stderr=subprocess.PIPE )<line_sep>out,err=proc.communicate()<if_stmt>proc.returncode<ne>0<block_start><raise>RpmProcessError(returncode=proc.returncode cmd=cmd output=err stdout=out stderr=err )<block_end><block_end><def_stmt>build specfile top=<none> verbose=<false><block_start>"""Run rpmbuild with options. Args: specfile: The absolute path to the SPEC file to build. top: The %_topdir to use during the build. The default is a temporary directory which is automatically generated. verbose: Whether or not to stream the rpmbuild output in real time or only during errors. Returns: The absolute path to the new RPM. """<line_sep>top=top<or>topdir()<line_sep>cmd="rpmbuild -ba --define='_topdir {0}' {1}".format(top specfile ).encode('ascii')<line_sep># PY3 shlex only works with unicode strings. Convert as needed. <if_stmt>sys.version_info[0]<g>2<block_start>cmd=cmd.decode('utf8')<block_end><if_stmt><not>verbose<block_start>quiet_popen(cmd)<block_end><else_stmt><block_start>verbose_popen(cmd)<block_end><return>glob.glob(os.path.join(top 'RPMS' '**' '*.rpm')).pop()<block_end>
"""Define variables for use in patterns all over Myia."""<import_from_stmt>..ir Graph<import_from_stmt>.misc Namespace<import_from_stmt>.unify SVar Var var<def_stmt>constvar cls=object<block_start>"""Return a variable matching a Constant of the given type."""<def_stmt>_is_c n<block_start><return>n.is_constant(cls)<block_end><return>var(_is_c)<block_end>##################### # Generic variables # ##################### X=Var("X")<line_sep>Y=Var("Y")<line_sep>Z=Var("Z")<line_sep>X1=Var("X1")<line_sep>X2=Var("X2")<line_sep>X3=Var("X3")<line_sep>X4=Var("X4")<line_sep>X5=Var("X5")<line_sep>############# # Constants # ############# C=constvar()<line_sep>C1=constvar()<line_sep>C2=constvar()<line_sep>CNS=constvar(Namespace)<line_sep>G=constvar(Graph)<line_sep>G1=constvar(Graph)<line_sep>G2=constvar(Graph)<line_sep>NIL=var(<lambda>x:x.is_constant()<and>x.value<eq>())<line_sep>###################### # Sequence variables # ###################### Xs=SVar(Var())<line_sep>Ys=SVar(Var())<line_sep>Cs=SVar(constvar())<line_sep>__all__=["X" "Y" "Z" "X1" "X2" "X3" "X4" "X5" "C" "C1" "C2" "CNS" "G" "G1" "G2" "NIL" "Xs" "Ys" "Cs" "constvar" ]<line_sep>
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """An NNVM implementation of graph packing."""<import_stmt>nnvm<import_from_stmt>nnvm.compiler graph_attr graph_util<def_stmt>_pack_batch_channel data dshape bfactor cfactor<block_start>"""Pack the data channel dimension. """<assert_stmt>dshape[0]%bfactor<eq>0<assert_stmt>dshape[1]%cfactor<eq>0<line_sep>data=nnvm.sym.reshape(data shape=(dshape[0]<floordiv>bfactor bfactor dshape[1]<floordiv>cfactor cfactor dshape[2] dshape[3]))<line_sep>data=nnvm.sym.transpose(data axes=(0 2 4 5 1 3))<line_sep><return>data<block_end><def_stmt>_unpack_batch_channel data old_shape<block_start>"""Unpack the data channel dimension. """<line_sep>data=nnvm.sym.transpose(data axes=(0 4 1 5 2 3))<line_sep>data=nnvm.sym.reshape(data shape=old_shape)<line_sep><return>data<block_end><def_stmt>_pack_weight data dshape cfactor<block_start>"""Pack the weight into packed format. """<assert_stmt>len(dshape)<eq>4<assert_stmt>dshape[0]%cfactor<eq>0<assert_stmt>dshape[1]%cfactor<eq>0<line_sep>data=nnvm.sym.reshape(data shape=(dshape[0]<floordiv>cfactor cfactor dshape[1]<floordiv>cfactor cfactor dshape[2] dshape[3]))<line_sep>data=nnvm.sym.transpose(data axes=(0 2 4 5 1 3))<line_sep><return>data<block_end><def_stmt>_pack_weight_conv2d_transpose data dshape cfactor<block_start>"""Pack the weight into packed format. """<assert_stmt>len(dshape)<eq>4<assert_stmt>dshape[0]%cfactor<eq>0<assert_stmt>dshape[1]%cfactor<eq>0<line_sep>data=nnvm.sym.reshape(data shape=(dshape[0]<floordiv>cfactor cfactor dshape[1]<floordiv>cfactor cfactor dshape[2] dshape[3]))<line_sep>data=nnvm.sym.transpose(data axes=(2 0 4 5 3 1))<line_sep><return>data<block_end><def_stmt>_pack_bias data dshape bfactor cfactor<block_start>"""Pack the bias parameter. """<assert_stmt>len(dshape)<eq>3<assert_stmt>dshape[0]%cfactor<eq>0<line_sep>data=nnvm.sym.reshape(data shape=(dshape[0]<floordiv>cfactor cfactor dshape[1] dshape[2] 1))<line_sep>data=nnvm.sym.transpose(data axes=(0 2 3 4 1))<line_sep># broadcast batch dimension to bfactor data=nnvm.sym.broadcast_to(data shape=(dshape[0]<floordiv>cfactor dshape[1] dshape[2] bfactor cfactor))<line_sep><return>data<block_end><def_stmt>_get_shape sym shape_dict<block_start>"""Get the shape of a node. """<line_sep><return>graph_util.infer_shape(nnvm.graph.create(sym) **shape_dict)[1][0]<block_end><def_stmt>nnvm_graph_pack graph shape_dict bfactor cfactor weight_bits start_name="max_pool2d0" stop_name="global_avg_pool2d0"<block_start>"""Pack the graph into batch&channel packed format. Parameters ---------- graph : Graph The input graph. shape_dict : dict of str to shape The input shape. bfactor : int The packing factor in batch cfactor : int The packing factor in channel start_name: str, optional Start packing from certain known node. start_name: str, optional Stop packing from certain known node. Returns ------- graph : Graph The transformed graph. """<line_sep>graph=graph_attr.set_shape_inputs(graph shape_dict)<line_sep>graph=graph.apply("InferShape")<line_sep>shape=graph.json_attr("shape")<line_sep>gidx=graph.index<line_sep>node_map={}<line_sep>dset=set()<line_sep>start_pack=<false><for_stmt>nid,node enumerate(gidx.nodes)<block_start>children=[node_map[e[0]]<for>e node["inputs"]]<line_sep>ishape=[shape[gidx.entry_id(e)]<for>e node["inputs"]]<line_sep>oshape=shape[gidx.entry_id(nid 0)]<line_sep>attrs=node.get("attrs" {})<line_sep>node_name=node["name"]<line_sep>op_name=node["op"]<line_sep>get_clone=<lambda>c o_n n_n a:getattr(nnvm.symbol o_n)(*c name=n_n **a)<if_stmt>op_name<eq>"null"<block_start>new_node=nnvm.symbol.Variable(node_name)<if_stmt>start_name<and>node_name<eq>start_name<block_start>start_pack=<true><line_sep>new_node=_pack_batch_channel(new_node oshape bfactor cfactor)<block_end><if_stmt>start_pack<and>"_begin_state_"<in>node_name# RNN -> CNN, pack <block_start>new_node=_pack_batch_channel(new_node oshape bfactor cfactor)<block_end><block_end><elif_stmt>node_name<eq>start_name<block_start><assert_stmt><not>start_pack<line_sep>start_pack=<true><line_sep>new_node=get_clone(children op_name node_name attrs)<line_sep>new_node=_pack_batch_channel(new_node oshape bfactor cfactor)<block_end><elif_stmt>node_name<eq>stop_name<block_start><if_stmt>start_pack<block_start>start_pack=<false><line_sep>children[0]=_unpack_batch_channel(children[0] ishape[0])<line_sep>new_node=getattr(nnvm.symbol op_name)(*children name=node_name **attrs)<block_end><else_stmt><block_start>new_node=get_clone(children op_name node_name attrs)<block_end><block_end><elif_stmt>op_name<eq>"conv2d"<and>attrs.get("out_dtype" <none>)<eq>"int32"<block_start><assert_stmt>8%weight_bits<eq>0<line_sep>w_lanes=8<floordiv>weight_bits<if_stmt>start_pack<block_start>attrs["layout"]="NCHW%dn%dc"%(bfactor cfactor)<line_sep>attrs["kernel_layout"]="OIHW%do%di%dp"%(cfactor cfactor w_lanes)<line_sep>data,weight=children<line_sep>weight=_pack_weight(weight ishape[1] cfactor)<line_sep># insert bit packing when necessary <if_stmt>w_lanes<ne>1<block_start><assert_stmt>8%w_lanes<eq>0<line_sep>weight=nnvm.sym.bitpack(weight lanes=w_lanes)<block_end>new_node=nnvm.sym.conv2d(data weight name=node_name **attrs)<block_end><else_stmt><block_start>new_node=get_clone(children op_name node_name attrs)<block_end><block_end><elif_stmt>op_name<eq>"conv2d_transpose"<and>attrs.get("out_dtype" <none>)<eq>"int32"<block_start><assert_stmt>8%weight_bits<eq>0<line_sep>w_lanes=8<floordiv>weight_bits<if_stmt>start_pack<block_start>attrs["layout"]="NCHW%dn%dc"%(bfactor cfactor)<line_sep>attrs["kernel_layout"]="IOHW%di%do%dp"%(cfactor cfactor w_lanes)<line_sep>data,weight=children<line_sep>weight=_pack_weight_conv2d_transpose(weight ishape[1] cfactor)<line_sep>new_node=nnvm.sym.conv2d_transpose(data weight name=node_name **attrs)<block_end><else_stmt><block_start>new_node=get_clone(children op_name node_name attrs)<block_end><block_end><elif_stmt>op_name.startswith("broadcast_")<and>tuple(ishape[0])<eq>tuple(ishape[1])<block_start>new_node=get_clone(children op_name node_name attrs)<block_end><elif_stmt>op_name.startswith("broadcast")<and>len(ishape[1])<eq>3<block_start><if_stmt>start_pack<block_start>children[1]=_pack_bias(children[1] ishape[1] bfactor cfactor)<line_sep>new_node=getattr(nnvm.symbol op_name)(*children name=node_name **attrs)<block_end><else_stmt><block_start>new_node=get_clone(children op_name node_name attrs)<block_end><block_end><elif_stmt>op_name.startswith("elementwise_add")<block_start>new_node=get_clone(children op_name node_name attrs)<block_end><else_stmt><block_start>new_node=get_clone(children op_name node_name attrs)<line_sep>dset.add(op_name)<block_end>node_map[nid]=new_node<block_end><assert_stmt>len(graph.index.output_entries)<eq>1<line_sep>ret=node_map[graph.index.output_entries[0][0]]<if_stmt>start_pack<block_start>oshape=shape[graph.index.output_entries[0][0]]<line_sep>ret=_unpack_batch_channel(ret oshape)<block_end>graph=nnvm.graph.create(ret)<line_sep>graph=graph_attr.set_shape_inputs(graph shape_dict)<line_sep>graph=graph.apply("InferShape")<line_sep><return>graph<block_end>
<import_from_stmt>pydantic.dataclasses dataclass<import_from_stmt>...samplers BaseSamplerConfig<line_sep>@dataclass<class_stmt>VAMPSamplerConfig(BaseSamplerConfig)<block_start>"""This is the VAMP prior sampler configuration instance deriving from :class:`BaseSamplerConfig`. """<line_sep><pass><block_end>
# coding: utf-8 <import_from_stmt>django.db models<import_from_stmt>django_th.models.services Services<import_from_stmt>django_th.models TriggerService<class_stmt>Github(Services)<block_start>""" github model to be adapted for the new service """<line_sep># put whatever you need here # eg title = models.CharField(max_length=80) # but keep at least this one repo=models.CharField(max_length=80)# owner project=models.CharField(max_length=80)# repo trigger=models.ForeignKey(TriggerService on_delete=models.CASCADE)<class_stmt>Meta<block_start>app_label='th_github'<line_sep>db_table='django_th_github'<block_end><def_stmt>show self<block_start>""" :return: string representing object """<line_sep><return>"My Github %s"%self.name<block_end><def_stmt>__str__ self<block_start><return>self.name<block_end><block_end>
<import_from_stmt>met_brewer.palettes MET_PALETTES COLORBLIND_PALETTES_NAMES COLORBLIND_PALETTES met_brew export is_colorblind_friendly <line_sep>MET_PALETTES<line_sep>COLORBLIND_PALETTES_NAMES<line_sep>COLORBLIND_PALETTES<line_sep>met_brew<line_sep>export<line_sep>is_colorblind_friendly<line_sep>
<def_stmt>singleton theClass<block_start>""" decorator for a class to make a singleton out of it """<line_sep>classInstances={}<def_stmt>getInstance *args **kwargs<block_start>""" creating or just return the one and only class instance. The singleton depends on the parameters used in __init__ """<line_sep>key=(theClass args str(kwargs))<if_stmt>key<not><in>classInstances<block_start>classInstances[key]=theClass(*args **kwargs)<block_end><return>classInstances[key]<block_end><return>getInstance<block_end># Example @singleton<class_stmt>A<block_start>""" test class """<def_stmt>__init__ self key=<none> subkey=<none><block_start>self.key=key<line_sep>self.subkey=subkey<block_end><def_stmt>__repr__ self<block_start><return>"A(id=%d, %s,%s)"%(id(self) self.key self.subkey)<block_end><block_end><def_stmt>tests <block_start>""" some basic tests """<line_sep>testCases=[(<none> <none>) (10 20) (30 <none>) (<none> 30)]<line_sep>instances=set()<line_sep>instance1=<none><line_sep>instance2=<none><for_stmt>key,subkey testCases<block_start><if_stmt>key<eq><none><block_start><if_stmt>subkey<eq><none><block_start>instance1,instance2=A() A()<block_end><else_stmt><block_start>instance1,instance2=A(subkey=subkey) A(subkey=subkey)<block_end><block_end><else_stmt><block_start><if_stmt>subkey<eq><none><block_start>instance1,instance2=A(key) A(key)<block_end><else_stmt><block_start>instance1,instance2=A(key subkey=subkey) A(key subkey=subkey)<block_end><block_end>print("instance1: %-25s"%instance1 " instance2: %-25s"%instance2)<assert_stmt>instance1<eq>instance2<assert_stmt>instance1.key<eq>key<and>instance1.subkey<eq>subkey<line_sep>instances.add(instance1)<block_end><assert_stmt>len(instances)<eq>len(testCases)<block_end>tests()<line_sep>
<import_from_stmt>django.core mail<import_from_stmt>django.test.testcases TestCase<import_from_stmt>django.urls reverse<import_from_stmt>parsifal.apps.activities.constants ActivityTypes<import_from_stmt>parsifal.apps.activities.models Activity<import_from_stmt>parsifal.apps.authentication.tests.factories UserFactory<import_from_stmt>parsifal.apps.invites.constants InviteStatus<import_from_stmt>parsifal.apps.invites.models Invite<import_from_stmt>parsifal.apps.invites.tests.factories InviteFactory<import_from_stmt>parsifal.utils.test login_redirect_url<class_stmt>TestManageAccessView(TestCase)<block_start>@classmethod<def_stmt>setUpTestData cls<block_start>cls.invite=InviteFactory()<line_sep>cls.co_author=UserFactory()<line_sep>cls.invite.review.co_authors.add(cls.co_author)<line_sep>cls.url=reverse("invites:manage_access" args=(cls.invite.review.author.username cls.invite.review.name ) )<block_end><def_stmt>test_login_required self<block_start>response=self.client.get(self.url)<line_sep>self.assertRedirects(response login_redirect_url(self.url))<block_end><def_stmt>test_main_author_required self<block_start>self.client.force_login(self.co_author)<line_sep>response=self.client.get(self.url)<line_sep>self.assertEqual(403 response.status_code)<block_end><def_stmt>test_get_success self<block_start>self.client.force_login(self.invite.review.author)<line_sep>response=self.client.get(self.url)<with_stmt>self.subTest(msg="Test get status code")<block_start>self.assertEqual(200 response.status_code)<block_end>parts=("csrfmiddlewaretoken" 'name="invitee"' 'name="invitee_email"' self.invite.get_invitee_email())<for_stmt>part parts<block_start><with_stmt>self.subTest(msg="Test response body" part=part)<block_start>self.assertContains(response part)<block_end><block_end><block_end><def_stmt>test_post_success_invitee_email self<block_start>data={"invitee_email":"<EMAIL>" }<line_sep>self.client.force_login(self.invite.review.author)<line_sep>response=self.client.post(self.url data follow=<true>)<with_stmt>self.subTest(msg="Test post status code")<block_start>self.assertEqual(302 response.redirect_chain[0][1])<block_end><with_stmt>self.subTest(msg="Test post redirect status code")<block_start>self.assertEqual(200 response.status_code)<block_end><with_stmt>self.subTest(msg="Test success message")<block_start>self.assertContains(response "An invitation was sent to <EMAIL>.")<block_end><with_stmt>self.subTest(msg="Test invite created")<block_start>self.assertTrue(Invite.objects.filter(invitee_email="<EMAIL>" status=InviteStatus.PENDING).exists())<block_end><with_stmt>self.subTest(msg="Test email sent")<block_start>self.assertEqual(1 len(mail.outbox))<block_end><block_end><def_stmt>test_post_success_invitee self<block_start>contact=UserFactory(email="<EMAIL>")<line_sep>Activity.objects.create(from_user=self.invite.review.author to_user=contact activity_type=ActivityTypes.FOLLOW)<with_stmt>self.subTest(msg="Test setup")<block_start>self.assertFalse(self.invite.review.is_author_or_coauthor(contact))<block_end>data={"invitee":contact.pk }<line_sep>self.client.force_login(self.invite.review.author)<line_sep>response=self.client.post(self.url data follow=<true>)<with_stmt>self.subTest(msg="Test post status code")<block_start>self.assertEqual(302 response.redirect_chain[0][1])<block_end><with_stmt>self.subTest(msg="Test post redirect status code")<block_start>self.assertEqual(200 response.status_code)<block_end><with_stmt>self.subTest(msg="Test success message")<block_start>self.assertContains(response "An invitation was sent to <EMAIL>.")<block_end><with_stmt>self.subTest(msg="Test invite created")<block_start>self.assertTrue(Invite.objects.filter(invitee=contact invitee_email="<EMAIL>" status=InviteStatus.PENDING).exists())<block_end><with_stmt>self.subTest(msg="Test email sent")<block_start>self.assertEqual(1 len(mail.outbox))<block_end><block_end><block_end>
# -*- coding: utf-8 -*- # Based on https://gist.github.com/voldmar/1264102 # and https://gist.github.com/runekaagaard/2eecf0a8367959dc634b7866694daf2c <import_stmt>gc<import_stmt>inspect<import_stmt>weakref<import_from_stmt>collections defaultdict<import_from_stmt>django.apps apps<import_from_stmt>django.core.management.base BaseCommand<import_from_stmt>django.db.models.signals ModelSignal pre_init post_init pre_save post_save pre_delete post_delete m2m_changed pre_migrate post_migrate <import_from_stmt>django.utils.encoding force_str<line_sep>MSG='{module}.{name} #{line}'<line_sep>SIGNAL_NAMES={pre_init:'pre_init' post_init:'post_init' pre_save:'pre_save' post_save:'post_save' pre_delete:'pre_delete' post_delete:'post_delete' m2m_changed:'m2m_changed' pre_migrate:'pre_migrate' post_migrate:'post_migrate' }<class_stmt>Command(BaseCommand)<block_start>help='List all signals by model and signal type'<def_stmt>handle self *args **options<block_start>all_models=apps.get_models(include_auto_created=<true> include_swapped=<true>)<line_sep>model_lookup={id(m):m<for>m all_models}<line_sep>signals=[obj<for>obj gc.get_objects()<if>isinstance(obj ModelSignal)]<line_sep>models=defaultdict(<lambda>:defaultdict(list))<for_stmt>signal signals<block_start>signal_name=SIGNAL_NAMES.get(signal 'unknown')<for_stmt>receiver signal.receivers<block_start>lookup,receiver=receiver<if_stmt>isinstance(receiver weakref.ReferenceType)<block_start>receiver=receiver()<block_end><if_stmt>receiver<is><none><block_start><continue><block_end>receiver_id,sender_id=lookup<line_sep>model=model_lookup.get(sender_id '_unknown_')<if_stmt>model<block_start>models[model][signal_name].append(MSG.format(name=receiver.__name__ module=receiver.__module__ line=inspect.getsourcelines(receiver)[1] path=inspect.getsourcefile(receiver)))<block_end><block_end><block_end>output=[]<for_stmt>key sorted(models.keys() key=str)<block_start>verbose_name=force_str(key._meta.verbose_name)<line_sep>output.append('{}.{} ({})'.format(key.__module__ key.__name__ verbose_name))<for_stmt>signal_name sorted(models[key].keys())<block_start>lines=models[key][signal_name]<line_sep>output.append(' {}'.format(signal_name))<for_stmt>line lines<block_start>output.append(' {}'.format(line))<block_end><block_end><block_end><return>'\n'.join(output)<block_end><block_end>
# pylint: disable=unused-import # Keep module for legacy backcompat <import_from_stmt>.ops S3Coordinate file_handle_to_s3<line_sep>
<class_stmt>UsageError(Exception)<block_start>"""Error in plugin usage."""<line_sep>__module__="builtins"<block_end>
<import_stmt>json<import_stmt>argparse<import_from_stmt>pathlib Path<import_from_stmt>itertools chain<import_stmt>time<import_from_stmt>. Preql __version__ Signal<import_from_stmt>. settings<line_sep>parser=argparse.ArgumentParser(description='Preql command-line interface (aka REPL)')<line_sep>parser.add_argument('-i' '--interactive' action='store_true' default=<false> help="enter interactive mode after running the script")<line_sep>parser.add_argument('-v' '--version' action='store_true' help="print version")<line_sep>parser.add_argument('--install-jupyter' action='store_true' help="installs the Preql plugin for Jupyter notebook")<line_sep>parser.add_argument('--print-sql' action='store_true' help="print the SQL code that's being executed")<line_sep>parser.add_argument('-f' '--file' type=str help='path to a Preql script to run')<line_sep>parser.add_argument('-m' '--module' type=str help='name of a Preql module to run')<line_sep>parser.add_argument('--time' action='store_true' help='displays how long the script ran')<line_sep>parser.add_argument('-c' '--config' type=str help='path to a JSON configuration file for Preql (default: ~/.preql_conf.json)')<line_sep>parser.add_argument('database' type=str nargs='?' default=<none> help="database url (postgres://user:password@host:port/db_name")<line_sep>parser.add_argument('--python-traceback' action='store_true' help="Show the Python traceback when an exception causes the interpreter to quit")<def_stmt>find_dot_preql <block_start>cwd=Path.cwd()<for_stmt>p chain([cwd] cwd.parents)<block_start>dot_preql=p/".preql"<if_stmt>dot_preql.exists()<block_start><return>dot_preql<block_end><block_end><block_end><def_stmt>update_settings path<block_start>config=json.load(path.open())<if_stmt>'debug'<in>config<block_start>settings.debug=config['debug']<block_end><if_stmt>'color_scheme'<in>config<block_start>settings.color_theme.update(config['color_scheme'])<block_end><block_end><def_stmt>main <block_start>args=parser.parse_args()<if_stmt>args.version<block_start>print(__version__)<block_end><if_stmt>args.install_jupyter<block_start><import_from_stmt>.jup_kernel.install main<as>install_jupyter<line_sep>install_jupyter([])<line_sep>print("Install successful. To start working, run 'jupyter notebook' and create a new Preql notebook.")<line_sep><return><block_end><import_from_stmt>pathlib Path<if_stmt>args.config<block_start>update_settings(Path(args.config))<block_end><else_stmt><block_start>config_path=Path.home()/'.preql_conf.json'<if_stmt>config_path.exists()<block_start>update_settings(config_path)<block_end><block_end>kw={'print_sql':args.print_sql}<if_stmt>args.database<block_start>kw['db_uri']=args.database<line_sep>kw['auto_create']=<true><block_end>p=Preql(**kw)<line_sep>interactive=args.interactive<line_sep>error_code=0<line_sep>start=time.time()<try_stmt><block_start><if_stmt>args.file<block_start>p.load(args.file)<block_end><elif_stmt>args.module<block_start>p('import '+args.module)<block_end><elif_stmt>args.version<or>args.install_jupyter<block_start><pass><block_end><else_stmt><block_start>dot_preql=find_dot_preql()<if_stmt>dot_preql<block_start>print("Auto-running" dot_preql)<line_sep>p._run_code(dot_preql.read_text() dot_preql)<block_end>interactive=<true><block_end><block_end><except_stmt>Signal<as>e<block_start>p._display.print_exception(e)<line_sep>error_code=-1<if_stmt>args.python_traceback<block_start><raise><block_end><block_end><except_stmt>KeyboardInterrupt<block_start>print("Interrupted (Ctrl+C)")<block_end>end=time.time()<if_stmt>args.time<block_start>print('Script took %.2f seconds to run'%(end-start))<block_end><if_stmt>interactive<block_start>p.load_all_tables()<line_sep>p.start_repl()<block_end><else_stmt><block_start><return>error_code<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
"""Test negative examples using Countries. The main idea here is that if we explicitly enter some false facts (signalling to the KB that they are false, it should make less-wrong predictions for them, versus just going by its own synthetic negative examples.) It may have the side effect of pushing UP the probability of other wrong triples, see e.g. "canada in asia" below. """<import_stmt>context<import_from_stmt>zincbase KB<line_sep>kb=KB()<line_sep>kb.seed(555)<line_sep>kb.from_csv('./assets/countries_s1_train.csv' delimiter='\t')<line_sep>rule_num=kb.store('~locatedin(canada, africa)')<line_sep>b=list(kb.query('locatedin(canada, X)'))<assert_stmt>len(b)<eq>1<assert_stmt>b[0]['X']<eq>'northern_america'<assert_stmt>kb.delete_rule(rule_num)<line_sep>kb.build_kg_model(cuda=<false> embedding_size=100)<line_sep>kb.train_kg_model(steps=500 batch_size=512 neg_ratio=0.01)<line_sep>canada_in_africa_naive=kb.estimate_triple_prob('canada' 'locatedin' 'africa')<line_sep>canada_in_asia_naive=kb.estimate_triple_prob('canada' 'locatedin' 'asia')<line_sep>austria_neighbors_spain_naive=kb.estimate_triple_prob('austria' 'neighbor' 'spain')<line_sep>austria_neighbors_france_naive=kb.estimate_triple_prob('austria' 'neighbor' 'france')<line_sep>kb=KB()<line_sep>kb.seed(555)<line_sep>kb.from_csv('./assets/countries_s1_train.csv' delimiter='\t')<line_sep>kb.store('~locatedin(canada, africa)')<line_sep>kb.store('~neighbor(austria, spain)')<line_sep>kb.build_kg_model(cuda=<false> embedding_size=100)<line_sep>kb.train_kg_model(steps=500 batch_size=512 neg_ratio=0.1)<line_sep>canada_in_africa_explicit=kb.estimate_triple_prob('canada' 'locatedin' 'africa')<line_sep>canada_in_asia_explicit=kb.estimate_triple_prob('canada' 'locatedin' 'asia')<line_sep>austria_neighbors_spain_explicit=kb.estimate_triple_prob('austria' 'neighbor' 'spain')<line_sep>austria_neighbors_france_explicit=kb.estimate_triple_prob('austria' 'neighbor' 'france')<assert_stmt>canada_in_africa_naive<g>canada_in_africa_explicit<assert_stmt>austria_neighbors_spain_naive<g>austria_neighbors_spain_explicit<line_sep>print('All negative example tests passed.')<line_sep>
# program checks if the string is palindrome or not. <def_stmt>function string<block_start><if_stmt>(string<eq>string[::-1])<block_start>print("This is a Palindrome String")<block_end><else_stmt><block_start>print("This is Not a Palindrome String")<block_end><block_end>string=input("Please enter your own String : ")<line_sep>function(string)<line_sep>
# -*- coding: utf-8 -*- """VISA VPP-4.3 data types (VPP-4.3.2 spec, section 3) using ctypes constants. This file is part of PyVISA. All data types that are defined by VPP-4.3.2. The module exports all data types including the pointer and array types. This means "ViUInt32" and such. :copyright: 2014-2020 by PyVISA Authors, see AUTHORS for more details. :license: MIT, see LICENSE for more details. """<import_stmt>ctypes<as>_ctypes<import_from_stmt>.cthelper FUNCTYPE<line_sep># Part One: Type Assignments for VISA and Instrument Drivers, see spec table # 3.1.1. # # Remark: The pointer and probably also the array variants are of no # significance in Python because there is no native call-by-reference. # However, as long as I'm not fully sure about this, they won't hurt. <def_stmt>_type_pair ctypes_type<block_start><return>ctypes_type _ctypes.POINTER(ctypes_type)<block_end><def_stmt>_type_triplet ctypes_type<block_start><return>_type_pair(ctypes_type)+(_ctypes.POINTER(ctypes_type) )<block_end>ViUInt64,ViPUInt64,ViAUInt64=_type_triplet(_ctypes.c_uint64)<line_sep>ViInt64,ViPInt64,ViAInt64=_type_triplet(_ctypes.c_int64)<line_sep>ViUInt32,ViPUInt32,ViAUInt32=_type_triplet(_ctypes.c_uint32)<line_sep>ViInt32,ViPInt32,ViAInt32=_type_triplet(_ctypes.c_int32)<line_sep>ViUInt16,ViPUInt16,ViAUInt16=_type_triplet(_ctypes.c_ushort)<line_sep>ViInt16,ViPInt16,ViAInt16=_type_triplet(_ctypes.c_short)<line_sep>ViUInt8,ViPUInt8,ViAUInt8=_type_triplet(_ctypes.c_ubyte)<line_sep>ViInt8,ViPInt8,ViAInt8=_type_triplet(_ctypes.c_byte)<line_sep>ViAddr,ViPAddr,ViAAddr=_type_triplet(_ctypes.c_void_p)<line_sep>ViChar,ViPChar,ViAChar=_type_triplet(_ctypes.c_char)<line_sep>ViByte,ViPByte,ViAByte=_type_triplet(_ctypes.c_ubyte)<line_sep>ViBoolean,ViPBoolean,ViABoolean=_type_triplet(ViUInt16)<line_sep>ViReal32,ViPReal32,ViAReal32=_type_triplet(_ctypes.c_float)<line_sep>ViReal64,ViPReal64,ViAReal64=_type_triplet(_ctypes.c_double)<class_stmt>ViString(object)<block_start>@classmethod<def_stmt>from_param cls obj<block_start><if_stmt>isinstance(obj str)<block_start><return>bytes(obj "ascii")<block_end><return>obj<block_end><block_end><class_stmt>ViAString(object)<block_start>@classmethod<def_stmt>from_param cls obj<block_start><return>_ctypes.POINTER(obj)<block_end><block_end>ViPString=ViString<line_sep># This follows visa.h definition, but involves a lot of manual conversion. # ViBuf, ViPBuf, ViABuf = ViPByte, ViPByte, _ctypes.POINTER(ViPByte) ViBuf,ViPBuf,ViABuf=ViPString ViPString ViAString<def_stmt>buffer_to_text buf<arrow>str<block_start><return>buf.value.decode("ascii")<block_end>ViRsrc=ViString<line_sep>ViPRsrc=ViString<line_sep>ViARsrc=ViAString<line_sep>ViKeyId,ViPKeyId=ViString ViPString<line_sep>ViStatus,ViPStatus,ViAStatus=_type_triplet(ViInt32)<line_sep>ViVersion,ViPVersion,ViAVersion=_type_triplet(ViUInt32)<line_sep>_ViObject,ViPObject,ViAObject=_type_triplet(ViUInt32)<line_sep>_ViSession,ViPSession,ViASession=_type_triplet(ViUInt32)<class_stmt>ViObject(_ViObject)# type: ignore <block_start>@classmethod<def_stmt>from_param cls obj<block_start><if_stmt>obj<is><none><block_start><raise>ValueError("Session cannot be None. The resource might be closed.")<block_end><return>_ViObject.from_param(obj)<block_end><block_end>ViSession=ViObject<line_sep>ViAttr=ViUInt32<line_sep>ViConstString=_ctypes.POINTER(ViChar)<line_sep># Part Two: Type Assignments for VISA only, see spec table 3.1.2. The # difference to the above is of no significance in Python, so I use it here # only for easier synchronisation with the spec. ViAccessMode,ViPAccessMode=_type_pair(ViUInt32)<line_sep>ViBusAddress,ViPBusAddress=_type_pair(ViUInt32)<line_sep>ViBusAddress64,ViPBusAddress64=_type_pair(ViUInt64)<line_sep>ViBusSize=ViUInt32<line_sep>ViAttrState,ViPAttrState=_type_pair(ViUInt32)<line_sep># The following is weird, taken from news:<EMAIL> ViVAList=_ctypes.POINTER(_ctypes.c_char)<line_sep>ViEventType,ViPEventType,ViAEventType=_type_triplet(ViUInt32)<line_sep>ViPAttr=_ctypes.POINTER(ViAttr)<line_sep>ViAAttr=ViPAttr<line_sep>ViEventFilter=ViUInt32<line_sep>ViFindList,ViPFindList=_type_pair(ViObject)<line_sep>ViEvent,ViPEvent=_type_pair(ViObject)<line_sep>ViJobId,ViPJobId=_type_pair(ViUInt32)<line_sep># Class of callback functions for event handling, first type is result type ViHndlr=FUNCTYPE(ViStatus ViSession ViEventType ViEvent ViAddr)<line_sep>
<import_from_stmt>dispatch.plugins.bases StoragePlugin<class_stmt>TestStoragePlugin(StoragePlugin)<block_start>title="Dispatch Test Plugin - Storage"<line_sep>slug="test-storage"<def_stmt>get self **kwargs<block_start><return><block_end><def_stmt>create self items **kwargs<block_start><return><block_end><def_stmt>update self items **kwargs<block_start><return><block_end><def_stmt>delete self items **kwargs<block_start><return><block_end><def_stmt>list self **kwargs<block_start><return><block_end><def_stmt>add_participant self items **kwargs<block_start><return><block_end><def_stmt>remove_participant self items **kwargs<block_start><return><block_end><def_stmt>add_file self **kwargs<block_start><return><block_end><def_stmt>delete_file self **kwargs<block_start><return><block_end><def_stmt>move_file self **kwargs<block_start><return><block_end><def_stmt>list_files self **kwargs<block_start><return><block_end><block_end>
"""The HP Integrated Lights-Out (iLO) component."""<line_sep>
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. <import_stmt>PIL<import_from_stmt>torch.utils.collect_env get_pretty_env_info<def_stmt>get_pil_version <block_start><return>"\n Pillow ({})".format(PIL.__version__)<block_end><def_stmt>collect_env_info <block_start>env_str=get_pretty_env_info()<line_sep>env_str<augadd>get_pil_version()<line_sep><return>env_str<block_end>
# This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. """The Iterative Quantum Phase Estimation Algorithm."""<import_from_stmt>typing Optional Union<import_stmt>numpy<import_stmt>qiskit<import_from_stmt>qiskit.circuit QuantumCircuit QuantumRegister<import_from_stmt>qiskit.circuit.classicalregister ClassicalRegister<import_from_stmt>qiskit.providers Backend<import_from_stmt>qiskit.utils QuantumInstance<import_from_stmt>.phase_estimator PhaseEstimator<import_from_stmt>.phase_estimator PhaseEstimatorResult<class_stmt>IterativePhaseEstimation(PhaseEstimator)<block_start>"""Run the Iterative quantum phase estimation (QPE) algorithm. Given a unitary circuit and a circuit preparing an eigenstate, return the phase of the eigenvalue as a number in :math:`[0,1)` using the iterative phase estimation algorithm. [1]: Dobsicek et al. (2006), Arbitrary accuracy iterative phase estimation algorithm as a two qubit benchmark, `arxiv/quant-ph/0610214 <https://arxiv.org/abs/quant-ph/0610214>`_ """<def_stmt>__init__ self num_iterations:int quantum_instance:Optional[Union[QuantumInstance Backend]]=<none> <arrow><none><block_start>"""Args: num_iterations: The number of iterations (rounds) of the phase estimation to run. quantum_instance: The quantum instance on which the circuit will be run. Raises: ValueError: if num_iterations is not greater than zero. """<if_stmt>isinstance(quantum_instance Backend)<block_start>quantum_instance=QuantumInstance(quantum_instance)<block_end>self._quantum_instance=quantum_instance<if_stmt>num_iterations<le>0<block_start><raise>ValueError("`num_iterations` must be greater than zero.")<block_end>self._num_iterations=num_iterations<block_end><def_stmt>construct_circuit self unitary:QuantumCircuit state_preparation:QuantumCircuit k:int omega:float=0 measurement:bool=<false> <arrow>QuantumCircuit<block_start>"""Construct the kth iteration Quantum Phase Estimation circuit. For details of parameters, see Fig. 2 in https://arxiv.org/pdf/quant-ph/0610214.pdf. Args: unitary: The circuit representing the unitary operator whose eigenvalue (via phase) will be measured. state_preparation: The circuit that prepares the state whose eigenphase will be measured. If this parameter is omitted, no preparation circuit will be run and input state will be the all-zero state in the computational basis. k: the iteration idx. omega: the feedback angle. measurement: Boolean flag to indicate if measurement should be included in the circuit. Returns: QuantumCircuit: the quantum circuit per iteration """<line_sep>k=self._num_iterations<if>k<is><none><else>k<line_sep># The auxiliary (phase measurement) qubit phase_register=QuantumRegister(1 name="a")<line_sep>eigenstate_register=QuantumRegister(unitary.num_qubits name="q")<line_sep>qc=QuantumCircuit(eigenstate_register)<line_sep>qc.add_register(phase_register)<if_stmt>isinstance(state_preparation QuantumCircuit)<block_start>qc.append(state_preparation eigenstate_register)<block_end><elif_stmt>state_preparation<is><not><none><block_start>qc<augadd>state_preparation.construct_circuit("circuit" eigenstate_register)<block_end># hadamard on phase_register[0] qc.h(phase_register[0])<line_sep># controlled-U # TODO: We may want to allow flexibility in how the power is computed # For example, it may be desirable to compute the power via Trotterization, if # we are doing Trotterization anyway. unitary_power=unitary.power(2<power>(k-1)).control()<line_sep>qc=qc.compose(unitary_power list(range(1 unitary.num_qubits+1))+[0])<line_sep>qc.p(omega phase_register[0])<line_sep># hadamard on phase_register[0] qc.h(phase_register[0])<if_stmt>measurement<block_start>c=ClassicalRegister(1 name="c")<line_sep>qc.add_register(c)<line_sep>qc.measure(phase_register c)<block_end><return>qc<block_end><def_stmt>_estimate_phase_iteratively self unitary state_preparation<block_start>""" Main loop of iterative phase estimation. """<line_sep>omega_coef=0<line_sep># k runs from the number of iterations back to 1 <for_stmt>k range(self._num_iterations 0 -1)<block_start>omega_coef<augdiv>2<if_stmt>self._quantum_instance.is_statevector<block_start>qc=self.construct_circuit(unitary state_preparation k -2<times>numpy.pi<times>omega_coef measurement=<false>)<line_sep>result=self._quantum_instance.execute(qc)<line_sep>complete_state_vec=result.get_statevector(qc)<line_sep>ancilla_density_mat=qiskit.quantum_info.partial_trace(complete_state_vec range(unitary.num_qubits))<line_sep>ancilla_density_mat_diag=numpy.diag(ancilla_density_mat)<line_sep>max_amplitude=max(ancilla_density_mat_diag.min() ancilla_density_mat_diag.max() key=abs)<line_sep>x=numpy.where(ancilla_density_mat_diag<eq>max_amplitude)[0][0]<block_end><else_stmt><block_start>qc=self.construct_circuit(unitary state_preparation k -2<times>numpy.pi<times>omega_coef measurement=<true>)<line_sep>measurements=self._quantum_instance.execute(qc).get_counts(qc)<line_sep>x=1<if>measurements.get("1" 0)<g>measurements.get("0" 0)<else>0<block_end>omega_coef=omega_coef+x/2<block_end><return>omega_coef<block_end># pylint: disable=arguments-differ <def_stmt>estimate self unitary:QuantumCircuit state_preparation:QuantumCircuit<arrow>"IterativePhaseEstimationResult"<block_start>""" Estimate the eigenphase of the input unitary and initial-state pair. Args: unitary: The circuit representing the unitary operator whose eigenvalue (via phase) will be measured. state_preparation: The circuit that prepares the state whose eigenphase will be measured. If this parameter is omitted, no preparation circuit will be run and input state will be the all-zero state in the computational basis. Returns: Estimated phase in an IterativePhaseEstimationResult object. """<line_sep>phase=self._estimate_phase_iteratively(unitary state_preparation)<line_sep><return>IterativePhaseEstimationResult(self._num_iterations phase)<block_end><block_end><class_stmt>IterativePhaseEstimationResult(PhaseEstimatorResult)<block_start>"""Phase Estimation Result."""<def_stmt>__init__ self num_iterations:int phase:float<arrow><none><block_start>""" Args: num_iterations: number of iterations used in the phase estimation. phase: the estimated phase. """<line_sep>self._num_iterations=num_iterations<line_sep>self._phase=phase<block_end>@property<def_stmt>phase self<arrow>float<block_start>r"""Return the estimated phase as a number in :math:`[0.0, 1.0)`. 1.0 corresponds to a phase of :math:`2\pi`. It is assumed that the input vector is an eigenvector of the unitary so that the peak of the probability density occurs at the bit string that most closely approximates the true phase. """<line_sep><return>self._phase<block_end>@property<def_stmt>num_iterations self<arrow>int<block_start>r"""Return the number of iterations used in the estimation algorithm."""<line_sep><return>self._num_iterations<block_end><block_end>
# -*- coding: utf-8 -*- <import_from_stmt>django.test TestCase<import_from_stmt>django.db models<import_stmt>pytest<import_from_stmt>django_dynamic_fixture N G<import_from_stmt>django_dynamic_fixture.models_test *<import_from_stmt>django_dynamic_fixture.django_helper *<class_stmt>DjangoHelperAppsTest(TestCase)<block_start><def_stmt>test_get_apps_must_return_all_installed_apps self<block_start><assert_stmt>len(get_apps())<ge>1<block_end><def_stmt>test_get_apps_may_be_filtered_by_app_names self<block_start>apps=get_apps(application_labels=['django_dynamic_fixture'])<assert_stmt>len(apps)<eq>1<block_end><def_stmt>test_get_apps_may_ignore_some_apps self<block_start>apps=len(get_apps(exclude_application_labels=['django_dynamic_fixture']))<assert_stmt>len(get_apps())-apps<eq>1<block_end><def_stmt>test_app_name_must_be_valid self<block_start><with_stmt>pytest.raises(Exception)<block_start>get_apps(application_labels=['x'])<block_end><with_stmt>pytest.raises(Exception)<block_start>get_apps(exclude_application_labels=['x'])<block_end><block_end><def_stmt>test_get_app_name_must self<block_start><import_stmt>django_dynamic_fixture.models<as>ddf<assert_stmt>get_app_name(ddf)<eq>'django_dynamic_fixture'<block_end><def_stmt>test_get_models_of_an_app_must self<block_start>ddf=get_apps(application_labels=['django_dynamic_fixture'])[0]<line_sep>models_ddf=get_models_of_an_app(ddf)<assert_stmt>len(models_ddf)<g>0<assert_stmt>ModelWithNumbers<in>models_ddf<block_end><block_end><class_stmt>DjangoHelperModelsTest(TestCase)<block_start><def_stmt>test_get_model_name self<block_start><class_stmt>MyModel_test_get_model_name(models.Model)<block_start><pass><block_end><assert_stmt>get_model_name(MyModel_test_get_model_name)<eq>'MyModel_test_get_model_name'<block_end><def_stmt>test_get_unique_model_name self<block_start><class_stmt>MyModel_test_get_unique_model_name(models.Model)<block_start><pass><block_end><assert_stmt>get_unique_model_name(MyModel_test_get_unique_model_name)<eq>'django_dynamic_fixture.tests.test_django_helper.MyModel_test_get_unique_model_name'<block_end><def_stmt>test_get_fields_from_model self<block_start><class_stmt>Model4GetFields_test_get_fields_from_model(models.Model)<block_start>integer=models.IntegerField()<block_end>fields=get_fields_from_model(Model4GetFields_test_get_fields_from_model)<assert_stmt>get_field_by_name_or_raise(Model4GetFields_test_get_fields_from_model 'id')<in>fields<assert_stmt>get_field_by_name_or_raise(Model4GetFields_test_get_fields_from_model 'integer')<in>fields<block_end><def_stmt>test_get_local_fields self<block_start><class_stmt>ModelForGetLocalFields_test_get_local_fields(models.Model)<block_start>integer=models.IntegerField()<block_end>fields=get_local_fields(ModelForGetLocalFields_test_get_local_fields)<assert_stmt>get_field_by_name_or_raise(ModelForGetLocalFields_test_get_local_fields 'id')<in>fields<assert_stmt>get_field_by_name_or_raise(ModelForGetLocalFields_test_get_local_fields 'integer')<in>fields<block_end><def_stmt>test_get_field_names_of_model self<block_start><class_stmt>Model4GetFieldNames_test_get_field_names_of_model(models.Model)<block_start>smallinteger=models.SmallIntegerField()<block_end>fields=get_field_names_of_model(Model4GetFieldNames_test_get_field_names_of_model)<assert_stmt>'smallinteger'<in>fields<assert_stmt>'unknown'<not><in>fields<block_end><def_stmt>test_get_many_to_many_fields_from_model self<block_start><class_stmt>ModelRelated_test_get_many_to_many_fields_from_model(models.Model)<block_start><pass><block_end><class_stmt>ModelWithM2M_test_get_many_to_many_fields_from_model(models.Model)<block_start>manytomany=models.ManyToManyField('ModelRelated_test_get_many_to_many_fields_from_model' related_name='m2m')<block_end>fields=get_many_to_many_fields_from_model(ModelWithM2M_test_get_many_to_many_fields_from_model)<assert_stmt>get_field_by_name_or_raise(ModelWithM2M_test_get_many_to_many_fields_from_model 'manytomany')<in>fields<assert_stmt>get_field_by_name_or_raise(ModelWithM2M_test_get_many_to_many_fields_from_model 'id')<not><in>fields<block_end><def_stmt>test_is_model_class self<block_start><class_stmt>MyModel_test_is_model_class(models.Model)<block_start><pass><block_end><assert_stmt>is_model_class(MyModel_test_is_model_class)<eq><true><class_stmt>X(object)<block_start><pass><block_end><assert_stmt>is_model_class(X)<eq><false><block_end><def_stmt>test_is_model_abstract self<block_start><class_stmt>AbstractModel_test_is_model_abstract(models.Model)<block_start><class_stmt>Meta<block_start>abstract=<true><block_end><block_end><assert_stmt>is_model_abstract(AbstractModel_test_is_model_abstract)<class_stmt>ConcreteModel_test_is_model_abstract(models.Model)<block_start><class_stmt>Meta<block_start>abstract=<false><block_end><block_end><assert_stmt>is_model_abstract(ConcreteModel_test_is_model_abstract)<eq><false><block_end><def_stmt>test_is_model_managed self<block_start><class_stmt>NotManagedModel_test_is_model_managed(models.Model)<block_start><class_stmt>Meta<block_start>managed=<false><block_end><block_end><assert_stmt>is_model_managed(NotManagedModel_test_is_model_managed)<eq><false><class_stmt>ManagedModel_test_is_model_managed(models.Model)<block_start><class_stmt>Meta<block_start>managed=<true><block_end><block_end><assert_stmt>is_model_managed(ManagedModel_test_is_model_managed)<block_end><def_stmt>test_model_has_the_field self<block_start><class_stmt>ModelWithWithoutFields_test_model_has_the_field(models.Model)<block_start>integer=models.IntegerField()<line_sep>selfforeignkey=models.ForeignKey('self' null=<true> on_delete=models.DO_NOTHING)<line_sep>manytomany=models.ManyToManyField('self' related_name='m2m')<block_end><assert_stmt>model_has_the_field(ModelWithWithoutFields_test_model_has_the_field 'integer')<assert_stmt>model_has_the_field(ModelWithWithoutFields_test_model_has_the_field 'selfforeignkey')<assert_stmt>model_has_the_field(ModelWithWithoutFields_test_model_has_the_field 'manytomany')<assert_stmt>model_has_the_field(ModelWithWithoutFields_test_model_has_the_field 'x')<eq><false><block_end><block_end><class_stmt>DjangoHelperFieldsTest(TestCase)<block_start><def_stmt>test_get_unique_field_name self<block_start><class_stmt>Model4GetUniqueFieldName_test_get_unique_field_name(models.Model)<block_start>integer=models.IntegerField()<block_end>field=get_field_by_name_or_raise(Model4GetUniqueFieldName_test_get_unique_field_name 'integer')<assert_stmt>get_unique_field_name(field)<eq>'django_dynamic_fixture.tests.test_django_helper.Model4GetUniqueFieldName_test_get_unique_field_name.integer'<block_end><def_stmt>test_get_related_model self<block_start><class_stmt>ModelRelated_test_get_related_model(models.Model)<block_start><pass><block_end><class_stmt>Model4GetRelatedModel_test_get_related_model(models.Model)<block_start>fk=models.ForeignKey(ModelRelated_test_get_related_model on_delete=models.DO_NOTHING)<block_end><assert_stmt>get_related_model(get_field_by_name_or_raise(Model4GetRelatedModel_test_get_related_model 'fk'))<eq>ModelRelated_test_get_related_model<block_end><def_stmt>test_field_is_a_parent_link self<block_start><class_stmt>ModelParent_test_get_related_model(models.Model)<block_start><pass><block_end><class_stmt>Model4FieldIsParentLink_test_get_related_model(ModelParent)<block_start>o2o_with_parent_link=models.OneToOneField(ModelParent_test_get_related_model parent_link=<true> related_name='my_custom_ref_x' on_delete=models.DO_NOTHING)<block_end><class_stmt>Model4FieldIsParentLink2(ModelParent)<block_start>o2o_without_parent_link=models.OneToOneField(ModelParent_test_get_related_model parent_link=<false> related_name='my_custom_ref_y' on_delete=models.DO_NOTHING)<block_end># FIXME # assert field_is_a_parent_link(get_field_by_name_or_raise(Model4FieldIsParentLink, 'o2o_with_parent_link')) <assert_stmt>field_is_a_parent_link(get_field_by_name_or_raise(Model4FieldIsParentLink2 'o2o_without_parent_link'))<eq><false><block_end><def_stmt>test_field_has_choices self<block_start><class_stmt>Model4FieldHasChoices_test_get_related_model(models.Model)<block_start>with_choices=models.IntegerField(choices=((1 1) (2 2)))<line_sep>without_choices=models.IntegerField()<block_end><assert_stmt>field_has_choices(get_field_by_name_or_raise(Model4FieldHasChoices_test_get_related_model 'with_choices'))<assert_stmt>field_has_choices(get_field_by_name_or_raise(Model4FieldHasChoices_test_get_related_model 'without_choices'))<eq><false><block_end><def_stmt>test_field_has_default_value self<block_start><class_stmt>Model4FieldHasDefault_test_field_has_default_value(models.Model)<block_start>with_default=models.IntegerField(default=1)<line_sep>without_default=models.IntegerField()<block_end><assert_stmt>field_has_default_value(get_field_by_name_or_raise(Model4FieldHasDefault_test_field_has_default_value 'with_default'))<assert_stmt>field_has_default_value(get_field_by_name_or_raise(Model4FieldHasDefault_test_field_has_default_value 'without_default'))<eq><false><block_end><def_stmt>test_field_is_unique self<block_start><class_stmt>Model4FieldMustBeUnique_test_field_is_unique(models.Model)<block_start>unique=models.IntegerField(unique=<true>)<line_sep>not_unique=models.IntegerField()<block_end><assert_stmt>field_is_unique(get_field_by_name_or_raise(Model4FieldMustBeUnique_test_field_is_unique 'unique'))<assert_stmt>field_is_unique(get_field_by_name_or_raise(Model4FieldMustBeUnique_test_field_is_unique 'not_unique'))<eq><false><block_end><def_stmt>test_is_key_field self<block_start><class_stmt>ModelForKeyField_test_is_key_field(models.Model)<block_start>integer=models.IntegerField()<block_end><assert_stmt>is_key_field(get_field_by_name_or_raise(ModelForKeyField_test_is_key_field 'id'))<assert_stmt>is_key_field(get_field_by_name_or_raise(ModelForKeyField_test_is_key_field 'integer'))<eq><false><block_end><def_stmt>test_is_relationship_field self<block_start><class_stmt>ModelForRelationshipField_test_is_relationship_field(models.Model)<block_start>fk=models.ForeignKey('self' on_delete=models.DO_NOTHING)<line_sep>one2one=models.OneToOneField('self' on_delete=models.DO_NOTHING)<block_end><assert_stmt>is_relationship_field(get_field_by_name_or_raise(ModelForRelationshipField_test_is_relationship_field 'fk'))<assert_stmt>is_relationship_field(get_field_by_name_or_raise(ModelForRelationshipField_test_is_relationship_field 'one2one'))<assert_stmt>is_relationship_field(get_field_by_name_or_raise(ModelForRelationshipField_test_is_relationship_field 'id'))<eq><false><block_end><def_stmt>test_is_file_field self<block_start><class_stmt>ModelForFileField_test_is_file_field(models.Model)<block_start>filefield=models.FileField()<block_end><assert_stmt>is_file_field(get_field_by_name_or_raise(ModelForFileField_test_is_file_field 'filefield'))<assert_stmt>is_file_field(get_field_by_name_or_raise(ModelForFileField_test_is_file_field 'id'))<eq><false><block_end><block_end><class_stmt>PrintFieldValuesTest(TestCase)<block_start><def_stmt>test_model_not_saved_do_not_raise_an_exception self<block_start>instance=N(ModelWithNumbers)<line_sep>print_field_values(instance)<block_end><def_stmt>test_model_saved_do_not_raise_an_exception self<block_start>instance=G(ModelWithNumbers)<line_sep>print_field_values(instance)<block_end><def_stmt>test_print_accept_list_of_models_too self<block_start>instances=G(ModelWithNumbers n=2)<line_sep>print_field_values(instances)<line_sep>print_field_values([G(ModelWithNumbers) G(ModelWithNumbers)])<block_end><def_stmt>test_print_accept_a_queryset_too self<block_start>G(ModelWithNumbers n=2)<line_sep>print_field_values(ModelWithNumbers.objects.all())<block_end><block_end>
""" PyMap3D provides coordinate transforms and geodesy functions with a similar API to the Matlab Mapping Toolbox, but was of course independently derived. For all functions, the default units are: distance : float METERS angles : float DEGREES time : datetime.datetime UTC time of observation These functions may be used with any planetary body, provided the appropriate reference ellipsoid is defined. The default ellipsoid is WGS-84 deg : bool = True means degrees. False = radians. Most functions accept NumPy arrays of any shape, as well as compatible data types including AstroPy, Pandas and Xarray that have Numpy-like data properties. For clarity, we omit all these types in the docs, and just specify the scalar type. Other languages --------------- Companion packages exist for: * Matlab / GNU Octave: [Matmap3D](https://github.com/geospace-code/matmap3d) * Fortran: [Maptran3D](https://github.com/geospace-code/maptran3d) """<line_sep>__version__="2.9.0"<import_from_stmt>.aer ecef2aer aer2ecef geodetic2aer aer2geodetic<import_from_stmt>.enu enu2geodetic geodetic2enu aer2enu enu2aer<import_from_stmt>.ned ned2ecef ned2geodetic geodetic2ned ecef2nedv ned2aer aer2ned ecef2ned<import_from_stmt>.ecef geodetic2ecef ecef2geodetic eci2geodetic geodetic2eci ecef2enuv enu2ecef ecef2enu enu2uvw uvw2enu <import_from_stmt>.sidereal datetime2sidereal greenwichsrt<import_from_stmt>.ellipsoid Ellipsoid<import_from_stmt>.timeconv str2dt<import_from_stmt>.spherical spherical2geodetic geodetic2spherical<try_stmt><block_start><import_from_stmt>.azelradec radec2azel azel2radec<import_from_stmt>.eci eci2ecef ecef2eci<import_from_stmt>.aer eci2aer aer2eci<block_end><except_stmt>ImportError<block_start><import_from_stmt>.vallado radec2azel azel2radec<block_end>
# Copyright 2020 <NAME> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>json<import_stmt>os<import_stmt>pytest<import_from_stmt>modelstore.storage.local FileSystemStorage<line_sep># pylint: disable=unused-import <import_from_stmt>tests.storage.test_utils TEST_FILE_CONTENTS TEST_FILE_NAME file_contains_expected_contents remote_file_path remote_path temp_file <line_sep># pylint: disable=protected-access # pylint: disable=redefined-outer-name @pytest.fixture<def_stmt>fs_model_store tmp_path<block_start><return>FileSystemStorage(root_path=str(tmp_path))<block_end><def_stmt>test_validate fs_model_store<block_start><assert_stmt>fs_model_store.validate()<assert_stmt>os.path.exists(fs_model_store.root_dir)<block_end><def_stmt>test_push temp_file remote_file_path fs_model_store<block_start>result=fs_model_store._push(temp_file remote_file_path)<assert_stmt>result<eq>os.path.join(fs_model_store.root_dir remote_file_path)<block_end><def_stmt>test_pull temp_file tmp_path remote_file_path fs_model_store# Push the file to storage <block_start>remote_destination=fs_model_store._push(temp_file remote_file_path)<line_sep># Pull the file back from storage local_destination=os.path.join(tmp_path TEST_FILE_NAME)<line_sep>result=fs_model_store._pull(remote_destination tmp_path)<assert_stmt>result<eq>local_destination<assert_stmt>os.path.exists(local_destination)<assert_stmt>file_contains_expected_contents(local_destination)<block_end><def_stmt>test_read_json_objects_ignores_non_json tmp_path remote_path fs_model_store# Create files with different suffixes <block_start><for_stmt>file_type ["txt" "json"]<block_start>source=os.path.join(tmp_path f"test-file-source.{file_type}")<with_stmt>open(source "w")<as>out<block_start>out.write(json.dumps({"key":"value"}))<block_end># Push the file to storage remote_destination=os.path.join(remote_path f"test-file-destination.{file_type}")<line_sep>fs_model_store._push(source remote_destination)<block_end># Read the json files at the prefix items=fs_model_store._read_json_objects(remote_path)<assert_stmt>len(items)<eq>1<block_end><def_stmt>test_read_json_object_fails_gracefully temp_file remote_file_path fs_model_store# Push a file that doesn't contain JSON to storage <block_start>remote_path=fs_model_store._push(temp_file remote_file_path)<line_sep># Read the json files at the prefix item=fs_model_store._read_json_object(remote_path)<assert_stmt>item<is><none><block_end><def_stmt>test_list_versions_missing_domain fs_model_store<block_start>versions=fs_model_store.list_versions("domain-that-doesnt-exist")<assert_stmt>len(versions)<eq>0<block_end><def_stmt>test_storage_location fs_model_store# Asserts that the location meta data is correctly formatted <block_start>prefix="/path/to/file"<line_sep>exp={"type":"file_system" "path":prefix }<assert_stmt>fs_model_store._storage_location(prefix)<eq>exp<block_end>@pytest.mark.parametrize("meta_data,should_raise,result" [({"path":"/path/to/file" } <false> "/path/to/file" ) ] )<def_stmt>test_get_location fs_model_store meta_data should_raise result# Asserts that pulling the location out of meta data is correct <block_start><if_stmt>should_raise<block_start><with_stmt>pytest.raises(ValueError)<block_start>fs_model_store._get_storage_location(meta_data)<block_end><block_end><else_stmt><block_start><assert_stmt>fs_model_store._get_storage_location(meta_data)<eq>result<block_end><block_end>@pytest.mark.parametrize("state_name,should_create,expect_exists" [("state-1" <false> <false>) ("state-2" <true> <true>) ] )<def_stmt>test_state_exists fs_model_store state_name should_create expect_exists<block_start><if_stmt>should_create<block_start>fs_model_store.create_model_state(state_name)<block_end><assert_stmt>fs_model_store.state_exists(state_name)<eq>expect_exists<block_end>
<import_stmt>itertools<import_stmt>pytest<import_stmt>tubes<def_stmt>test_static_tube_takes_a_list <block_start>tube=tubes.Each([1 2 3])<assert_stmt>list(tube)<eq>[1 2 3]<block_end><def_stmt>test_static_tube_takes_an_iter <block_start>tube=tubes.Each(itertools.count(10)).first(3)<assert_stmt>list(tube)<eq>[10 11 12]<block_end><def_stmt>test_static_tube_with_strings <block_start>tube=tubes.Each(['a' 'b' 'c'])<assert_stmt>list(tube)<eq>['a' 'b' 'c']<block_end><def_stmt>test_static_tube_with_strings <block_start>tube=tubes.Each(['a' 'b' 'c'])<assert_stmt>list(tube.to(str))<eq>['a' 'b' 'c']<assert_stmt>list(tube.to(bytes))<eq>[b'a' b'b' b'c']<block_end><def_stmt>test_static_tube_with_encoding <block_start>tube=tubes.Each(['£' '😃' ''])<assert_stmt>list(tube.to(str))<eq>['£' '😃' '']<assert_stmt>list(tube.to(bytes))<eq>[b'\xc2\xa3' b'\xf0\x9f\x98\x83' b'']<with_stmt>pytest.raises(UnicodeEncodeError)<block_start>list(tube.to(bytes codec='ascii'))<block_end><block_end>
<import_stmt>re<import_from_stmt>ztag.annotation Annotation<import_from_stmt>ztag.annotation OperatingSystem<import_from_stmt>ztag.annotation Type<import_from_stmt>ztag.annotation Manufacturer<import_from_stmt>ztag protocols<import_stmt>ztag.test<class_stmt>FtpSharp(Annotation)<block_start>protocol=protocols.FTP<line_sep>subprotocol=protocols.FTP.BANNER<line_sep>port=<none><line_sep>manufact_re=re.compile("^220 SHARP ((MX)|(AR))-[0-9A-Z]+ Ver \d+(\.[0-9a-z]+)+ FTP server" re.IGNORECASE)<line_sep>product_re=re.compile("SHARP (.+) Ver" re.IGNORECASE)<line_sep>version_re=re.compile("Ver (\d+(?:\.\d+[a-z]?)*) FTP" re.IGNORECASE)<line_sep>tests={"FtpSharp_1":{"global_metadata":{"device_type":Type.GENERIC_PRINTER "manufacturer":Manufacturer.SHARP "product":"MX-5110N"} "local_metadata":{"version":"01.05.00.0m.80"}}}<def_stmt>process self obj meta<block_start>banner=obj["banner"]<if_stmt>self.manufact_re.search(banner)<block_start>meta.global_metadata.device_type=Type.GENERIC_PRINTER<line_sep>meta.global_metadata.manufacturer=Manufacturer.SHARP<line_sep>product=self.product_re.search(banner).group(1)<line_sep>meta.global_metadata.product=product<line_sep>match=self.version_re.search(banner)<line_sep>meta.local_metadata.version=match.group(1)<line_sep><return>meta<block_end><block_end>""" Tests "220 SHARP MX-3100N Ver 01.05.00.0b FTP server.\r\n" "220 SHARP MX-2010U Ver 01.05.00.2k.56 FTP server.\r\n" "220 SHARP MX-2300N Ver 01.02.00.0i FTP server.\r\n" "220 SHARP MX-5001N Ver 01.05.00.0k FTP server.\r\n" "220 SHARP MX-M502N Ver 01.05.00.0m FTP server.\r\n" "220 SHARP MX-C312 Ver 01.05.00.0m FTP server.\r\n" "220 SHARP MX-4140N Ver 01.06.00.0f.01 FTP server.\r\n" "220 SHARP MX-5110N Ver 01.05.00.0m.80 FTP server.\r\n" "220 SHARP MX-M450N Ver 01.04.00.0g FTP server.\r\n" "220 SHARP MX-C312 Ver 01.05.00.0m FTP server.\r\n" "220 SHARP AR-M257 Ver 01.04.00.0e FTP server.\r\n" "220 SHARP MX-M550N Ver 01.04.00.0c FTP server.\r\n" "220 SHARP MX-C300W Ver 02.03.E1.00 FTP server.\r\n" "220 SHARP MX-M452N Ver 01.05.00.0k FTP server.\r\n" "220 SHARP MX-M452N Ver 01.05.00.0k FTP server.\r\n" "220 SHARP MX-2010U Ver 01.05.00.2k.51 FTP server.\r\n" "220 SHARP MX-2010U Ver 01.05.00.2k.56 FTP server.\r\n" "220 SHARP MX-2615N Ver 01.05.00.0q.06 FTP server.\r\n" "220 SHARP MX-M450U Ver 01.04.00.0e FTP server.\r\n" "220 SHARP MX-4101N Ver 01.05.00.0k FTP server.\r\n" "220 SHARP MX-M452N Ver 01.05.00.0k FTP server.\r\n" "220 SHARP MX-4112N Ver 01.05.00.0o.12 FTP server.\r\n" "220 SHARP MX-2300N Ver 01.02.00.0d FTP server.\r\n" "220 SHARP MX-2314N Ver 01.05.00.0q.06 FTP server.\r\n" "220 SHARP MX-3501N Ver 01.02.00.0e FTP server.\r\n" "220 SHARP MX-6240N Ver 01.06.00.00.107 FTP server.\r\n" "220 SHARP MX-2600FN Ver 01.05.00.0m FTP server.\r\n" "220 SHARP MX-2300N Ver 01.02.00.0i FTP server.\r\n" "220 SHARP MX-B400P Ver 01.05.00.0k FTP server.\r\n" "220 SHARP MX-5112N Ver 01.05.00.0o.12 FTP server.\r\n" "220 SHARP MX-2610N Ver 01.05.00.0m.93.U FTP server.\r\n" """<block_end>
<import_stmt>tensorflow<as>tf<def_stmt>init *args<block_start><if_stmt>len(args)<eq>1<block_start>use_pb=<true><line_sep>pb_path=args[0]<block_end><else_stmt><block_start>use_pb=<false><line_sep>meta_path=args[0]<line_sep>restore_model_path=args[1]<block_end><def_stmt>ini_ckpt <block_start>graph=tf.Graph()<line_sep>graph.as_default()<line_sep>configProto=tf.ConfigProto()<line_sep>configProto.gpu_options.allow_growth=<true><line_sep>sess=tf.Session(config=configProto)<line_sep>#load_model(model_path, sess) saver=tf.train.import_meta_graph(meta_path)<line_sep>saver.restore(sess restore_model_path)<line_sep>print("Model restred!")<line_sep><return>(graph sess)<block_end><def_stmt>init_pb model_path<block_start>config=tf.ConfigProto()<line_sep>config.gpu_options.per_process_gpu_memory_fraction=0.2<line_sep>compute_graph=tf.Graph()<line_sep>compute_graph.as_default()<line_sep>sess=tf.Session(config=config)<with_stmt>tf.gfile.GFile(model_path 'rb')<as>fid<block_start>graph_def=tf.GraphDef()<line_sep>graph_def.ParseFromString(fid.read())<line_sep>tf.import_graph_def(graph_def name='')<block_end># saver = tf.train.Saver(tf.global_variables()) # saver.save(sess, save_path='./tmp.ckpt') <return>(compute_graph sess)<block_end><if_stmt>use_pb<block_start>model=init_pb(pb_path)<block_end><else_stmt><block_start>model=ini_ckpt()<block_end>graph=model[0]<line_sep>sess=model[1]<line_sep><return>graph sess<block_end>
<import_stmt>sys<import_stmt>os<line_sep>sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)) '..'))<import_from_stmt>qfc.core filter_files get_weight<def_stmt>_equals marks_list1 marks_list2<block_start>l1=sorted(marks_list1)<line_sep>l2=sorted(marks_list2)<if_stmt>len(l1)<ne>len(l2)<block_start><return><false><block_end><for_stmt>i,_ enumerate(l1)<block_start><if_stmt>l1[i]<ne>l2[i]<block_start><return><false><block_end><block_end><return><true><block_end><def_stmt>test_filter_files <block_start>files=['/' '/a/' '/b/' '/a/b' '/a/b/c' '/b/a/' '/b/a/c' 'd' 'da']<assert_stmt>(_equals(filter_files(files '') ['/' 'd' 'da']))<assert_stmt>(_equals(filter_files(files '/') ['/']))<assert_stmt>(_equals(filter_files(files 'a') ['/a/' '/b/a/' 'da']))<block_end><def_stmt>test_weight <block_start><assert_stmt>(get_weight('a' '')<eq>1001)<assert_stmt>(get_weight('a/' '')<eq>1000)<assert_stmt>(get_weight('a/b/' '')<eq>2000)<assert_stmt>(get_weight('a/b/c' '')<eq>3001)<assert_stmt>(get_weight('a' 'a')<eq>1001)<assert_stmt>(get_weight('ab' 'a')<eq>1021)<assert_stmt>(get_weight('bab' 'a')<eq>1111)<assert_stmt>(get_weight('a_b' 'a')<eq>1011)<assert_stmt>(get_weight('root/a_b' 'a')<eq>2011)<assert_stmt>(get_weight('root/a_b_c_d_e_f_g_h_i_j_k' 'k')<eq>2091)<assert_stmt>(get_weight('a/b/c/d/e/f/g/h/i/j/k' 'k')<eq>10001)<assert_stmt>(get_weight('a/B/' 'b')<eq>2000)<block_end>
# Copyright 2015 Netflix, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>urllib<import_from_stmt>flask request<import_from_stmt>flask render_template make_response<import_from_stmt>flask_mail Message<import_from_stmt>sleepypuppy app db flask_mail csrf_protect<import_from_stmt>sleepypuppy.admin.payload.models Payload<import_from_stmt>sleepypuppy.admin.capture.models Capture<import_from_stmt>sleepypuppy.admin.collector.models GenericCollector<import_from_stmt>sleepypuppy.admin.access_log.models AccessLog<import_from_stmt>sleepypuppy.admin.assessment.models Assessment<import_from_stmt>sleepypuppy.admin.user.models User<import_from_stmt>flask Response<import_from_stmt>urlparse urlparse<line_sep>@app.route('/x' methods=['GET'])<def_stmt>x_collector payload=1<block_start>""" Determine the payload associated with the request. If accesslog is enabled for the payload, record the event and email users subscribed to the payload's assessment. """<line_sep>the_payload=Payload.query.filter_by(id=int(request.args.get('u' 1))).first()<line_sep>assessment_id=request.args.get('a' 1)<line_sep># consider only looking up payload one time for performance the_assessment=Assessment.query.filter_by(id=int(assessment_id)).first()<try_stmt><block_start><if_stmt>the_assessment.access_log_enabled<block_start>referrer=request.headers.get("Referrer" <none>)<line_sep>user_agent=request.headers.get("User-Agent" <none>)<line_sep>ip_address=request.access_route[-1]<line_sep>client_info=AccessLog(the_payload.id the_assessment.name referrer user_agent ip_address)<line_sep>db.session.add(client_info)<line_sep>db.session.commit()<line_sep>email_subscription(the_payload.id the_assessment <none> client_info 'access_log')<block_end><block_end><except_stmt>Exception<as>err<block_start>app.logger.warn("assessment not found, can't check access log.")<line_sep>app.logger.warn(err)<block_end># Log for recording access log records <if_stmt>request.args.get('u' 1)<block_start><return>collector(request.args.get('u' 1))<block_end><block_end>@app.route('/loader.js' methods=['GET'])<def_stmt>collector payload=1<block_start>""" Render Puppyscript payload with unique identifier and hosts for callback. Enforce snooze and run_once directives. """<line_sep>payload=request.args.get('u' 1)<line_sep>assessment=request.args.get('a' 1)<try_stmt><block_start>the_assessment=Assessment.query.filter_by(id=int(assessment)).first()<if_stmt>the_assessment.snooze<block_start><return>''<block_end><if_stmt>the_assessment.run_once<and>Capture.query.filter_by(payload=int(payload) assessment=the_assessment.name).first()<block_start><return>''<block_end><if_stmt>the_assessment.run_once<and>GenericCollector.query.filter_by(payload=int(payload) assessment=the_assessment.name).first()<block_start><return>''<block_end><block_end><except_stmt>Exception<as>err<block_start>app.logger.warn(err)<block_end># Render the template and include payload, hostname, callback_protocol, # assessment. # If you need to expose additional server side # information for your JavaScripts, do it here. <try_stmt><block_start>headers={'Content-Type':'text/javascript'}<line_sep><return>make_response(render_template('loader.js' payload=payload assessment=the_assessment.id hostname=app.config['CALLBACK_HOSTNAME'] callback_protocol=app.config.get('CALLBACK_PROTOCOL' 'https')) 200 headers)<block_end><except_stmt><block_start>app.logger.warn("Assessment not found, defaulting to General.")<line_sep># If the assessment doesn't exist, default to general headers={'Content-Type':'text/javascript'}<line_sep><return>make_response(render_template('loader.js' payload=payload assessment=1 hostname=app.config['CALLBACK_HOSTNAME'] callback_protocol=app.config.get('CALLBACK_PROTOCOL' 'https')) 200 headers)<block_end><block_end><def_stmt>email_subscription payload the_assessment url client_info model<block_start>""" Email notifications for captures, generic collections, and access log """<line_sep>email_list=[]<line_sep>notify_jobs=Payload.query.filter_by(id=payload).first()<line_sep>user_notify=User.query.all()<for_stmt>user user_notify<block_start>user_subscriptions=[]<for_stmt>assessment user.assessments<block_start>user_subscriptions.append(assessment.id)<block_end><if_stmt>the_assessment.id<in>user_subscriptions<block_start>email_list.append(user.email)<block_end><block_end><import_stmt>cgi<if_stmt>model<eq>"capture"<block_start>subject="[Sleepy Puppy] - Capture Received From: {}".format(cgi.escape(url quote=<true>))<line_sep>html="<b>Associated Assessment: </b>{}<br/>".format(cgi.escape(the_assessment.name quote=<true>))<line_sep>html<augadd>"<b>URL: </b>{}<br/>".format(cgi.escape(url quote=<true>))<line_sep>html<augadd>"<b>Payload: </b>{}<br/>".format(cgi.escape(notify_jobs.payload quote=<true>))<if_stmt>notify_jobs.notes<is><not><none><block_start>html<augadd>"<b>Notes: </b>{}<br/>".format(cgi.escape(notify_jobs.notes quote=<true>))<block_end>html<augadd>"<b>Capture: </b>{}://{}/capture/?flt1_0={}&flt3_14={}".format(app.config.get('CALLBACK_PROTOCOL' 'https') app.config.get('HOSTNAME' 'localhost') payload the_assessment.name)<block_end><elif_stmt>model<eq>"access_log"<block_start>subject="[Sleepy Puppy] - Access Log Request Received For Assessment(s): {}".format(cgi.escape(the_assessment.name quote=<true>))<line_sep>html="<b>Associated Assessment: </b>{}<br/>".format(cgi.escape(the_assessment.name quote=<true>))<line_sep>html<augadd>"<b>Referer: </b>{}<br/>".format(cgi.escape(client_info.referrer<or>"" quote=<true>))<line_sep>html<augadd>"<b>User Agent: </b>{}<br/>".format(cgi.escape(client_info.user_agent<or>"" quote=<true>))<line_sep>html<augadd>"<b>IP Address: </b>{}<br/>".format(cgi.escape(client_info.ip_address quote=<true>))<line_sep>html<augadd>"<b>AccessLog: </b>{}://{}/accesslog/?flt1_7={}&flt2_14={}".format(app.config.get('CALLBACK_PROTOCOL' 'https') app.config.get('HOSTNAME' 'localhost') payload the_assessment.name)<block_end><elif_stmt>model<eq>"generic_collector"<block_start>subject="[Sleepy Puppy] - Generic Collector Received From: {}".format(cgi.escape(client_info.url quote=<true>))<line_sep>html="<b>Associated Assessment: </b>{}<br/>".format(cgi.escape(the_assessment.name quote=<true>))<line_sep>html<augadd>"<b>Puppyscript Name: </b>{}<br/>".format(cgi.escape(client_info.puppyscript_name<or>"" quote=<true>))<line_sep>html<augadd>"<b>Url: </b>{}<br/>".format(cgi.escape(client_info.url<or>"" quote=<true>))<line_sep>html<augadd>"<b>Referer: </b>{}<br/>".format(cgi.escape(client_info.referrer<or>"" quote=<true>))<line_sep>html<augadd>"<b>Generic Collector: </b>{}://{}/genericcollector/?flt1_0={}&flt2_7={}".format(app.config.get('CALLBACK_PROTOCOL' 'https') app.config.get('HOSTNAME' 'localhost') payload the_assessment.name)<block_end># If there are people to email, email them that a capture was received <if_stmt>email_list<block_start><if_stmt>app.config["EMAILS_USE_SES"]<block_start><import_stmt>boto.ses<try_stmt><block_start>ses_region=app.config.get('SES_REGION' 'us-east-1')<line_sep>ses=boto.ses.connect_to_region(ses_region)<block_end><except_stmt>Exception e<block_start><import_stmt>traceback<line_sep>app.logger.debug(Exception)<line_sep>app.logger.debug(e)<line_sep>app.logger.warn(traceback.format_exc())<line_sep><return><block_end><for_stmt>email email_list<block_start><try_stmt><block_start>ses.send_email(app.config['MAIL_SENDER'] subject html email format="html")<line_sep>app.logger.debug("Emailed {} - {} ".format(email subject))<block_end><except_stmt>Exception e<block_start>m="Failed to send failure message to {} from {} with subject: {}\n{} {}".format(email app.config['MAIL_SENDER'] subject Exception e)<line_sep>app.logger.debug(m)<block_end><block_end><block_end><else_stmt><block_start>msg=Message(subject sender=app.config['MAIL_SENDER'] recipients=email_list)<line_sep>msg.html=html<try_stmt><block_start>flask_mail.send(msg)<block_end><except_stmt>Exception<as>err<block_start>app.logger.debug(Exception)<line_sep>app.logger.debug(err)<block_end><block_end><block_end><block_end>@csrf_protect.exempt@app.route('/generic_callback' methods=['POST' 'GET'])<def_stmt>get_generic_callback <block_start>""" Method to handle generic callbacks from arbitrary puppyscripts. Expects Method: POST Data: payload, puppyscript_name, data Optional Data: referrer, url """<line_sep>response=Response()<if_stmt>request.method<eq>'POST'<block_start><try_stmt><block_start>app.logger.info("request.form.get('payload', 0): {}".format(request.form.get('payload' 0)))<line_sep>puppyscript_name=urllib.unquote(unicode(request.form.get('puppyscript_name' '')))<line_sep># If they don't set a url or referrer, ignore it url=urllib.unquote(unicode(request.form.get('uri' '')))<line_sep>referrer=urllib.unquote(unicode(request.form.get('referrer' '')))<try_stmt><block_start><if_stmt>app.config.get('ALLOWED_DOMAINS')<block_start>domain=urlparse(url).netloc.split(':')[0]<if_stmt>domain<not><in>app.config.get('ALLOWED_DOMAINS')<block_start>app.logger.info("Ignoring Capture from unapproved domain: [{}]".format(domain))<line_sep><return>response<block_end><block_end><block_end><except_stmt>Exception<as>e<block_start>app.logger.warn("Exception in /generic_callback when parsing url {}\n\n{}".format(Exception str(e)))<block_end># noqa data=urllib.unquote(unicode(request.form.get('data' '')))<line_sep>payload=Payload.query.filter_by(id=int(request.form.get('payload' 0))).first()<line_sep>assessment=Assessment.query.filter_by(id=int(request.form.get('assessment' 0))).first()<line_sep># If it's a rogue capture, log it anyway. <if_stmt>payload<is><none><or>assessment<is><none><block_start>client_info=GenericCollector(0 0 puppyscript_name url referrer data)<block_end><else_stmt># Create the capture with associated assessment/payload <block_start>client_info=GenericCollector(payload.id assessment.name puppyscript_name url referrer data)<block_end>db.session.add(client_info)<line_sep>db.session.commit()<line_sep># Email users subscribed to the Payload's Assessment email_subscription(payload.id assessment url client_info 'generic_collector')<block_end><except_stmt>Exception<as>e<block_start>app.logger.warn("Exception in /generic_callback {}\n\n{}".format(Exception str(e)))<import_stmt>traceback<line_sep>traceback.print_exc()<block_end><block_end><return>response<block_end># Disable CSRF protection on callback posts @csrf_protect.exempt@app.route('/callbacks' methods=['POST' 'GET'])<def_stmt>get_callbacks <block_start>""" Method to handle Capture creation. The Default Puppyscript provides all the expected parameters for this endpoint. If you need to modify the default captures, provide the following: Method: POST Data: assessment(payload.id will work here), url, referrer, cookies, user_agent, payload, screenshot, dom """<line_sep>response=Response()<line_sep>app.logger.info("Inside /callbacks")<if_stmt>request.method<eq>'POST'<block_start><try_stmt><block_start>app.logger.info("request.form.get('payload', 0): {}".format(request.form.get('payload' 0)))<line_sep>url=urllib.unquote(unicode(request.form.get('uri' '')))<if_stmt>app.config.get('ALLOWED_DOMAINS')<block_start>domain=urlparse(url).netloc.split(':')[0]<if_stmt>domain<not><in>app.config.get('ALLOWED_DOMAINS')<block_start>app.logger.info("Ignoring Capture from unapproved domain: [{}]".format(domain))<line_sep><return>response<block_end><block_end>referrer=urllib.unquote(unicode(request.form.get('referrer' '')))<line_sep>cookies=urllib.unquote(unicode(request.form.get('cookies' '')))<line_sep>user_agent=urllib.unquote(unicode(request.form.get('user_agent' '')))<line_sep>payload=Payload.query.filter_by(id=int(request.form.get('payload' 0))).first()<line_sep>assessment=Assessment.query.filter_by(id=int(request.form.get('assessment' 0))).first()<line_sep>screenshot=unicode(request.form.get('screenshot' ''))<line_sep>dom=urllib.unquote(unicode(request.form.get('dom' '')))[:65535]<line_sep># If it's a rogue capture, log it anyway. <if_stmt>payload<is><none><or>assessment<is><none><block_start>client_info=Capture("Not found" url referrer cookies user_agent 0 screenshot dom)<block_end><else_stmt># Create the capture with associated assessment/payload <block_start>client_info=Capture(assessment.name url referrer cookies user_agent payload.id screenshot dom)<block_end>db.session.add(client_info)<line_sep>db.session.commit()<line_sep># Email users subscribed to the Payload's Assessment email_subscription(payload.id assessment url client_info 'capture')<block_end><except_stmt>Exception<as>e<block_start>app.logger.warn("Exception in /callbacks {}\n\n{}".format(Exception str(e)))<import_stmt>traceback<line_sep>traceback.print_exc()<block_end><block_end><return>response<block_end>
<import_stmt>collections<import_stmt>getopt<import_stmt>numpy<import_stmt>os<import_stmt>random<import_stmt>sys<import_stmt>unittest<import_stmt>pickle<import_from_stmt>time clock<import_from_stmt>time gmtime<import_from_stmt>time mktime<import_from_stmt>amuse.community.ph4.interface ph4<as>grav<import_from_stmt>amuse.community.smalln.interface SmallN<import_from_stmt>amuse.community.kepler.interface Kepler<import_from_stmt>amuse.couple multiples<import_from_stmt>amuse.units nbody_system<import_from_stmt>amuse.units units<import_from_stmt>amuse.units quantities<import_from_stmt>amuse datamodel<import_from_stmt>amuse.datamodel particle_attributes<as>pa<import_from_stmt>amuse.rfi.core is_mpd_running<import_from_stmt>amuse.ic.plummer new_plummer_model<import_from_stmt>amuse.ic.salpeter new_salpeter_mass_distribution_nbody<import_from_stmt>amuse io<import_from_stmt>utils *<def_stmt>make_nbody number_of_stars=100 time=0.0 n_workers=1 use_gpu=1 gpu_worker=1 salpeter=0 delta_t=1.0|nbody_system.time timestep_parameter=0.1 softening_length=0.0|nbody_system.length random_seed=1234# Make an N-body system, print out some statistics on it, and save # it in a restart file. The restart file name is of the form # 't=nnnn.n.xxx', where the default time is 0.0. <block_start><if_stmt>random_seed<le>0<block_start>numpy.random.seed()<line_sep>random_seed=numpy.random.randint(1 pow(2 31)-1)<block_end>numpy.random.seed(random_seed)<line_sep>print("random seed =" random_seed)<line_sep>init_smalln()<line_sep># Note that there are actually three GPU options: # # 1. use the GPU code and allow GPU use (default) # 2. use the GPU code but disable GPU use (-g) # 3. use the non-GPU code (-G) <if_stmt>gpu_worker<eq>1<block_start><try_stmt><block_start>gravity=grav(number_of_workers=n_workers redirection="none" mode="gpu")<block_end><except_stmt>Exception<as>ex<block_start>gravity=grav(number_of_workers=n_workers redirection="none")<block_end><block_end><else_stmt><block_start>gravity=grav(number_of_workers=n_workers redirection="none")<block_end>gravity.initialize_code()<line_sep>gravity.parameters.set_defaults()<line_sep>#----------------------------------------------------------------- # Make a standard N-body system. print("making a Plummer model")<line_sep>stars=new_plummer_model(number_of_stars)<line_sep>id=numpy.arange(number_of_stars)<line_sep>stars.id=id+1<line_sep>print("setting particle masses and radii")<if_stmt>salpeter<eq>0<block_start>print('equal masses')<line_sep>total_mass=1.0|nbody_system.mass<line_sep>scaled_mass=total_mass/number_of_stars<block_end><else_stmt><block_start>print('salpeter mass function')<line_sep>mmin=0.5|nbody_system.mass<line_sep>mmax=10.0|nbody_system.mass<line_sep>scaled_mass=new_salpeter_mass_distribution_nbody(number_of_stars mass_min=mmin mass_max=mmax)<block_end>stars.mass=scaled_mass<line_sep>print("centering stars")<line_sep>stars.move_to_center()<line_sep>print("scaling stars to virial equilibrium")<line_sep>stars.scale_to_standard(smoothing_length_squared=gravity.parameters.epsilon_squared)<line_sep># Set dynamical radii (assuming virial equilibrium and standard # units). Note that this choice should be refined, and updated # as the system evolves. Probably the choice of radius should be # made entirely in the multiples module. TODO. In these units, # M = 1 and <v^2> = 0.5, so the mean 90-degree turnaround impact # parameter is # # b_90 = G (m_1+m_2) / vrel^2 # = 2 <m> / 2<v^2> # = 2 / N for equal masses # # Taking r_i = m_i / 2<v^2> = m_i in virial equilibrium means # that, approximately, "contact" means a 90-degree deflection (r_1 # + r_2 = b_90). A more conservative choice with r_i less than # this value will isolate encounters better, but also place more # load on the large-N dynamical module. stars.radius=0.5<times>stars.mass.number|nbody_system.length<line_sep>time=0.0|nbody_system.time<line_sep># print "IDs:", stars.id.number print("recentering stars")<line_sep>stars.move_to_center()<line_sep>sys.stdout.flush()<line_sep>#----------------------------------------------------------------- <if_stmt>softening_length<l>0.0|nbody_system.length# Use ~interparticle spacing. Assuming standard units here. TODO <block_start>softening_length=0.5<times>float(number_of_stars)<power>(-0.3333333)|nbody_system.length<block_end>print('softening length =' softening_length)<line_sep>gravity.parameters.timestep_parameter=timestep_parameter<line_sep>gravity.parameters.epsilon_squared=softening_length<times>softening_length<line_sep>gravity.parameters.use_gpu=use_gpu<line_sep>print('')<line_sep>print("adding particles")<line_sep># print stars sys.stdout.flush()<line_sep>gravity.particles.add_particles(stars)<line_sep>gravity.commit_particles()<line_sep>print('')<line_sep>print("number_of_stars =" number_of_stars)<line_sep>sys.stdout.flush()<line_sep># Channel to copy values from the code to the set in memory. channel=gravity.particles.new_channel_to(stars)<line_sep>stopping_condition=gravity.stopping_conditions.collision_detection<line_sep>stopping_condition.enable()<line_sep># ----------------------------------------------------------------- # Create the coupled code and integrate the system to the desired # time, managing interactions internally. kep=init_kepler(stars[0] stars[1])<line_sep>multiples_code=multiples.Multiples(gravity new_smalln kep)<line_sep>multiples_code.neighbor_perturbation_limit=0.1<line_sep>multiples_code.neighbor_veto=<true><line_sep>print('')<line_sep>print('multiples_code.initial_scale_factor =' multiples_code.initial_scale_factor)<line_sep>print('multiples_code.neighbor_perturbation_limit =' multiples_code.neighbor_perturbation_limit)<line_sep>print('multiples_code.neighbor_veto =' multiples_code.neighbor_veto)<line_sep>print('multiples_code.final_scale_factor =' multiples_code.final_scale_factor)<line_sep>print('multiples_code.initial_scatter_factor =' multiples_code.initial_scatter_factor)<line_sep>print('multiples_code.final_scatter_factor =' multiples_code.final_scatter_factor)<line_sep>print('multiples_code.retain_binary_apocenter =' multiples_code.retain_binary_apocenter)<line_sep>print('multiples_code.wide_perturbation_limit =' multiples_code.wide_perturbation_limit)<line_sep># Take a dummy step, just in case... multiples_code.evolve_model(time)<line_sep># Copy values from the module to the set in memory. channel.copy()<line_sep># Copy the index (ID) as used in the module to the id field in # memory. The index is not copied by default, as different # codes may have different indices for the same particle and # we don't want to overwrite silently. channel.copy_attribute("index_in_code" "id")<line_sep>pre="%%% "<line_sep>E0,cpu0=print_log(pre time multiples_code)<line_sep>sys.stdout.flush()<line_sep># file = 't='+'{:07.2f}'.format(time.number) # fails in Python 2.6 file='t=%07.2f'%time.number<line_sep>write_state_to_file(time stars gravity multiples_code file delta_t E0 cpu0)<line_sep>tree_copy=multiples_code.root_to_tree.copy()<del_stmt>multiples_code<line_sep>sys.stdout.flush()<line_sep>gravity.stop()<line_sep>kep.stop()<line_sep>stop_smalln()<line_sep>print('')<block_end><if_stmt>__name__<eq>'__main__'# Defaults: <block_start>N=1000<line_sep>time=0.0|nbody_system.time<line_sep>delta_t=1.0|nbody_system.time<line_sep>n_workers=1<line_sep>use_gpu=1<line_sep>gpu_worker=1<line_sep>salpeter=0<line_sep>timestep_parameter=0.1<line_sep>softening_length=0.0|nbody_system.length<line_sep>random_seed=-1<try_stmt><block_start>opts,args=getopt.getopt(sys.argv[1:] "n:st:")<block_end><except_stmt>getopt.GetoptError<as>err<block_start>print(str(err))<line_sep>sys.exit(1)<block_end><for_stmt>o,a opts<block_start><if_stmt>o<eq>"-n"<block_start>N=int(a)<block_end><elif_stmt>o<eq>"-s"<block_start>salpeter=1<block_end><elif_stmt>o<eq>"-t"<block_start>time=float(a)|nbody_system.time<block_end><else_stmt><block_start>print("unexpected argument" o)<block_end><block_end><assert_stmt>is_mpd_running()<line_sep>make_nbody(N time n_workers use_gpu gpu_worker salpeter delta_t timestep_parameter softening_length random_seed)<block_end>
# This sample tests the case where a __getattr__ method override # differentiates based on the name of the accessed member. <import_from_stmt>typing Any overload Literal<class_stmt>Obj<block_start>@overload<def_stmt>__getattr__ self name:Literal["foo"]<arrow>int<block_start><ellipsis><block_end>@overload<def_stmt>__getattr__ self name:Literal["bar"]<arrow>str<block_start><ellipsis><block_end><def_stmt>__getattr__ self name:str<arrow>Any<block_start><if_stmt>name<eq>"foo"<block_start><return>1<block_end><return>"1"<block_end><block_end>obj=Obj()<line_sep>b1=obj.foo<line_sep>reveal_type(b1 expected_text="int")<line_sep>b2=getattr(obj "foo")<line_sep>reveal_type(b2 expected_text="Any")<line_sep>c1=obj.bar<line_sep>reveal_type(c1 expected_text="str")<line_sep>c2=getattr(obj "bar")<line_sep>reveal_type(c2 expected_text="Any")<line_sep>
<import_from_stmt>datetime datetime<import_from_stmt>elasticsearch_dsl DocType String Date Integer Float<import_from_stmt>elasticsearch_dsl.connections connections<line_sep># Define a default Elasticsearch client connections.create_connection(hosts=['localhost'])<class_stmt>Extension(DocType)<block_start>name=String()<line_sep>url=String()<line_sep>description=String()<line_sep>user_count=Integer()<line_sep>review_count=Float()<line_sep>review_score=Float()<class_stmt>Meta<block_start>index='exts'<block_end><block_end># create the mappings in elasticsearch Extension.init()<import_stmt>json<line_sep>exts=json.load(open('data/PAGES.json'))<line_sep># TODO source code extract # rob query: all ext with this permission in manifest and this regex in source code # https://www.elastic.co/guide/en/elasticsearch/guide/current/nested-query.html <for_stmt>ext exts<block_start>print(ext['name'])<line_sep>sources=extract_sources(ext['id'])<line_sep># create and save ext=Extension(meta={'id':ext['ext_id']} name=ext['name'] sources=sources url=ext['url'] review_count=ext['aggregateRating.properties.ratingCount'] review_score=ext['aggregateRating.properties.ratingValue'] description=ext['full_description'] user_count=int(ext['user_count']))<line_sep>ext.save()<block_end># Display cluster health print(connections.get_connection().cluster.health())<line_sep>
<import_stmt>numpy<as>np<import_from_stmt>matplotlib pyplot<as>plt<import_from_stmt>.basewidget BaseWidget<import_from_stmt>..toolkit get_template_extremum_channel get_template_extremum_amplitude<import_from_stmt>.utils get_unit_colors<class_stmt>UnitsDepthAmplitudeWidget(BaseWidget)<block_start><def_stmt>__init__ self waveform_extractor peak_sign='neg' depth_axis=1 unit_colors=<none> figure=<none> ax=<none><block_start>BaseWidget.__init__(self figure ax)<line_sep>self.we=waveform_extractor<line_sep>self.peak_sign=peak_sign<line_sep>self.depth_axis=depth_axis<if_stmt>unit_colors<is><none><block_start>unit_colors=get_unit_colors(self.we.sorting)<block_end>self.unit_colors=unit_colors<block_end><def_stmt>plot self<block_start>ax=self.ax<line_sep>we=self.we<line_sep>unit_ids=we.sorting.unit_ids<line_sep>channels_index=get_template_extremum_channel(we peak_sign=self.peak_sign outputs='index')<line_sep>probe=we.recording.get_probe()<line_sep>channel_depth=probe.contact_positions[: self.depth_axis]<line_sep>unit_depth=[channel_depth[channels_index[unit_id]]<for>unit_id unit_ids]<line_sep>unit_amplitude=get_template_extremum_amplitude(we peak_sign=self.peak_sign)<line_sep>unit_amplitude=np.abs([unit_amplitude[unit_id]<for>unit_id unit_ids])<line_sep>colors=[self.unit_colors[unit_id]<for>unit_id unit_ids]<line_sep>num_spikes=np.zeros(len(unit_ids))<for_stmt>i,unit_id enumerate(unit_ids)<block_start><for_stmt>segment_index range(we.sorting.get_num_segments())<block_start>st=we.sorting.get_unit_spike_train(unit_id=unit_id segment_index=segment_index)<line_sep>num_spikes[i]<augadd>st.size<block_end><block_end>size=num_spikes/max(num_spikes)<times>120<line_sep>ax.scatter(unit_amplitude unit_depth color=colors s=size)<line_sep>ax.set_aspect(3)<line_sep>ax.set_xlabel('amplitude')<line_sep>ax.set_ylabel('depth [um]')<line_sep>ax.set_xlim(0 max(unit_amplitude)<times>1.2)<block_end><block_end><def_stmt>plot_units_depth_vs_amplitude *args **kwargs<block_start>W=UnitsDepthAmplitudeWidget(*args **kwargs)<line_sep>W.plot()<line_sep><return>W<block_end>plot_units_depth_vs_amplitude.__doc__=UnitsDepthAmplitudeWidget.__doc__<line_sep>
<import_from_stmt>.NoSolutionError NoSolutionError<import_from_stmt>.DnaOptimizationProblem DnaOptimizationProblem<import_from_stmt>.CircularDnaOptimizationProblem CircularDnaOptimizationProblem<line_sep>__all__=["NoSolutionError" "DnaOptimizationProblem" "CircularDnaOptimizationProblem"]<line_sep>
# -*- coding: utf-8 -*- """ v9s model * Input: v5_im Author: Kohei <<EMAIL>> """<import_from_stmt>logging getLogger Formatter StreamHandler INFO FileHandler<import_from_stmt>pathlib Path<import_stmt>subprocess<import_stmt>argparse<import_stmt>math<import_stmt>glob<import_stmt>sys<import_stmt>json<import_stmt>re<import_stmt>warnings<import_stmt>scipy<import_stmt>tqdm<import_stmt>click<import_stmt>tables<as>tb<import_stmt>pandas<as>pd<import_stmt>numpy<as>np<import_from_stmt>keras.models Model<import_from_stmt>keras.engine.topology merge<as>merge_l<import_from_stmt>keras.layers Input Convolution2D MaxPooling2D UpSampling2D Reshape core Dropout Activation BatchNormalization <import_from_stmt>keras.optimizers Adam<import_from_stmt>keras.callbacks ModelCheckpoint EarlyStopping History<import_from_stmt>keras backend<as>K<import_stmt>skimage.transform<import_stmt>skimage.morphology<import_stmt>rasterio.features<import_stmt>shapely.wkt<import_stmt>shapely.ops<import_stmt>shapely.geometry<line_sep>MODEL_NAME='v9s'<line_sep>ORIGINAL_SIZE=650<line_sep>INPUT_SIZE=256<line_sep>LOGFORMAT='%(asctime)s %(levelname)s %(message)s'<line_sep>BASE_DIR="/data/train"<line_sep>WORKING_DIR="/data/working"<line_sep>IMAGE_DIR="/data/working/images/{}".format('v5')<line_sep>MODEL_DIR="/data/working/models/{}".format(MODEL_NAME)<line_sep>FN_SOLUTION_CSV="/data/output/{}.csv".format(MODEL_NAME)<line_sep># Parameters MIN_POLYGON_AREA=30<line_sep># Input files FMT_TRAIN_SUMMARY_PATH=str(Path(BASE_DIR)/Path("{prefix:s}_Train/")/Path("summaryData/{prefix:s}_Train_Building_Solutions.csv"))<line_sep>FMT_TRAIN_RGB_IMAGE_PATH=str(Path(BASE_DIR)/Path("{prefix:s}_Train/")/Path("RGB-PanSharpen/RGB-PanSharpen_{image_id:s}.tif"))<line_sep>FMT_TEST_RGB_IMAGE_PATH=str(Path(BASE_DIR)/Path("{prefix:s}_Test_public/")/Path("RGB-PanSharpen/RGB-PanSharpen_{image_id:s}.tif"))<line_sep>FMT_TRAIN_MSPEC_IMAGE_PATH=str(Path(BASE_DIR)/Path("{prefix:s}_Train/")/Path("MUL-PanSharpen/MUL-PanSharpen_{image_id:s}.tif"))<line_sep>FMT_TEST_MSPEC_IMAGE_PATH=str(Path(BASE_DIR)/Path("{prefix:s}_Test_public/")/Path("MUL-PanSharpen/MUL-PanSharpen_{image_id:s}.tif"))<line_sep># Preprocessing result FMT_BANDCUT_TH_PATH=IMAGE_DIR+"/bandcut{}.csv"<line_sep>FMT_MUL_BANDCUT_TH_PATH=IMAGE_DIR+"/mul_bandcut{}.csv"<line_sep># Image list, Image container and mask container FMT_VALTRAIN_IMAGELIST_PATH=IMAGE_DIR+"/{prefix:s}_valtrain_ImageId.csv"<line_sep>FMT_VALTEST_IMAGELIST_PATH=IMAGE_DIR+"/{prefix:s}_valtest_ImageId.csv"<line_sep>FMT_VALTRAIN_IM_STORE=IMAGE_DIR+"/valtrain_{}_im.h5"<line_sep>FMT_VALTEST_IM_STORE=IMAGE_DIR+"/valtest_{}_im.h5"<line_sep>FMT_VALTRAIN_MASK_STORE=IMAGE_DIR+"/valtrain_{}_mask.h5"<line_sep>FMT_VALTEST_MASK_STORE=IMAGE_DIR+"/valtest_{}_mask.h5"<line_sep>FMT_VALTRAIN_MUL_STORE=IMAGE_DIR+"/valtrain_{}_mul.h5"<line_sep>FMT_VALTEST_MUL_STORE=IMAGE_DIR+"/valtest_{}_mul.h5"<line_sep>FMT_TRAIN_IMAGELIST_PATH=IMAGE_DIR+"/{prefix:s}_train_ImageId.csv"<line_sep>FMT_TEST_IMAGELIST_PATH=IMAGE_DIR+"/{prefix:s}_test_ImageId.csv"<line_sep>FMT_TRAIN_IM_STORE=IMAGE_DIR+"/train_{}_im.h5"<line_sep>FMT_TEST_IM_STORE=IMAGE_DIR+"/test_{}_im.h5"<line_sep>FMT_TRAIN_MASK_STORE=IMAGE_DIR+"/train_{}_mask.h5"<line_sep>FMT_TRAIN_MUL_STORE=IMAGE_DIR+"/train_{}_mul.h5"<line_sep>FMT_TEST_MUL_STORE=IMAGE_DIR+"/test_{}_mul.h5"<line_sep>FMT_IMMEAN=IMAGE_DIR+"/{}_immean.h5"<line_sep>FMT_MULMEAN=IMAGE_DIR+"/{}_mulmean.h5"<line_sep># Model files FMT_VALMODEL_PATH=MODEL_DIR+"/{}_val_weights.h5"<line_sep>FMT_FULLMODEL_PATH=MODEL_DIR+"/{}_full_weights.h5"<line_sep>FMT_VALMODEL_HIST=MODEL_DIR+"/{}_val_hist.csv"<line_sep>FMT_VALMODEL_EVALHIST=MODEL_DIR+"/{}_val_evalhist.csv"<line_sep>FMT_VALMODEL_EVALTHHIST=MODEL_DIR+"/{}_val_evalhist_th.csv"<line_sep># Prediction & polygon result FMT_TESTPRED_PATH=MODEL_DIR+"/{}_pred.h5"<line_sep>FMT_VALTESTPRED_PATH=MODEL_DIR+"/{}_eval_pred.h5"<line_sep>FMT_VALTESTPOLY_PATH=MODEL_DIR+"/{}_eval_poly.csv"<line_sep>FMT_VALTESTTRUTH_PATH=MODEL_DIR+"/{}_eval_poly_truth.csv"<line_sep>FMT_VALTESTPOLY_OVALL_PATH=MODEL_DIR+"/eval_poly.csv"<line_sep>FMT_VALTESTTRUTH_OVALL_PATH=MODEL_DIR+"/eval_poly_truth.csv"<line_sep>FMT_TESTPOLY_PATH=MODEL_DIR+"/{}_poly.csv"<line_sep># Model related files (others) FMT_VALMODEL_LAST_PATH=MODEL_DIR+"/{}_val_weights_last.h5"<line_sep>FMT_FULLMODEL_LAST_PATH=MODEL_DIR+"/{}_full_weights_last.h5"<line_sep># Logger warnings.simplefilter("ignore" UserWarning)<line_sep>handler=StreamHandler()<line_sep>handler.setLevel(INFO)<line_sep>handler.setFormatter(Formatter(LOGFORMAT))<line_sep>fh_handler=FileHandler(".{}.log".format(MODEL_NAME))<line_sep>fh_handler.setFormatter(Formatter(LOGFORMAT))<line_sep>logger=getLogger('spacenet2')<line_sep>logger.setLevel(INFO)<if_stmt>__name__<eq>'__main__'<block_start>logger.addHandler(handler)<line_sep>logger.addHandler(fh_handler)<block_end># Fix seed for reproducibility np.random.seed(1145141919)<def_stmt>directory_name_to_area_id datapath<block_start>""" Directory name to AOI number Usage: >>> directory_name_to_area_id("/data/test/AOI_2_Vegas") 2 """<line_sep>dir_name=Path(datapath).name<if_stmt>dir_name.startswith('AOI_2_Vegas')<block_start><return>2<block_end><elif_stmt>dir_name.startswith('AOI_3_Paris')<block_start><return>3<block_end><elif_stmt>dir_name.startswith('AOI_4_Shanghai')<block_start><return>4<block_end><elif_stmt>dir_name.startswith('AOI_5_Khartoum')<block_start><return>5<block_end><else_stmt><block_start><raise>RuntimeError("Unsupported city id is given.")<block_end><block_end><def_stmt>_remove_interiors line<block_start><if_stmt>"), ("<in>line<block_start>line_prefix=line.split('), (')[0]<line_sep>line_terminate=line.split('))",')[-1]<line_sep>line=(line_prefix+'))",'+line_terminate)<block_end><return>line<block_end><def_stmt>__load_band_cut_th band_fn bandsz=3<block_start>df=pd.read_csv(band_fn index_col='area_id')<line_sep>all_band_cut_th={area_id:{}<for>area_id range(2 6)}<for_stmt>area_id,row df.iterrows()<block_start><for_stmt>chan_i range(bandsz)<block_start>all_band_cut_th[area_id][chan_i]=dict(min=row['chan{}_min'.format(chan_i)] max=row['chan{}_max'.format(chan_i)] )<block_end><block_end><return>all_band_cut_th<block_end><def_stmt>_calc_fscore_per_aoi area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>truth_file=FMT_VALTESTTRUTH_PATH.format(prefix)<line_sep>poly_file=FMT_VALTESTPOLY_PATH.format(prefix)<line_sep>cmd=['java' '-jar' '/root/visualizer-2.0/visualizer.jar' '-truth' truth_file '-solution' poly_file '-no-gui' '-band-triplets' '/root/visualizer-2.0/data/band-triplets.txt' '-image-dir' 'pass' ]<line_sep>proc=subprocess.Popen(cmd stdout=subprocess.PIPE stderr=subprocess.PIPE )<line_sep>stdout_data,stderr_data=proc.communicate()<line_sep>lines=[line<for>line stdout_data.decode('utf8').split('\n')[-10:]]<line_sep>""" Overall F-score : 0.85029 AOI_2_Vegas: TP : 27827 FP : 4999 FN : 4800 Precision: 0.847712 Recall : 0.852883 F-score : 0.85029 """<if_stmt>stdout_data.decode('utf8').strip().endswith("Overall F-score : 0")<block_start>overall_fscore=0<line_sep>tp=0<line_sep>fp=0<line_sep>fn=0<line_sep>precision=0<line_sep>recall=0<line_sep>fscore=0<block_end><elif_stmt>len(lines)<g>0<and>lines[0].startswith("Overall F-score : ")<block_start><assert_stmt>lines[0].startswith("Overall F-score : ")<assert_stmt>lines[2].startswith("AOI_")<assert_stmt>lines[3].strip().startswith("TP")<assert_stmt>lines[4].strip().startswith("FP")<assert_stmt>lines[5].strip().startswith("FN")<assert_stmt>lines[6].strip().startswith("Precision")<assert_stmt>lines[7].strip().startswith("Recall")<assert_stmt>lines[8].strip().startswith("F-score")<line_sep>overall_fscore=float(re.findall("([\d\.]+)" lines[0])[0])<line_sep>tp=int(re.findall("(\d+)" lines[3])[0])<line_sep>fp=int(re.findall("(\d+)" lines[4])[0])<line_sep>fn=int(re.findall("(\d+)" lines[5])[0])<line_sep>precision=float(re.findall("([\d\.]+)" lines[6])[0])<line_sep>recall=float(re.findall("([\d\.]+)" lines[7])[0])<line_sep>fscore=float(re.findall("([\d\.]+)" lines[8])[0])<block_end><else_stmt><block_start>logger.warn("Unexpected data >>> "+stdout_data.decode('utf8'))<line_sep><raise>RuntimeError("Unsupported format")<block_end><return>{'overall_fscore':overall_fscore 'tp':tp 'fp':fp 'fn':fn 'precision':precision 'recall':recall 'fscore':fscore }<block_end><def_stmt>prefix_to_area_id prefix<block_start>area_dict={'AOI_2_Vegas':2 'AOI_3_Paris':3 'AOI_4_Shanghai':4 'AOI_5_Khartoum':5 }<line_sep><return>area_dict[area_id]<block_end><def_stmt>area_id_to_prefix area_id<block_start>area_dict={2:'AOI_2_Vegas' 3:'AOI_3_Paris' 4:'AOI_4_Shanghai' 5:'AOI_5_Khartoum' }<line_sep><return>area_dict[area_id]<block_end># --------------------------------------------------------- # main <def_stmt>_get_model_parameter area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn_hist=FMT_VALMODEL_EVALTHHIST.format(prefix)<line_sep>best_row=pd.read_csv(fn_hist).sort_values(by='fscore' ascending=<false> ).iloc[0]<line_sep>param=dict(fn_epoch=int(best_row['zero_base_epoch']) min_poly_area=int(best_row['min_area_th']) )<line_sep><return>param<block_end><def_stmt>get_resized_raster_3chan_image image_id band_cut_th=<none><block_start>fn=train_image_id_to_path(image_id)<with_stmt>rasterio.open(fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>chan_i range(3)<block_start>min_val=band_cut_th[chan_i]['min']<line_sep>max_val=band_cut_th[chan_i]['max']<line_sep>values[chan_i]=np.clip(values[chan_i] min_val max_val)<line_sep>values[chan_i]=(values[chan_i]-min_val)/(max_val-min_val)<block_end><block_end>values=np.swapaxes(values 0 2)<line_sep>values=np.swapaxes(values 0 1)<line_sep>values=skimage.transform.resize(values (INPUT_SIZE INPUT_SIZE))<line_sep><return>values<block_end><def_stmt>get_resized_raster_3chan_image_test image_id band_cut_th=<none><block_start>fn=test_image_id_to_path(image_id)<with_stmt>rasterio.open(fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>chan_i range(3)<block_start>min_val=band_cut_th[chan_i]['min']<line_sep>max_val=band_cut_th[chan_i]['max']<line_sep>values[chan_i]=np.clip(values[chan_i] min_val max_val)<line_sep>values[chan_i]=(values[chan_i]-min_val)/(max_val-min_val)<block_end><block_end>values=np.swapaxes(values 0 2)<line_sep>values=np.swapaxes(values 0 1)<line_sep>values=skimage.transform.resize(values (INPUT_SIZE INPUT_SIZE))<line_sep><return>values<block_end><def_stmt>image_mask_resized_from_summary df image_id<block_start>im_mask=np.zeros((650 650))<for_stmt>idx,row df[df.ImageId<eq>image_id].iterrows()<block_start>shape_obj=shapely.wkt.loads(row.PolygonWKT_Pix)<if_stmt>shape_obj.exterior<is><not><none><block_start>coords=list(shape_obj.exterior.coords)<line_sep>x=[round(float(pp[0]))<for>pp coords]<line_sep>y=[round(float(pp[1]))<for>pp coords]<line_sep>yy,xx=skimage.draw.polygon(y x (650 650))<line_sep>im_mask[yy xx]=1<line_sep>interiors=shape_obj.interiors<for_stmt>interior interiors<block_start>coords=list(interior.coords)<line_sep>x=[round(float(pp[0]))<for>pp coords]<line_sep>y=[round(float(pp[1]))<for>pp coords]<line_sep>yy,xx=skimage.draw.polygon(y x (650 650))<line_sep>im_mask[yy xx]=0<block_end><block_end><block_end>im_mask=skimage.transform.resize(im_mask (INPUT_SIZE INPUT_SIZE))<line_sep>im_mask=(im_mask<g>0.5).astype(np.uint8)<line_sep><return>im_mask<block_end><def_stmt>train_test_image_prep area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>df_train=pd.read_csv(FMT_TRAIN_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>df_test=pd.read_csv(FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>band_cut_th=__load_band_cut_th(FMT_BANDCUT_TH_PATH.format(prefix))[area_id]<line_sep>df_summary=_load_train_summary_data(area_id)<line_sep>fn=FMT_TRAIN_IM_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_train.index total=len(df_train))<block_start>im=get_resized_raster_3chan_image(image_id band_cut_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end>fn=FMT_TEST_IM_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_test.index total=len(df_test))<block_start>im=get_resized_raster_3chan_image_test(image_id band_cut_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end>fn=FMT_TRAIN_MASK_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_train.index total=len(df_train))<block_start>im_mask=image_mask_resized_from_summary(df_summary image_id)<line_sep>atom=tb.Atom.from_dtype(im_mask.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im_mask.shape filters=filters)<line_sep>ds[:]=im_mask<block_end><block_end><block_end><def_stmt>valtrain_test_image_prep area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>logger.info("valtrain_test_image_prep for {}".format(prefix))<line_sep>df_train=pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>df_test=pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>band_cut_th=__load_band_cut_th(FMT_BANDCUT_TH_PATH.format(prefix))[area_id]<line_sep>df_summary=_load_train_summary_data(area_id)<line_sep>fn=FMT_VALTRAIN_IM_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_train.index total=len(df_train))<block_start>im=get_resized_raster_3chan_image(image_id band_cut_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end>fn=FMT_VALTEST_IM_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_test.index total=len(df_test))<block_start>im=get_resized_raster_3chan_image(image_id band_cut_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end>fn=FMT_VALTRAIN_MASK_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_train.index total=len(df_train))<block_start>im_mask=image_mask_resized_from_summary(df_summary image_id)<line_sep>atom=tb.Atom.from_dtype(im_mask.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im_mask.shape filters=filters)<line_sep>ds[:]=im_mask<block_end><block_end>fn=FMT_VALTEST_MASK_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_test.index total=len(df_test))<block_start>im_mask=image_mask_resized_from_summary(df_summary image_id)<line_sep>atom=tb.Atom.from_dtype(im_mask.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im_mask.shape filters=filters)<line_sep>ds[:]=im_mask<block_end><block_end><block_end><def_stmt>train_test_mul_image_prep area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>df_train=pd.read_csv(FMT_TRAIN_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>df_test=pd.read_csv(FMT_TEST_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>band_rgb_th=__load_band_cut_th(FMT_BANDCUT_TH_PATH.format(prefix))[area_id]<line_sep>band_mul_th=__load_band_cut_th(FMT_MUL_BANDCUT_TH_PATH.format(prefix) bandsz=8)[area_id]<line_sep>df_summary=_load_train_summary_data(area_id)<line_sep>fn=FMT_TRAIN_MUL_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_train.index total=len(df_train))<block_start>im=get_resized_raster_8chan_image(image_id band_rgb_th band_mul_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end>fn=FMT_TEST_MUL_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_test.index total=len(df_test))<block_start>im=get_resized_raster_8chan_image_test(image_id band_rgb_th band_mul_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end><block_end><def_stmt>valtrain_test_mul_image_prep area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>logger.info("valtrain_test_image_prep for {}".format(prefix))<line_sep>df_train=pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>df_test=pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) index_col='ImageId')<line_sep>band_rgb_th=__load_band_cut_th(FMT_BANDCUT_TH_PATH.format(prefix))[area_id]<line_sep>band_mul_th=__load_band_cut_th(FMT_MUL_BANDCUT_TH_PATH.format(prefix) bandsz=8)[area_id]<line_sep>df_summary=_load_train_summary_data(area_id)<line_sep>fn=FMT_VALTRAIN_MUL_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_train.index total=len(df_train))<block_start>im=get_resized_raster_8chan_image(image_id band_rgb_th band_mul_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end>fn=FMT_VALTEST_MUL_STORE.format(prefix)<line_sep>logger.info("Prepare image container: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start><for_stmt>image_id tqdm.tqdm(df_test.index total=len(df_test))<block_start>im=get_resized_raster_8chan_image(image_id band_rgb_th band_mul_th)<line_sep>atom=tb.Atom.from_dtype(im.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root image_id atom im.shape filters=filters)<line_sep>ds[:]=im<block_end><block_end><block_end><def_stmt>_load_train_summary_data area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn=FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix)<line_sep>df=pd.read_csv(fn)<line_sep><return>df<block_end><def_stmt>split_val_train_test area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>df=_load_train_summary_data(area_id)<line_sep>df_agg=df.groupby('ImageId').agg('first')<line_sep>image_id_list=df_agg.index.tolist()<line_sep>np.random.shuffle(image_id_list)<line_sep>sz_valtrain=int(len(image_id_list)<times>0.7)<line_sep>sz_valtest=len(image_id_list)-sz_valtrain<line_sep>pd.DataFrame({'ImageId':image_id_list[:sz_valtrain]}).to_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix) index=<false>)<line_sep>pd.DataFrame({'ImageId':image_id_list[sz_valtrain:]}).to_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix) index=<false>)<block_end><def_stmt>train_image_id_to_mspec_path image_id<block_start>prefix=image_id_to_prefix(image_id)<line_sep>fn=FMT_TRAIN_MSPEC_IMAGE_PATH.format(prefix=prefix image_id=image_id)<line_sep><return>fn<block_end><def_stmt>test_image_id_to_mspec_path image_id<block_start>prefix=image_id_to_prefix(image_id)<line_sep>fn=FMT_TEST_MSPEC_IMAGE_PATH.format(prefix=prefix image_id=image_id)<line_sep><return>fn<block_end><def_stmt>train_image_id_to_path image_id<block_start>prefix=image_id_to_prefix(image_id)<line_sep>fn=FMT_TRAIN_RGB_IMAGE_PATH.format(prefix=prefix image_id=image_id)<line_sep><return>fn<block_end><def_stmt>test_image_id_to_path image_id<block_start>prefix=image_id_to_prefix(image_id)<line_sep>fn=FMT_TEST_RGB_IMAGE_PATH.format(prefix=prefix image_id=image_id)<line_sep><return>fn<block_end><def_stmt>image_id_to_prefix image_id<block_start>prefix=image_id.split('img')[0][:-1]<line_sep><return>prefix<block_end><def_stmt>calc_multiband_cut_threshold area_id<block_start>rows=[]<line_sep>band_cut_th=__calc_multiband_cut_threshold(area_id)<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>row=dict(prefix=area_id_to_prefix(area_id))<line_sep>row['area_id']=area_id<for_stmt>chan_i band_cut_th.keys()<block_start>row['chan{}_max'.format(chan_i)]=band_cut_th[chan_i]['max']<line_sep>row['chan{}_min'.format(chan_i)]=band_cut_th[chan_i]['min']<block_end>rows.append(row)<line_sep>pd.DataFrame(rows).to_csv(FMT_BANDCUT_TH_PATH.format(prefix) index=<false>)<block_end><def_stmt>__calc_multiband_cut_threshold area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>band_values={k:[]<for>k range(3)}<line_sep>band_cut_th={k:dict(max=0 min=0)<for>k range(3)}<line_sep>image_id_list=pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix)).ImageId.tolist()<for_stmt>image_id tqdm.tqdm(image_id_list[:500])<block_start>image_fn=train_image_id_to_path(image_id)<with_stmt>rasterio.open(image_fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>i_chan range(3)<block_start>values_=values[i_chan].ravel().tolist()<line_sep>values_=np.array([v<for>v values_<if>v<ne>0])<line_sep># Remove sensored mask band_values[i_chan].append(values_)<block_end><block_end><block_end>image_id_list=pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)).ImageId.tolist()<for_stmt>image_id tqdm.tqdm(image_id_list[:500])<block_start>image_fn=train_image_id_to_path(image_id)<with_stmt>rasterio.open(image_fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>i_chan range(3)<block_start>values_=values[i_chan].ravel().tolist()<line_sep>values_=np.array([v<for>v values_<if>v<ne>0])<line_sep># Remove sensored mask band_values[i_chan].append(values_)<block_end><block_end><block_end><for_stmt>i_chan range(3)<block_start>band_values[i_chan]=np.concatenate(band_values[i_chan]).ravel()<line_sep>band_cut_th[i_chan]['max']=scipy.percentile(band_values[i_chan] 98)<line_sep>band_cut_th[i_chan]['min']=scipy.percentile(band_values[i_chan] 2)<block_end><return>band_cut_th<block_end><def_stmt>calc_mul_multiband_cut_threshold area_id<block_start>rows=[]<line_sep>band_cut_th=__calc_mul_multiband_cut_threshold(area_id)<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>row=dict(prefix=area_id_to_prefix(area_id))<line_sep>row['area_id']=area_id<for_stmt>chan_i band_cut_th.keys()<block_start>row['chan{}_max'.format(chan_i)]=band_cut_th[chan_i]['max']<line_sep>row['chan{}_min'.format(chan_i)]=band_cut_th[chan_i]['min']<block_end>rows.append(row)<line_sep>pd.DataFrame(rows).to_csv(FMT_MUL_BANDCUT_TH_PATH.format(prefix) index=<false>)<block_end><def_stmt>__calc_mul_multiband_cut_threshold area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>band_values={k:[]<for>k range(8)}<line_sep>band_cut_th={k:dict(max=0 min=0)<for>k range(8)}<line_sep>image_id_list=pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix)).ImageId.tolist()<for_stmt>image_id tqdm.tqdm(image_id_list[:500])<block_start>image_fn=train_image_id_to_mspec_path(image_id)<with_stmt>rasterio.open(image_fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>i_chan range(8)<block_start>values_=values[i_chan].ravel().tolist()<line_sep>values_=np.array([v<for>v values_<if>v<ne>0])<line_sep># Remove sensored mask band_values[i_chan].append(values_)<block_end><block_end><block_end>image_id_list=pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)).ImageId.tolist()<for_stmt>image_id tqdm.tqdm(image_id_list[:500])<block_start>image_fn=train_image_id_to_mspec_path(image_id)<with_stmt>rasterio.open(image_fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>i_chan range(8)<block_start>values_=values[i_chan].ravel().tolist()<line_sep>values_=np.array([v<for>v values_<if>v<ne>0])<line_sep># Remove sensored mask band_values[i_chan].append(values_)<block_end><block_end><block_end><for_stmt>i_chan range(8)<block_start>band_values[i_chan]=np.concatenate(band_values[i_chan]).ravel()<line_sep>band_cut_th[i_chan]['max']=scipy.percentile(band_values[i_chan] 98)<line_sep>band_cut_th[i_chan]['min']=scipy.percentile(band_values[i_chan] 2)<block_end><return>band_cut_th<block_end><def_stmt>get_unet <block_start>conv_params=dict(activation='relu' border_mode='same')<line_sep>merge_params=dict(mode='concat' concat_axis=1)<line_sep>inputs=Input((8 256 256))<line_sep>conv1=Convolution2D(32 3 3 **conv_params)(inputs)<line_sep>conv1=Convolution2D(32 3 3 **conv_params)(conv1)<line_sep>pool1=MaxPooling2D(pool_size=(2 2))(conv1)<line_sep>conv2=Convolution2D(64 3 3 **conv_params)(pool1)<line_sep>conv2=Convolution2D(64 3 3 **conv_params)(conv2)<line_sep>pool2=MaxPooling2D(pool_size=(2 2))(conv2)<line_sep>conv3=Convolution2D(128 3 3 **conv_params)(pool2)<line_sep>conv3=Convolution2D(128 3 3 **conv_params)(conv3)<line_sep>pool3=MaxPooling2D(pool_size=(2 2))(conv3)<line_sep>conv4=Convolution2D(256 3 3 **conv_params)(pool3)<line_sep>conv4=Convolution2D(256 3 3 **conv_params)(conv4)<line_sep>pool4=MaxPooling2D(pool_size=(2 2))(conv4)<line_sep>conv5=Convolution2D(512 3 3 **conv_params)(pool4)<line_sep>conv5=Convolution2D(512 3 3 **conv_params)(conv5)<line_sep>up6=merge_l([UpSampling2D(size=(2 2))(conv5) conv4] **merge_params)<line_sep>conv6=Convolution2D(256 3 3 **conv_params)(up6)<line_sep>conv6=Convolution2D(256 3 3 **conv_params)(conv6)<line_sep>up7=merge_l([UpSampling2D(size=(2 2))(conv6) conv3] **merge_params)<line_sep>conv7=Convolution2D(128 3 3 **conv_params)(up7)<line_sep>conv7=Convolution2D(128 3 3 **conv_params)(conv7)<line_sep>up8=merge_l([UpSampling2D(size=(2 2))(conv7) conv2] **merge_params)<line_sep>conv8=Convolution2D(64 3 3 **conv_params)(up8)<line_sep>conv8=Convolution2D(64 3 3 **conv_params)(conv8)<line_sep>up9=merge_l([UpSampling2D(size=(2 2))(conv8) conv1] **merge_params)<line_sep>conv9=Convolution2D(32 3 3 **conv_params)(up9)<line_sep>conv9=Convolution2D(32 3 3 **conv_params)(conv9)<line_sep>conv10=Convolution2D(1 1 1 activation='sigmoid')(conv9)<line_sep>adam=Adam()<line_sep>model=Model(input=inputs output=conv10)<line_sep>model.compile(optimizer=adam loss='binary_crossentropy' metrics=['accuracy' jaccard_coef jaccard_coef_int])<line_sep><return>model<block_end><def_stmt>jaccard_coef y_true y_pred<block_start>smooth=1e-12<line_sep>intersection=K.sum(y_true<times>y_pred axis=[0 -1 -2])<line_sep>sum_=K.sum(y_true+y_pred axis=[0 -1 -2])<line_sep>jac=(intersection+smooth)/(sum_-intersection+smooth)<line_sep><return>K.mean(jac)<block_end><def_stmt>jaccard_coef_int y_true y_pred<block_start>smooth=1e-12<line_sep>y_pred_pos=K.round(K.clip(y_pred 0 1))<line_sep>intersection=K.sum(y_true<times>y_pred_pos axis=[0 -1 -2])<line_sep>sum_=K.sum(y_true+y_pred_pos axis=[0 -1 -2])<line_sep>jac=(intersection+smooth)/(sum_-intersection+smooth)<line_sep><return>K.mean(jac)<block_end><def_stmt>generate_test_batch area_id batch_size=64 immean=<none> enable_tqdm=<false><block_start>prefix=area_id_to_prefix(area_id)<line_sep>df_test=pd.read_csv(FMT_TEST_IMAGELIST_PATH.format(prefix=prefix))<line_sep>fn_im=FMT_TEST_MUL_STORE.format(prefix)<line_sep>image_id_list=df_test.ImageId.tolist()<if_stmt>enable_tqdm<block_start>pbar=tqdm.tqdm(total=len(image_id_list))<block_end><while_stmt>1<block_start>total_sz=len(image_id_list)<line_sep>n_batch=int(math.floor(total_sz/batch_size)+1)<with_stmt>tb.open_file(fn_im 'r')<as>f_im<block_start><for_stmt>i_batch range(n_batch)<block_start>target_image_ids=image_id_list[i_batch<times>batch_size:(i_batch+1)<times>batch_size]<if_stmt>len(target_image_ids)<eq>0<block_start><continue><block_end>X_test=[]<line_sep>y_test=[]<for_stmt>image_id target_image_ids<block_start>im=np.array(f_im.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_test.append(im)<line_sep>mask=np.zeros((INPUT_SIZE INPUT_SIZE)).astype(np.uint8)<line_sep>y_test.append(mask)<block_end>X_test=np.array(X_test)<line_sep>y_test=np.array(y_test)<line_sep>y_test=y_test.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<if_stmt>immean<is><not><none><block_start>X_test=X_test-immean<block_end><if_stmt>enable_tqdm<block_start>pbar.update(y_test.shape[0])<block_end><yield>(X_test y_test)<block_end><block_end><if_stmt>enable_tqdm<block_start>pbar.close()<block_end><block_end><block_end><def_stmt>get_resized_raster_8chan_image_test image_id band_rgb_th band_mul_th<block_start>""" RGB + multispectral (total: 8 channels) """<line_sep>im=[]<line_sep>fn=test_image_id_to_path(image_id)<with_stmt>rasterio.open(fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>chan_i range(3)<block_start>min_val=band_rgb_th[chan_i]['min']<line_sep>max_val=band_rgb_th[chan_i]['max']<line_sep>values[chan_i]=np.clip(values[chan_i] min_val max_val)<line_sep>values[chan_i]=(values[chan_i]-min_val)/(max_val-min_val)<line_sep>im.append(skimage.transform.resize(values[chan_i] (INPUT_SIZE INPUT_SIZE)))<block_end><block_end>fn=test_image_id_to_mspec_path(image_id)<with_stmt>rasterio.open(fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<line_sep>usechannels=[1 2 5 6 7]<for_stmt>chan_i usechannels<block_start>min_val=band_mul_th[chan_i]['min']<line_sep>max_val=band_mul_th[chan_i]['max']<line_sep>values[chan_i]=np.clip(values[chan_i] min_val max_val)<line_sep>values[chan_i]=(values[chan_i]-min_val)/(max_val-min_val)<line_sep>im.append(skimage.transform.resize(values[chan_i] (INPUT_SIZE INPUT_SIZE)))<block_end><block_end>im=np.array(im)# (ch, w, h) im=np.swapaxes(im 0 2)# -> (h, w, ch) im=np.swapaxes(im 0 1)# -> (w, h, ch) <return>im<block_end><def_stmt>get_resized_raster_8chan_image image_id band_rgb_th band_mul_th<block_start>""" RGB + multispectral (total: 8 channels) """<line_sep>im=[]<line_sep>fn=train_image_id_to_path(image_id)<with_stmt>rasterio.open(fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<for_stmt>chan_i range(3)<block_start>min_val=band_rgb_th[chan_i]['min']<line_sep>max_val=band_rgb_th[chan_i]['max']<line_sep>values[chan_i]=np.clip(values[chan_i] min_val max_val)<line_sep>values[chan_i]=(values[chan_i]-min_val)/(max_val-min_val)<line_sep>im.append(skimage.transform.resize(values[chan_i] (INPUT_SIZE INPUT_SIZE)))<block_end><block_end>fn=train_image_id_to_mspec_path(image_id)<with_stmt>rasterio.open(fn 'r')<as>f<block_start>values=f.read().astype(np.float32)<line_sep>usechannels=[1 2 5 6 7]<for_stmt>chan_i usechannels<block_start>min_val=band_mul_th[chan_i]['min']<line_sep>max_val=band_mul_th[chan_i]['max']<line_sep>values[chan_i]=np.clip(values[chan_i] min_val max_val)<line_sep>values[chan_i]=(values[chan_i]-min_val)/(max_val-min_val)<line_sep>im.append(skimage.transform.resize(values[chan_i] (INPUT_SIZE INPUT_SIZE)))<block_end><block_end>im=np.array(im)# (ch, w, h) im=np.swapaxes(im 0 2)# -> (h, w, ch) im=np.swapaxes(im 0 1)# -> (w, h, ch) <return>im<block_end><def_stmt>_get_train_mul_data area_id<block_start>""" RGB + multispectral (total: 8 channels) """<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>fn_train=FMT_TRAIN_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_train=pd.read_csv(fn_train)<line_sep>X_train=[]<line_sep>fn_im=FMT_TRAIN_MUL_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_train.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_train.append(im)<block_end><block_end>X_train=np.array(X_train)<line_sep>y_train=[]<line_sep>fn_mask=FMT_TRAIN_MASK_STORE.format(prefix)<with_stmt>tb.open_file(fn_mask 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_train.ImageId.tolist())<block_start>mask=np.array(f.get_node('/'+image_id))<line_sep>mask=(mask<g>0.5).astype(np.uint8)<line_sep>y_train.append(mask)<block_end><block_end>y_train=np.array(y_train)<line_sep>y_train=y_train.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<line_sep><return>X_train y_train<block_end><def_stmt>_get_test_mul_data area_id<block_start>""" RGB + multispectral (total: 8 channels) """<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>fn_test=FMT_TEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>X_test=[]<line_sep>fn_im=FMT_TEST_MUL_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_test.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_test.append(im)<block_end><block_end>X_test=np.array(X_test)<line_sep><return>X_test<block_end><def_stmt>_get_valtest_mul_data area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>X_val=[]<line_sep>fn_im=FMT_VALTEST_MUL_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_test.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_val.append(im)<block_end><block_end>X_val=np.array(X_val)<line_sep>y_val=[]<line_sep>fn_mask=FMT_VALTEST_MASK_STORE.format(prefix)<with_stmt>tb.open_file(fn_mask 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_test.ImageId.tolist())<block_start>mask=np.array(f.get_node('/'+image_id))<line_sep>mask=(mask<g>0.5).astype(np.uint8)<line_sep>y_val.append(mask)<block_end><block_end>y_val=np.array(y_val)<line_sep>y_val=y_val.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<line_sep><return>X_val y_val<block_end><def_stmt>_get_valtrain_mul_data area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn_train=FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_train=pd.read_csv(fn_train)<line_sep>X_val=[]<line_sep>fn_im=FMT_VALTRAIN_MUL_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_train.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_val.append(im)<block_end><block_end>X_val=np.array(X_val)<line_sep>y_val=[]<line_sep>fn_mask=FMT_VALTRAIN_MASK_STORE.format(prefix)<with_stmt>tb.open_file(fn_mask 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_train.ImageId.tolist())<block_start>mask=np.array(f.get_node('/'+image_id))<line_sep>mask=(mask<g>0.5).astype(np.uint8)<line_sep>y_val.append(mask)<block_end><block_end>y_val=np.array(y_val)<line_sep>y_val=y_val.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<line_sep><return>X_val y_val<block_end><def_stmt>get_mul_mean_image area_id<block_start>prefix=area_id_to_prefix(area_id)<with_stmt>tb.open_file(FMT_MULMEAN.format(prefix) 'r')<as>f<block_start>im_mean=np.array(f.get_node('/mulmean'))<block_end><return>im_mean<block_end><def_stmt>preproc_stage3 area_id<block_start>prefix=area_id_to_prefix(area_id)<if_stmt><not>Path(FMT_VALTEST_MUL_STORE.format(prefix)).exists()<block_start>valtrain_test_mul_image_prep(area_id)<block_end><if_stmt><not>Path(FMT_TEST_MUL_STORE.format(prefix)).exists()<block_start>train_test_mul_image_prep(area_id)<block_end># mean image for subtract preprocessing X1,_=_get_train_mul_data(area_id)<line_sep>X2=_get_test_mul_data(area_id)<line_sep>X=np.vstack([X1 X2])<line_sep>print(X.shape)<line_sep>X_mean=X.mean(axis=0)<line_sep>fn=FMT_MULMEAN.format(prefix)<line_sep>logger.info("Prepare mean image: {}".format(fn))<with_stmt>tb.open_file(fn 'w')<as>f<block_start>atom=tb.Atom.from_dtype(X_mean.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root 'mulmean' atom X_mean.shape filters=filters)<line_sep>ds[:]=X_mean<block_end><block_end><def_stmt>_internal_test_predict_best_param area_id save_pred=<true><block_start>prefix=area_id_to_prefix(area_id)<line_sep>param=_get_model_parameter(area_id)<line_sep>epoch=param['fn_epoch']<line_sep>min_th=param['min_poly_area']<line_sep># Prediction phase logger.info("Prediction phase: {}".format(prefix))<line_sep>X_mean=get_mul_mean_image(area_id)<line_sep># Load model weights # Predict and Save prediction result fn=FMT_TESTPRED_PATH.format(prefix)<line_sep>fn_model=FMT_VALMODEL_PATH.format(prefix+'_{epoch:02d}')<line_sep>fn_model=fn_model.format(epoch=epoch)<line_sep>model=get_unet()<line_sep>model.load_weights(fn_model)<line_sep>fn_test=FMT_TEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>y_pred=model.predict_generator(generate_test_batch(area_id batch_size=64 immean=X_mean enable_tqdm=<true> ) val_samples=len(df_test) )<del_stmt>model<line_sep># Save prediction result <if_stmt>save_pred<block_start><with_stmt>tb.open_file(fn 'w')<as>f<block_start>atom=tb.Atom.from_dtype(y_pred.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root 'pred' atom y_pred.shape filters=filters)<line_sep>ds[:]=y_pred<block_end><block_end><return>y_pred<block_end><def_stmt>_internal_test area_id enable_tqdm=<false><block_start>prefix=area_id_to_prefix(area_id)<line_sep>y_pred=_internal_test_predict_best_param(area_id save_pred=<false>)<line_sep>param=_get_model_parameter(area_id)<line_sep>min_th=param['min_poly_area']<line_sep># Postprocessing phase logger.info("Postprocessing phase")<line_sep>fn_test=FMT_TEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>fn_out=FMT_TESTPOLY_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<line_sep>test_image_list=df_test.index.tolist()<for_stmt>idx,image_id tqdm.tqdm(enumerate(test_image_list) total=len(test_image_list))<block_start>df_poly=mask_to_poly(y_pred[idx][0] min_polygon_area_th=min_th)<if_stmt>len(df_poly)<g>0<block_start><for_stmt>i,row df_poly.iterrows()<block_start>line="{},{},\"{}\",{:.6f}\n".format(image_id row.bid row.wkt row.area_ratio)<line_sep>line=_remove_interiors(line)<line_sep>f.write(line)<block_end><block_end><else_stmt><block_start>f.write("{},{},{},0\n".format(image_id -1 "POLYGON EMPTY"))<block_end><block_end><block_end><block_end><def_stmt>validate_score area_id<block_start>""" Calc competition score """<line_sep>prefix=area_id_to_prefix(area_id)<line_sep># Prediction phase <if_stmt><not>Path(FMT_VALTESTPRED_PATH.format(prefix)).exists()<block_start>X_val,y_val=_get_valtest_mul_data(area_id)<line_sep>X_mean=get_mul_mean_image(area_id)<line_sep># Load model weights # Predict and Save prediction result model=get_unet()<line_sep>model.load_weights(FMT_VALMODEL_PATH.format(prefix))<line_sep>y_pred=model.predict(X_val-X_mean batch_size=8 verbose=1)<del_stmt>model<line_sep># Save prediction result fn=FMT_VALTESTPRED_PATH.format(prefix)<with_stmt>tb.open_file(fn 'w')<as>f<block_start>atom=tb.Atom.from_dtype(y_pred.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root 'pred' atom y_pred.shape filters=filters)<line_sep>ds[:]=y_pred<block_end><block_end># Postprocessing phase <if_stmt><not>Path(FMT_VALTESTPOLY_PATH.format(prefix)).exists()<block_start>fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>fn=FMT_VALTESTPRED_PATH.format(prefix)<with_stmt>tb.open_file(fn 'r')<as>f<block_start>y_pred=np.array(f.get_node('/pred'))<block_end>print(y_pred.shape)<line_sep>fn_out=FMT_VALTESTPOLY_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<for_stmt>idx,image_id enumerate(df_test.index.tolist())<block_start>df_poly=mask_to_poly(y_pred[idx][0])<if_stmt>len(df_poly)<g>0<block_start><for_stmt>i,row df_poly.iterrows()<block_start>f.write("{},{},\"{}\",{:.6f}\n".format(image_id row.bid row.wkt row.area_ratio))<block_end><block_end><else_stmt><block_start>f.write("{},{},{},0\n".format(image_id -1 "POLYGON EMPTY"))<block_end><block_end><block_end># update fn_out <with_stmt>open(fn_out 'r')<as>f<block_start>lines=f.readlines()<block_end><with_stmt>open(fn_out 'w')<as>f<block_start>f.write(lines[0])<for_stmt>line lines[1:]<block_start>line=_remove_interiors(line)<line_sep>f.write(line)<block_end><block_end><block_end># Validation solution file <if_stmt><not>Path(FMT_VALTESTTRUTH_PATH.format(prefix)).exists()<block_start>fn_true=FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix)<line_sep>df_true=pd.read_csv(fn_true)<line_sep># # Remove prefix "PAN_" # df_true.loc[:, 'ImageId'] = df_true.ImageId.str[4:] fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>df_test_image_ids=df_test.ImageId.unique()<line_sep>fn_out=FMT_VALTESTTRUTH_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<line_sep>df_true=df_true[df_true.ImageId.isin(df_test_image_ids)]<for_stmt>idx,r df_true.iterrows()<block_start>f.write("{},{},\"{}\",{:.6f}\n".format(r.ImageId r.BuildingId r.PolygonWKT_Pix 1.0))<block_end><block_end><block_end><block_end><def_stmt>validate_all_score <block_start>header_line=[]<line_sep>lines=[]<for_stmt>area_id range(2 6)<block_start>prefix=area_id_to_prefix(area_id)<assert_stmt>Path(FMT_VALTESTTRUTH_PATH.format(prefix)).exists()<with_stmt>open(FMT_VALTESTTRUTH_PATH.format(prefix) 'r')<as>f<block_start>header_line=f.readline()<line_sep>lines<augadd>f.readlines()<block_end><block_end><with_stmt>open(FMT_VALTESTTRUTH_OVALL_PATH 'w')<as>f<block_start>f.write(header_line)<for_stmt>line lines<block_start>f.write(line)<block_end><block_end># Predicted polygons header_line=[]<line_sep>lines=[]<for_stmt>area_id range(2 6)<block_start>prefix=area_id_to_prefix(area_id)<assert_stmt>Path(FMT_VALTESTPOLY_PATH.format(prefix)).exists()<with_stmt>open(FMT_VALTESTPOLY_PATH.format(prefix) 'r')<as>f<block_start>header_line=f.readline()<line_sep>lines<augadd>f.readlines()<block_end><block_end><with_stmt>open(FMT_VALTESTPOLY_OVALL_PATH 'w')<as>f<block_start>f.write(header_line)<for_stmt>line lines<block_start>f.write(line)<block_end><block_end><block_end><def_stmt>generate_valtest_batch area_id batch_size=8 immean=<none> enable_tqdm=<false><block_start>prefix=area_id_to_prefix(area_id)<line_sep>df_train=pd.read_csv(FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix))<line_sep>fn_im=FMT_VALTEST_MUL_STORE.format(prefix)<line_sep>fn_mask=FMT_VALTEST_MASK_STORE.format(prefix)<line_sep>image_id_list=df_train.ImageId.tolist()<if_stmt>enable_tqdm<block_start>pbar=tqdm.tqdm(total=len(image_id_list))<block_end><while_stmt>1<block_start>total_sz=len(image_id_list)<line_sep>n_batch=int(math.floor(total_sz/batch_size)+1)<with_stmt>tb.open_file(fn_im 'r')<as>f_im tb.open_file(fn_mask 'r')<as>f_mask<block_start><for_stmt>i_batch range(n_batch)<block_start>target_image_ids=image_id_list[i_batch<times>batch_size:(i_batch+1)<times>batch_size]<if_stmt>len(target_image_ids)<eq>0<block_start><continue><block_end>X_train=[]<line_sep>y_train=[]<for_stmt>image_id target_image_ids<block_start>im=np.array(f_im.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_train.append(im)<line_sep>mask=np.array(f_mask.get_node('/'+image_id))<line_sep>mask=(mask<g>0).astype(np.uint8)<line_sep>y_train.append(mask)<block_end>X_train=np.array(X_train)<line_sep>y_train=np.array(y_train)<line_sep>y_train=y_train.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<if_stmt>immean<is><not><none><block_start>X_train=X_train-immean<block_end><if_stmt>enable_tqdm<block_start>pbar.update(y_train.shape[0])<block_end><yield>(X_train y_train)<block_end><block_end><if_stmt>enable_tqdm<block_start>pbar.close()<block_end><block_end><block_end><def_stmt>generate_valtrain_batch area_id batch_size=8 immean=<none><block_start>prefix=area_id_to_prefix(area_id)<line_sep>df_train=pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix))<line_sep>fn_im=FMT_VALTRAIN_MUL_STORE.format(prefix)<line_sep>fn_mask=FMT_VALTRAIN_MASK_STORE.format(prefix)<line_sep>image_id_list=df_train.ImageId.tolist()<line_sep>np.random.shuffle(image_id_list)<while_stmt>1<block_start>total_sz=len(image_id_list)<line_sep>n_batch=int(math.floor(total_sz/batch_size)+1)<with_stmt>tb.open_file(fn_im 'r')<as>f_im tb.open_file(fn_mask 'r')<as>f_mask<block_start><for_stmt>i_batch range(n_batch)<block_start>target_image_ids=image_id_list[i_batch<times>batch_size:(i_batch+1)<times>batch_size]<if_stmt>len(target_image_ids)<eq>0<block_start><continue><block_end>X_train=[]<line_sep>y_train=[]<for_stmt>image_id target_image_ids<block_start>im=np.array(f_im.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_train.append(im)<line_sep>mask=np.array(f_mask.get_node('/'+image_id))<line_sep>mask=(mask<g>0).astype(np.uint8)<line_sep>y_train.append(mask)<block_end>X_train=np.array(X_train)<line_sep>y_train=np.array(y_train)<line_sep>y_train=y_train.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<if_stmt>immean<is><not><none><block_start>X_train=X_train-immean<block_end><yield>(X_train y_train)<block_end><block_end><block_end><block_end><def_stmt>_get_test_data area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn_test=FMT_TEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>X_test=[]<line_sep>fn_im=FMT_TEST_IM_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_test.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_test.append(im)<block_end><block_end>X_test=np.array(X_test)<line_sep><return>X_test<block_end><def_stmt>_get_valtest_data area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>X_val=[]<line_sep>fn_im=FMT_VALTEST_IM_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_test.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_val.append(im)<block_end><block_end>X_val=np.array(X_val)<line_sep>y_val=[]<line_sep>fn_mask=FMT_VALTEST_MASK_STORE.format(prefix)<with_stmt>tb.open_file(fn_mask 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_test.ImageId.tolist())<block_start>mask=np.array(f.get_node('/'+image_id))<line_sep>mask=(mask<g>0.5).astype(np.uint8)<line_sep>y_val.append(mask)<block_end><block_end>y_val=np.array(y_val)<line_sep>y_val=y_val.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<line_sep><return>X_val y_val<block_end><def_stmt>_get_valtrain_data area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>fn_train=FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_train=pd.read_csv(fn_train)<line_sep>X_val=[]<line_sep>fn_im=FMT_VALTRAIN_IM_STORE.format(prefix)<with_stmt>tb.open_file(fn_im 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_train.ImageId.tolist())<block_start>im=np.array(f.get_node('/'+image_id))<line_sep>im=np.swapaxes(im 0 2)<line_sep>im=np.swapaxes(im 1 2)<line_sep>X_val.append(im)<block_end><block_end>X_val=np.array(X_val)<line_sep>y_val=[]<line_sep>fn_mask=FMT_VALTRAIN_MASK_STORE.format(prefix)<with_stmt>tb.open_file(fn_mask 'r')<as>f<block_start><for_stmt>idx,image_id enumerate(df_train.ImageId.tolist())<block_start>mask=np.array(f.get_node('/'+image_id))<line_sep>mask=(mask<g>0.5).astype(np.uint8)<line_sep>y_val.append(mask)<block_end><block_end>y_val=np.array(y_val)<line_sep>y_val=y_val.reshape((-1 1 INPUT_SIZE INPUT_SIZE))<line_sep><return>X_val y_val<block_end><def_stmt>predict area_id<block_start>prefix=area_id_to_prefix(area_id)<line_sep>X_test=_get_test_mul_data(area_id)<line_sep>X_mean=get_mul_mean_image(area_id)<line_sep># Load model weights # Predict and Save prediction result model=get_unet()<line_sep>model.load_weights(FMT_VALMODEL_PATH.format(prefix))<line_sep>y_pred=model.predict(X_test-X_mean batch_size=8 verbose=1)<del_stmt>model<line_sep># Save prediction result fn=FMT_TESTPRED_PATH.format(prefix)<with_stmt>tb.open_file(fn 'w')<as>f<block_start>atom=tb.Atom.from_dtype(y_pred.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root 'pred' atom y_pred.shape filters=filters)<line_sep>ds[:]=y_pred<block_end><block_end><def_stmt>_internal_validate_predict_best_param area_id enable_tqdm=<false><block_start>param=_get_model_parameter(area_id)<line_sep>epoch=param['fn_epoch']<line_sep>y_pred=_internal_validate_predict(area_id epoch=epoch save_pred=<false> enable_tqdm=enable_tqdm)<line_sep><return>y_pred<block_end><def_stmt>_internal_validate_predict area_id epoch=3 save_pred=<true> enable_tqdm=<false><block_start>prefix=area_id_to_prefix(area_id)<line_sep>X_mean=get_mul_mean_image(area_id)<line_sep># Load model weights # Predict and Save prediction result fn_model=FMT_VALMODEL_PATH.format(prefix+'_{epoch:02d}')<line_sep>fn_model=fn_model.format(epoch=epoch)<line_sep>model=get_unet()<line_sep>model.load_weights(fn_model)<line_sep>fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>y_pred=model.predict_generator(generate_valtest_batch(area_id batch_size=64 immean=X_mean enable_tqdm=enable_tqdm ) val_samples=len(df_test) )<del_stmt>model<line_sep># Save prediction result <if_stmt>save_pred<block_start>fn=FMT_VALTESTPRED_PATH.format(prefix)<with_stmt>tb.open_file(fn 'w')<as>f<block_start>atom=tb.Atom.from_dtype(y_pred.dtype)<line_sep>filters=tb.Filters(complib='blosc' complevel=9)<line_sep>ds=f.create_carray(f.root 'pred' atom y_pred.shape filters=filters)<line_sep>ds[:]=y_pred<block_end><block_end><return>y_pred<block_end><def_stmt>_internal_validate_fscore_wo_pred_file area_id epoch=3 min_th=MIN_POLYGON_AREA enable_tqdm=<false><block_start>prefix=area_id_to_prefix(area_id)<line_sep># Prediction phase logger.info("Prediction phase")<line_sep>y_pred=_internal_validate_predict(area_id epoch=epoch save_pred=<false> enable_tqdm=enable_tqdm)<line_sep># Postprocessing phase logger.info("Postprocessing phase")<line_sep>fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>fn=FMT_VALTESTPRED_PATH.format(prefix)<line_sep>fn_out=FMT_VALTESTPOLY_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<line_sep>test_list=df_test.index.tolist()<line_sep>iterator=enumerate(test_list)<for_stmt>idx,image_id tqdm.tqdm(iterator total=len(test_list))<block_start>df_poly=mask_to_poly(y_pred[idx][0] min_polygon_area_th=min_th)<if_stmt>len(df_poly)<g>0<block_start><for_stmt>i,row df_poly.iterrows()<block_start>line="{},{},\"{}\",{:.6f}\n".format(image_id row.bid row.wkt row.area_ratio)<line_sep>line=_remove_interiors(line)<line_sep>f.write(line)<block_end><block_end><else_stmt><block_start>f.write("{},{},{},0\n".format(image_id -1 "POLYGON EMPTY"))<block_end><block_end><block_end># ------------------------ # Validation solution file logger.info("Validation solution file")<line_sep>fn_true=FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix)<line_sep>df_true=pd.read_csv(fn_true)<line_sep># # Remove prefix "PAN_" # df_true.loc[:, 'ImageId'] = df_true.ImageId.str[4:] fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>df_test_image_ids=df_test.ImageId.unique()<line_sep>fn_out=FMT_VALTESTTRUTH_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<line_sep>df_true=df_true[df_true.ImageId.isin(df_test_image_ids)]<for_stmt>idx,r df_true.iterrows()<block_start>f.write("{},{},\"{}\",{:.6f}\n".format(r.ImageId r.BuildingId r.PolygonWKT_Pix 1.0))<block_end><block_end><block_end><def_stmt>_internal_validate_fscore area_id epoch=3 predict=<true> min_th=MIN_POLYGON_AREA enable_tqdm=<false><block_start>prefix=area_id_to_prefix(area_id)<line_sep># Prediction phase logger.info("Prediction phase")<if_stmt>predict<block_start>_internal_validate_predict(area_id epoch=epoch enable_tqdm=enable_tqdm)<block_end># Postprocessing phase logger.info("Postprocessing phase")<line_sep>fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>fn=FMT_VALTESTPRED_PATH.format(prefix)<line_sep>fn_out=FMT_VALTESTPOLY_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f tb.open_file(fn 'r')<as>fr<block_start>y_pred=np.array(fr.get_node('/pred'))<line_sep>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<line_sep>test_list=df_test.index.tolist()<line_sep>iterator=enumerate(test_list)<for_stmt>idx,image_id tqdm.tqdm(iterator total=len(test_list))<block_start>df_poly=mask_to_poly(y_pred[idx][0] min_polygon_area_th=min_th)<if_stmt>len(df_poly)<g>0<block_start><for_stmt>i,row df_poly.iterrows()<block_start>line="{},{},\"{}\",{:.6f}\n".format(image_id row.bid row.wkt row.area_ratio)<line_sep>line=_remove_interiors(line)<line_sep>f.write(line)<block_end><block_end><else_stmt><block_start>f.write("{},{},{},0\n".format(image_id -1 "POLYGON EMPTY"))<block_end><block_end><block_end># ------------------------ # Validation solution file logger.info("Validation solution file")<line_sep># if not Path(FMT_VALTESTTRUTH_PATH.format(prefix)).exists(): <if_stmt><true><block_start>fn_true=FMT_TRAIN_SUMMARY_PATH.format(prefix=prefix)<line_sep>df_true=pd.read_csv(fn_true)<line_sep># # Remove prefix "PAN_" # df_true.loc[:, 'ImageId'] = df_true.ImageId.str[4:] fn_test=FMT_VALTEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test)<line_sep>df_test_image_ids=df_test.ImageId.unique()<line_sep>fn_out=FMT_VALTESTTRUTH_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<line_sep>df_true=df_true[df_true.ImageId.isin(df_test_image_ids)]<for_stmt>idx,r df_true.iterrows()<block_start>f.write("{},{},\"{}\",{:.6f}\n".format(r.ImageId r.BuildingId r.PolygonWKT_Pix 1.0))<block_end><block_end><block_end><block_end>@click.group()<def_stmt>cli <block_start><pass><block_end>@cli.command()@click.argument('datapath' type=str)<def_stmt>validate datapath<block_start>area_id=directory_name_to_area_id(datapath)<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>logger.info(">> validate sub-command: {}".format(prefix))<line_sep>X_mean=get_mul_mean_image(area_id)<line_sep>X_val,y_val=_get_valtest_mul_data(area_id)<line_sep>X_val=X_val-X_mean<if_stmt><not>Path(MODEL_DIR).exists()<block_start>Path(MODEL_DIR).mkdir(parents=<true>)<block_end>logger.info("load valtrain")<line_sep>X_trn,y_trn=_get_valtrain_mul_data(area_id)<line_sep>X_trn=X_trn-X_mean<line_sep>model=get_unet()<line_sep>model_checkpoint=ModelCheckpoint(FMT_VALMODEL_PATH.format(prefix+"_{epoch:02d}") monitor='val_jaccard_coef_int' save_best_only=<false>)<line_sep>model_earlystop=EarlyStopping(monitor='val_jaccard_coef_int' patience=10 verbose=0 mode='max')<line_sep>model_history=History()<line_sep>df_train=pd.read_csv(FMT_VALTRAIN_IMAGELIST_PATH.format(prefix=prefix))<line_sep>logger.info("Fit")<line_sep>model.fit(X_trn y_trn nb_epoch=200 shuffle=<true> verbose=1 validation_data=(X_val y_val) callbacks=[model_checkpoint model_earlystop model_history])<line_sep>model.save_weights(FMT_VALMODEL_LAST_PATH.format(prefix))<line_sep># Save evaluation history pd.DataFrame(model_history.history).to_csv(FMT_VALMODEL_HIST.format(prefix) index=<false>)<line_sep>logger.info(">> validate sub-command: {} ... Done".format(prefix))<block_end>@cli.command()@click.argument('datapath' type=str)<def_stmt>testproc datapath<block_start>area_id=directory_name_to_area_id(datapath)<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>logger.info(">>>> Test proc for {}".format(prefix))<line_sep>_internal_test(area_id)<line_sep>logger.info(">>>> Test proc for {} ... done".format(prefix))<block_end>@cli.command()@click.argument('datapath' type=str)<def_stmt>evalfscore datapath<block_start>area_id=directory_name_to_area_id(datapath)<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>logger.info("Evaluate fscore on validation set: {}".format(prefix))<line_sep># for each epoch # if not Path(FMT_VALMODEL_EVALHIST.format(prefix)).exists(): <if_stmt><true><block_start>df_hist=pd.read_csv(FMT_VALMODEL_HIST.format(prefix))<line_sep>df_hist.loc[: 'epoch']=list(range(1 len(df_hist)+1))<line_sep>rows=[]<for_stmt>zero_base_epoch range(0 len(df_hist))<block_start>logger.info(">>> Epoch: {}".format(zero_base_epoch))<line_sep>_internal_validate_fscore_wo_pred_file(area_id epoch=zero_base_epoch enable_tqdm=<true> min_th=MIN_POLYGON_AREA)<line_sep>evaluate_record=_calc_fscore_per_aoi(area_id)<line_sep>evaluate_record['zero_base_epoch']=zero_base_epoch<line_sep>evaluate_record['min_area_th']=MIN_POLYGON_AREA<line_sep>evaluate_record['area_id']=area_id<line_sep>logger.info("\n"+json.dumps(evaluate_record indent=4))<line_sep>rows.append(evaluate_record)<block_end>pd.DataFrame(rows).to_csv(FMT_VALMODEL_EVALHIST.format(prefix) index=<false>)<block_end># find best min-poly-threshold df_evalhist=pd.read_csv(FMT_VALMODEL_EVALHIST.format(prefix))<line_sep>best_row=df_evalhist.sort_values(by='fscore' ascending=<false>).iloc[0]<line_sep>best_epoch=int(best_row.zero_base_epoch)<line_sep>best_fscore=best_row.fscore<line_sep># optimize min area th rows=[]<for_stmt>th [30 60 90 120 150 180 210 240]<block_start>logger.info(">>> TH: {}".format(th))<line_sep>predict_flag=<false><if_stmt>th<eq>30<block_start>predict_flag=<true><block_end>_internal_validate_fscore(area_id epoch=best_epoch enable_tqdm=<true> min_th=th predict=predict_flag)<line_sep>evaluate_record=_calc_fscore_per_aoi(area_id)<line_sep>evaluate_record['zero_base_epoch']=best_epoch<line_sep>evaluate_record['min_area_th']=th<line_sep>evaluate_record['area_id']=area_id<line_sep>logger.info("\n"+json.dumps(evaluate_record indent=4))<line_sep>rows.append(evaluate_record)<block_end>pd.DataFrame(rows).to_csv(FMT_VALMODEL_EVALTHHIST.format(prefix) index=<false>)<line_sep>logger.info("Evaluate fscore on validation set: {} .. done".format(prefix))<block_end><def_stmt>mask_to_poly mask min_polygon_area_th=MIN_POLYGON_AREA<block_start>""" Convert from 256x256 mask to polygons on 650x650 image """<line_sep>mask=(skimage.transform.resize(mask (650 650))<g>0.5).astype(np.uint8)<line_sep>shapes=rasterio.features.shapes(mask.astype(np.int16) mask<g>0)<line_sep>poly_list=[]<line_sep>mp=shapely.ops.cascaded_union(shapely.geometry.MultiPolygon([shapely.geometry.shape(shape)<for>shape,value shapes]))<if_stmt>isinstance(mp shapely.geometry.Polygon)<block_start>df=pd.DataFrame({'area_size':[mp.area] 'poly':[mp] })<block_end><else_stmt><block_start>df=pd.DataFrame({'area_size':[p.area<for>p mp] 'poly':[p<for>p mp] })<block_end>df=df[df.area_size<g>min_polygon_area_th].sort_values(by='area_size' ascending=<false>)<line_sep>df.loc[: 'wkt']=df.poly.apply(<lambda>x:shapely.wkt.dumps(x rounding_precision=0))<line_sep>df.loc[: 'bid']=list(range(1 len(df)+1))<line_sep>df.loc[: 'area_ratio']=df.area_size/df.area_size.max()<line_sep><return>df<block_end><def_stmt>postproc area_id# Mask to poly <block_start>print(area_id)<line_sep>prefix=area_id_to_prefix(area_id)<line_sep>fn_test=FMT_TEST_IMAGELIST_PATH.format(prefix=prefix)<line_sep>df_test=pd.read_csv(fn_test index_col='ImageId')<line_sep>fn=FMT_TESTPRED_PATH.format(prefix)<with_stmt>tb.open_file(fn 'r')<as>f<block_start>y_pred=np.array(f.get_node('/pred'))<block_end>print(y_pred.shape)<line_sep>fn_out=FMT_TESTPOLY_PATH.format(prefix)<with_stmt>open(fn_out 'w')<as>f<block_start>f.write("ImageId,BuildingId,PolygonWKT_Pix,Confidence\n")<for_stmt>idx,image_id enumerate(df_test.index.tolist())<block_start>df_poly=mask_to_poly(y_pred[idx][0])<if_stmt>len(df_poly)<g>0<block_start><for_stmt>i,row df_poly.iterrows()<block_start>f.write("{},{},\"{}\",{:.6f}\n".format(image_id row.bid row.wkt row.area_ratio))<block_end><block_end><else_stmt><block_start>f.write("{},{},{},0\n".format(image_id -1 "POLYGON EMPTY"))<block_end><block_end><block_end><block_end><def_stmt>merge <block_start>df_list=[]<for_stmt>area_id range(2 6)<block_start>prefix=area_id_to_prefix(area_id)<line_sep>df_part=pd.read_csv(FMT_TESTPOLY_PATH.format(prefix))<line_sep>df_list.append(df_part)<block_end>df=pd.concat(df_list)<line_sep>df.to_csv(FN_SOLUTION_CSV index=<false>)<with_stmt>open(FN_SOLUTION_CSV 'r')<as>f<block_start>lines=f.readlines()<block_end><with_stmt>open(FN_SOLUTION_CSV 'w')<as>f<block_start>f.write(lines[0])<for_stmt>line lines[1:]<block_start>line=_remove_interiors(line)<line_sep>f.write(line)<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>cli()<block_end>
<import_from_stmt>random randint<import_stmt>numpy<as>np<try_stmt><block_start><import_stmt>tensorflow<as>tf<block_end><except_stmt>ImportError<block_start>tf=<none><block_end># ToDo: we are using a lot of tf.keras.backend modules below, can we use tf core instead? <class_stmt>MaskingDense(tf.keras.layers.Layer)<block_start>""" Just copied code from keras Dense layer and added masking and a few other tricks: - Direct auto-regressive connections to output - Allows a second (non-autoregressive) input that is fully connected to first hidden - Either 1 output or 2 outputs (concatenated) that are separately auto-regressive wrt to the input """<def_stmt>__init__ self units out_units hidden_layers=1 dropout_rate=0.0 random_input_order=<false> activation='elu' out_activation='linear' kernel_initializer='glorot_uniform' bias_initializer='zeros' out_kernel_initializer='glorot_uniform' out_bias_initializer='zeros' kernel_regularizer=<none> bias_regularizer=<none> activity_regularizer=<none> kernel_constraint=<none> bias_constraint=<none> name=<none> batchnorm=<false> **kwargs<block_start><if_stmt>'input_shape'<not><in>kwargs<and>'input_dim'<in>kwargs<block_start>kwargs['input_shape']=(kwargs.pop('input_dim') )<block_end>super(MaskingDense self).__init__(name=name **kwargs)<line_sep>self.input_sel=<none><line_sep>self.random_input_order=random_input_order<line_sep>self.rate=min(1. max(0. dropout_rate))<line_sep>self.kernel_sels=[]<line_sep>self.units=units<line_sep>self.out_units=out_units<line_sep>self.hidden_layers=hidden_layers<line_sep>self.activation=tf.keras.activations.get(activation)<line_sep>self.out_activation=tf.keras.activations.get(out_activation)# None gives linear activation self.kernel_initializer=tf.keras.initializers.get(kernel_initializer)<line_sep>self.bias_initializer=tf.keras.initializers.get(bias_initializer)<line_sep>self.out_kernel_initializer=tf.keras.initializers.get(out_kernel_initializer)<line_sep>self.out_bias_initializer=tf.keras.initializers.get(out_bias_initializer)<line_sep>self.kernel_regularizer=tf.keras.regularizers.get(kernel_regularizer)<line_sep>self.bias_regularizer=tf.keras.regularizers.get(bias_regularizer)<line_sep>self.activity_regularizer=tf.keras.regularizers.get(activity_regularizer)<line_sep>self.kernel_constraint=tf.keras.constraints.get(kernel_constraint)<line_sep>self.bias_constraint=tf.keras.constraints.get(bias_constraint)<line_sep>self.batchnorm=batchnorm<block_end><def_stmt>dropout_wrapper self inputs training<block_start><if_stmt>0.<l>self.rate<l>1.<block_start><def_stmt>dropped_inputs <block_start><return>tf.keras.backend.dropout(inputs self.rate noise_shape=<none> seed=<none>)<block_end><return>tf.keras.backend.in_train_phase(dropped_inputs inputs training=training)<block_end><return>inputs<block_end><def_stmt>build_layer_weights self input_dim units use_bias=<true> is_output=<false> id=''<block_start>kernel_initializer=(self.kernel_initializer<if><not>is_output<else>self.out_kernel_initializer)<line_sep>bias_initializer=(self.bias_initializer<if><not>is_output<else>self.out_bias_initializer)<line_sep>kernel=self.add_weight(shape=(input_dim units) initializer=kernel_initializer name='kernel'+id regularizer=self.kernel_regularizer constraint=self.kernel_constraint)<if_stmt>use_bias<block_start>bias=self.add_weight(shape=(units ) initializer=bias_initializer name='bias'+id regularizer=self.bias_regularizer constraint=self.bias_constraint)<block_end><else_stmt><block_start>bias=<none><block_end><return>kernel bias<block_end><def_stmt>build_mask self shape prev_sel is_output<block_start><if_stmt>is_output<block_start><if_stmt>shape[-1]<eq>len(self.input_sel)<block_start>input_sel=self.input_sel<block_end><else_stmt><block_start>input_sel=self.input_sel<times>2<block_end><block_end><else_stmt># Disallow D-1 because it would violate auto-regressive property # Disallow unconnected units by sampling min from previous layer <block_start>input_sel=[randint(np.min(prev_sel) shape[-1]-2)<for>i range(shape[-1])]<block_end><def_stmt>vals <block_start>in_len=len(self.input_sel)<for_stmt>x range(shape[-2])<block_start><for_stmt>y range(shape[-1])<block_start><if_stmt>is_output<block_start><yield>1<if>prev_sel[x]<l>input_sel[y%in_len]<else>0<block_end><else_stmt><block_start><yield>1<if>prev_sel[x]<le>input_sel[y]<else>0<block_end><block_end><block_end><block_end><return>tf.keras.backend.constant(list(vals()) dtype='float32' shape=shape) input_sel<block_end><def_stmt>build self input_shape<block_start><if_stmt>isinstance(input_shape list)<block_start><if_stmt>len(input_shape)<ne>2<block_start><raise>ValueError('Only list only supported for exactly two inputs')<block_end>input_shape,other_input_shape=input_shape<line_sep># Build weights for other (non-autoregressive) vector other_shape=(other_input_shape[-1] self.units)<line_sep>self.other_kernel,self.other_bias=self.build_layer_weights(*other_shape id='_h')<block_end><assert_stmt>len(input_shape)<ge>2<assert_stmt>self.out_units<eq>input_shape[-1]<or>self.out_units<eq>2<times>input_shape[-1]<line_sep>self.kernels,self.biases=[] []<line_sep>self.kernel_masks,self.kernel_sels=[] []<line_sep>self.batch_norms=[]<line_sep>shape=(input_shape[-1] self.units)<line_sep>self.input_sel=np.arange(input_shape[-1])<if_stmt>self.random_input_order<block_start>np.random.shuffle(self.input_sel)<block_end>prev_sel=self.input_sel<for_stmt>i range(self.hidden_layers)# Hidden layer <block_start>kernel,bias=self.build_layer_weights(*shape id=str(i))<line_sep>self.kernels.append(kernel)<line_sep>self.biases.append(bias)<line_sep># Hidden layer mask kernel_mask,kernel_sel=self.build_mask(shape prev_sel is_output=<false>)<line_sep>self.kernel_masks.append(kernel_mask)<line_sep>self.kernel_sels.append(kernel_sel)<line_sep>prev_sel=kernel_sel<line_sep>shape=(self.units self.units)<line_sep>self.batch_norms.append(tf.keras.layers.BatchNormalization(center=<true> scale=<true>))<block_end># Direct connection between input/output <if_stmt>self.hidden_layers<g>0<block_start>direct_shape=(input_shape[-1] self.out_units)<line_sep>self.direct_kernel,_=self.build_layer_weights(*direct_shape use_bias=<false> is_output=<true> id='_direct')<line_sep>self.direct_kernel_mask,self.direct_sel=self.build_mask(direct_shape self.input_sel is_output=<true>)<block_end># Output layer out_shape=(self.units self.out_units)<line_sep>self.out_kernel,self.out_bias=self.build_layer_weights(*out_shape is_output=<true> id='_out')<line_sep>self.out_kernel_mask,self.out_sel=self.build_mask(out_shape prev_sel is_output=<true>)<line_sep>self.built=<true><block_end><def_stmt>call self inputs training=<none><block_start>other_input=<none><if_stmt>isinstance(inputs list)<block_start><assert_stmt>len(inputs)<eq>2<assert_stmt>self.hidden_layers<g>0 "other input not supported if no hidden layers"<assert_stmt>hasattr(self 'other_kernel')<line_sep>inputs,other_input=inputs<block_end>output=inputs<if_stmt>other_input<is><not><none><block_start>other=tf.keras.backend.dot(other_input self.other_kernel)<line_sep>other=tf.keras.backend.bias_add(other self.other_bias)<line_sep>other=self.activation(other)<block_end># Hidden layer + mask <for_stmt>i range(self.hidden_layers)# i=0: input_dim -> masking_dim # i>0: masking_dim -> masking_dim <block_start>weight=self.kernels[i]<times>self.kernel_masks[i]<line_sep>output=tf.keras.backend.dot(output weight)<line_sep># "other" input <if_stmt>i<eq>0<and>other_input<is><not><none><block_start>output=output+other<block_end>output=tf.keras.backend.bias_add(output self.biases[i])<line_sep>output=self.activation(output)<if_stmt>self.batchnorm<block_start>output=self.batch_norms[i](output)<block_end>output=self.dropout_wrapper(output training)<block_end># out_act(bias + (V dot M_v)h(x) + (A dot M_a)x + (other dot M_other)other) # masking_dim -> input_dim output=tf.keras.backend.dot(output self.out_kernel<times>self.out_kernel_mask)<line_sep># Direct connection <if_stmt>self.hidden_layers<g>0# input_dim -> input_dim <block_start>direct=tf.keras.backend.dot(inputs self.direct_kernel<times>self.direct_kernel_mask)<line_sep>output=output+direct<block_end>output=tf.keras.backend.bias_add(output self.out_bias)<line_sep>output=self.out_activation(output)<line_sep>output=self.dropout_wrapper(output training)<line_sep><return>output<block_end><def_stmt>compute_output_shape self input_shape<block_start><if_stmt>isinstance(input_shape list)<block_start>input_shape=input_shape[0]<block_end><return>(input_shape[0] self.out_units)<block_end><block_end>
# filename: __init__.py # used to test that Catalyst can load Packages # correctly. <import_from_stmt>paraview.simple *<import_from_stmt>paraview print_info<line_sep># print start marker print_info("begin '%s'" __name__)<line_sep>tracker={}<def_stmt>count f<block_start><def_stmt>wrapper *args **kwargs<block_start><global>tracker<line_sep>c=tracker.get(f.__name__ 0)<line_sep>tracker[f.__name__]=c+1<line_sep><return>f(*args **kwargs)<block_end><return>wrapper<block_end>@count<def_stmt>catalyst_initialize <block_start>print_info("in '%s::catalyst_initialize'" __name__)<block_end>@count<def_stmt>catalyst_execute info<block_start>print_info("in '%s::catalyst_execute'" __name__)<block_end>@count<def_stmt>catalyst_finalize <block_start>print_info("in '%s::catalyst_finalize'" __name__)<line_sep><global>tracker<assert_stmt>tracker["catalyst_initialize"]<eq>1<assert_stmt>tracker["catalyst_finalize"]<eq>1<assert_stmt>tracker["catalyst_execute"]<ge>1<line_sep>print_info("All's ok")<block_end># print end marker print_info("end '%s'" __name__)<line_sep>
<import_from_future_stmt> print_function<import_from_stmt>distutils log<import_from_stmt>setuptools setup find_packages<import_stmt>os<import_from_stmt>jupyter_packaging create_cmdclass install_npm ensure_targets combine_commands get_version skip_if_exists <line_sep># Name of the project name='keplergl'<line_sep>here=os.path.dirname(os.path.abspath(__file__))<line_sep>long_description='Keplergl Jupyter Extension'<line_sep>log.info('setup.py entered')<line_sep>log.info('$PATH=%s'%os.environ['PATH'])<line_sep># Get version version=get_version(os.path.join(name '_version.py'))<line_sep>js_dir=os.path.join(here 'js')<line_sep># Representative files that should exist after a successful build jstargets=[os.path.join('keplergl' 'static' 'index.js') os.path.join('keplergl-jupyter' 'labextension' 'package.json') ]<line_sep>data_files_spec=[('share/jupyter/nbextensions/keplergl-jupyter' 'keplergl/static' '**') ('share/jupyter/labextensions/keplergl-jupyter' 'keplergl-jupyter/labextension' "**") ('etc/jupyter/nbconfig/notebook.d' '.' 'keplergl-jupyter.json') ]<line_sep>cmdclass=create_cmdclass('jsdeps' data_files_spec=data_files_spec)<line_sep>js_command=combine_commands(install_npm(js_dir npm=["yarn"] build_cmd='build') ensure_targets(jstargets) )<line_sep>is_repo=os.path.exists(os.path.join(here '.git'))<if_stmt>is_repo<block_start>cmdclass['jsdeps']=js_command<block_end><else_stmt><block_start>cmdclass['jsdeps']=skip_if_exists(jstargets js_command)<block_end>LONG_DESCRIPTION='A jupyter widget for kepler.gl, an advanced geospatial visualization tool, to render large-scale interactive maps.'<line_sep>setup_args={'name':'keplergl' 'version':version 'description':'This is a simple jupyter widget for kepler.gl, an advanced geospatial visualization tool, to render large-scale interactive maps.' 'long_description':LONG_DESCRIPTION 'include_package_data':<true> 'install_requires':['ipywidgets>=7.0.0,<8' 'traittypes>=0.2.1' 'geopandas>=0.5.0' 'pandas>=0.23.0' 'Shapely>=1.6.4.post2'] 'packages':find_packages() 'zip_safe':<false> 'cmdclass':cmdclass 'author':'<NAME>' 'author_email':'<EMAIL>' 'url':'https://github.com/keplergl/kepler.gl/tree/master/bindings/kepler.gl-jupyter' 'keywords':['ipython' 'jupyter' 'widgets' 'geospatial' 'visualization' 'webGL'] 'classifiers':['Development Status :: 4 - Beta' 'Framework :: IPython' 'Intended Audience :: Developers' 'Intended Audience :: Science/Research' 'Topic :: Multimedia :: Graphics' 'Programming Language :: Python :: 2' 'Programming Language :: Python :: 3.6' 'Programming Language :: Python :: 3.7' 'Programming Language :: Python :: 3.8' 'Programming Language :: Python :: 3.9' ] }<line_sep>setup(**setup_args)<line_sep>
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) <import_stmt>tempfile<class_stmt>PyDmTree(PythonPackage)<block_start>"""tree is a library for working with nested data structures. In a way, tree generalizes the builtin map() function which only supports flat sequences, and allows to apply a function to each leaf preserving the overall structure."""<line_sep>homepage="https://github.com/deepmind/tree"<line_sep>pypi="dm-tree/dm-tree-0.1.5.tar.gz"<line_sep>maintainers=['aweits']<line_sep>version('0.1.5' sha256='a951d2239111dfcc468071bc8ff792c7b1e3192cab5a3c94d33a8b2bda3127fa')<line_sep>depends_on('py-setuptools' type='build')<line_sep>depends_on('bazel' type='build')<line_sep>depends_on('py-six@1.12.0:' type=('build' 'run'))<line_sep>@run_after('install')<def_stmt>clean self<block_start>remove_linked_tree(self.tmp_path)<block_end><def_stmt>patch self<block_start>self.tmp_path=tempfile.mkdtemp(dir='/tmp' prefix='spack')<line_sep>env['TEST_TMPDIR']=self.tmp_path<line_sep>env['HOME']=self.tmp_path<line_sep>args=[# Don't allow user or system .bazelrc to override build settings "'--nohome_rc',\n" "'--nosystem_rc',\n" # Bazel does not work properly on NFS, switch to /tmp "'--output_user_root={0}',\n".format(self.tmp_path) "'build',\n" # Spack logs don't handle colored output well "'--color=no',\n" "'--jobs={0}',\n".format(make_jobs) # Enable verbose output for failures "'--verbose_failures',\n" # Show (formatted) subcommands being executed "'--subcommands=pretty_print',\n" "'--spawn_strategy=local',\n" # Ask bazel to explain what it's up to # Needs a filename as argument "'--explain=explainlogfile.txt',\n" # Increase verbosity of explanation, "'--verbose_explanations',\n" # bazel uses system PYTHONPATH instead of spack paths "'--action_env', 'PYTHONPATH={0}',\n".format(env['PYTHONPATH']) ]<line_sep>filter_file("'build'," ' '.join(args) 'setup.py')<block_end><block_end>
# # Copyright (c) 2017 Idiap Research Institute, http://www.idiap.ch/ # Written by <NAME> <<EMAIL>> # <import_stmt>unittest<import_from_stmt>keras.layers Input<import_from_stmt>keras.models Model<import_stmt>numpy<as>np<import_from_stmt>importance_sampling.reweighting BiasedReweightingPolicy NoReweightingPolicy<class_stmt>TestReweighting(unittest.TestCase)<block_start><def_stmt>_test_external_reweighting_layer self rw<block_start>s1,s2=Input(shape=(1 )) Input(shape=(1 ))<line_sep>w=rw.weight_layer()([s1 s2])<line_sep>m=Model(inputs=[s1 s2] outputs=[w])<line_sep>m.compile("sgd" "mse")<line_sep>r=np.random.rand(100 1).astype(np.float32)<line_sep>r_hat=m.predict([np.zeros((100 1)) r])<line_sep>self.assertTrue(np.all(r<eq>r_hat))<block_end><def_stmt>test_biased_reweighting self<block_start>rw=BiasedReweightingPolicy(k=1.)<line_sep>s=np.random.rand(100)<line_sep>i=np.arange(100)<line_sep>w=rw.sample_weights(i s).ravel()<line_sep>self.assertEqual(rw.weight_size 1)<line_sep>self.assertAlmostEqual(w.dot(s) s.sum())<line_sep>self._test_external_reweighting_layer(rw)<line_sep># Make sure that it is just a normalized version of the same weights # raised to k rw=BiasedReweightingPolicy(k=0.5)<line_sep>w_hat=rw.sample_weights(i s).ravel()<line_sep>scales=w<power>0.5/w_hat<line_sep>scales_hat=np.ones(100)<times>scales[0]<line_sep>self.assertTrue(np.allclose(scales scales_hat))<block_end><def_stmt>test_no_reweighting self<block_start>rw=NoReweightingPolicy()<line_sep>self.assertTrue(np.all(rw.sample_weights(np.arange(100) np.random.rand(100))<eq>1.0))<line_sep>self._test_external_reweighting_layer(rw)<block_end><def_stmt>test_adjusted_biased_reweighting self<block_start>self.skipTest("Not implemented yet")<block_end><def_stmt>test_correcting_reweighting_policy self<block_start>self.skipTest("Not implemented yet")<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>unittest.main()<block_end>
# Generic uwsgi_param headers CONTENT_LENGTH='CONTENT_LENGTH'<line_sep>CONTENT_TYPE='CONTENT_TYPE'<line_sep>DOCUMENT_ROOT='DOCUMENT_ROOT'<line_sep>QUERY_STRING='QUERY_STRING'<line_sep>PATH_INFO='PATH_INFO'<line_sep>REMOTE_ADDR='REMOTE_ADDR'<line_sep>REMOTE_PORT='REMOTE_PORT'<line_sep>REQUEST_METHOD='REQUEST_METHOD'<line_sep>REQUEST_URI='REQUEST_URI'<line_sep>SERVER_ADDR='SERVER_ADDR'<line_sep>SERVER_NAME='SERVER_NAME'<line_sep>SERVER_PORT='SERVER_PORT'<line_sep>SERVER_PROTOCOL='SERVER_PROTOCOL'<line_sep># SSL uwsgi_param headers CLIENT_SSL_CERT='CLIENT_SSL_CERT'<line_sep>
__author__='wenqihe'<import_from_stmt>abstract_feature AbstractFeature<import_from_stmt>em_token_feature EMHeadFeature get_lemma<class_stmt>EMDependencyFeature(AbstractFeature)<block_start>accepted_deps=["nn" "agent" "dobj" "nsubj" "amod" "nsubjpass" "poss" "appos"]<line_sep>""" Universal Dependencies """<def_stmt>apply self sentence mention features# head_index = HeadFeature.get_head(sentence, mention) # for dep_type, gov, dep in sentence.dep: # if head_index == gov: # token = 'root' # if dep >= 0: # token = get_lemma(sentence.tokens[dep], sentence.pos[dep]) # features.append('ROLE_gov:%s' % dep_type) # features.append('PARENT_%s' % token) # if head_index == dep: # token = 'root' # if gov >= 0: # token = get_lemma(sentence.tokens[dep], sentence.pos[gov]) # features.append('ROLE_dep:%s' % dep_type) # features.append('PARENT_%s' % token) <block_start>start=mention.start<line_sep>end=mention.end<for_stmt>dep_type,gov,dep sentence.dep<block_start><if_stmt>start<le>gov<l>end<block_start><if_stmt>0<le>dep<l>sentence.size()<block_start>token=get_lemma(sentence.tokens[dep] sentence.pos[dep])<line_sep>pos=sentence.pos[dep]<if_stmt>self.accept_pos(pos)<and>self.accept_dep(dep_type)<block_start>key="gov:"+dep_type+":"+token+"="+pos[0]<line_sep>features.append(("DEP_"+key))<block_end><block_end><block_end><if_stmt>start<le>dep<l>end<block_start><if_stmt>0<le>gov<l>sentence.size()<block_start>token=get_lemma(sentence.tokens[gov] sentence.pos[gov])<line_sep>pos=sentence.pos[gov]<if_stmt>self.accept_pos(pos)<and>self.accept_dep(dep_type)<block_start>key="dep:"+dep_type+":"+token+"="+pos[0]<line_sep>features.append(("DEP_"+key))<block_end><block_end><block_end><block_end><block_end><def_stmt>accept_pos self pos<block_start><return>pos[0]<eq>'N'<or>pos[0]<eq>'V'<block_end><def_stmt>accept_dep self dep<block_start><return>dep.startswith('prep')<or>dep<in>self.accepted_deps<block_end><block_end>
##@file kmedian.py #@brief model for solving the k-median problem. """ minimize the total (weighted) travel cost for servicing a set of customers from k facilities. Copyright (c) by <NAME> and <NAME>, 2012 """<import_stmt>math<import_stmt>random<import_from_stmt>pyscipopt Model quicksum multidict<def_stmt>kmedian I J c k<block_start>"""kmedian -- minimize total cost of servicing customers from k facilities Parameters: - I: set of customers - J: set of potential facilities - c[i,j]: cost of servicing customer i from facility j - k: number of facilities to be used Returns a model, ready to be solved. """<line_sep>model=Model("k-median")<line_sep>x,y={} {}<for_stmt>j J<block_start>y[j]=model.addVar(vtype="B" name="y(%s)"%j)<for_stmt>i I<block_start>x[i j]=model.addVar(vtype="B" name="x(%s,%s)"%(i j))<block_end><block_end><for_stmt>i I<block_start>model.addCons(quicksum(x[i j]<for>j J)<eq>1 "Assign(%s)"%i)<for_stmt>j J<block_start>model.addCons(x[i j]<le>y[j] "Strong(%s,%s)"%(i j))<block_end><block_end>model.addCons(quicksum(y[j]<for>j J)<eq>k "Facilities")<line_sep>model.setObjective(quicksum(c[i j]<times>x[i j]<for>i I<for>j J) "minimize")<line_sep>model.data=x y<line_sep><return>model<block_end><def_stmt>distance x1 y1 x2 y2<block_start>"""return distance of two points"""<line_sep><return>math.sqrt((x2-x1)<power>2+(y2-y1)<power>2)<block_end><def_stmt>make_data n m same=<true><block_start>"""creates example data set"""<if_stmt>same<eq><true><block_start>I=range(n)<line_sep>J=range(m)<line_sep>x=[random.random()<for>i range(max(m n))]# positions of the points in the plane y=[random.random()<for>i range(max(m n))]<block_end><else_stmt><block_start>I=range(n)<line_sep>J=range(n n+m)<line_sep>x=[random.random()<for>i range(n+m)]# positions of the points in the plane y=[random.random()<for>i range(n+m)]<block_end>c={}<for_stmt>i I<block_start><for_stmt>j J<block_start>c[i j]=distance(x[i] y[i] x[j] y[j])<block_end><block_end><return>I J c x y<block_end><if_stmt>__name__<eq>"__main__"<block_start><import_stmt>sys<line_sep>random.seed(67)<line_sep>n=200<line_sep>m=n<line_sep>I,J,c,x_pos,y_pos=make_data(n m same=<true>)<line_sep>k=20<line_sep>model=kmedian(I J c k)<line_sep># model.Params.Threads = 1 model.optimize()<line_sep>EPS=1.e-6<line_sep>x,y=model.data<line_sep>edges=[(i j)<for>(i j) x<if>model.getVal(x[i j])<g>EPS]<line_sep>facilities=[j<for>j y<if>model.getVal(y[j])<g>EPS]<line_sep>print("Optimal value:" model.getObjVal())<line_sep>print("Selected facilities:" facilities)<line_sep>print("Edges:" edges)<line_sep>print("max c:" max([c[i j]<for>(i j) edges]))<try_stmt># plot the result using networkx and matplotlib <block_start><import_stmt>networkx<as>NX<import_stmt>matplotlib.pyplot<as>P<line_sep>P.clf()<line_sep>G=NX.Graph()<line_sep>facilities=set(j<for>j J<if>model.getVal(y[j])<g>EPS)<line_sep>other=set(j<for>j J<if>j<not><in>facilities)<line_sep>client=set(i<for>i I<if>i<not><in>facilities<and>i<not><in>other)<line_sep>G.add_nodes_from(facilities)<line_sep>G.add_nodes_from(client)<line_sep>G.add_nodes_from(other)<for_stmt>(i j) edges<block_start>G.add_edge(i j)<block_end>position={}<for_stmt>i range(len(x_pos))<block_start>position[i]=(x_pos[i] y_pos[i])<block_end>NX.draw(G position with_labels=<false> node_color="w" nodelist=facilities)<line_sep>NX.draw(G position with_labels=<false> node_color="c" nodelist=other node_size=50)<line_sep>NX.draw(G position with_labels=<false> node_color="g" nodelist=client node_size=50)<line_sep>P.show()<block_end><except_stmt>ImportError<block_start>print("install 'networkx' and 'matplotlib' for plotting")<block_end><block_end>
""" DIMACS For more information on the input formats, see "Satisfiability Suggested Format", published May 1993 by the Rutgers Center for Discrete Mathematics (DIMACS). Also, see the proceedings of the International SAT Competition (http://www.satcompetition.org) for information and CNF examples. Exceptions: Error Interface Functions: parse_cnf parse_sat """<line_sep># Disable 'invalid-name', because this module uses an unconventional naming # scheme for the parsing tokens. # pylint: disable=C0103 <import_from_stmt>pyeda.parsing lex<import_from_stmt>pyeda.parsing.token EndToken KeywordToken IntegerToken OperatorToken PunctuationToken <class_stmt>Error(Exception)<block_start>"""An error happened during parsing a DIMACS file"""<block_end># Keywords <class_stmt>KW_p(KeywordToken)<block_start>"""DIMACS 'p' preamble token"""<block_end><class_stmt>KW_cnf(KeywordToken)<block_start>"""DIMACS 'cnf' token"""<block_end><class_stmt>KW_sat(KeywordToken)<block_start>"""DIMACS 'sat' token"""<block_end><class_stmt>KW_satx(KeywordToken)<block_start>"""DIMACS 'satx' token"""<block_end><class_stmt>KW_sate(KeywordToken)<block_start>"""DIMACS 'sate' token"""<block_end><class_stmt>KW_satex(KeywordToken)<block_start>"""DIMACS 'satex' token"""<block_end># Operators <class_stmt>OP_not(OperatorToken)<block_start>"""DIMACS '-' operator"""<line_sep>ASTOP='not'<block_end><class_stmt>OP_or(OperatorToken)<block_start>"""DIMACS '+' operator"""<line_sep>ASTOP='or'<block_end><class_stmt>OP_and(OperatorToken)<block_start>"""DIMACS '*' operator"""<line_sep>ASTOP='and'<block_end><class_stmt>OP_xor(OperatorToken)<block_start>"""DIMACS 'xor' operator"""<line_sep>ASTOP='xor'<block_end><class_stmt>OP_equal(OperatorToken)<block_start>"""DIMACS '=' operator"""<line_sep>ASTOP='equal'<block_end># Punctuation <class_stmt>LPAREN(PunctuationToken)<block_start>"""DIMACS '(' token"""<block_end><class_stmt>RPAREN(PunctuationToken)<block_start>"""DIMACS ')' token"""<block_end><class_stmt>CNFLexer(lex.RegexLexer)<block_start>"""Lexical analysis of CNF strings"""<def_stmt>ignore self text<block_start>"""Ignore this text."""<block_end><def_stmt>keyword self text<block_start>"""Push a keyword onto the token queue."""<line_sep>cls=self.KEYWORDS[text]<line_sep>self.push_token(cls(text self.lineno self.offset))<block_end><def_stmt>operator self text<block_start>"""Push an operator onto the token queue."""<line_sep>cls=self.OPERATORS[text]<line_sep>self.push_token(cls(text self.lineno self.offset))<block_end>@lex.action(IntegerToken)<def_stmt>integer self text<block_start>"""Push an integer onto the token queue."""<line_sep><return>int(text)<block_end>RULES={'root':[(r"c.*\n" ignore) (r"\bp\b" keyword 'preamble') ] 'preamble':[(r"[ \t]+" ignore) (r"\bcnf\b" keyword) (r"\d+" integer) (r"\n" ignore 'formula') ] 'formula':[(r"\s+" ignore) (r"-" operator) (r"\d+" integer) ] }<line_sep>KEYWORDS={'p':KW_p 'cnf':KW_cnf }<line_sep>OPERATORS={'-':OP_not }<block_end><def_stmt>_expect_token lexer types<block_start>"""Return the next token, or raise an exception."""<line_sep>tok=next(lexer)<if_stmt>any(isinstance(tok t)<for>t types)<block_start><return>tok<block_end><else_stmt><block_start><raise>Error("unexpected token: "+str(tok))<block_end><block_end><def_stmt>parse_cnf s varname='x'<block_start>""" Parse an input string in DIMACS CNF format, and return an expression abstract syntax tree. Parameters ---------- s : str String containing a DIMACS CNF. varname : str, optional The variable name used for creating literals. Defaults to 'x'. Returns ------- An ast tuple, defined recursively: ast := ('var', names, indices) | ('not', ast) | ('or', ast, ...) | ('and', ast, ...) names := (name, ...) indices := (index, ...) """<line_sep>lexer=iter(CNFLexer(s))<try_stmt><block_start>ast=_cnf(lexer varname)<block_end><except_stmt>lex.RunError<as>exc<block_start>fstr=("{0.args[0]}: "<concat>"(line: {0.lineno}, offset: {0.offset}, text: {0.text})")<line_sep><raise>Error(fstr.format(exc))<block_end># Check for end of buffer _expect_token(lexer {EndToken})<line_sep><return>ast<block_end><def_stmt>_cnf lexer varname<block_start>"""Return a DIMACS CNF."""<line_sep>_expect_token(lexer {KW_p})<line_sep>_expect_token(lexer {KW_cnf})<line_sep>nvars=_expect_token(lexer {IntegerToken}).value<line_sep>nclauses=_expect_token(lexer {IntegerToken}).value<line_sep><return>_cnf_formula(lexer varname nvars nclauses)<block_end><def_stmt>_cnf_formula lexer varname nvars nclauses<block_start>"""Return a DIMACS CNF formula."""<line_sep>clauses=_clauses(lexer varname nvars)<if_stmt>len(clauses)<l>nclauses<block_start>fstr="formula has fewer than {} clauses"<line_sep><raise>Error(fstr.format(nclauses))<block_end><if_stmt>len(clauses)<g>nclauses<block_start>fstr="formula has more than {} clauses"<line_sep><raise>Error(fstr.format(nclauses))<block_end><return>('and' )+clauses<block_end><def_stmt>_clauses lexer varname nvars<block_start>"""Return a tuple of DIMACS CNF clauses."""<line_sep>tok=next(lexer)<line_sep>toktype=type(tok)<if_stmt>toktype<is>OP_not<or>toktype<is>IntegerToken<block_start>lexer.unpop_token(tok)<line_sep>first=_clause(lexer varname nvars)<line_sep>rest=_clauses(lexer varname nvars)<line_sep><return>(first )+rest<block_end># null <else_stmt><block_start>lexer.unpop_token(tok)<line_sep><return>tuple()<block_end><block_end><def_stmt>_clause lexer varname nvars<block_start>"""Return a DIMACS CNF clause."""<line_sep><return>('or' )+_lits(lexer varname nvars)<block_end><def_stmt>_lits lexer varname nvars<block_start>"""Return a tuple of DIMACS CNF clause literals."""<line_sep>tok=_expect_token(lexer {OP_not IntegerToken})<if_stmt>isinstance(tok IntegerToken)<and>tok.value<eq>0<block_start><return>tuple()<block_end><else_stmt><block_start><if_stmt>isinstance(tok OP_not)<block_start>neg=<true><line_sep>tok=_expect_token(lexer {IntegerToken})<block_end><else_stmt><block_start>neg=<false><block_end>index=tok.value<if_stmt>index<g>nvars<block_start>fstr="formula literal {} is greater than {}"<line_sep><raise>Error(fstr.format(index nvars))<block_end>lit=('var' (varname ) (index ))<if_stmt>neg<block_start>lit=('not' lit)<block_end><return>(lit )+_lits(lexer varname nvars)<block_end><block_end><class_stmt>SATLexer(lex.RegexLexer)<block_start>"""Lexical analysis of SAT strings"""<def_stmt>ignore self text<block_start>"""Ignore this text."""<block_end><def_stmt>keyword self text<block_start>"""Push a keyword onto the token queue."""<line_sep>cls=self.KEYWORDS[text]<line_sep>self.push_token(cls(text self.lineno self.offset))<block_end><def_stmt>operator self text<block_start>"""Push an operator onto the token queue."""<line_sep>cls=self.OPERATORS[text]<line_sep>self.push_token(cls(text self.lineno self.offset))<block_end><def_stmt>punct self text<block_start>"""Push punctuation onto the token queue."""<line_sep>cls=self.PUNCTUATION[text]<line_sep>self.push_token(cls(text self.lineno self.offset))<block_end>@lex.action(IntegerToken)<def_stmt>integer self text<block_start>"""Push an integer onto the token queue."""<line_sep><return>int(text)<block_end>RULES={'root':[(r"c.*\n" ignore) (r"\bp\b" keyword 'preamble') ] 'preamble':[(r"[ \t]+" ignore) (r"\bsat\b" keyword) (r"\bsatx\b" keyword) (r"\bsate\b" keyword) (r"\bsatex\b" keyword) (r"\d+" integer) (r"\n" ignore 'formula') ] 'formula':[(r"\s+" ignore) (r"\+" operator) (r"\-" operator) (r"\*" operator) (r"\bxor\b" operator) (r"=" operator) (r"\(" punct) (r"\)" punct) (r"\d+" integer) ] }<line_sep>KEYWORDS={'p':KW_p 'sat':KW_sat 'satx':KW_satx 'sate':KW_sate 'satex':KW_satex }<line_sep>OPERATORS={'-':OP_not '+':OP_or '*':OP_and 'xor':OP_xor '=':OP_equal }<line_sep>PUNCTUATION={'(':LPAREN ')':RPAREN }<block_end>SAT_GRAMMAR=""" SAT := COMMENT* PREAMBLE FORMULA COMMENT := 'c' .* '\n' PREAMBLE := 'p' FORMAT VARIABLES '\n' FORMAT := 'sat' | 'satx' | 'sate' | 'satex' VARIABLES := INT FORMULA := INT | '-' INT | '(' FORMULA ')' | '-' '(' FORMULA ')' | OP '(' FORMULAS ')' OP := '+' | '*' | 'xor' | '=' FORMULAS := FORMULAS FORMULA | null """<line_sep>_SAT_TOKS={'sat':{OP_not OP_or OP_and} 'satx':{OP_not OP_or OP_and OP_xor} 'sate':{OP_not OP_or OP_and OP_equal} 'satex':{OP_not OP_or OP_and OP_xor OP_equal} }<def_stmt>parse_sat s varname='x'<block_start>""" Parse an input string in DIMACS SAT format, and return an expression. """<line_sep>lexer=iter(SATLexer(s))<try_stmt><block_start>ast=_sat(lexer varname)<block_end><except_stmt>lex.RunError<as>exc<block_start>fstr=("{0.args[0]}: "<concat>"(line: {0.lineno}, offset: {0.offset}, text: {0.text})")<line_sep><raise>Error(fstr.format(exc))<block_end># Check for end of buffer _expect_token(lexer {EndToken})<line_sep><return>ast<block_end><def_stmt>_sat lexer varname<block_start>"""Return a DIMACS SAT."""<line_sep>_expect_token(lexer {KW_p})<line_sep>fmt=_expect_token(lexer {KW_sat KW_satx KW_sate KW_satex}).value<line_sep>nvars=_expect_token(lexer {IntegerToken}).value<line_sep><return>_sat_formula(lexer varname fmt nvars)<block_end><def_stmt>_sat_formula lexer varname fmt nvars<block_start>"""Return a DIMACS SAT formula."""<line_sep>types={IntegerToken LPAREN}|_SAT_TOKS[fmt]<line_sep>tok=_expect_token(lexer types)<line_sep># INT <if_stmt>isinstance(tok IntegerToken)<block_start>index=tok.value<if_stmt><not>0<l>index<le>nvars<block_start>fstr="formula literal {} outside valid range: (0, {}]"<line_sep><raise>Error(fstr.format(index nvars))<block_end><return>('var' (varname ) (index ))<block_end># '-' <elif_stmt>isinstance(tok OP_not)<block_start>tok=_expect_token(lexer {IntegerToken LPAREN})<line_sep># '-' INT <if_stmt>isinstance(tok IntegerToken)<block_start>index=tok.value<if_stmt><not>0<l>index<le>nvars<block_start>fstr="formula literal {} outside valid range: (0, {}]"<line_sep><raise>Error(fstr.format(index nvars))<block_end><return>('not' ('var' (varname ) (index )))<block_end># '-' '(' FORMULA ')' <else_stmt><block_start>formula=_sat_formula(lexer varname fmt nvars)<line_sep>_expect_token(lexer {RPAREN})<line_sep><return>('not' formula)<block_end><block_end># '(' FORMULA ')' <elif_stmt>isinstance(tok LPAREN)<block_start>formula=_sat_formula(lexer varname fmt nvars)<line_sep>_expect_token(lexer {RPAREN})<line_sep><return>formula<block_end># OP '(' FORMULAS ')' <else_stmt><block_start>_expect_token(lexer {LPAREN})<line_sep>formulas=_formulas(lexer varname fmt nvars)<line_sep>_expect_token(lexer {RPAREN})<line_sep><return>(tok.ASTOP )+formulas<block_end><block_end><def_stmt>_formulas lexer varname fmt nvars<block_start>"""Return a tuple of DIMACS SAT formulas."""<line_sep>types={IntegerToken LPAREN}|_SAT_TOKS[fmt]<line_sep>tok=lexer.peek_token()<if_stmt>any(isinstance(tok t)<for>t types)<block_start>first=_sat_formula(lexer varname fmt nvars)<line_sep>rest=_formulas(lexer varname fmt nvars)<line_sep><return>(first )+rest<block_end># null <else_stmt><block_start><return>tuple()<block_end><block_end>
<import_from_future_stmt> print_function<import_stmt>math<import_stmt>numpy<import_stmt>theano<import_stmt>itertools<import_from_stmt>theano tensor Op<import_from_stmt>theano.gradient disconnected_type<import_from_stmt>fuel.utils do_not_pickle_attributes<import_from_stmt>picklable_itertools.extras equizip<import_from_stmt>collections defaultdict deque<import_from_stmt>toposort toposort_flatten<import_from_stmt>lvsr.error_rate reward_matrix gain_matrix edit_distance _edit_distance_matrix _bleu <class_stmt>RewardOp(Op)<block_start>__props__=()<def_stmt>__init__ self eos_label alphabet_size<block_start>"""Computes matrices of rewards and gains."""<line_sep>self.eos_label=eos_label<line_sep>self.alphabet_size=alphabet_size<block_end><def_stmt>perform self node inputs output_storage<block_start>groundtruth,recognized=inputs<if_stmt>(groundtruth.ndim<ne>2<or>recognized.ndim<ne>2<or>groundtruth.shape[1]<ne>recognized.shape[1])<block_start><raise>ValueError<block_end>batch_size=groundtruth.shape[1]<line_sep>all_rewards=numpy.zeros(recognized.shape+(self.alphabet_size ) dtype='int64')<line_sep>all_gains=numpy.zeros(recognized.shape+(self.alphabet_size ) dtype='int64')<line_sep>alphabet=list(range(self.alphabet_size))<for_stmt>index range(batch_size)<block_start>y=list(groundtruth[: index])<line_sep>y_hat=list(recognized[: index])<try_stmt><block_start>eos_pos=y.index(self.eos_label)<line_sep>y=y[:eos_pos+1]<block_end><except_stmt># Sometimes groundtruth is in fact also a prediction # and in this case it might not have EOS label <block_start><pass><block_end><if_stmt>self.eos_label<in>y_hat<block_start>y_hat_eos_pos=y_hat.index(self.eos_label)<line_sep>y_hat_trunc=y_hat[:y_hat_eos_pos+1]<block_end><else_stmt><block_start>y_hat_trunc=y_hat<block_end>rewards_trunc=reward_matrix(y y_hat_trunc alphabet self.eos_label)<line_sep># pass freshly computed rewards to gain_matrix to speed things up # a bit gains_trunc=gain_matrix(y y_hat_trunc alphabet given_reward_matrix=rewards_trunc)<line_sep>gains=numpy.ones((len(y_hat) len(alphabet)))<times>-1000<line_sep>gains[:(gains_trunc.shape[0]-1) :]=gains_trunc[:-1 :]<line_sep>rewards=numpy.ones((len(y_hat) len(alphabet)))<times>-1<line_sep>rewards[:(rewards_trunc.shape[0]-1) :]=rewards_trunc[:-1 :]<line_sep>all_rewards[: index :]=rewards<line_sep>all_gains[: index :]=gains<block_end>output_storage[0][0]=all_rewards<line_sep>output_storage[1][0]=all_gains<block_end><def_stmt>grad self *args **kwargs<block_start><return>disconnected_type() disconnected_type()<block_end><def_stmt>make_node self groundtruth recognized<block_start>recognized=tensor.as_tensor_variable(recognized)<line_sep>groundtruth=tensor.as_tensor_variable(groundtruth)<line_sep><return>theano.Apply(self [groundtruth recognized] [tensor.ltensor3() tensor.ltensor3()])<block_end><block_end><def_stmt>trim y mask<block_start><try_stmt><block_start><return>y[:mask.index(0.)]<block_end><except_stmt>ValueError<block_start><return>y<block_end><block_end><class_stmt>EditDistanceOp(Op)<block_start>__props__=()<def_stmt>__init__ self bos_label eos_label deltas=<false><block_start>self.bos_label=bos_label<line_sep>self.eos_label=eos_label<line_sep>self.deltas=deltas<block_end><def_stmt>perform self node inputs output_storage<block_start>prediction,prediction_mask,groundtruth,groundtruth_mask=inputs<if_stmt>(groundtruth.ndim<ne>2<or>prediction.ndim<ne>2<or>groundtruth.shape[1]<ne>prediction.shape[1])<block_start><raise>ValueError<block_end>batch_size=groundtruth.shape[1]<line_sep>results=numpy.zeros_like(prediction[: : <none>])<for_stmt>index range(batch_size)<block_start>y=trim(list(groundtruth[: index]) list(groundtruth_mask[: index]))<line_sep>y_hat=trim(list(prediction[: index]) list(prediction_mask[: index]))<if_stmt>self.deltas<block_start>matrix=_edit_distance_matrix(y y_hat special_tokens={self.bos_label self.eos_label})<line_sep>row=matrix[-1 :].copy()<line_sep>results[:len(y_hat) index 0]=row[1:]-matrix[-1 :-1]<block_end><else_stmt><block_start>results[len(y_hat)-1 index 0]=edit_distance(y y_hat)<block_end><block_end>output_storage[0][0]=results<block_end><def_stmt>grad self *args **kwargs<block_start><return>theano.gradient.disconnected_type()<block_end><def_stmt>make_node self prediction prediction_mask groundtruth groundtruth_mask<block_start>prediction=tensor.as_tensor_variable(prediction)<line_sep>prediction_mask=tensor.as_tensor_variable(prediction_mask)<line_sep>groundtruth=tensor.as_tensor_variable(groundtruth)<line_sep>groundtruth_mask=tensor.as_tensor_variable(groundtruth_mask)<line_sep><return>theano.Apply(self [prediction prediction_mask groundtruth groundtruth_mask] [tensor.ltensor3()])<block_end><block_end><class_stmt>BleuOp(Op)<block_start>__props__=()<def_stmt>__init__ self bos_label eos_label deltas=<false><block_start>self.n=4<line_sep>self.deltas=deltas<line_sep>self.special_tokens=set([bos_label eos_label])<block_end><def_stmt>grad self *args **kwargs<block_start><return>[theano.gradient.disconnected_type()]<times>4<block_end><def_stmt>perform self node inputs output_storage<block_start>prediction,prediction_mask,groundtruth,groundtruth_mask=inputs<if_stmt>(groundtruth.ndim<ne>2<or>prediction.ndim<ne>2<or>groundtruth.shape[1]<ne>prediction.shape[1])<block_start><raise>ValueError<block_end>batch_size=groundtruth.shape[1]<line_sep>results=numpy.zeros_like(prediction[: : <none>]).astype('float32')<for_stmt>index range(batch_size)<block_start>y=trim(list(groundtruth[: index]) list(groundtruth_mask[: index]))<line_sep>y_no_special=[token<for>token y<if>token<not><in>self.special_tokens]<line_sep>y_hat=trim(list(prediction[: index]) list(prediction_mask[: index]))<line_sep>y_hat_no_special=[token<for>token y_hat<if>token<not><in>self.special_tokens]<line_sep>blues,_,_,_=_bleu(y_no_special y_hat_no_special self.n)<line_sep>reward=blues[: self.n-1].copy()<if_stmt>self.deltas<block_start>reward[1:]=reward[1:]-reward[:-1]<line_sep>pos=-1<for_stmt>i range(len(y_hat))<block_start><if_stmt>y_hat[i]<not><in>self.special_tokens<block_start>pos=pos+1<line_sep>results[i index 0]=reward[pos]<block_end><else_stmt><block_start>results[i index 0]=0.<block_end><block_end><block_end><elif_stmt>len(reward)<block_start>results[len(y_hat)-1 index 0]=reward[-1]<block_end><block_end>output_storage[0][0]=results<block_end><def_stmt>make_node self prediction prediction_mask groundtruth groundtruth_mask<block_start>prediction=tensor.as_tensor_variable(prediction)<line_sep>prediction_mask=tensor.as_tensor_variable(prediction_mask)<line_sep>groundtruth=tensor.as_tensor_variable(groundtruth)<line_sep>groundtruth_mask=tensor.as_tensor_variable(groundtruth_mask)<line_sep><return>theano.Apply(self [prediction prediction_mask groundtruth groundtruth_mask] [tensor.tensor3()])<block_end><block_end>
# TODO: refactor nested_dict into common library with ATen <class_stmt>nested_dict(object)<block_start>""" A nested dict is a dictionary with a parent. If key lookup fails, it recursively continues into the parent. Writes always happen to the top level dict. """<def_stmt>__init__ self base parent<block_start>self.base,self.parent=base parent<block_end><def_stmt>__contains__ self item<block_start><return>item<in>self.base<or>item<in>self.parent<block_end><def_stmt>__getitem__ self x<block_start>r=self.base.get(x)<if_stmt>r<is><not><none><block_start><return>r<block_end><return>self.parent[x]<block_end><block_end>
"""Base AutoML class."""<import_stmt>logging<import_from_stmt>typing Any<import_from_stmt>typing Dict<import_from_stmt>typing Iterable<import_from_stmt>typing List<import_from_stmt>typing Optional<import_from_stmt>typing Sequence<import_from_stmt>..dataset.base LAMLDataset<import_from_stmt>..dataset.utils concatenate<import_from_stmt>..pipelines.ml.base MLPipeline<import_from_stmt>..reader.base Reader<import_from_stmt>..utils.logging set_stdout_level<import_from_stmt>..utils.logging verbosity_to_loglevel<import_from_stmt>..utils.timer PipelineTimer<import_from_stmt>..validation.utils create_validation_iterator<import_from_stmt>.blend BestModelSelector<import_from_stmt>.blend Blender<line_sep>logger=logging.getLogger(__name__)<class_stmt>AutoML<block_start>"""Class for compile full pipeline of AutoML task. AutoML steps: - Read, analyze data and get inner :class:`~lightautoml.dataset.base.LAMLDataset` from input dataset: performed by reader. - Create validation scheme. - Compute passed ml pipelines from levels. Each element of levels is list of :class:`~lightautoml.pipelines.ml.base.MLPipelines` prediction from current level are passed to next level pipelines as features. - Time monitoring - check if we have enough time to calc new pipeline. - Blend last level models and prune useless pipelines to speedup inference: performed by blender. - Returns prediction on validation data. If crossvalidation scheme is used, out-of-fold prediction will returned. If validation data is passed it will return prediction on validation dataset. In case of cv scheme when some point of train data never was used as validation (ex. timeout exceeded or custom cv iterator like :class:`~lightautoml.validation.np_iterators.TimeSeriesIterator` was used) NaN for this point will be returned. Example: Common usecase - create custom pipelines or presets. >>> reader = SomeReader() >>> pipe = MLPipeline([SomeAlgo()]) >>> levels = [[pipe]] >>> automl = AutoML(reader, levels, ) >>> automl.fit_predict(data, roles={'target': 'TARGET'}) """<def_stmt>__init__ self reader:Reader levels:Sequence[Sequence[MLPipeline]] timer:Optional[PipelineTimer]=<none> blender:Optional[Blender]=<none> skip_conn:bool=<false> return_all_predictions:bool=<false> <block_start>""" Args: reader: Instance of Reader class object that creates :class:`~lightautoml.dataset.base.LAMLDataset` from input data. levels: List of list of :class:`~lightautoml.pipelines.ml..base.MLPipelines`. timer: Timer instance of :class:`~lightautoml.utils.timer.PipelineTimer`. Default - unlimited timer. blender: Instance of Blender. Default - :class:`~lightautoml.automl.blend.BestModelSelector`. skip_conn: True if we should pass first level input features to next levels. Note: There are several verbosity levels: - `0`: No messages. - `1`: Warnings. - `2`: Info. - `3`: Debug. """<line_sep>self._initialize(reader levels timer blender skip_conn return_all_predictions)<block_end><def_stmt>_initialize self reader:Reader levels:Sequence[Sequence[MLPipeline]] timer:Optional[PipelineTimer]=<none> blender:Optional[Blender]=<none> skip_conn:bool=<false> return_all_predictions:bool=<false> <block_start>"""Same as __init__. Exists for delayed initialization in presets. Args: reader: Instance of Reader class object that creates :class:`~lightautoml.dataset.base.LAMLDataset` from input data. levels: List of list of :class:`~lightautoml.pipelines.ml..base.MLPipelines`. timer: Timer instance of :class:`~lightautoml.utils.timer.PipelineTimer`. Default - unlimited timer. blender: Instance of Blender. Default - :class:`~lightautoml.automl.blend.BestModelSelector`. skip_conn: True if we should pass first level input features to next levels. return_all_predictions: True if we should return all predictions from last level models. verbose: Controls the verbosity: the higher, the more messages. <1 : messages are not displayed; >=1 : the computation process for layers is displayed; >=2 : the information about folds processing is also displayed; >=3 : the hyperparameters optimization process is also displayed; >=4 : the training process for every algorithm is displayed; """<assert_stmt>len(levels)<g>0 "At least 1 level should be defined"<line_sep>self.timer=timer<if_stmt>timer<is><none><block_start>self.timer=PipelineTimer()<block_end>self.reader=reader<line_sep>self._levels=levels<line_sep># default blender is - select best model and prune other pipes self.blender=blender<if_stmt>blender<is><none><block_start>self.blender=BestModelSelector()<block_end># update model names <for_stmt>i,lvl enumerate(self._levels)<block_start><for_stmt>j,pipe enumerate(lvl)<block_start>pipe.upd_model_names("Lvl_{0}_Pipe_{1}".format(i j))<block_end><block_end>self.skip_conn=skip_conn<line_sep>self.return_all_predictions=return_all_predictions<block_end><def_stmt>fit_predict self train_data:Any roles:dict train_features:Optional[Sequence[str]]=<none> cv_iter:Optional[Iterable]=<none> valid_data:Optional[Any]=<none> valid_features:Optional[Sequence[str]]=<none> verbose:int=0 <arrow>LAMLDataset<block_start>"""Fit on input data and make prediction on validation part. Args: train_data: Dataset to train. roles: Roles dict. train_features: Optional features names, if cannot be inferred from train_data. cv_iter: Custom cv iterator. For example, :class:`~lightautoml.validation.np_iterators.TimeSeriesIterator`. valid_data: Optional validation dataset. valid_features: Optional validation dataset features if can't be inferred from `valid_data`. Returns: Predicted values. """<line_sep>set_stdout_level(verbosity_to_loglevel(verbose))<line_sep>self.timer.start()<line_sep>train_dataset=self.reader.fit_read(train_data train_features roles)<assert_stmt>(len(self._levels)<le>1<or>train_dataset.folds<is><not><none>) "Not possible to fit more than 1 level without cv folds"<assert_stmt>(len(self._levels)<le>1<or>valid_data<is><none>) "Not possible to fit more than 1 level with holdout validation"<line_sep>valid_dataset=<none><if_stmt>valid_data<is><not><none><block_start>valid_dataset=self.reader.read(valid_data valid_features add_array_attrs=<true>)<block_end>train_valid=create_validation_iterator(train_dataset valid_dataset n_folds=<none> cv_iter=cv_iter)<line_sep># for pycharm) level_predictions=<none><line_sep>pipes=<none><line_sep>self.levels=[]<for_stmt>leven_number,level enumerate(self._levels 1)<block_start>pipes=[]<line_sep>level_predictions=[]<line_sep>flg_last_level=leven_number<eq>len(self._levels)<line_sep>logger.info(f"Layer \x1b[1m{leven_number}\x1b[0m train process start. Time left {self.timer.time_left:.2f} secs")<for_stmt>k,ml_pipe enumerate(level)<block_start>pipe_pred=ml_pipe.fit_predict(train_valid)<line_sep>level_predictions.append(pipe_pred)<line_sep>pipes.append(ml_pipe)<line_sep>logger.info("Time left {:.2f} secs\n".format(self.timer.time_left))<if_stmt>self.timer.time_limit_exceeded()<block_start>logger.info("Time limit exceeded. Last level models will be blended and unused pipelines will be pruned.\n")<line_sep>flg_last_level=<true><line_sep><break><block_end><block_end><else_stmt><block_start><if_stmt>self.timer.child_out_of_time<block_start>logger.info("Time limit exceeded in one of the tasks. AutoML will blend level {0} models.\n".format(leven_number))<line_sep>flg_last_level=<true><block_end><block_end>logger.info("\x1b[1mLayer {} training completed.\x1b[0m\n".format(leven_number))<line_sep># here is split on exit condition <if_stmt><not>flg_last_level<block_start>self.levels.append(pipes)<line_sep>level_predictions=concatenate(level_predictions)<if_stmt>self.skip_conn<block_start>valid_part=train_valid.get_validation_data()<try_stmt># convert to initital dataset type <block_start>level_predictions=valid_part.from_dataset(level_predictions)<block_end><except_stmt>TypeError<block_start><raise>TypeError("Can not convert prediction dataset type to input features. Set skip_conn=False")<block_end>level_predictions=concatenate([level_predictions valid_part])<block_end>train_valid=create_validation_iterator(level_predictions <none> n_folds=<none> cv_iter=<none>)<block_end><else_stmt><block_start><break><block_end><block_end>blended_prediction,last_pipes=self.blender.fit_predict(level_predictions pipes)<line_sep>self.levels.append(last_pipes)<line_sep>self.reader.upd_used_features(remove=list(set(self.reader.used_features)-set(self.collect_used_feats())))<del_stmt>self._levels<if_stmt>self.return_all_predictions<block_start><return>concatenate(level_predictions)<block_end><return>blended_prediction<block_end><def_stmt>predict self data:Any features_names:Optional[Sequence[str]]=<none> return_all_predictions:Optional[bool]=<none> <arrow>LAMLDataset<block_start>"""Predict with automl on new dataset. Args: data: Dataset to perform inference. features_names: Optional features names, if cannot be inferred from `train_data`. return_all_predictions: if True, returns all model predictions from last level Returns: Dataset with predictions. """<line_sep>dataset=self.reader.read(data features_names=features_names add_array_attrs=<false>)<for_stmt>n,level enumerate(self.levels 1)# check if last level <block_start>level_predictions=[]<for_stmt>_n,ml_pipe enumerate(level)<block_start>level_predictions.append(ml_pipe.predict(dataset))<block_end><if_stmt>n<ne>len(self.levels)<block_start>level_predictions=concatenate(level_predictions)<if_stmt>self.skip_conn<block_start><try_stmt># convert to initital dataset type <block_start>level_predictions=dataset.from_dataset(level_predictions)<block_end><except_stmt>TypeError<block_start><raise>TypeError("Can not convert prediction dataset type to input features. Set skip_conn=False")<block_end>dataset=concatenate([level_predictions dataset])<block_end><else_stmt><block_start>dataset=level_predictions<block_end><block_end><else_stmt><block_start><if_stmt>(return_all_predictions<is><none><and>self.return_all_predictions)<or>return_all_predictions<block_start><return>concatenate(level_predictions)<block_end><return>self.blender.predict(level_predictions)<block_end><block_end><block_end><def_stmt>collect_used_feats self<arrow>List[str]<block_start>"""Get feats that automl uses on inference. Returns: Features names list. """<line_sep>used_feats=set()<for_stmt>lvl self.levels<block_start><for_stmt>pipe lvl<block_start>used_feats.update(pipe.used_features)<block_end><block_end>used_feats=list(used_feats)<line_sep><return>used_feats<block_end><def_stmt>collect_model_stats self<arrow>Dict[str int]<block_start>"""Collect info about models in automl. Returns: Dict with models and its runtime numbers. """<line_sep>model_stats={}<for_stmt>lvl self.levels<block_start><for_stmt>pipe lvl<block_start><for_stmt>ml_algo pipe.ml_algos<block_start>model_stats[ml_algo.name]=len(ml_algo.models)<block_end><block_end><block_end><return>model_stats<block_end><block_end>
<def_stmt>extractBersekerTranslations item<block_start>""" """<line_sep>vol,chp,frag,postfix=extractVolChapterFragmentPostfix(item['title'])<if_stmt>'Because the world has changed into a death game is funny'<in>item['tags']<and>(chp<or>vol<or>'Prologue'<in>postfix)<block_start><return>buildReleaseMessageWithType(item 'Sekai ga death game ni natta no de tanoshii desu' vol chp frag=frag postfix=postfix)<block_end><return><false><block_end>
# coding=utf-8 """ unit test """<import_stmt>os<import_stmt>sys<import_stmt>logging<import_from_stmt>psutil Popen<import_from_stmt>os.path join<import_from_stmt>bzt TaurusNetworkError<import_from_stmt>bzt.utils log_std_streams get_uniq_name JavaVM ToolError is_windows HTTPClient BetterDict<import_from_stmt>bzt.utils ensure_is_dict Environment temp_file communicate<import_from_stmt>tests.unit BZTestCase RESOURCES_DIR<import_from_stmt>tests.unit.mocks MockFileReader<class_stmt>MockPopen(object)<block_start><def_stmt>__init__ self out err<block_start>self.out=out<line_sep>self.err=err<block_end><def_stmt>communicate self<block_start><return>self.out self.err<block_end><block_end><class_stmt>TestEnvironment(BZTestCase)<block_start><def_stmt>test_nesting self<block_start>v1='val_param_name'<line_sep>v2='path_param_name'<line_sep>v3='const_val'<line_sep>os.environ[v1]='v1.1'<line_sep>os.environ[v2]='v1.2'<line_sep>os.environ[v3]='v1.3'<line_sep>e1=Environment()<line_sep>e1.set({v1:'local_val1.1'})<line_sep>e1.add_path({v2:'param_val1.1'} finish=<true>)<line_sep>e2=Environment(parent=e1)<line_sep>e1.add_path({v2:'param_val1.3'} finish=<true>)<line_sep>os.environ[v1]='v2.1'<line_sep>os.environ[v2]='v2.2'<line_sep>os.environ[v3]='v2.3'<line_sep>e1.set({v1:'local_val1.2'})<line_sep>e2.add_path({v2:'param_val1.2'} finish=<true>)<line_sep>self.assertEqual(e1.get(v1) 'local_val1.2')<line_sep>self.assertEqual(e2.get(v1) 'local_val1.1')<line_sep>self.assertEqual(e1.get(v2) os.pathsep.join(('v2.2' 'param_val1.1' 'param_val1.3')))<line_sep>self.assertEqual(e2.get(v2) os.pathsep.join(('v2.2' 'param_val1.1' 'param_val1.2')))<line_sep>self.assertEqual(e1.get(v3) 'v2.3')<line_sep>self.assertEqual(e2.get(v3) 'v2.3')<block_end><block_end><class_stmt>TestBetterDict(BZTestCase)<block_start><def_stmt>_merge_and_compare self first second result<block_start>sample=BetterDict().merge(first)<line_sep>sample.merge(second)<line_sep>result=BetterDict().merge(result)<line_sep>self.assertEqual(sample result)<block_end><def_stmt>_filter_and_compare self first second result black_list=<false><block_start>sample=BetterDict().merge(first)<line_sep>sample.filter(second black_list=black_list)<line_sep>result=BetterDict().merge(result)<line_sep>self.assertEqual(sample result)<block_end><def_stmt>test_merge_configs self<block_start>a={"modules":{"local":"class_name"}}<line_sep>b={"modules":{"local":{"class":"another_class"}}}<line_sep>res=BetterDict()<line_sep>res.merge(a)<line_sep>res.merge(b)<line_sep>self.assertEqual(BetterDict.__name__ type(res["modules"]["local"]).__name__)<line_sep>modules=res["modules"]<line_sep>ensure_is_dict(modules "local" "class")<line_sep>self.assertEqual("another_class" res["modules"]["local"]["class"])<block_end><def_stmt>test_merge_del self<block_start>a={"A":["B" "C"] "B":{"A":"vA"}}<line_sep>b={"^A":{"^D":"E"} "^X":"Y"}<line_sep>res={"B":{"A":"vA"}}<line_sep>self._merge_and_compare(a b res)<block_end><def_stmt>test_merge_overwrite self<block_start>a={"A":["B" "C"] "B":{"A":"vA"}}<line_sep>b={"~B":{"~C":"vC"}}<line_sep>res={"A":["B" "C"] "B":{"C":"vC"}}<line_sep>self._merge_and_compare(a b res)<block_end><def_stmt>test_merge_list_elements self<block_start>a={"A":["B" "C"] "B":{"A":"vA"} "D":["E" "F"]}<line_sep>b={"$A":["nB"] "$B":{"nC":"vC"} "$C":["D"]}<line_sep>res={"A":["nB" "C"] "B":{"A":"vA" "nC":"vC"} "D":["E" "F"] "C":["D"]}<line_sep>self._merge_and_compare(a b res)<block_end><def_stmt>test_filter_wl0 self<block_start>a={"A":<false> "C":{"D":"E" "G":"GG"} "F":["FF"]}<line_sep>b={"A":<true> "!C":{"G":"H"}}<line_sep>res={"A":<false> "C":{"D":"E"}}<line_sep>self._filter_and_compare(a b res)<block_end><def_stmt>test_filter_wl1 self<block_start>a={"A":["B" "BB"] "C":{"D":"E" "G":"GG"} "F":["FF"]}<line_sep>b={"A":<true> "!C":{"G":"H"}}<line_sep>res={"A":["B" "BB"] "C":{"D":"E"}}<line_sep>self._filter_and_compare(a b res)<block_end><def_stmt>test_filter_wl2 self<block_start>a={"A":"B" "C":{"D":"E"}}<line_sep>b={"A":{"B":"C"} "C":<true>}<line_sep>res={"C":{"D":"E"}}<line_sep>self._filter_and_compare(a b res)<block_end><def_stmt>test_filter_bl0 self<block_start>a={"A":["B" "BB"] "C":{"D":"E" "G":"GG"} "F":["FF"]}<line_sep>b={"A":<true> "!C":{"G":"H"}}<line_sep>res={"F":["FF"] "C":{"G":"GG"}}<line_sep>self._filter_and_compare(a b res black_list=<true>)<block_end><def_stmt>test_filter_bl1 self<block_start>a={"A":"B" "C":{"D":"E"}}<line_sep>b={"A":{"B":"C"} "C":<true>}<line_sep>res={"A":"B"}<line_sep>self._filter_and_compare(a b res black_list=<true>)<block_end><block_end><class_stmt>TestMisc(BZTestCase)<block_start><def_stmt>test_communicate self<block_start>self.sniff_log()<line_sep>out=b'\xf1\xe5\xedoutput'# on py2 bytes is just str synonym err=b'\xf1\xe5\xederror'<line_sep>obj=MockPopen(out err)<line_sep>output=communicate(obj)<line_sep>self.assertEqual(output ("output" "error"))<block_end><block_end><class_stmt>TestJavaVM(BZTestCase)<block_start><def_stmt>test_missed_tool self<block_start>self.obj=JavaVM()<line_sep>self.obj.tool_path='java-not-found'<line_sep>self.assertEqual(<false> self.obj.check_if_installed())<line_sep>self.obj.install()<block_end><def_stmt>test_missed_req_tool self<block_start>self.obj=JavaVM()<line_sep>self.obj.tool_path='java-not-found'<line_sep>self.obj.mandatory=<true><line_sep>self.assertEqual(<false> self.obj.check_if_installed())<line_sep>self.assertRaises(ToolError self.obj.install)<block_end><def_stmt>test_get_version self<block_start>self.obj=JavaVM()<line_sep>out1="openjdk version \"10.0.1\" 2018-04-17\nOpenJDK Runtime Environment (build "<concat>"10.0.1+10-Ubuntu-3ubuntu1)\nOpenJDK 64-Bit Server VM (build 10.0.1+10-Ubuntu-3ubuntu1, mixed mode)"<line_sep>out2="java version \"1.8.0_151\"\nJava(TM) SE Runtime Environment (build 1.8.0_151-b12)\n"<concat>"Java HotSpot(TM) 64-Bit Server VM (build 25.151-b12, mixed mode)"<line_sep>self.assertEqual("10" self.obj._get_version(out1))<line_sep>self.assertEqual("8" self.obj._get_version(out2))<block_end><block_end><class_stmt>TestLogStreams(BZTestCase)<block_start><def_stmt>test_streams self<block_start>self.sniff_log()<line_sep>print('test1')<with_stmt>log_std_streams(logger=self.captured_logger stdout_level=logging.DEBUG)<block_start>print('test2')<block_end><with_stmt>log_std_streams(stdout_level=logging.DEBUG)<block_start>print('test3')<block_end><with_stmt>log_std_streams(stdout_level=logging.DEBUG)<block_start>sys.stdout.write('test3')<block_end><with_stmt>log_std_streams(logger=self.captured_logger stdout_level=logging.DEBUG)<block_start>cmd=['echo' '"test5"']<if_stmt>is_windows()<block_start>cmd=['cmd' '/c']+cmd<block_end>process=Popen(cmd)<line_sep>process.wait()<block_end>missed_file=get_uniq_name('.' 'test6' '')<with_stmt>log_std_streams(logger=self.captured_logger stderr_level=logging.WARNING)<block_start><if_stmt>is_windows()<block_start>cmd=['cmd' '/c' 'dir']<block_end><else_stmt><block_start>cmd=['ls']<block_end>process=Popen(cmd+[missed_file])<line_sep>process.wait()<block_end>debug_buf=self.log_recorder.debug_buff.getvalue()<line_sep>warn_buf=self.log_recorder.warn_buff.getvalue()<line_sep>self.assertNotIn('test1' debug_buf)<line_sep>self.assertIn('test2' debug_buf)<line_sep>self.assertNotIn('test3' debug_buf)<line_sep>self.assertIn('test5' debug_buf)<line_sep>self.assertTrue(len(warn_buf)<g>0)<block_end><block_end><class_stmt>TestFileReader(BZTestCase)<block_start><def_stmt>setUp self<block_start>super(TestFileReader self).setUp()<line_sep>self.obj=MockFileReader()<block_end><def_stmt>configure self file_name<block_start>self.obj.name=file_name<block_end><def_stmt>tearDown self<block_start><if_stmt>self.obj<and>self.obj.fds<block_start>self.obj.fds.close()<block_end>super(TestFileReader self).tearDown()<block_end><def_stmt>test_file_len self<block_start>self.configure(join(RESOURCES_DIR 'jmeter' 'jtl' 'file.notfound'))<line_sep>self.sniff_log(self.obj.log)<line_sep>list(self.obj.get_lines(size=1))<line_sep>self.assertIn('File not appeared yet' self.log_recorder.debug_buff.getvalue())<line_sep>self.obj.name=join(RESOURCES_DIR 'jmeter' 'jtl' 'unicode.jtl')<line_sep>lines=list(self.obj.get_lines(size=1))<line_sep>self.assertEqual(1 len(lines))<line_sep>lines=list(self.obj.get_lines(last_pass=<true>))<line_sep>self.assertEqual(13 len(lines))<line_sep>self.assertTrue(all(l.endswith('\n')<for>l lines))<block_end><def_stmt>test_decode self<block_start>old_string="Тест.Эхо"<line_sep>gen_file_name=temp_file()<line_sep>mod_str=old_string+'\n'<with_stmt>open(gen_file_name 'wb')<as>fd# use target system encoding for writing <block_start>fd.write(mod_str.encode(self.obj.SYS_ENCODING))<block_end># important on win where it's not 'utf-8' <try_stmt><block_start>self.configure(gen_file_name)<line_sep>self.assertEqual('utf-8' self.obj.cp)<line_sep>lines=list(self.obj.get_lines(<true>))<line_sep>self.assertEqual(self.obj.SYS_ENCODING self.obj.cp)# on win self.obj.cp must be changed during of self.assertEqual(1 len(lines))# reading (see MockFileReader) new_string=lines[0].rstrip()<line_sep>self.assertEqual(old_string new_string)<block_end><finally_stmt><block_start><if_stmt>self.obj.fds<block_start>self.obj.fds.close()<block_end>os.remove(gen_file_name)<block_end><block_end><def_stmt>test_decode_crash self<block_start>self.configure(join(RESOURCES_DIR 'jmeter' 'jtl' 'unicode.jtl'))<line_sep>self.obj.get_bytes(size=180)<block_end><block_end># shouldn't crash with UnicodeDecodeError <class_stmt>TestHTTPClient(BZTestCase)<block_start><def_stmt>test_proxy_setup self<block_start>obj=HTTPClient()<line_sep>obj.add_proxy_settings({"address":"http://localhost:3128" "username":"me" "password":"<PASSWORD>"})<line_sep>self.assertIn('http' obj.session.proxies)<line_sep>self.assertIn('https' obj.session.proxies)<line_sep>self.assertEqual(obj.session.proxies['http'] 'http://me:too@localhost:3128')<line_sep>self.assertEqual(obj.session.proxies['https'] 'http://me:too@localhost:3128')<block_end><def_stmt>test_proxy_ssl_cert self<block_start>obj=HTTPClient()<line_sep>obj.add_proxy_settings({"ssl-cert":"i am server side cert" "ssl-client-cert":"i am client side cert"})<line_sep>self.assertEqual(obj.session.verify 'i am server side cert')<line_sep>self.assertEqual(obj.session.cert 'i am client side cert')<block_end><def_stmt>test_jvm_args self<block_start>obj=HTTPClient()<line_sep>obj.add_proxy_settings({"address":"http://localhost:3128" "username":"me" "password":"<PASSWORD>"})<line_sep>jvm_args=obj.get_proxy_props()<for_stmt>protocol ['http' 'https']<block_start><for_stmt>key ['proxyHost' 'proxyPort' 'proxyUser' 'proxyPass']<block_start>combo_key=protocol+'.'+key<line_sep>self.assertIn(combo_key jvm_args)<block_end><block_end><block_end><def_stmt>test_download_file self<block_start>obj=HTTPClient()<line_sep>tmpfile=temp_file()<line_sep>obj.download_file('http://localhost:8000/' tmpfile)<line_sep>self.assertTrue(os.path.exists(tmpfile))<with_stmt>open(tmpfile)<as>fds<block_start>contents=fds.read()<block_end>self.assertGreaterEqual(len(contents) 0)<block_end><def_stmt>test_download_404 self<block_start>obj=HTTPClient()<line_sep>tmpfile=temp_file()<line_sep>self.assertRaises(TaurusNetworkError <lambda>:obj.download_file('http://localhost:8000/404' tmpfile))<block_end><def_stmt>test_download_fail self<block_start>obj=HTTPClient()<line_sep>tmpfile=temp_file()<line_sep>self.assertRaises(TaurusNetworkError <lambda>:obj.download_file('http://non.existent.com/' tmpfile))<block_end><def_stmt>test_request self<block_start>obj=HTTPClient()<line_sep>resp=obj.request('GET' 'http://localhost:8000/')<line_sep>self.assertTrue(resp.ok)<block_end><def_stmt>test_request_fail self<block_start>obj=HTTPClient()<line_sep>self.assertRaises(TaurusNetworkError <lambda>:obj.request('GET' 'http://non.existent.com/'))<block_end><block_end>
# -*- coding: utf8 -*- # Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # CAM签名/鉴权错误。 AUTHFAILURE='AuthFailure'<line_sep># 用户没有权限进行此查询操作。 AUTHFAILURE_CHECKRESOURCERESPONSECODEERROR='AuthFailure.CheckResourceResponseCodeError'<line_sep># 未授权操作。 AUTHFAILURE_UNAUTHORIZEDOPERATION='AuthFailure.UnauthorizedOperation'<line_sep># 操作失败。 FAILEDOPERATION='FailedOperation'<line_sep># 下载音频文件失败。 FAILEDOPERATION_ERRORDOWNFILE='FailedOperation.ErrorDownFile'<line_sep># 识别失败。 FAILEDOPERATION_ERRORRECOGNIZE='FailedOperation.ErrorRecognize'<line_sep># 错误的TaskId。 FAILEDOPERATION_NOSUCHTASK='FailedOperation.NoSuchTask'<line_sep># 账号因为欠费停止服务,请在腾讯云账户充值。 FAILEDOPERATION_SERVICEISOLATE='FailedOperation.ServiceIsolate'<line_sep># 账号本月免费额度已用完。 FAILEDOPERATION_USERHASNOFREEAMOUNT='FailedOperation.UserHasNoFreeAmount'<line_sep># 服务未开通,请在腾讯云官网语音识别控制台开通服务。 FAILEDOPERATION_USERNOTREGISTERED='FailedOperation.UserNotRegistered'<line_sep># 内部错误。 INTERNALERROR='InternalError'<line_sep># 初始化配置失败。 INTERNALERROR_ERRORCONFIGURE='InternalError.ErrorConfigure'<line_sep># 创建日志失败。 INTERNALERROR_ERRORCREATELOG='InternalError.ErrorCreateLog'<line_sep># 下载音频文件失败。 INTERNALERROR_ERRORDOWNFILE='InternalError.ErrorDownFile'<line_sep># 新建数组失败。 INTERNALERROR_ERRORFAILNEWPREQUEST='InternalError.ErrorFailNewprequest'<line_sep># 写入数据库失败。 INTERNALERROR_ERRORFAILWRITETODB='InternalError.ErrorFailWritetodb'<line_sep># 文件无法打开。 INTERNALERROR_ERRORFILECANNOTOPEN='InternalError.ErrorFileCannotopen'<line_sep># 获取路由失败。 INTERNALERROR_ERRORGETROUTE='InternalError.ErrorGetRoute'<line_sep># 创建日志路径失败。 INTERNALERROR_ERRORMAKELOGPATH='InternalError.ErrorMakeLogpath'<line_sep># 识别失败。 INTERNALERROR_ERRORRECOGNIZE='InternalError.ErrorRecognize'<line_sep># 访问数据库失败。 INTERNALERROR_FAILACCESSDATABASE='InternalError.FailAccessDatabase'<line_sep># 访问Redis失败。 INTERNALERROR_FAILACCESSREDIS='InternalError.FailAccessRedis'<line_sep># 参数错误。 INVALIDPARAMETER='InvalidParameter'<line_sep># 请求数据长度无效。 INVALIDPARAMETER_ERRORCONTENTLENGTH='InvalidParameter.ErrorContentlength'<line_sep># 参数不全。 INVALIDPARAMETER_ERRORPARAMSMISSING='InvalidParameter.ErrorParamsMissing'<line_sep># 解析请求数据失败。 INVALIDPARAMETER_ERRORPARSEQUEST='InvalidParameter.ErrorParsequest'<line_sep># 文件编码错误。 INVALIDPARAMETER_FILEENCODE='InvalidParameter.FileEncode'<line_sep># 非法的词表状态。 INVALIDPARAMETER_INVALIDVOCABSTATE='InvalidParameter.InvalidVocabState'<line_sep># 该模型状态不允许删除。 INVALIDPARAMETER_MODELSTATE='InvalidParameter.ModelState'<line_sep># 参数取值错误。 INVALIDPARAMETERVALUE='InvalidParameterValue'<line_sep># AppId无效。 INVALIDPARAMETERVALUE_ERRORINVALIDAPPID='InvalidParameterValue.ErrorInvalidAppid'<line_sep># ClientIp无效。 INVALIDPARAMETERVALUE_ERRORINVALIDCLIENTIP='InvalidParameterValue.ErrorInvalidClientip'<line_sep># EngSerViceType无效。 INVALIDPARAMETERVALUE_ERRORINVALIDENGSERVICE='InvalidParameterValue.ErrorInvalidEngservice'<line_sep># ProjectId无效。 INVALIDPARAMETERVALUE_ERRORINVALIDPROJECTID='InvalidParameterValue.ErrorInvalidProjectid'<line_sep># RequestId无效。 INVALIDPARAMETERVALUE_ERRORINVALIDREQUESTID='InvalidParameterValue.ErrorInvalidRequestid'<line_sep># SourceType无效。 INVALIDPARAMETERVALUE_ERRORINVALIDSOURCETYPE='InvalidParameterValue.ErrorInvalidSourcetype'<line_sep># SubserviceType无效。 INVALIDPARAMETERVALUE_ERRORINVALIDSUBSERVICETYPE='InvalidParameterValue.ErrorInvalidSubservicetype'<line_sep># Url无效。 INVALIDPARAMETERVALUE_ERRORINVALIDURL='InvalidParameterValue.ErrorInvalidUrl'<line_sep># UsrAudioKey无效。 INVALIDPARAMETERVALUE_ERRORINVALIDUSERAUDIOKEY='InvalidParameterValue.ErrorInvalidUseraudiokey'<line_sep># 音频编码格式不支持。 INVALIDPARAMETERVALUE_ERRORINVALIDVOICEFORMAT='InvalidParameterValue.ErrorInvalidVoiceFormat'<line_sep># 音频数据无效。 INVALIDPARAMETERVALUE_ERRORINVALIDVOICEDATA='InvalidParameterValue.ErrorInvalidVoicedata'<line_sep># 音频时长超过限制。 INVALIDPARAMETERVALUE_ERRORVOICEDATATOOLONG='InvalidParameterValue.ErrorVoicedataTooLong'<line_sep># 非法的参数长度。 INVALIDPARAMETERVALUE_INVALIDPARAMETERLENGTH='InvalidParameterValue.InvalidParameterLength'<line_sep># 非法的VocabId。 INVALIDPARAMETERVALUE_INVALIDVOCABID='InvalidParameterValue.InvalidVocabId'<line_sep># 非法的词表状态。 INVALIDPARAMETERVALUE_INVALIDVOCABSTATE='InvalidParameterValue.InvalidVocabState'<line_sep># 词权重不合法。 INVALIDPARAMETERVALUE_INVALIDWORDWEIGHT='InvalidParameterValue.InvalidWordWeight'<line_sep># 非法的WordWeightStr。 INVALIDPARAMETERVALUE_INVALIDWORDWEIGHTSTR='InvalidParameterValue.InvalidWordWeightStr'<line_sep># 模型不存在。 INVALIDPARAMETERVALUE_MODELID='InvalidParameterValue.ModelId'<line_sep># 非法的模型状态。 INVALIDPARAMETERVALUE_TOSTATE='InvalidParameterValue.ToState'<line_sep># 超过配额限制。 LIMITEXCEEDED='LimitExceeded'<line_sep># 自学习模型创建个数已到限制。 LIMITEXCEEDED_CUSTOMIZATIONFULL='LimitExceeded.CustomizationFull'<line_sep># 上线模型个数已到限制。 LIMITEXCEEDED_ONLINEFULL='LimitExceeded.OnlineFull'<line_sep># 热词表数量已到账号限制。 LIMITEXCEEDED_VOCABFULL='LimitExceeded.VocabFull'<line_sep># 缺少参数错误。 MISSINGPARAMETER='MissingParameter'<line_sep># 请求的次数超过了频率限制。 REQUESTLIMITEXCEEDED='RequestLimitExceeded'<line_sep># 未知参数错误。 UNKNOWNPARAMETER='UnknownParameter'<line_sep>
<import_stmt>torch<import_stmt>pickle<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_from_stmt>config model_config<as>config<import_from_stmt>sklearn.metrics confusion_matrix accuracy_score f1_score precision_score recall_score<import_stmt>itertools<import_stmt>matplotlib.pyplot<as>plt<def_stmt>load_data batched=<true> test=<false> file_dir='../../data/combined/combined_features.pkl'<block_start>bs=config['batch_size']<line_sep>ftype='test'<if>test<else>'train'<with_stmt>open('{}'.format(file_dir) 'rb')<as>f<block_start>features=pickle.load(f)<block_end>x=features['x_{}'.format(ftype)]<line_sep>y=features['y_{}'.format(ftype)]<line_sep>data=(x y)<if_stmt>test<or><not>batched<block_start><return>[torch.FloatTensor(data[0]) torch.LongTensor(data[1])]<block_end>data=list(zip(data[0] data[1]))<line_sep>n_iters=len(data)<floordiv>bs<line_sep>batches=[]<for_stmt>i range(1 n_iters+1)<block_start>input_batch=[]<line_sep>output_batch=[]<for_stmt>e data[bs<times>(i-1):bs<times>i]<block_start>input_batch.append(e[0])<line_sep>output_batch.append(e[1])<block_end>batches.append([torch.FloatTensor(input_batch) torch.LongTensor(output_batch)])<block_end><return>batches<block_end><def_stmt>evaluate targets predictions<block_start>performance={'acc':accuracy_score(targets predictions) 'f1':f1_score(targets predictions average='macro') 'precision':precision_score(targets predictions average='macro') 'recall':recall_score(targets predictions average='macro')}<line_sep><return>performance<block_end><def_stmt>plot_confusion_matrix targets predictions classes normalize=<false> title='Confusion matrix' cmap=plt.cm.Blues<block_start>""" This function prints and plots the confusion matrix. Normalization can be applied by setting `normalize=True`. """<line_sep># plt.figure(figsize=(8,8)) cm=confusion_matrix(targets predictions)<line_sep>plt.imshow(cm interpolation='nearest' cmap=cmap)<line_sep>plt.title(title)<line_sep>plt.colorbar()<line_sep>tick_marks=np.arange(len(classes))<line_sep>plt.xticks(tick_marks classes rotation=45)<line_sep>plt.yticks(tick_marks classes)<if_stmt>normalize<block_start>cm=cm.astype('float')/cm.sum(axis=1)[: np.newaxis]<line_sep>print("Normalized confusion matrix")<block_end><else_stmt><block_start>print('Confusion matrix, without normalization')<block_end>print(cm)<line_sep>thresh=cm.max()/2.<for_stmt>i,j itertools.product(range(cm.shape[0]) range(cm.shape[1]))<block_start>plt.text(j i cm[i j] horizontalalignment="center" color="white"<if>cm[i j]<g>thresh<else>"black")<block_end>plt.tight_layout()<line_sep>plt.ylabel('True label')<line_sep>plt.xlabel('Predicted label')<block_end>
# Generated by Django 2.2.4 on 2019-11-28 02:21 <import_from_stmt>django.db migrations models<import_stmt>django.db.models.deletion<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('promgen' '0010_app_label_migration') ]<line_sep>operations=[migrations.AddField(model_name='alert' name='error_count' field=models.PositiveIntegerField(default=0) ) migrations.AddField(model_name='alert' name='sent_count' field=models.PositiveIntegerField(default=0) ) migrations.CreateModel(name='AlertError' fields=[('id' models.AutoField(auto_created=<true> primary_key=<true> serialize=<false> verbose_name='ID')) ('message' models.TextField()) ('created' models.DateTimeField(default=django.utils.timezone.now)) ('alert' models.ForeignKey(on_delete=django.db.models.deletion.CASCADE to='promgen.Alert')) ] ) ]<block_end>
<import_from_stmt>bs4 BeautifulSoup<import_stmt>cocrawler.parse<as>parse<import_from_stmt>cocrawler.urls URL<line_sep>test_html=''' <html> <head><title>Foo</title><link href='link.html'></link></head> <body> <a href = "foo1.html">Anchor 1</a> <a href = foo2.htm>Anchor 2</a> <a href='foo3.html '>Anchor 3</a> <img src=foo.gif /> <a href='torture" <url>'>torture anchor</a> </body> '''<line_sep>test_html_harder=''' <html> <head></head> <body> <iframe src="iframe.html"></iframe> <iframe src=""></iframe> <link href="stylesheet.blah" rel="stylesheet"> <link href="" rel="stylesheet"> <link href="http://example.com" rel="prefetch"> <link href="do-not-crash-1"> <link href="do-not-crash-2" rel="one" rel="two"> <link href=""> </body> '''<line_sep>test_html_no_body=''' <html> <head><title>Foo</title><link href='link.html'></link></head> <a href="foo1.html">Anchor 4</a> <a href=foo2.htm>Anchor 5</a> <a href="foo3.html ">Anchor 6</a> <img src=foo.gif /> '''<line_sep>test_html_no_head=''' <html> <body> <a href="foo1.html">Anchor 7</a> <a href=foo2.htm>Anchor 8</a> <a href="foo3.html ">Anchor 9</a> <img src=foo.gif /> </body> '''<line_sep>test_html_no_nothing=''' <a href="foo1.html">Anchor 10</a> <a href=foo2.htm>Anchor 11</a> <a href="foo3.html ">Anchor 12</a> <img src=foo.gif /> '''<def_stmt>test_do_burner_work_html <block_start>urlj=URL('http://example.com')<line_sep>test_html_bytes=test_html.encode(encoding='utf-8' errors='replace')<line_sep>headers={}<line_sep>links,embeds,sha1,facets,base=parse.do_burner_work_html(test_html test_html_bytes headers url=urlj)<assert_stmt>len(links)<eq>4<assert_stmt>len(embeds)<eq>2<line_sep>linkset=set(u.url<for>u links)<line_sep>embedset=set(e.url<for>e embeds)<assert_stmt>'http://example.com/foo3.html'<in>linkset<assert_stmt>'http://example.com/foo.gif'<in>embedset<assert_stmt>sha1<eq>'sha1:cdcb087d39afd827d5d523e165a6566d65a2e9b3'<assert_stmt>base<is><none><line_sep># as a handwave, let's expect these defective pages to also work. test_html_bytes=test_html_no_body.encode(encoding='utf-8' errors='replace')<line_sep>links,embeds,sha1,facets,base=parse.do_burner_work_html(test_html_no_body test_html_bytes headers url=urlj)<assert_stmt>len(links)<eq>3<assert_stmt>len(embeds)<eq>2<line_sep>test_html_bytes=test_html_no_head.encode(encoding='utf-8' errors='replace')<line_sep>links,embeds,sha1,facets,base=parse.do_burner_work_html(test_html_no_head test_html_bytes headers url=urlj)<assert_stmt>len(links)<eq>3<assert_stmt>len(embeds)<eq>1<line_sep>test_html_bytes=test_html_no_nothing.encode(encoding='utf-8' errors='replace')<line_sep>links,embeds,sha1,facets,base=parse.do_burner_work_html(test_html_no_nothing test_html_bytes headers url=urlj)<assert_stmt>len(links)<eq>3<assert_stmt>len(embeds)<eq>1<block_end><def_stmt>test_clean_link_objects <block_start>test=[{'href':'http://example.com'} {'href':'data:46532656'} {'href':'https://example.com'}]<line_sep>ret=[{'href':'http://example.com'} {'href':'https://example.com'}]<assert_stmt>parse.clean_link_objects(test ('data:' 'javascript:'))<eq>ret<block_end><def_stmt>test_individual_parsers <block_start>links,embeds=parse.find_html_links_re(test_html)<assert_stmt>len(links)<eq>6<assert_stmt>len(embeds)<eq>0<line_sep>linkset=set(parse.collapse_links(links))<assert_stmt>'foo2.htm'<in>linkset<assert_stmt>'foo3.html '<in>linkset<assert_stmt>'foo.gif'<in>linkset<assert_stmt>'torture"\n<url>'<in>linkset<line_sep>head,body=parse.split_head_body(test_html)<line_sep>links,embeds=parse.find_body_links_re(body)<assert_stmt>len(links)<eq>4<assert_stmt>len(embeds)<eq>1<line_sep>linkset=set(parse.collapse_links(links))<line_sep>embedset=set(parse.collapse_links(embeds))<assert_stmt>'foo2.htm'<in>linkset<assert_stmt>'foo3.html '<in>linkset<assert_stmt>'torture"\n<url>'<in>linkset<assert_stmt>'foo.gif'<in>embedset<line_sep>links,embeds=parse.find_body_links_anchors_re(body)<assert_stmt>len(links)<eq>4<assert_stmt>len(embeds)<eq>1<line_sep>linkdict=dict([(l['href'] l['anchor'])<for>l links])<line_sep># {('foo1.html', 'Anchor 1'), ('foo3.html ', 'Anchor 3'), ('foo2.htm', 'Anchor 2'), ('torture"\n<url>', 'torture\nanchor')} <assert_stmt>linkdict['foo2.htm']<eq>'Anchor 2'<assert_stmt>linkdict['foo3.html ']<eq>'Anchor 3'<assert_stmt>linkdict['torture"\n<url>']<eq>'torture\nanchor'<assert_stmt>'foo.gif'<in>embeds[0]['src']<line_sep>head_soup=BeautifulSoup(head 'lxml')<line_sep>links,embeds=parse.find_head_links_soup(head_soup)<line_sep>embedset=set(parse.collapse_links(embeds))<assert_stmt>len(links)<eq>0<assert_stmt>len(embeds)<eq>1<assert_stmt>'link.html'<in>embedset<line_sep>head_soup=BeautifulSoup(head 'lxml')<line_sep>body_soup=BeautifulSoup(body 'lxml')<line_sep>links,embeds=parse.find_head_links_soup(head_soup)<line_sep>lbody,ebody=parse.find_body_links_soup(body_soup)<line_sep>links<augadd>lbody<line_sep>embeds<augadd>ebody<line_sep>linkset=set(parse.collapse_links(links))<line_sep>embedset=set(parse.collapse_links(embeds))<assert_stmt>len(links)<eq>4<assert_stmt>len(embeds)<eq>2<assert_stmt>'foo2.htm'<in>linkset<assert_stmt>'foo3.html '<in>linkset<assert_stmt>'torture"\n<url>'<in>linkset<assert_stmt>'link.html'<in>embedset<assert_stmt>'foo.gif'<in>embedset<line_sep>head,body=parse.split_head_body(test_html_harder)<line_sep>body_soup=BeautifulSoup(body 'lxml')<line_sep>lbody,ebody=parse.find_body_links_soup(body_soup)<assert_stmt>len(lbody)<eq>1<assert_stmt>len(ebody)<eq>1<assert_stmt>'iframe.html'<eq>lbody[0]['src']<assert_stmt>'stylesheet.blah'<eq>ebody[0]['href']<block_end>test_css=''' @import url('foo1.css') url(images/foo2.png) url( images/foo3.png ) '''<def_stmt>test_css_parser <block_start>links,embeds=parse.find_css_links_re(test_css)<assert_stmt>len(links)<eq>0<assert_stmt>len(embeds)<eq>3<assert_stmt>'images/foo3.png'<in>embeds<block_end><def_stmt>test_split_head_body <block_start>''' Whitebox test of the heuristics in this function '''<line_sep>head,body=parse.split_head_body('x'<times>100000)<assert_stmt>head<eq>''<assert_stmt>len(body)<eq>100000<line_sep>head,body=parse.split_head_body('x'+'<HeAd>'+'x'<times>100000)<assert_stmt>head<eq>''<assert_stmt>len(body)<eq>100007<line_sep>head,body=parse.split_head_body('x'+'</HeAd>'+'x'<times>100000)<assert_stmt>head<eq>'x'<assert_stmt>len(body)<eq>100000<line_sep>head,body=parse.split_head_body('x'+'<BoDy>'+'x'<times>100000)<assert_stmt>head<eq>'x'<assert_stmt>len(body)<eq>100000<line_sep>head,body=parse.split_head_body('x'+'<heAd><boDy>'+'x'<times>100000)<assert_stmt>head<eq>'x<heAd>'<assert_stmt>len(body)<eq>100000<line_sep>head,body=parse.split_head_body('x'+'<hEad></heAd>'+'x'<times>100000)<assert_stmt>head<eq>'x<hEad>'<assert_stmt>len(body)<eq>100000<line_sep>head,body=parse.split_head_body('x'+'<heaD></Head><bOdy>'+'x'<times>100000)<assert_stmt>head<eq>'x<heaD>'<assert_stmt>len(body)<eq>100006<block_end><def_stmt>test_parse_refresh <block_start>test=((('0;foo') (0 'foo')) ((';') (<none> <none>)) (('1.1.1.1; bar') (1 'bar')) (('2.2, urbaz') (2 'urbaz')) (('3; url=barf') (3 'barf')) (('3; url="barf"asdf') (3 'barf')) (('3; UrL=') (3 '')))<for_stmt>t test<block_start><assert_stmt>parse.parse_refresh(t[0])<eq>t[1]<block_end><block_end><def_stmt>test_regex_out_comments <block_start>t='Hello <!-- foo --> world!'<assert_stmt>parse.regex_out_comments(t)<eq>'Hello world!'<block_end><def_stmt>test_regex_out_some_scripts <block_start>t='<script>foo</script> bar'<assert_stmt>parse.regex_out_some_scripts(t)<eq>' bar'<block_end><def_stmt>test_regex_out_all_script <block_start>t='<script>foo</script> bar <script type="baz">barf</script> '<assert_stmt>parse.regex_out_all_scripts(t)<eq>' bar '<block_end>
<import_from_stmt>jnpr.junos Device<import_from_stmt>jnpr.junos.utils.config Config<import_stmt>yaml<line_sep>dev=Device(host='xxxx' user='demo' password='<PASSWORD>' gather_facts=<false>)<line_sep>dev.open()<line_sep>data=yaml.load(open('protocol_data.yml'))<line_sep>cu=Config(dev)<line_sep>cu.load(template_path='protocol_temp.j2' template_vars=data format='text')<line_sep>cu.pdiff()<if_stmt>cu.commit_check()<block_start>cu.commit()<block_end><else_stmt><block_start>cu.rollback()<block_end>dev.close()<line_sep>
# -*- coding: utf-8 -*- """ Created on 2017-4-25 @author: cheng.li """<import_stmt>numba<as>nb<import_stmt>numpy<as>np<import_from_stmt>alphamind.utilities aggregate<import_from_stmt>alphamind.utilities array_index<import_from_stmt>alphamind.utilities group_mapping<import_from_stmt>alphamind.utilities simple_mean<import_from_stmt>alphamind.utilities simple_std<import_from_stmt>alphamind.utilities transform<line_sep>@nb.njit(nogil=<true> cache=<true>)<def_stmt>mask_values_2d x:np.ndarray mean_values:np.ndarray std_values:np.ndarray num_stds:int=3<arrow>np.ndarray<block_start>res=x.copy()<line_sep>length,width=x.shape<for_stmt>i range(length)<block_start><for_stmt>j range(width)<block_start>ubound=mean_values[i j]+num_stds<times>std_values[i j]<line_sep>lbound=mean_values[i j]-num_stds<times>std_values[i j]<if_stmt>x[i j]<g>ubound<block_start>res[i j]=ubound<block_end><elif_stmt>x[i j]<l>lbound<block_start>res[i j]=lbound<block_end><block_end><block_end><return>res<block_end>@nb.njit(nogil=<true> cache=<true>)<def_stmt>interp_values_2d x:np.ndarray groups:np.ndarray mean_values:np.ndarray std_values:np.ndarray num_stds:int=3 interval:float=0.5<arrow>np.ndarray<block_start>res=x.copy()<line_sep>length,width=x.shape<line_sep>max_cat=np.max(groups)<for_stmt>k range(max_cat+1)<block_start>target_idx=np.where(groups<eq>k)[0].flatten()<for_stmt>j range(width)<block_start>target_x=x[target_idx j]<line_sep>target_res=target_x.copy()<line_sep>mean=mean_values[target_idx[0] j]<line_sep>std=std_values[target_idx[0] j]<line_sep>ubound=mean+num_stds<times>std<line_sep>lbound=mean-num_stds<times>std<line_sep># upper bound abnormal values idx=target_x<g>ubound<line_sep>n=np.sum(idx)<if_stmt>n<g>0<block_start>u_values=target_res[idx]<line_sep>q_values=u_values.argsort().argsort()<line_sep>target_res[idx]=ubound+q_values/n<times>interval<times>std<block_end># lower bound abnormal values idx=target_x<l>lbound<line_sep>n=np.sum(idx)<if_stmt>n<g>0<block_start>l_values=target_res[idx]<line_sep>q_values=(-l_values).argsort().argsort()<line_sep>target_res[idx]=lbound-q_values/n<times>interval<times>std<block_end>res[target_idx j]=target_res<block_end><block_end><return>res<block_end>@nb.njit(nogil=<true> cache=<true>)<def_stmt>mask_values_1d x:np.ndarray mean_values:np.ndarray std_values:np.ndarray num_stds:int=3<arrow>np.ndarray<block_start>res=x.copy()<line_sep>length,width=x.shape<for_stmt>j range(width)<block_start>ubound=mean_values[j]+num_stds<times>std_values[j]<line_sep>lbound=mean_values[j]-num_stds<times>std_values[j]<line_sep>res[x[: j]<g>ubound j]=ubound<line_sep>res[x[: j]<l>lbound j]=lbound<block_end><return>res<block_end>@nb.njit(nogil=<true> cache=<true>)<def_stmt>interp_values_1d x:np.ndarray mean_values:np.ndarray std_values:np.ndarray num_stds:int=3 interval:float=0.5<arrow>np.ndarray<block_start>res=x.copy()<line_sep>length,width=x.shape<for_stmt>j range(width)<block_start>ubound=mean_values[j]+num_stds<times>std_values[j]<line_sep>lbound=mean_values[j]-num_stds<times>std_values[j]<line_sep># upper bound abnormal values idx=x[: j]<g>ubound<line_sep>n=np.sum(idx)<if_stmt>n<g>0<block_start>u_values=res[idx j]<line_sep>q_values=u_values.argsort().argsort()<line_sep>res[idx j]=ubound+q_values/n<times>interval<times>std_values[j]<block_end># lower bound abnormal values idx=x[: j]<l>lbound<line_sep>n=np.sum(idx)<if_stmt>n<g>0<block_start>l_values=res[idx j]<line_sep>q_values=(-l_values).argsort().argsort()<line_sep>res[idx j]=lbound-q_values/n<times>interval<times>std_values[j]<block_end><block_end><return>res<block_end><def_stmt>winsorize_normal x:np.ndarray num_stds:int=3 ddof=1 groups:np.ndarray=<none> method:str='flat' interval:float=0.5<arrow>np.ndarray<block_start><if_stmt>groups<is><not><none><block_start>groups=group_mapping(groups)<line_sep>mean_values=transform(groups x 'mean')<line_sep>std_values=transform(groups x 'std' ddof)<if_stmt>method<eq>'flat'<block_start>res=mask_values_2d(x mean_values std_values num_stds)<block_end><else_stmt><block_start>res=interp_values_2d(x groups mean_values std_values num_stds interval)<block_end><block_end><else_stmt><block_start>std_values=simple_std(x axis=0 ddof=ddof)<line_sep>mean_values=simple_mean(x axis=0)<if_stmt>method<eq>'flat'<block_start>res=mask_values_1d(x mean_values std_values num_stds)<block_end><else_stmt><block_start>res=interp_values_1d(x mean_values std_values num_stds interval)<block_end><block_end><return>res<block_end><class_stmt>NormalWinsorizer(object)<block_start><def_stmt>__init__ self num_stds:int=3 ddof:int=1 method:str='flat' interval:float=0.5<block_start>self.num_stds=num_stds<line_sep>self.ddof=ddof<line_sep>self.mean=<none><line_sep>self.std=<none><line_sep>self.labels=<none><line_sep>self.method=method<line_sep>self.interval=interval<block_end><def_stmt>fit self x:np.ndarray groups:np.ndarray=<none><block_start><if_stmt>groups<is><not><none><block_start>group_index=group_mapping(groups)<line_sep>self.mean=aggregate(group_index x 'mean')<line_sep>self.std=aggregate(group_index x 'std' self.ddof)<line_sep>self.labels=np.unique(groups)<block_end><else_stmt><block_start>self.mean=simple_mean(x axis=0)<line_sep>self.std=simple_std(x axis=0 ddof=self.ddof)<block_end><block_end><def_stmt>transform self x:np.ndarray groups:np.ndarray=<none><arrow>np.ndarray<block_start><if_stmt>groups<is><not><none><block_start>index=array_index(self.labels groups)<if_stmt>self.method<eq>'flat'<block_start>res=mask_values_2d(x self.mean[index] self.std[index] self.num_stds)<block_end><else_stmt><block_start>res=interp_values_2d(x groups self.mean[index] self.std[index] self.num_stds self.interval)<block_end><block_end><else_stmt><block_start><if_stmt>self.method<eq>'flat'<block_start>res=mask_values_1d(x self.mean self.std self.num_stds)<block_end><else_stmt><block_start>res=interp_values_1d(x self.mean self.std self.num_stds self.interval)<block_end><block_end><return>res<block_end><def_stmt>__call__ self x:np.ndarray groups:np.ndarray=<none><arrow>np.ndarray<block_start><return>winsorize_normal(x self.num_stds self.ddof groups self.method self.interval)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>x=np.random.randn(10000 1)<line_sep>groups=np.random.randint(0 3 10000)<import_stmt>datetime<as>dt<line_sep>start=dt.datetime.now()<for_stmt>i range(1000)<block_start>winsorize_normal(x method='flat')<block_end>print(dt.datetime.now()-start)<line_sep>start=dt.datetime.now()<for_stmt>i range(1000)<block_start>winsorize_normal(x method='interp')<block_end>print(dt.datetime.now()-start)<block_end>
<import_from_stmt>typing Union List<import_from_stmt>pydantic BaseModel<class_stmt>Configuration(BaseModel)<block_start>key:Union[str List[str]]<line_sep>save_in:str<block_end>
<import_from_stmt>.base EnumerableGenericMeta typeof is_type TemplateArgumentsError<import_from_stmt>.base class_and_instance_method<line_sep># TODO: Check why array is specified when no length is specified <class_stmt>ArrayType(EnumerableGenericMeta)<block_start><def_stmt>__new__ cls name bases namespace args=<none><block_start>cls=super().__new__(cls name bases namespace args)<if_stmt><not>cls.specified<block_start><return>cls<block_end><if_stmt><not>isinstance(cls.args[1] int)<block_start>err=<none><try_stmt><block_start>cls.__args__[1]=int(cls.args[1])<block_end><except_stmt>TypeError<as>e<block_start>err=e<block_end><if_stmt>err<block_start><raise>TemplateArgumentsError(f'Second argument to the "Array" type must be integer, not "{repr(cls.args[1])}')<block_end><block_end><return>cls<block_end><def_stmt>keys self<block_start>"""Returns a list of keys that can be used for indexing :class:`Array` [T, N] type. Number of keys equals to the number of elements N. >>> Array[Uint[2], 5].keys() [0, 1, 2, 3, 4] """<line_sep><return>list(range(self.args[1]))<block_end>@property<def_stmt>width self<block_start><return>sum(f.width<for>f self)<block_end># TODO: Remove this @property<def_stmt>dtype self<block_start><return>self.args[0]<block_end>@property<def_stmt>data self<block_start><return>self.args[0]<block_end><def_stmt>__getitem__ self index<block_start>"""If a single element is supplied for index, returns type T. If a slice is suplied for index, an :class:`Array` type is returned with a number of elements equal to the slice size. >>> Array[Uint[2], 5][3] Uint[2] >>> Array[Uint[2], 5][2:4] Array[Uint[2], 2] """<if_stmt><not>self.specified<block_start><return>super().__getitem__(index)<block_end>index=self.index_norm(index)<if_stmt>len(index)<eq>1<and><not>isinstance(index[0] slice)<block_start><if_stmt>index[0]<ge>len(self)<block_start><raise>IndexError<block_end><return>self.args[0]<block_end><else_stmt><block_start>width=0<for_stmt>i index<block_start><if_stmt>isinstance(i slice)<block_start><if_stmt>(i.stop<eq>0)<or>(i.stop-i.start<g>len(self))<block_start><raise>IndexError<block_end>width<augadd>i.stop-i.start<block_end><else_stmt><block_start><if_stmt>i<ge>len(self)<block_start><raise>IndexError<block_end>width<augadd>1<block_end><block_end><return>Array[self.args[0] width]<block_end><block_end><def_stmt>__str__ self<block_start><if_stmt>self.args<block_start><return>f'Array[{str(self.args[0])}, {len(self)}]'<block_end><else_stmt><block_start><return>super().__str__()<block_end><block_end><block_end><class_stmt>Array(list metaclass=ArrayType)<block_start>"""Generic container datatype that holds N instances of type T Generic parameters: T: Type of the :class:`Array` [T, N] elements N: Number of elements in the :class:`Array` [T, N] Concrete data type is obtained by indexing:: u16_4 = Array[Uint[16], 4] """<line_sep>__parameters__=['T' 'N']<def_stmt>__init__ self val:tuple=<none><block_start>t=type(self).data<if_stmt>val<is><none><block_start>array_tpl=(<none> )<times>len(type(self))<block_end><else_stmt><block_start>array_tpl=(v<if>typeof(type(v) t)<or>v<is><none><else>t(v)<for>v val)<block_end><return>super().__init__(array_tpl)<block_end><def_stmt>__eq__ self other<block_start>t_other=type(other)<if_stmt><not>is_type(t_other)<block_start><return>super().__eq__(other)<block_end><return>type(self)<eq>t_other<and>super().__eq__(other)<block_end><def_stmt>__ne__ self other<block_start><if_stmt><not>is_type(type(other))<block_start><return>self._array<ne>other<block_end><return><not>self.__eq__(other)<block_end>@class_and_instance_method<def_stmt>__getitem__ self key<block_start><if_stmt>isinstance(key int)<block_start><return>super().__getitem__(key)<block_end><elif_stmt>isinstance(key str)<block_start><try_stmt><block_start><return>super().__getitem__(type(self).fields.index(key))<block_end><except_stmt>ValueError<block_start><raise>TypeError(f'Tuple "{repr(self)}" has no field "{key}"')<block_end><block_end>key_norm=type(self).index_norm(key)<if_stmt>len(key_norm)<eq>1<block_start><if_stmt>isinstance(key_norm[0] slice)<block_start>tout=type(self)[key_norm]<line_sep><return>tout(super().__getitem__(key_norm[0]))<block_end><else_stmt><block_start><return>super(Array self).__getitem__(key_norm[0])<block_end><block_end><else_stmt><block_start>tout=type(self)[key_norm]<line_sep>elems=[]<for_stmt>i key_norm<block_start>elems.extend(super().__getitem__(i))<block_end><return>tout(elems)<block_end><block_end>@class_and_instance_method<def_stmt>subs self path val<block_start><if_stmt>isinstance(path tuple)<block_start><if_stmt>len(path)<g>1<block_start>val=self[path[0]].subs(path[1:] val)<block_end>path=path[0]<block_end><return>type(self)([self[i]<if>i<ne>path<else>val<for>i range(len(self))])<block_end><def_stmt>__hash__ self<block_start><return>super().__hash__()<block_end><def_stmt>code self<block_start>w_dtype=type(self).data.width<line_sep>ret=0<for_stmt>d reversed(self)<block_start>ret<auglshift>w_dtype<if_stmt>d<is><not><none><block_start>ret<augor>d.code()<block_end><block_end><return>ret<block_end>@property<def_stmt>unknown self<block_start><return>any(v<is><none><or>getattr(v 'unknown' <false>)<for>v self)<block_end>@classmethod<def_stmt>decode cls val<block_start>ret=[]<line_sep>val=int(val)<line_sep>mask=int(cls.data.width<times>'1' 2)<for_stmt>t cls<block_start>ret.append(t.decode(val&mask))<line_sep>val<augrshift>t.width<block_end><return>cls(ret)<block_end>@class_and_instance_method<def_stmt>copy self<block_start>type(self)(self)<block_end><block_end>
<import_stmt>math<import_stmt>torch.nn<as>nn<import_stmt>curves<line_sep>__all__=['ConvFC' ]<class_stmt>ConvFCBase(nn.Module)<block_start><def_stmt>__init__ self num_classes<block_start>super(ConvFCBase self).__init__()<line_sep>self.conv_part=nn.Sequential(nn.Conv2d(3 32 kernel_size=5 padding=2) nn.ReLU(<true>) nn.MaxPool2d(kernel_size=3 stride=2) nn.Conv2d(32 64 kernel_size=5 padding=2) nn.ReLU(<true>) nn.MaxPool2d(3 2) nn.Conv2d(64 128 kernel_size=5 padding=2) nn.ReLU(<true>) nn.MaxPool2d(3 2) )<line_sep>self.fc_part=nn.Sequential(nn.Linear(1152 1000) nn.ReLU(<true>) nn.Linear(1000 1000) nn.ReLU(<true>) nn.Linear(1000 num_classes))<line_sep># Initialize weights <for_stmt>m self.conv_part.modules()<block_start><if_stmt>isinstance(m nn.Conv2d)<block_start>n=m.kernel_size[0]<times>m.kernel_size[1]<times>m.out_channels<line_sep>m.weight.data.normal_(0 math.sqrt(2./n))<line_sep>m.bias.data.zero_()<block_end><block_end><block_end><def_stmt>forward self x<block_start>x=self.conv_part(x)<line_sep>x=x.view(x.size(0) -1)<line_sep>x=self.fc_part(x)<line_sep><return>x<block_end><block_end><class_stmt>ConvFCCurve(nn.Module)<block_start><def_stmt>__init__ self num_classes fix_points<block_start>super(ConvFCCurve self).__init__()<line_sep>self.conv1=curves.Conv2d(3 32 kernel_size=5 padding=2 fix_points=fix_points)<line_sep>self.relu1=nn.ReLU(<true>)<line_sep>self.max_pool1=nn.MaxPool2d(kernel_size=3 stride=2)<line_sep>self.conv2=curves.Conv2d(32 64 kernel_size=5 padding=2 fix_points=fix_points)<line_sep>self.relu2=nn.ReLU(<true>)<line_sep>self.max_pool2=nn.MaxPool2d(3 2)<line_sep>self.conv3=curves.Conv2d(64 128 kernel_size=5 padding=2 fix_points=fix_points)<line_sep>self.relu3=nn.ReLU(<true>)<line_sep>self.max_pool3=nn.MaxPool2d(3 2)<line_sep>self.fc4=curves.Linear(1152 1000 fix_points=fix_points)<line_sep>self.relu4=nn.ReLU(<true>)<line_sep>self.fc5=curves.Linear(1000 1000 fix_points=fix_points)<line_sep>self.relu5=nn.ReLU(<true>)<line_sep>self.fc6=curves.Linear(1000 num_classes fix_points=fix_points)<line_sep># Initialize weights <for_stmt>m self.modules()<block_start><if_stmt>isinstance(m curves.Conv2d)<block_start>n=m.kernel_size[0]<times>m.kernel_size[1]<times>m.out_channels<for_stmt>i range(m.num_bends)<block_start>getattr(m 'weight_%d'%i).data.normal_(0 math.sqrt(2./n))<line_sep>getattr(m 'bias_%d'%i).data.zero_()<block_end><block_end><block_end><block_end><def_stmt>forward self x coeffs_t<block_start>x=self.conv1(x coeffs_t)<line_sep>x=self.relu1(x)<line_sep>x=self.max_pool1(x)<line_sep>x=self.conv2(x coeffs_t)<line_sep>x=self.relu2(x)<line_sep>x=self.max_pool2(x)<line_sep>x=self.conv3(x coeffs_t)<line_sep>x=self.relu3(x)<line_sep>x=self.max_pool3(x)<line_sep>x=x.view(x.size(0) -1)<line_sep>x=self.fc4(x coeffs_t)<line_sep>x=self.relu4(x)<line_sep>x=self.fc5(x coeffs_t)<line_sep>x=self.relu5(x)<line_sep>x=self.fc6(x coeffs_t)<line_sep><return>x<block_end><block_end><class_stmt>ConvFC<block_start>base=ConvFCBase<line_sep>curve=ConvFCCurve<line_sep>kwargs={}<block_end>
# Generated by Django 1.11.14 on 2018-10-10 22:52 <import_from_stmt>django.db migrations<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[("zilencer" "0013_remove_customer_billing_user") ]<line_sep>operations=[migrations.AlterUniqueTogether(name="remotepushdevicetoken" unique_together={("server" "user_id" "kind" "token")} ) ]<block_end>
""" The ``ui.FileUpload`` class implements a file uploader widget. The FileUpload widget must be inside a ``ui.FormPanel`` which is used to submit the HTML form to the server. Note that you must set the form encoding and method like this: self.form.setEncoding(FormPanel.ENCODING_MULTIPART) self.form.setMethod(FormPanel.METHOD_POST) This will ensure that the form is submitted in a way that allows files to be uploaded. The example below doesn't really work, as there is no suitable server at ``nonexistent.com``. However, it does show how a file upload widget could be used within a FormPanel. """<import_from_stmt>pyjamas.ui.SimplePanel SimplePanel<import_from_stmt>pyjamas.ui.FormPanel FormPanel<import_from_stmt>pyjamas.ui.VerticalPanel VerticalPanel<import_from_stmt>pyjamas.ui.HorizontalPanel HorizontalPanel<import_from_stmt>pyjamas.ui.FileUpload FileUpload<import_from_stmt>pyjamas.ui.Label Label<import_from_stmt>pyjamas.ui.Button Button<class_stmt>FileUploadDemo(SimplePanel)<block_start><def_stmt>__init__ self<block_start>SimplePanel.__init__(self)<line_sep>self.form=FormPanel()<line_sep>self.form.setEncoding(FormPanel.ENCODING_MULTIPART)<line_sep>self.form.setMethod(FormPanel.METHOD_POST)<line_sep>self.form.setAction("http://nonexistent.com")<line_sep>self.form.setTarget("results")<line_sep>vPanel=VerticalPanel()<line_sep>hPanel=HorizontalPanel()<line_sep>hPanel.setSpacing(5)<line_sep>hPanel.add(Label("Upload file:"))<line_sep>self.field=FileUpload()<line_sep>self.field.setName("file")<line_sep>hPanel.add(self.field)<line_sep>hPanel.add(Button("Submit" getattr(self "onBtnClick")))<line_sep>vPanel.add(hPanel)<line_sep>results=NamedFrame("results")<line_sep>vPanel.add(results)<line_sep>self.form.add(vPanel)<line_sep>self.add(self.form)<block_end><def_stmt>onBtnClick self event<block_start>self.form.submit()<block_end><block_end>
""" Clustering example using an SMS spam dataset with SciPy. Associated with the Real Python article Scientific Python: Using SciPy for Optimization Available at: https://realpython.com/python-scipy-cluster-optimize/ """<import_from_stmt>pathlib Path<import_stmt>numpy<as>np<import_from_stmt>scipy.cluster.vq whiten kmeans vq<line_sep>HERE=Path(__file__).parent<line_sep>data=HERE.joinpath("SMSSpamCollection").read_text().strip().split("\n")<line_sep>digit_counts=np.empty((len(data) 2) dtype=int)<for_stmt>i,line enumerate(data)<block_start>case,message=line.split("\t")<line_sep>num_digits=sum(c.isdigit()<for>c message)<line_sep>digit_counts[i 0]=0<if>case<eq>"ham"<else>1<line_sep>digit_counts[i 1]=num_digits<block_end>unique_counts=np.unique(digit_counts[: 1] return_counts=<true>)<line_sep>unique_counts=np.transpose(np.vstack(unique_counts))<line_sep>whitened_counts=whiten(unique_counts)<line_sep>codebook,_=kmeans(whitened_counts 3)<line_sep>codes,_=vq(whitened_counts codebook)<line_sep>ham_code=codes[0]<line_sep>spam_code=codes[-1]<line_sep>unknown_code=list(set(range(3))^set((ham_code spam_code)))[0]<line_sep>print("definitely ham:" unique_counts[codes<eq>ham_code][-1])<line_sep>print("definitely spam:" unique_counts[codes<eq>spam_code][-1])<line_sep>print("unknown:" unique_counts[codes<eq>unknown_code][-1])<line_sep>digits=digit_counts[: 1]<line_sep>predicted_hams=digits<eq>0<line_sep>predicted_spams=digits<g>20<line_sep>predicted_unknowns=np.logical_and(digits<g>0 digits<le>20)<line_sep>ham_cluster=digit_counts[predicted_hams]<line_sep>spam_cluster=digit_counts[predicted_spams]<line_sep>unknown_cluster=digit_counts[predicted_unknowns]<line_sep>print("hams:" np.unique(ham_cluster[: 0] return_counts=<true>))<line_sep>print("spams:" np.unique(spam_cluster[: 0] return_counts=<true>))<line_sep>print("unknowns:" np.unique(unknown_cluster[: 0] return_counts=<true>))<line_sep>
<def_stmt>reverse string<block_start><return>string[::-1]<block_end>print('Gimmie some word')<line_sep>s=input()<line_sep>print(reverse(s))<line_sep>
# Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ An implementation of a key manager that reads its key from the project's configuration options. This key manager implementation provides limited security, assuming that the key remains secret. Using the volume encryption feature as an example, encryption provides protection against a lost or stolen disk, assuming that the configuration file that contains the key is not stored on the disk. Encryption also protects the confidentiality of data as it is transmitted via iSCSI from the compute host to the storage host (again assuming that an attacker who intercepts the data does not know the secret key). Because this implementation uses a single, fixed key, it proffers no protection once that key is compromised. In particular, different volumes encrypted with a key provided by this key manager actually share the same encryption key so *any* volume can be decrypted once the fixed key is known. """<import_stmt>binascii<import_from_stmt>castellan.common.objects symmetric_key<as>key<import_from_stmt>castellan.key_manager key_manager<import_from_stmt>oslo_log log<as>logging<import_stmt>nova.conf<import_from_stmt>nova exception<import_from_stmt>nova.i18n _<line_sep>CONF=nova.conf.CONF<line_sep>LOG=logging.getLogger(__name__)<class_stmt>ConfKeyManager(key_manager.KeyManager)<block_start>"""This key manager implementation supports all the methods specified by the key manager interface. This implementation creates a single key in response to all invocations of create_key. Side effects (e.g., raising exceptions) for each method are handled as specified by the key manager interface. """<def_stmt>__init__ self configuration<block_start>LOG.warning('This key manager is insecure and is not recommended '<concat>'for production deployments')<line_sep>super(ConfKeyManager self).__init__(configuration)<line_sep>self.key_id='00000000-0000-0000-0000-000000000000'<line_sep>self.conf=CONF<if>configuration<is><none><else>configuration<if_stmt>CONF.key_manager.fixed_key<is><none><block_start><raise>ValueError(_('keymgr.fixed_key not defined'))<block_end>self._hex_key=CONF.key_manager.fixed_key<line_sep>super(ConfKeyManager self).__init__(configuration)<block_end><def_stmt>_get_key self<block_start>key_bytes=bytes(binascii.unhexlify(self._hex_key))<line_sep><return>key.SymmetricKey('AES' len(key_bytes)<times>8 key_bytes)<block_end><def_stmt>create_key self context algorithm length **kwargs<block_start>"""Creates a symmetric key. This implementation returns a UUID for the key read from the configuration file. A Forbidden exception is raised if the specified context is None. """<if_stmt>context<is><none><block_start><raise>exception.Forbidden()<block_end><return>self.key_id<block_end><def_stmt>create_key_pair self context **kwargs<block_start><raise>NotImplementedError("ConfKeyManager does not support asymmetric keys")<block_end><def_stmt>store self context managed_object **kwargs<block_start>"""Stores (i.e., registers) a key with the key manager."""<if_stmt>context<is><none><block_start><raise>exception.Forbidden()<block_end><if_stmt>managed_object<ne>self._get_key()<block_start><raise>exception.KeyManagerError(reason="cannot store arbitrary keys")<block_end><return>self.key_id<block_end><def_stmt>get self context managed_object_id<block_start>"""Retrieves the key identified by the specified id. This implementation returns the key that is associated with the specified UUID. A Forbidden exception is raised if the specified context is None; a KeyError is raised if the UUID is invalid. """<if_stmt>context<is><none><block_start><raise>exception.Forbidden()<block_end><if_stmt>managed_object_id<ne>self.key_id<block_start><raise>KeyError(str(managed_object_id)+" != "+str(self.key_id))<block_end><return>self._get_key()<block_end><def_stmt>delete self context managed_object_id<block_start>"""Represents deleting the key. Because the ConfKeyManager has only one key, which is read from the configuration file, the key is not actually deleted when this is called. """<if_stmt>context<is><none><block_start><raise>exception.Forbidden()<block_end><if_stmt>managed_object_id<ne>self.key_id<block_start><raise>exception.KeyManagerError(reason="cannot delete non-existent key")<block_end>LOG.warning("Not deleting key %s" managed_object_id)<block_end><block_end>
<import_stmt>json<import_stmt>pickle<import_stmt>os<import_from_stmt>Giveme5W1H.extractor.candidate Candidate<import_from_stmt>Giveme5W1H.extractor.configuration Configuration<as>Config<class_stmt>Writer<block_start>""" Helper to write prickles and json representations of documents There is no way to convert a json back to a full document object. Use prickles instead """<def_stmt>__init__ self<block_start>""" :param path: Absolute path to the output directory """<line_sep>self._preprocessedPath=<none><block_end><def_stmt>_write_json self output_object<block_start>outfile=open(self._outputPath+'/'+output_object['dId']+'.json' 'w')<line_sep>outfile.write(json.dumps(output_object sort_keys=<false> indent=2))<line_sep>outfile.close()<block_end><def_stmt>write_pickle self document#deprecated <block_start><with_stmt>open(self.get_preprocessed_filepath(document.get_rawData()['dId']) 'wb')<as>f# Pickle the 'data' document using the highest protocol available. <block_start>pickle.dump(document f pickle.HIGHEST_PROTOCOL)<block_end><block_end><def_stmt>write_pickle_file self path file<block_start>fullpath=self._preprocessedPath+'/'+path+'.pickle'<line_sep>os.makedirs(os.path.dirname(fullpath) exist_ok=<true>)<with_stmt>open(fullpath 'wb')<as>f<block_start>pickle.dump(file f pickle.HIGHEST_PROTOCOL)<block_end><block_end><def_stmt>get_preprocessed_filepath self id#deprecated <block_start><return>self._preprocessedPath+'/'+id+'.pickle'<block_end><def_stmt>get_preprocessed_path self<block_start><return>self._preprocessedPath<block_end><def_stmt>set_preprocessed_path self preprocessed_path<block_start>self._preprocessedPath=preprocessed_path<block_end><def_stmt>setOutputPath self output_path<block_start>self._outputPath=output_path<block_end><def_stmt>generate_json self document<block_start>""" :param document: The parsed Document :type document: Document :return: None """<line_sep># Reuse the input json as template for the output json output=document.get_rawData()<if_stmt>output<is><none><block_start>output={}<block_end># Check if there isn`t already a fiveWoneH literal five_w_one_h_literal=output.setdefault('fiveWoneH' {})<line_sep># Save error flags(not under fiveWoneH, would break further code which expects there only questions) output.setdefault('fiveWoneH_Metadata' {'process_errors':document.get_error_flags()})<if_stmt>Config.get()['fiveWoneH_enhancer_full']<block_start>output.setdefault('fiveWoneH_enhancer' document.get_enhancements())<block_end># Extract answers answers=document.get_answers()<for_stmt>question answers# check if question literal is there <block_start>question_literal=five_w_one_h_literal.setdefault(question {'extracted':[]})<line_sep># add a label, thats only there for the ui <if_stmt>Config.get()['label']<block_start>question_literal['label']=question<block_end># check if extracted literal is there extracted_literal=question_literal.setdefault('extracted' [])<for_stmt>answer answers[question]<block_start><if_stmt>isinstance(answer Candidate)# answer was already refactored <block_start>awJson=answer.get_json()<line_sep># clean up json by skipping NULL entries <if_stmt>awJson<block_start>extracted_literal.append(awJson)<block_end><block_end><else_stmt># fallback for none refactored extractors <block_start>candidate_json={'score':answer[1] 'words':[]}<for_stmt>candidateWord answer[0]<block_start>candidate_json['parts'].append({'text':candidateWord[0] 'nlpTag':candidateWord[1]})<block_end>extracted_literal.append(candidate_json)<block_end><if_stmt>Config.get()['onlyTopCandidate']# stop after the first answer <block_start><break><block_end><block_end><block_end><return>output<block_end><def_stmt>write self document<block_start><if_stmt>self._outputPath<block_start>a_json=self.generate_json(document)<line_sep>self._write_json(a_json)<block_end><else_stmt><block_start>print("set a outputPath before writing")<block_end><block_end><block_end>
#------------------------------------------------------------------------------- # elftools: dwarf/aranges.py # # DWARF aranges section decoding (.debug_aranges) # # <NAME> (<EMAIL>) # This code is in the public domain #------------------------------------------------------------------------------- <import_stmt>os<import_from_stmt>collections namedtuple<import_from_stmt>..common.utils struct_parse<import_from_stmt>bisect bisect_right<import_stmt>math<line_sep># An entry in the aranges table; # begin_addr: The beginning address in the CU # length: The length of the address range in this entry # info_offset: The CU's offset into .debug_info # see 6.1.2 in DWARF4 docs for explanation of the remaining fields ARangeEntry=namedtuple('ARangeEntry' 'begin_addr length info_offset unit_length version address_size segment_size')<class_stmt>ARanges(object)<block_start>""" ARanges table in DWARF stream, size: A stream holding the .debug_aranges section, and its size structs: A DWARFStructs instance for parsing the data """<def_stmt>__init__ self stream size structs<block_start>self.stream=stream<line_sep>self.size=size<line_sep>self.structs=structs<line_sep># Get entries of aranges table in the form of ARangeEntry tuples self.entries=self._get_entries()<line_sep># Sort entries by the beginning address self.entries.sort(key=<lambda>entry:entry.begin_addr)<line_sep># Create list of keys (first addresses) for better searching self.keys=[entry.begin_addr<for>entry self.entries]<block_end><def_stmt>cu_offset_at_addr self addr<block_start>""" Given an address, get the offset of the CU it belongs to, where 'offset' refers to the offset in the .debug_info section. """<line_sep>tup=self.entries[bisect_right(self.keys addr)-1]<if_stmt>tup.begin_addr<le>addr<l>tup.begin_addr+tup.length<block_start><return>tup.info_offset<block_end><else_stmt><block_start><return><none><block_end><block_end>#------ PRIVATE ------# <def_stmt>_get_entries self<block_start>""" Populate self.entries with ARangeEntry tuples for each range of addresses """<line_sep>self.stream.seek(0)<line_sep>entries=[]<line_sep>offset=0<line_sep># one loop == one "set" == one CU <while_stmt>offset<l>self.size<block_start>aranges_header=struct_parse(self.structs.Dwarf_aranges_header self.stream offset)<line_sep>addr_size=self._get_addr_size_struct(aranges_header["address_size"])<line_sep># No segmentation <if_stmt>aranges_header["segment_size"]<eq>0# pad to nearest multiple of tuple size <block_start>tuple_size=aranges_header["address_size"]<times>2<line_sep>fp=self.stream.tell()<line_sep>seek_to=int(math.ceil(fp/float(tuple_size))<times>tuple_size)<line_sep>self.stream.seek(seek_to)<line_sep># entries in this set/CU addr=struct_parse(addr_size('addr') self.stream)<line_sep>length=struct_parse(addr_size('length') self.stream)<while_stmt>addr<ne>0<or>length<ne>0# 'begin_addr length info_offset version address_size segment_size' <block_start>entries.append(ARangeEntry(begin_addr=addr length=length info_offset=aranges_header["debug_info_offset"] unit_length=aranges_header["unit_length"] version=aranges_header["version"] address_size=aranges_header["address_size"] segment_size=aranges_header["segment_size"]))<line_sep>addr=struct_parse(addr_size('addr') self.stream)<line_sep>length=struct_parse(addr_size('length') self.stream)<block_end><block_end># Segmentation exists in executable <elif_stmt>aranges_header["segment_size"]<ne>0<block_start><raise>NotImplementedError("Segmentation not implemented")<block_end>offset=(offset+aranges_header.unit_length+self.structs.initial_length_field_size())<block_end><return>entries<block_end><def_stmt>_get_addr_size_struct self addr_header_value<block_start>""" Given this set's header value (int) for the address size, get the Construct representation of that size """<if_stmt>addr_header_value<eq>4<block_start><return>self.structs.Dwarf_uint32<block_end><else_stmt><block_start><assert_stmt>addr_header_value<eq>8<line_sep><return>self.structs.Dwarf_uint64<block_end><block_end><block_end>
r""" The Tachyon Ray Tracer AUTHOR: - <NAME> """<line_sep>#***************************************************************************** # Copyright (C) 2006 <NAME> # # Distributed under the terms of the GNU General Public License (GPL) # as published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # http://www.gnu.org/licenses/ #***************************************************************************** <import_stmt>os<import_from_stmt>sage.cpython.string bytes_to_str<import_from_stmt>sage.misc.pager pager<import_from_stmt>sage.misc.temporary_file tmp_filename<import_from_stmt>sage.structure.sage_object SageObject<class_stmt>TachyonRT(SageObject)<block_start>""" The Tachyon Ray Tracer tachyon_rt(model, outfile='sage.png', verbose=1, block=True, extra_opts='') INPUT: - ``model`` - a string that describes a 3d model in the Tachyon modeling format. Type tachyon_rt.help() for a description of this format. - ``outfile`` - (default: 'sage.png') output filename; the extension of the filename determines the type. Supported types include: - ``tga`` - 24-bit (uncompressed) - ``bmp`` - 24-bit Windows BMP (uncompressed) - ``ppm`` - 24-bit PPM (uncompressed) - ``rgb`` - 24-bit SGI RGB (uncompressed) - ``png`` - 24-bit PNG (compressed, lossless) - ``verbose`` - integer; (default: 1) - ``0`` - silent - ``1`` - some output - ``2`` - very verbose output - ``block`` - bool (default: True); if False, run the rendering command in the background. - ``extra_opts`` - passed directly to tachyon command line. Use tachyon_rt.usage() to see some of the possibilities. OUTPUT: - Some text may be displayed onscreen. - The file outfile is created. EXAMPLES: .. automethod:: __call__ """<def_stmt>_repr_ self<block_start>""" Returns a brief description of this interface object (the Tachyon raytracer written by <NAME>). TESTS:: sage: from sage.interfaces.tachyon import TachyonRT sage: t = TachyonRT() sage: print(t.__repr__()) <NAME>'s Tachyon Ray Tracer """<line_sep><return>"<NAME>'s Tachyon Ray Tracer"<block_end><def_stmt>__call__ self model outfile='sage.png' verbose=1 extra_opts=''<block_start>""" This executes the tachyon program, given a scene file input. INPUT: - ``model`` -- string. The tachyon model. - ``outfile`` -- string, default ``'sage.png'``. The filename to save the model to. - ``verbose`` -- 0, 1, (default) or 2. The verbosity level. - ``extra_opts`` -- string (default: empty string). Extra options that will be appended to the tachyon commandline. EXAMPLES:: sage: from sage.interfaces.tachyon import TachyonRT sage: tgen = Tachyon() sage: tgen.texture('t1') sage: tgen.sphere((0,0,0),1,'t1') sage: tgen.str()[30:40] 'resolution' sage: t = TachyonRT() sage: import os sage: t(tgen.str(), outfile=os.devnull) tachyon ... Tachyon Parallel/Multiprocessor Ray Tracer... TESTS:: sage: from sage.env import SAGE_EXTCODE sage: filename = os.path.join(SAGE_EXTCODE, 'doctest', 'invalid', 'syntax_error.tachyon') sage: with open(filename, 'r') as f: ....: syntax_error = f.read() sage: t(syntax_error, outfile=os.devnull) Traceback (most recent call last): ... RuntimeError: Tachyon Parallel/Multiprocessor Ray Tracer... ... Parser failed due to an input file syntax error. Aborting render. """<line_sep>modelfile=tmp_filename(ext='.dat')<with_stmt>open(modelfile 'w')<as>file<block_start>file.write(model)<block_end>cmd=['tachyon' modelfile]<line_sep>ext=outfile[-4:].lower()<if_stmt>ext<eq>'.png'<block_start>cmd<augadd>['-format' 'PNG']<block_end><elif_stmt>ext<eq>'.tga'<block_start>cmd<augadd>['-format' 'TARGA']<block_end><elif_stmt>ext<eq>'.bmp'<block_start>cmd<augadd>['-format' 'BMP']<block_end><elif_stmt>ext<eq>'.ppm'<block_start>cmd<augadd>['-format' 'PPM']<block_end><elif_stmt>ext<eq>'.rgb'<block_start>cmd<augadd>['-format' 'RGB']<block_end>cmd<augadd>['-o' outfile]<line_sep>cmd<augadd>extra_opts.split()<if_stmt>verbose<ge>2<block_start>cmd<augadd>['+V']<block_end><if_stmt>verbose<block_start>print(' '.join(cmd))<block_end><import_stmt>subprocess<line_sep>out=bytes_to_str(subprocess.check_output(cmd))<if_stmt>verbose<ge>1<block_start>print(out)<block_end><if_stmt>out.rstrip().endswith('Aborting render.')<block_start><raise>RuntimeError(out)<block_end><if_stmt>outfile<ne>os.devnull<and>os.stat(outfile).st_size<eq>0<block_start><raise>RuntimeError('tachyon did not abort but output file is empty')<block_end><block_end><def_stmt>usage self use_pager=<true><block_start>""" Returns the basic description of using the Tachyon raytracer (simply what is returned by running tachyon with no input). The output is paged unless use_pager=False. TESTS:: sage: from sage.interfaces.tachyon import TachyonRT sage: t = TachyonRT() sage: t.usage(use_pager=False) ... tachyon modelfile [options]... <BLANKLINE> Model file formats supported: filename.dat ... """<with_stmt>os.popen('tachyon')<as>f<block_start>r=f.read()<block_end><if_stmt>use_pager<block_start>pager()(r)<block_end><else_stmt><block_start>print(r)<block_end><block_end><def_stmt>help self use_pager=<true><block_start>""" Prints (pages) the help file written by <NAME> describing scene files for Tachyon. The output is paged unless use_pager=False. TESTS:: sage: from sage.interfaces.tachyon import TachyonRT sage: t = TachyonRT() sage: t.help(use_pager=False) This help, which was written by <NAME>, describes ... """<line_sep>s=r""" This help, which was written by <NAME>, describes how to create scene files. At the present time, scene description files are very simple. The parser can't handle multiple file scene descriptions, although they may be added in the future. Most of the objects and their scene description are closely related to the RAY API \emph{(See the API docs for additional info.)} \subsection{Basic Scene Requirements} Unlike some other ray tracers out there, RAY requires that you specify most of the scene parameters in the scene description file itself. If users would rather specify some of these parameters at the command line, then I may add that feature in the future. A scene description file contains keywords, and values associated or grouped with a keyword. All keywords can be in caps, lower case, or mixed case for the convenience of the user. File names and texture names are normally case-sensitive, although the behavior for file names is operating system-dependent. All values are either character strings, or floating point numbers. In some cases, the presence of one keyword will require additional keyword / value pairs. At the moment there are several keywords with values, that must appear in every scene description file. Every scene description file must begin with the {\bf BEGIN\_SCENE} keyword, and end with the {\bf END\_SCENE} keyword. All definitions and declarations of any kind must be inside the {\bf BEGIN\_SCENE}, {\bf END\_SCENE} pair. The {\bf RESOLUTION} keyword is followed by an x resolution and a y resolution in terms of pixels on each axis. There are currently no limits placed on the resolution of an output image other than the computer's available memory and reasonable execution time. An example of a simple scene description skeleton is show below: \begin{verbatim} BEGIN_SCENE RESOLUTION 1024 1024 ... ... Camera definition.. ... ... Other objects, etc.. ... END_SCENE \end{verbatim} \subsection{Camera and viewing parameters} One of the most important parts of any scene, is the camera position and orientation. Having a good angle on a scene can make the difference between an average looking scene and a strikingly interesting one. There may be multiple camera definitions in a scene file, but the last camera definition overrides all previous definitions. There are several parameters that control the camera in \RAY, {\bf PROJECTION}, {\bf ZOOM}, {\bf ASPECTRATIO}, {\bf ANTIALIASING}, {\bf CENTER}, {\bf RAYDEPTH}, {\bf VIEWDIR}, and {\bf UPDIR}. The first and last keywords required in the definition of a camera are the {\bf CAMERA} and {\bf END\_CAMERA} keywords. The {\bf PROJECTION} keyword is optional, the remaining camera keywords are required, and must be written in the sequence they are listed in the examples in this section. \subsubsection{Camera projection modes} The {\bf PROJECTION} keyword must be followed by one of the supported camera projection mode identifiers {\bf PERSPECTIVE}, {\bf PERSPECTIVE_DOF}, {\bf ORTHOGRAPHIC}, or {\bf FISHEYE}. The {\bf FISHEYE} projection mode requires two extra parameters {\bf FOCALLENGTH} and {\bf APERTURE} which precede the regular camera options. \begin{verbatim} Camera projection perspective_dof focallength 0.75 aperture 0.02 Zoom 0.666667 Aspectratio 1.000000 Antialiasing 128 Raydepth 30 Center 0.000000 0.000000 -2.000000 Viewdir -0.000000 -0.000000 2.000000 Updir 0.000000 1.000000 -0.000000 End_Camera \end{verbatim} \subsubsection{Common camera parameters} The {\bf ZOOM} parameter controls the camera in a way similar to a telephoto lens on a 35mm camera. A zoom value of 1.0 is standard, with a 90 degree field of view. By changing the zoom factor to 2.0, the relative size of any feature in the frame is twice as big, while the field of view is decreased slightly. The zoom effect is implemented as a scaling factor on the height and width of the image plane relative to the world. The {\bf ASPECTRATIO} parameter controls the aspect ratio of the resulting image. By using the aspect ratio parameter, one can produce images which look correct on any screen. Aspect ratio alters the relative width of the image plane, while keeping the height of the image plane constant. In general, most workstation displays have an aspect ratio of 1.0. To see what aspect ratio your display has, you can render a simple sphere, at a resolution of 512x512 and measure the ratio of its width to its height. The {\bf ANTIALIASING} parameter controls the maximum level of supersampling used to obtain higher image quality. The parameter given sets the number of additional rays to trace per-pixel to attain higher image quality. The {\bf RAYDEPTH} parameter tells RAY what the maximum level of reflections, refraction, or in general the maximum recursion depth to trace rays to. A value between 4 and 12 is usually good. A value of 1 will disable rendering of reflective or transmissive objects (they'll be black). The remaining three camera parameters are the most important, because they define the coordinate system of the camera, and its position in the scene. The {\bf CENTER} parameter is an X, Y, Z coordinate defining the center of the camera \emph{(also known as the Center of Projection)}. Once you have determined where the camera will be placed in the scene, you need to tell RAY what the camera should be looking at. The {\bf VIEWDIR} parameter is a vector indicating the direction the camera is facing. It may be useful for me to add a "Look At" type keyword in the future to make camera aiming easier. If people want or need the "Look At" style camera, let me know. The last parameter needed to completely define a camera is the "up" direction. The {\bf UPDIR} parameter is a vector which points in the direction of the "sky". I wrote the camera so that {\bf VIEWDIR} and {\bf UPDIR} don't have to be perpendicular, and there shouldn't be a need for a "right" vector although some other ray tracers require it. Here's a snippet of a camera definition: \begin{verbatim} CAMERA ZOOM 1.0 ASPECTRATIO 1.0 ANTIALIASING 0 RAYDEPTH 12 CENTER 0.0 0.0 2.0 VIEWDIR 0 0 -1 UPDIR 0 1 0 END_CAMERA \end{verbatim} \subsubsection{Viewing frustum} An optional {\bf FRUSTUM} parameter provides a means for rendering sub-images in a larger frame, and correct stereoscopic images. The {\bf FRUSTUM} keyword must be followed by four floating parameters, which indicate the top, bottom, left and right coordinates of the image plane in eye coordinates. When the projection mode is set to {\bf FISHEYE}, the frustum parameters correspond to spherical coordinates specified in radians. \begin{verbatim} CAMERA ZOOM 1.0 ASPECTRATIO 1.0 ANTIALIASING 0 RAYDEPTH 4 CENTER 0.0 0.0 -6.0 VIEWDIR 0.0 0.0 1.0 UPDIR 0.0 1.0 0.0 FRUSTUM -0.5 0.5 -0.5 0.5 END_CAMERA \end{verbatim} \subsection{Including Files} The {\bf INCLUDE} keyword is used anywhere after the camera description, and is immediately followed by a valid filename, for a file containing additional scene description information. The included file is opened, and processing continues as if it were part of the current file, until the end of the included file is reached. Parsing of the current file continues from where it left off prior to the included file. \subsection{Scene File Comments} The {\bf $\#$} keyword is used anywhere after the camera description, and will cause RAY to ignore all characters from the {\bf $\#$} to the end of the input line. The {\bf $\#$} character must be surrounded by whitespace in order to be recognized. A sequence such as {\bf $\#\#\#$} will not be recognized as a comment. \subsection{Lights} The most frequently used type of lights provided by RAY are positional point light sources. The lights are actually small spheres, which are visible. A point light is composed of three pieces of information, a center, a radius (since its a sphere), and a color. To define a light, simply write the {\bf LIGHT} keyword, followed by its {\bf CENTER} (a X, Y, Z coordinate), its {\bf RAD} (radius, a scalar), and its {\bf COLOR} (a Red Green Blue triple). The radius parameter will accept any value of 0.0 or greater. Lights of radius 0.0 will not be directly visible in the rendered scene, but contribute light to the scene normally. For a light, the color values range from 0.0 to 1.0, any values outside this range may yield unpredictable results. A simple light definition looks like this: \begin{verbatim} LIGHT CENTER 4.0 3.0 2.0 RAD 0.2 COLOR 0.5 0.5 0.5 \end{verbatim} This light would be gray colored if seen directly, and would be 50\% intensity in each RGB color component. RAY supports simple directional lighting, commonly used in CAD and scientific visualization programs for its performance advantages over positional lights. Directional lights cannot be seen directly in scenes rendered by \RAY, only their illumination contributes to the final image. \begin{verbatim} DIRECTIONAL_LIGHT DIRECTION 0.0 -1.0 0.0 COLOR 1.0 0.0 0.0 \end{verbatim} RAY supports spotlights, which are described very similarly to a point light, but they are attenuated by angle from the direction vector, based on a ``falloff start'' angle and ``falloff end''angle. Between the starting and ending angles, the illumination is attenuated linearly. The syntax for a spotlight description in a scene file is as follows. \begin{verbatim} SPOTLIGHT CENTER 0.0 3.0 17.0 RAD 0.2 DIRECTION 0.0 -1.0 0.0 FALLOFF_START 20.0 FALLOFF_END 45.0 COLOR 1.0 0.0 0.0 \end{verbatim} The lighting system implemented by RAY provides various levels of distance-based lighting attenuation. By default, a light is not attenuated by distance. If the \emph{attenuation} keywords is present immediately prior to the light's color, RAY will accept coefficients which are used to calculate distance-based attenuation, which is applied the light by multiplying with the resulting value. The attenuation factor is calculated from the equation $$ 1/(K_c + K_l d + k_q d^2) $$ This attenuation equation should be familiar to some as it is the same lighting attenuation equation used by OpenGL. The constant, linear, and quadratic terms are specified in a scene file as shown in the following example. \begin{verbatim} LIGHT CENTER -5.0 0.0 10.0 RAD 1.0 ATTENUATION CONSTANT 1.0 LINEAR 0.2 QUADRATIC 0.05 COLOR 1.0 0.0 0.0 \end{verbatim} \subsection{Atmospheric effects} RAY currently only implements one atmospheric effect, simple distance-based fog. \subsubsection{Fog} RAY provides a simple distance-based fog effect intended to provide functionality similar to that found in OpenGL, for compatibility with software that requires an OpenGL-like fog implementation. Much like OpenGL, RAY provides linear, exponential, and exponential-squared fog. \begin{verbatim} FOG LINEAR START 0.0 END 50.0 DENSITY 1.0 COLOR 1.0 1.0 1.0 \end{verbatim} \begin{verbatim} FOG EXP START 0.0 END 50.0 DENSITY 1.0 COLOR 1.0 1.0 1.0 \end{verbatim} \begin{verbatim} FOG EXP2 START 0.0 END 50.0 DENSITY 1.0 COLOR 1.0 1.0 1.0 \end{verbatim} \subsection{Objects} \subsubsection{Spheres} Spheres are the simplest object supported by RAY and they are also the fastest object to render. Spheres are defined as one would expect, with a {\bf CENTER}, {\bf RAD} (radius), and a texture. The texture may be defined along with the object as discussed earlier, or it may be declared and assigned a name. Here's a sphere definition using a previously defined "NitrogenAtom" texture: \begin{verbatim} SPHERE CENTER 26.4 27.4 -2.4 RAD 1.0 NitrogenAtom \end{verbatim} A sphere with an inline texture definition is declared like this: \begin{verbatim} Sphere center 1.0 0.0 10.0 Rad 1.0 Texture Ambient 0.2 Diffuse 0.8 Specular 0.0 Opacity 1.0 Color 1.0 0.0 0.5 TexFunc 0 \end{verbatim} Notice that in this example I used mixed case for the keywords, this is allowable... Review the section on textures if the texture definitions are confusing. \subsubsection{Triangles} Triangles are also fairly simple objects, constructed by listing the three vertices of the triangle, and its texture. The order of the vertices isn't important, the triangle object is "double sided", so the surface normal is always pointing back in the direction of the incident ray. The triangle vertices are listed as {\bf V1}, {\bf V2}, and {\bf V3} each one is an X, Y, Z coordinate. An example of a triangle is shown below: \begin{verbatim} TRI V0 0.0 -4.0 12.0 V1 4.0 -4.0 8.0 V2 -4.0 -4.0 8.0 TEXTURE AMBIENT 0.1 DIFFUSE 0.2 SPECULAR 0.7 OPACITY 1.0 COLOR 1.0 1.0 1.0 TEXFUNC 0 \end{verbatim} \subsubsection{Smoothed Triangles} Smoothed triangles are just like regular triangles, except that the surface normal for each of the three vertices is used to determine the surface normal across the triangle by linear interpolation. Smoothed triangles yield curved looking objects and have nice reflections. \begin{verbatim} STRI V0 1.4 0.0 2.4 V1 1.35 -0.37 2.4 V2 1.36 -0.32 2.45 N0 -0.9 -0.0 -0.4 N1 -0.8 0.23 -0.4 N2 -0.9 0.27 -0.15 TEXTURE AMBIENT 0.1 DIFFUSE 0.2 SPECULAR 0.7 OPACITY 1.0 COLOR 1.0 1.0 1.0 TEXFUNC 0 \end{verbatim} \subsubsection{Infinite Planes} Useful for things like desert floors, backgrounds, skies etc, the infinite plane is pretty easy to use. An infinite plane only consists of two pieces of information, the {\bf CENTER} of the plane, and a {\bf NORMAL} to the plane. The center of the plane is just any point on the plane such that the point combined with the surface normal define the equation for the plane. As with triangles, planes are double sided. Here is an example of an infinite plane: \begin{verbatim} PLANE CENTER 0.0 -5.0 0.0 NORMAL 0.0 1.0 0.0 TEXTURE AMBIENT 0.1 DIFFUSE 0.9 SPECULAR 0.0 OPACITY 1.0 COLOR 1.0 1.0 1.0 TEXFUNC 1 CENTER 0.0 -5.0 0.0 ROTATE 0. 0.0 0.0 SCALE 1.0 1.0 1.0 \end{verbatim} \subsubsection{Rings} Rings are a simple object, they are really a not-so-infinite plane. Rings are simply an infinite plane cut into a washer shaped ring, infinitely thing just like a plane. A ring only requires two more pieces of information than an infinite plane does, an inner and outer radius. Here's an example of a ring: \begin{verbatim} Ring Center 1.0 1.0 1.0 Normal 0.0 1.0 0.0 Inner 1.0 Outer 5.0 MyNewRedTexture \end{verbatim} \subsubsection{Infinite Cylinders} Infinite cylinders are quite simple. They are defined by a center, an axis, and a radius. An example of an infinite cylinder is: \begin{verbatim} Cylinder Center 0.0 0.0 0.0 Axis 0.0 1.0 0.0 Rad 1.0 SomeRandomTexture \end{verbatim} \subsubsection{Finite Cylinders} Finite cylinders are almost the same as infinite ones, but the center and length of the axis determine the extents of the cylinder. The finite cylinder is also really a shell, it doesn't have any caps. If you need to close off the ends of the cylinder, use two ring objects, with the inner radius set to 0.0 and the normal set to be the axis of the cylinder. Finite cylinders are built this way to enhance speed. \begin{verbatim} FCylinder Center 0.0 0.0 0.0 Axis 0.0 9.0 0.0 Rad 1.0 SomeRandomTexture \end{verbatim} This defines a finite cylinder with radius 1.0, going from 0.0 0.0 0.0, to 0.0 9.0 0.0 along the Y axis. The main difference between an infinite cylinder and a finite cylinder is in the interpretation of the {\bf AXIS} parameter. In the case of the infinite cylinder, the length of the axis vector is ignored. In the case of the finite cylinder, the axis parameter is used to determine the length of the overall cylinder. \subsubsection{Axis Aligned Boxes} Axis aligned boxes are fast, but of limited usefulness. As such, I'm not going to waste much time explaining 'em. An axis aligned box is defined by a {\bf MIN} point, and a {\bf MAX} point. The volume between the min and max points is the box. Here's a simple box: \begin{verbatim} BOX MIN -1.0 -1.0 -1.0 MAX 1.0 1.0 1.0 Boxtexture1 \end{verbatim} \subsubsection{Fractal Landscapes} Currently fractal landscapes are a built-in function. In the near future I'll allow the user to load an image map for use as a heightfield. Fractal landscapes are currently forced to be axis aligned. Any suggestion on how to make them more appealing to users is welcome. A fractal landscape is defined by its "resolution" which is the number of grid points along each axis, and by its scale and center. The "scale" is how large the landscape is along the X, and Y axes in world coordinates. Here's a simple landscape: \begin{verbatim} SCAPE RES 30 30 SCALE 80.0 80.0 CENTER 0.0 -4.0 20.0 TEXTURE AMBIENT 0.1 DIFFUSE 0.9 SPECULAR 0.0 OPACITY 1.0 COLOR 1.0 1.0 1.0 TEXFUNC 0 \end{verbatim} The landscape shown above generates a square landscape made of 1,800 triangles. When time permits, the heightfield code will be rewritten to be more general and to increase rendering speed. \subsubsection{Arbitrary Quadric Surfaces} Docs soon. I need to add these into the parser, must have forgotten before ;-) \subsubsection{Volume Rendered Scalar Voxels} These are a little trickier than the average object :-) These are likely to change substantially in the very near future so I'm not going to get too detailed yet. A volume rendered data set is described by its axis aligned bounding box, and its resolution along each axis. The final parameter is the voxel data file. If you are seriously interested in messing with these, get hold of me and I'll give you more info. Here's a quick example: \begin{verbatim} SCALARVOL MIN -1.0 -1.0 -0.4 MAX 1.0 1.0 0.4 DIM 256 256 100 FILE /cfs/johns/vol/engine.256x256x110 TEXTURE AMBIENT 1.0 DIFFUSE 0.0 SPECULAR 0.0 OPACITY 8.1 COLOR 1.0 1.0 1.0 TEXFUNC 0 \end{verbatim} \subsection{Texture and Color} \subsubsection{Simple Texture Characteristics} The surface textures applied to an object drastically alter its overall appearance, making textures and color one of the most important topics in this manual. As with many other renderers, textures can be declared and associated with a name so that they may be used over and over again in a scene definition with less typing. If a texture is only need once, or it is unique to a particular object in the scene, then it may be declared along with the object it is applied to, and does not need a name. The simplest texture definition is a solid color with no image mapping or procedural texture mapping. A solid color texture is defined by the {\bf AMBIENT}, {\bf DIFFUSE}, {\bf SPECULAR}, {\bf OPACITY} and {\bf COLOR} parameters. The {\bf AMBIENT} parameter defines the ambient lighting coefficient to be used when shading the object. Similarly, the {\bf DIFFUSE} parameter is the relative contribution of the diffuse shading to the surface appearance. The {\bf SPECULAR} parameter is the contribution from perfectly reflected rays, as if on a mirrored surface. {\bf OPACITY} defines how transparent a surface is. An {\bf OPACITY} value of 0.0 renders the object completely invisible. An {\bf OPACITY} value of 1.0 makes the object completely solid, and non-transmissive. In general, the values for the ambient, diffuse, and specular parameters should add up to 1.0, if they don't then pixels may be over or underexposed quite easily. These parameters function in a manner similar to that of other ray tracers. The {\bf COLOR} parameter is an RGB triple with each value ranging from 0.0 to 1.0 inclusive. If the RGB values stray from 0.0 to 1.0, results are undefined. In the case of solid textures, a final parameter, {\bf TEXFUNC} is set to zero (integer). \subsubsection{Texture Declaration and Aliasing} To define a simple texture for use on several objects in a scene, the {\bf TEXDEF} keyword is used. The {\bf TEXDEF} keyword is followed by a case sensitive texture name, which will subsequently be used while defining objects. If many objects in a scene use the same texture through texture definition, a significant amount of memory may be saved since only one copy of the texture is present in memory, and its shared by all of the objects. Here is an example of a solid texture definition: \begin{verbatim} TEXDEF MyNewRedTexture AMBIENT 0.1 DIFFUSE 0.9 SPECULAR 0.0 OPACITY 1.0 COLOR 1.0 0.0 0.0 TEXFUNC 0 \end{verbatim} When this texture is used in an object definition, it is referenced only by name. Be careful not to use one of the other keywords as a defined texture, this will probably cause the parser to explode, as I don't check for use of keywords as texture names. When a texture is declared within an object definition, it appears in an identical format to the {\bf TEXDEF} declaration, but the {\bf TEXTURE} keyword is used instead of {\bf TEXDEF}. If it is useful to have several names for the same texture (when you are too lazy to actually finish defining different variations of a wood texture for example, and just want to be approximately correct for example) aliases can be constructed using the {\bf TEXALIAS} keyword, along with the alias name, and the original name. An example of a texture alias is: \begin{verbatim} TEXALIAS MyNewestRedTexture MyNewRedTexture \end{verbatim} This line would alias MyNewestRedTexture to be the same thing as the previously declared MyNewRedTexture. Note that the source texture must be declared before any aliases that use it. \subsubsection{Image Maps and Procedural Textures} Image maps and procedural textures very useful in making realistic looking scenes. A good image map can do as much for the realism of a wooden table as any amount of sophisticated geometry or lighting. Image maps are made by wrapping an image on to an object in one of three ways, a spherical map, a cylindrical map, and a planar map. Procedural textures are used in a way similar to the image maps, but they are on the fly and do not use much memory compared to the image maps. The main disadvantage of the procedural maps is that they must be hard-coded into RAY when it is compiled. The syntax used for all texture maps is fairly simple to learn. The biggest problem with the way that the parser is written now is that the different mappings are selected by an integer, which is not very user friendly. I expect to rewrite this section of the parser sometime in the near future to alleviate this problem. When I rewrite the parser, I may also end up altering the parameters that are used to describe a texture map, and some of them may become optional rather than required. \begin{center} \begin{tabular}{|c|c|} \multicolumn{2}{c}{Texture Mapping Functions} \\ \hline {Value for TEXFUNC} & {Mapping and Texture Description}\\ \hline {0} & {No special texture, plain shading} \\ {1} & {3D checkerboard function, like a Rubik's cube} \\ {2} & {Grit Texture, randomized surface color} \\ {3} & {3D marble texture, uses object's base color} \\ {4} & {3D wood texture, light and dark brown, not very good yet} \\ {5} & {3D gradient noise function (can't remember what it look like} \\ {6} & {Don't remember} \\ {7} & {Cylindrical Image Map, requires ppm filename} \\ {8} & {Spherical Image Map, requires ppm filename} \\ {9} & {Planar Image Map, requires ppm filename} \\ \hline \end{tabular} \end{center} Here's an example of a sphere, with a spherical image map applied to its surface: \begin{verbatim} SPHERE CENTER 2.0 0.0 5.0 RAD 2.0 TEXTURE AMBIENT 0.4 DIFFUSE 0.8 SPECULAR 0.0 OPACITY 1.0 COLOR 1.0 1.0 1.0 TEXFUNC 7 /cfs/johns/imaps/fire644.ppm CENTER 2.0 0.0 5.0 ROTATE 0.0 0.0 0.0 SCALE 2.0 -2.0 1.0 \end{verbatim} Basically, the image maps require the center, rotate and scale parameters so that you can position the image map on the object properly. """<import_from_stmt>sage.misc.sagedoc format<line_sep>f=format(s)<line_sep>f=f.replace('{ ' '').replace('}' '').replace('{' '')<if_stmt>use_pager<block_start>pager()(f)<block_end><else_stmt><block_start>print(f)<block_end><block_end><block_end>tachyon_rt=TachyonRT()<line_sep>
<import_from_stmt>plugin.scrobbler.methods.s_logging Logging<import_from_stmt>plugin.scrobbler.methods.s_websocket WebSocket<line_sep>__all__=['Logging' 'WebSocket']<line_sep>
<class_stmt>TextBoxBase(FocusWidget)<block_start><def_stmt>getCursorPos self<block_start><try_stmt><block_start>elem=self.getElement()<line_sep>tr=elem.document.selection.createRange()<if_stmt>tr.parentElement().uniqueID<ne>elem.uniqueID<block_start><return>-1<block_end><return>-tr.move("character" -65535)<block_end><except_stmt><block_start><return>0<block_end><block_end><def_stmt>getSelectionLength self<block_start><try_stmt><block_start>elem=self.getElement()<line_sep>tr=elem.document.selection.createRange()<if_stmt>tr.parentElement().uniqueID<ne>elem.uniqueID<block_start><return>0<block_end><return>tr.text<and>len(tr.text)<or>0<block_end><except_stmt><block_start><return>0<block_end><block_end><def_stmt>setSelectionRange self pos length<block_start><try_stmt><block_start>elem=self.getElement()<line_sep>tr=elem.createTextRange()<line_sep>tr.collapse(<true>)<line_sep>tr.moveStart('character' pos)<line_sep>tr.moveEnd('character' length)<line_sep>tr.select()<block_end><except_stmt><block_start><pass><block_end><block_end><def_stmt>getText self<block_start><return>DOM.getAttribute(self.getElement() "value")<or>""<block_end><def_stmt>setText self text<block_start>DOM.setAttribute(self.getElement() "value" text)<block_end><block_end>
<import_from_stmt>.dvd DVD<import_from_stmt>.gopro GOPRO<import_from_stmt>.reds REDS<line_sep># <import_from_stmt>.build build_dataset list_datasets<line_sep>__all__=[k<for>k globals().keys()<if><not>k.startswith("_")]<line_sep>