text
stringlengths
2
99.9k
meta
dict
from ..fast_exp import fast_exp import numpy as np def test_fast_exp(): X = np.linspace(-5, 0, 5000, endpoint=True) # Ground truth Y = np.exp(X) # Approximation at double precision _y_f64 = np.array([fast_exp['float64_t'](x) for x in X]) # Approximation at single precision _y_f32 = np.array([fast_exp['float32_t'](x) for x in X.astype('float32')], dtype='float32') for _y in [_y_f64, _y_f32]: assert np.abs(Y - _y).mean() < 3e-3
{ "pile_set_name": "Github" }
/* * DO NOT EDIT. THIS FILE IS GENERATED FROM e:/builds/moz2_slave/mozilla-1.9.1-win32-xulrunner/build/dom/public/idl/svg/nsIDOMSVGTextElement.idl */ #ifndef __gen_nsIDOMSVGTextElement_h__ #define __gen_nsIDOMSVGTextElement_h__ #ifndef __gen_nsIDOMSVGTextPositionElem_h__ #include "nsIDOMSVGTextPositionElem.h" #endif /* For IDL files that don't want to include root IDL files. */ #ifndef NS_NO_VTABLE #define NS_NO_VTABLE #endif /* starting interface: nsIDOMSVGTextElement */ #define NS_IDOMSVGTEXTELEMENT_IID_STR "6d43b1b4-efb6-426d-9e65-4420c3e24688" #define NS_IDOMSVGTEXTELEMENT_IID \ {0x6d43b1b4, 0xefb6, 0x426d, \ { 0x9e, 0x65, 0x44, 0x20, 0xc3, 0xe2, 0x46, 0x88 }} class NS_NO_VTABLE NS_SCRIPTABLE nsIDOMSVGTextElement : public nsIDOMSVGTextPositioningElement { public: NS_DECLARE_STATIC_IID_ACCESSOR(NS_IDOMSVGTEXTELEMENT_IID) }; NS_DEFINE_STATIC_IID_ACCESSOR(nsIDOMSVGTextElement, NS_IDOMSVGTEXTELEMENT_IID) /* Use this macro when declaring classes that implement this interface. */ #define NS_DECL_NSIDOMSVGTEXTELEMENT \ /* no methods! */ /* Use this macro to declare functions that forward the behavior of this interface to another object. */ #define NS_FORWARD_NSIDOMSVGTEXTELEMENT(_to) \ /* no methods! */ /* Use this macro to declare functions that forward the behavior of this interface to another object in a safe way. */ #define NS_FORWARD_SAFE_NSIDOMSVGTEXTELEMENT(_to) \ /* no methods! */ #if 0 /* Use the code below as a template for the implementation class for this interface. */ /* Header file */ class nsDOMSVGTextElement : public nsIDOMSVGTextElement { public: NS_DECL_ISUPPORTS NS_DECL_NSIDOMSVGTEXTELEMENT nsDOMSVGTextElement(); private: ~nsDOMSVGTextElement(); protected: /* additional members */ }; /* Implementation file */ NS_IMPL_ISUPPORTS1(nsDOMSVGTextElement, nsIDOMSVGTextElement) nsDOMSVGTextElement::nsDOMSVGTextElement() { /* member initializers and constructor code */ } nsDOMSVGTextElement::~nsDOMSVGTextElement() { /* destructor code */ } /* End of implementation class template. */ #endif #endif /* __gen_nsIDOMSVGTextElement_h__ */
{ "pile_set_name": "Github" }
is-property =========== Tests if a property of a JavaScript object can be accessed using the dot (.) notation or if it must be enclosed in brackets, (ie use x[" ... "]) Example ------- ```javascript var isProperty = require("is-property") console.log(isProperty("foo")) //Prints true console.log(isProperty("0")) //Prints false ``` Install ------- npm install is-property ### `require("is-property")(str)` Checks if str is a property * `str` is a string which we will test if it is a property or not **Returns** true or false depending if str is a property ## Credits (c) 2013 Mikola Lysenko. MIT License
{ "pile_set_name": "Github" }
// !$*UTF8*$! { archiveVersion = 1; classes = { }; objectVersion = 46; objects = { /* Begin PBXBuildFile section */ 0019E5D623A1960300ABB19D /* CardPartAttributedTextViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 0019E5D523A1960300ABB19D /* CardPartAttributedTextViewController.swift */; }; 0AC06A50F9EFC07A051126DA /* Pods_CardParts_Example.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = F639F94EB1911C332F07F7C5 /* Pods_CardParts_Example.framework */; }; 1964FC2A3640A8318B06FAE2 /* Pods_CardParts_Tests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 411FA9F08E0EF1AF5B1D8EA1 /* Pods_CardParts_Tests.framework */; }; 1B1E203D22F47A8700734EDA /* CardPartPillLabelCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1B1E203C22F47A8700734EDA /* CardPartPillLabelCardController.swift */; }; 1B1E204022F4ADE900734EDA /* CardPartRoundedStackViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1B1E203F22F4ADE900734EDA /* CardPartRoundedStackViewCardController.swift */; }; 1B1F9541231F3F4900085906 /* CardPartConfettiViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1B1F9540231F3F4900085906 /* CardPartConfettiViewCardController.swift */; }; 1B1F9543231FF58400085906 /* ConfettiViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1B1F9542231FF58300085906 /* ConfettiViewController.swift */; }; 1B5F65412333DA3D001B473F /* CardPartProgressBarViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1B5F65402333DA3D001B473F /* CardPartProgressBarViewCardController.swift */; }; 1B693BA1230DAE2900662382 /* CardPartIconLabelCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1B693BA0230DAE2900662382 /* CardPartIconLabelCardController.swift */; }; 1BC4B4E1236762DD00EE7A31 /* CardPartHistogramCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1BC4B4E0236762DD00EE7A31 /* CardPartHistogramCardController.swift */; }; 1BCE6B782360AE26006B7325 /* CardPartRadioButtonCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1BCE6B772360AE26006B7325 /* CardPartRadioButtonCardController.swift */; }; 1BD1074022F22823000163D2 /* CardPartBorderViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1BD1073F22F22823000163D2 /* CardPartBorderViewController.swift */; }; 2012C74E23E39ED000F48EFA /* CardPartMultiSliderViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 2012C74D23E39ECF00F48EFA /* CardPartMultiSliderViewCardController.swift */; }; 20F68E7D2405EFC200395102 /* CardPartSwitchViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 20F68E7C2405EFC200395102 /* CardPartSwitchViewCardController.swift */; }; 5570914320D18B07004D8E5A /* CardPartOrientedViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5570914220D18B07004D8E5A /* CardPartOrientedViewCardController.swift */; }; 55809E2620DABBF3008BE0D2 /* CardPartCenteredViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 55809E2520DABBF3008BE0D2 /* CardPartCenteredViewCardController.swift */; }; 55AB80CF20B61E1700B5994B /* CardUtilsTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 55AB80CE20B61E1700B5994B /* CardUtilsTests.swift */; }; 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD51AFB9204008FA782 /* AppDelegate.swift */; }; 607FACD81AFB9204008FA782 /* MainViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 607FACD71AFB9204008FA782 /* MainViewController.swift */; }; 607FACDB1AFB9204008FA782 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 607FACD91AFB9204008FA782 /* Main.storyboard */; }; 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDC1AFB9204008FA782 /* Images.xcassets */; }; 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */; }; 61D470651FDA6D1B00F451F0 /* StateCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D470611FDA6D1A00F451F0 /* StateCardController.swift */; }; 61D470681FDA6D1B00F451F0 /* ReactiveCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D470641FDA6D1A00F451F0 /* ReactiveCardController.swift */; }; 61D470701FDA709B00F451F0 /* CardPartTableViewCellTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D470691FDA709B00F451F0 /* CardPartTableViewCellTests.swift */; }; 61D470711FDA709B00F451F0 /* CardPartImageViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D4706A1FDA709B00F451F0 /* CardPartImageViewTests.swift */; }; 61D470721FDA709B00F451F0 /* CardPartTitleViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D4706B1FDA709B00F451F0 /* CardPartTitleViewTests.swift */; }; 61D470731FDA709B00F451F0 /* CardPartSeparatorViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D4706C1FDA709B00F451F0 /* CardPartSeparatorViewTests.swift */; }; 61D470741FDA709B00F451F0 /* CardPartTextViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D4706D1FDA709B00F451F0 /* CardPartTextViewTests.swift */; }; 61D470751FDA709B00F451F0 /* CardPartButtonViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D4706E1FDA709B00F451F0 /* CardPartButtonViewTests.swift */; }; 61D470761FDA709B00F451F0 /* CardPartSpacerViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D4706F1FDA709B00F451F0 /* CardPartSpacerViewTests.swift */; }; 61D470851FDB08EE00F451F0 /* CardPartTitleDescriptionViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 61D470841FDB08EE00F451F0 /* CardPartTitleDescriptionViewTests.swift */; }; 7021D3D220B5FA8600F9CB80 /* ThemedCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7021D3D120B5FA8600F9CB80 /* ThemedCardController.swift */; }; 7051279B20B5D35900B8E635 /* CardPartBarViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7051279A20B5D35900B8E635 /* CardPartBarViewCardController.swift */; }; 7051279D20B5D3D900B8E635 /* CardPartPagedViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7051279C20B5D3D900B8E635 /* CardPartPagedViewCardController.swift */; }; 7051279F20B5E2ED00B8E635 /* CardPartTextFieldCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7051279E20B5E2ED00B8E635 /* CardPartTextFieldCardController.swift */; }; 705127A120B5E52600B8E635 /* CardPartSliderViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 705127A020B5E52600B8E635 /* CardPartSliderViewCardController.swift */; }; 70BB3BF420FF89A500F55D57 /* CardPartTitleDescriptionViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70BB3BF320FF89A500F55D57 /* CardPartTitleDescriptionViewCardController.swift */; }; 70F595CE20B5C4D700B6E688 /* ThemeViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595CD20B5C4D700B6E688 /* ThemeViewController.swift */; }; 70F595D020B5C56200B6E688 /* CardPartTextViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595CF20B5C56200B6E688 /* CardPartTextViewCardController.swift */; }; 70F595D220B5C66100B6E688 /* CardPartImageViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595D120B5C66100B6E688 /* CardPartImageViewCardController.swift */; }; 70F595D420B5C7B300B6E688 /* CardPartButtonViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595D320B5C7B300B6E688 /* CardPartButtonViewCardController.swift */; }; 70F595D620B5C87300B6E688 /* CardPartTitleViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595D520B5C87300B6E688 /* CardPartTitleViewCardController.swift */; }; 70F595D820B5C95700B6E688 /* CardPartSeparatorViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595D720B5C95700B6E688 /* CardPartSeparatorViewCardController.swift */; }; 70F595DA20B5C9F200B6E688 /* CardPartStackViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595D920B5C9F200B6E688 /* CardPartStackViewCardController.swift */; }; 70F595DC20B5CAF800B6E688 /* CardPartTableViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595DB20B5CAF800B6E688 /* CardPartTableViewCardController.swift */; }; 70F595E020B5CDCE00B6E688 /* CardPartCollectionViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 70F595DF20B5CDCE00B6E688 /* CardPartCollectionViewCardController.swift */; }; 80F3C7662348F32300F5D271 /* CardPartMapViewCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 80F3C7652348F32300F5D271 /* CardPartMapViewCardController.swift */; }; 80F3C768234ADA7900F5D271 /* CardPartMapViewTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 80F3C767234ADA7900F5D271 /* CardPartMapViewTests.swift */; }; 9A33961F2410208D00167DD5 /* CardPartsBottomSheetTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9A33961E2410208D00167DD5 /* CardPartsBottomSheetTests.swift */; }; 9A9AE5C323FBA72D0006E1EC /* CardPartBottomSheetCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 9A9AE5C223FBA72D0006E1EC /* CardPartBottomSheetCardController.swift */; }; EB16E38A2396E8FD003F70A2 /* CardParthCustomMarginsCardController.swift in Sources */ = {isa = PBXBuildFile; fileRef = EB16E3892396E8FD003F70A2 /* CardParthCustomMarginsCardController.swift */; }; /* End PBXBuildFile section */ /* Begin PBXContainerItemProxy section */ 607FACE61AFB9204008FA782 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = 607FACC81AFB9204008FA782 /* Project object */; proxyType = 1; remoteGlobalIDString = 607FACCF1AFB9204008FA782; remoteInfo = CardParts; }; /* End PBXContainerItemProxy section */ /* Begin PBXFileReference section */ 0019E5D523A1960300ABB19D /* CardPartAttributedTextViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartAttributedTextViewController.swift; sourceTree = "<group>"; }; 1B1E203C22F47A8700734EDA /* CardPartPillLabelCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartPillLabelCardController.swift; sourceTree = "<group>"; }; 1B1E203F22F4ADE900734EDA /* CardPartRoundedStackViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartRoundedStackViewCardController.swift; sourceTree = "<group>"; }; 1B1F9540231F3F4900085906 /* CardPartConfettiViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartConfettiViewCardController.swift; sourceTree = "<group>"; }; 1B1F9542231FF58300085906 /* ConfettiViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConfettiViewController.swift; sourceTree = "<group>"; }; 1B5F65402333DA3D001B473F /* CardPartProgressBarViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartProgressBarViewCardController.swift; sourceTree = "<group>"; }; 1B693BA0230DAE2900662382 /* CardPartIconLabelCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartIconLabelCardController.swift; sourceTree = "<group>"; }; 1BC4B4E0236762DD00EE7A31 /* CardPartHistogramCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartHistogramCardController.swift; sourceTree = "<group>"; }; 1BCE6B772360AE26006B7325 /* CardPartRadioButtonCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartRadioButtonCardController.swift; sourceTree = "<group>"; }; 1BD1073F22F22823000163D2 /* CardPartBorderViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartBorderViewController.swift; sourceTree = "<group>"; }; 2012C74D23E39ECF00F48EFA /* CardPartMultiSliderViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartMultiSliderViewCardController.swift; sourceTree = "<group>"; }; 20F68E7C2405EFC200395102 /* CardPartSwitchViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartSwitchViewCardController.swift; sourceTree = "<group>"; }; 2BA2B86E3C2946F91C18390D /* README.md */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; }; 2C0391CB2C9A48987E449BE1 /* Pods-CardParts_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Tests.debug.xcconfig"; path = "Pods/Target Support Files/Pods-CardParts_Tests/Pods-CardParts_Tests.debug.xcconfig"; sourceTree = "<group>"; }; 30570F5B2E2268AA7B34EEC0 /* LICENSE */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = LICENSE; path = ../LICENSE; sourceTree = "<group>"; }; 3274FA90B83B27385FE1EC09 /* Pods-CardParts_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Example.release.xcconfig"; path = "Pods/Target Support Files/Pods-CardParts_Example/Pods-CardParts_Example.release.xcconfig"; sourceTree = "<group>"; }; 40FADEDFCAAAB84F115A1C2B /* Pods-CardParts_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Example.debug.xcconfig"; path = "Pods/Target Support Files/Pods-CardParts_Example/Pods-CardParts_Example.debug.xcconfig"; sourceTree = "<group>"; }; 411FA9F08E0EF1AF5B1D8EA1 /* Pods_CardParts_Tests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_CardParts_Tests.framework; sourceTree = BUILT_PRODUCTS_DIR; }; 5570914220D18B07004D8E5A /* CardPartOrientedViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartOrientedViewCardController.swift; sourceTree = "<group>"; }; 55809E2520DABBF3008BE0D2 /* CardPartCenteredViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartCenteredViewCardController.swift; sourceTree = "<group>"; }; 55AB80CE20B61E1700B5994B /* CardUtilsTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardUtilsTests.swift; sourceTree = "<group>"; }; 58D33E4374206E3A72623089 /* Pods-CardParts_Example.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Example.release.xcconfig"; path = "Target Support Files/Pods-CardParts_Example/Pods-CardParts_Example.release.xcconfig"; sourceTree = "<group>"; }; 607FACD01AFB9204008FA782 /* CardParts_Example.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = CardParts_Example.app; sourceTree = BUILT_PRODUCTS_DIR; }; 607FACD41AFB9204008FA782 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; }; 607FACD51AFB9204008FA782 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = "<group>"; }; 607FACD71AFB9204008FA782 /* MainViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MainViewController.swift; sourceTree = "<group>"; }; 607FACDA1AFB9204008FA782 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = "<group>"; }; 607FACDC1AFB9204008FA782 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = "<group>"; }; 607FACDF1AFB9204008FA782 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/LaunchScreen.xib; sourceTree = "<group>"; }; 607FACE51AFB9204008FA782 /* CardParts_Tests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = CardParts_Tests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; 607FACEA1AFB9204008FA782 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; }; 61D470611FDA6D1A00F451F0 /* StateCardController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StateCardController.swift; sourceTree = "<group>"; }; 61D470641FDA6D1A00F451F0 /* ReactiveCardController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ReactiveCardController.swift; sourceTree = "<group>"; }; 61D470691FDA709B00F451F0 /* CardPartTableViewCellTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartTableViewCellTests.swift; sourceTree = "<group>"; }; 61D4706A1FDA709B00F451F0 /* CardPartImageViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartImageViewTests.swift; sourceTree = "<group>"; }; 61D4706B1FDA709B00F451F0 /* CardPartTitleViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartTitleViewTests.swift; sourceTree = "<group>"; }; 61D4706C1FDA709B00F451F0 /* CardPartSeparatorViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartSeparatorViewTests.swift; sourceTree = "<group>"; }; 61D4706D1FDA709B00F451F0 /* CardPartTextViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartTextViewTests.swift; sourceTree = "<group>"; }; 61D4706E1FDA709B00F451F0 /* CardPartButtonViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartButtonViewTests.swift; sourceTree = "<group>"; }; 61D4706F1FDA709B00F451F0 /* CardPartSpacerViewTests.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CardPartSpacerViewTests.swift; sourceTree = "<group>"; }; 61D470841FDB08EE00F451F0 /* CardPartTitleDescriptionViewTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartTitleDescriptionViewTests.swift; sourceTree = "<group>"; }; 7021D3D120B5FA8600F9CB80 /* ThemedCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThemedCardController.swift; sourceTree = "<group>"; }; 7051279A20B5D35900B8E635 /* CardPartBarViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartBarViewCardController.swift; sourceTree = "<group>"; }; 7051279C20B5D3D900B8E635 /* CardPartPagedViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartPagedViewCardController.swift; sourceTree = "<group>"; }; 7051279E20B5E2ED00B8E635 /* CardPartTextFieldCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartTextFieldCardController.swift; sourceTree = "<group>"; }; 705127A020B5E52600B8E635 /* CardPartSliderViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartSliderViewCardController.swift; sourceTree = "<group>"; }; 70BB3BF320FF89A500F55D57 /* CardPartTitleDescriptionViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartTitleDescriptionViewCardController.swift; sourceTree = "<group>"; }; 70F595CD20B5C4D700B6E688 /* ThemeViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThemeViewController.swift; sourceTree = "<group>"; }; 70F595CF20B5C56200B6E688 /* CardPartTextViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartTextViewCardController.swift; sourceTree = "<group>"; }; 70F595D120B5C66100B6E688 /* CardPartImageViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartImageViewCardController.swift; sourceTree = "<group>"; }; 70F595D320B5C7B300B6E688 /* CardPartButtonViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartButtonViewCardController.swift; sourceTree = "<group>"; }; 70F595D520B5C87300B6E688 /* CardPartTitleViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartTitleViewCardController.swift; sourceTree = "<group>"; }; 70F595D720B5C95700B6E688 /* CardPartSeparatorViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartSeparatorViewCardController.swift; sourceTree = "<group>"; }; 70F595D920B5C9F200B6E688 /* CardPartStackViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartStackViewCardController.swift; sourceTree = "<group>"; }; 70F595DB20B5CAF800B6E688 /* CardPartTableViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartTableViewCardController.swift; sourceTree = "<group>"; }; 70F595DF20B5CDCE00B6E688 /* CardPartCollectionViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartCollectionViewCardController.swift; sourceTree = "<group>"; }; 80F3C7652348F32300F5D271 /* CardPartMapViewCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartMapViewCardController.swift; sourceTree = "<group>"; }; 80F3C767234ADA7900F5D271 /* CardPartMapViewTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartMapViewTests.swift; sourceTree = "<group>"; }; 9A33961E2410208D00167DD5 /* CardPartsBottomSheetTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartsBottomSheetTests.swift; sourceTree = "<group>"; }; 9A9AE5C223FBA72D0006E1EC /* CardPartBottomSheetCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardPartBottomSheetCardController.swift; sourceTree = "<group>"; }; B21177744B384387575CB614 /* Pods-CardParts_Tests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Tests.debug.xcconfig"; path = "Target Support Files/Pods-CardParts_Tests/Pods-CardParts_Tests.debug.xcconfig"; sourceTree = "<group>"; }; E768E0B2422010080EDB1A52 /* Pods-CardParts_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Tests.release.xcconfig"; path = "Pods/Target Support Files/Pods-CardParts_Tests/Pods-CardParts_Tests.release.xcconfig"; sourceTree = "<group>"; }; EB16E3892396E8FD003F70A2 /* CardParthCustomMarginsCardController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CardParthCustomMarginsCardController.swift; sourceTree = "<group>"; }; ED96724FD3D3088F4E52A4B3 /* CardParts.podspec */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = CardParts.podspec; path = ../CardParts.podspec; sourceTree = "<group>"; xcLanguageSpecificationIdentifier = xcode.lang.ruby; }; EDB3B26F1BF024EF68A0F8CC /* Pods-CardParts_Tests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Tests.release.xcconfig"; path = "Target Support Files/Pods-CardParts_Tests/Pods-CardParts_Tests.release.xcconfig"; sourceTree = "<group>"; }; F639F94EB1911C332F07F7C5 /* Pods_CardParts_Example.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_CardParts_Example.framework; sourceTree = BUILT_PRODUCTS_DIR; }; FB125B7758C7A2765F5C9714 /* Pods-CardParts_Example.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-CardParts_Example.debug.xcconfig"; path = "Target Support Files/Pods-CardParts_Example/Pods-CardParts_Example.debug.xcconfig"; sourceTree = "<group>"; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ 607FACCD1AFB9204008FA782 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 0AC06A50F9EFC07A051126DA /* Pods_CardParts_Example.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; 607FACE21AFB9204008FA782 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( 1964FC2A3640A8318B06FAE2 /* Pods_CardParts_Tests.framework in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXFrameworksBuildPhase section */ /* Begin PBXGroup section */ 607FACC71AFB9204008FA782 = { isa = PBXGroup; children = ( 607FACF51AFB993E008FA782 /* Podspec Metadata */, 607FACD21AFB9204008FA782 /* Example for CardParts */, 607FACE81AFB9204008FA782 /* Tests */, 607FACD11AFB9204008FA782 /* Products */, BABC66A8CBE4FFFB18E62FFE /* Pods */, 87F0B966E5030117E89059DF /* Frameworks */, ); sourceTree = "<group>"; }; 607FACD11AFB9204008FA782 /* Products */ = { isa = PBXGroup; children = ( 607FACD01AFB9204008FA782 /* CardParts_Example.app */, 607FACE51AFB9204008FA782 /* CardParts_Tests.xctest */, ); name = Products; sourceTree = "<group>"; }; 607FACD21AFB9204008FA782 /* Example for CardParts */ = { isa = PBXGroup; children = ( 607FACD51AFB9204008FA782 /* AppDelegate.swift */, 607FACD71AFB9204008FA782 /* MainViewController.swift */, 70F595CD20B5C4D700B6E688 /* ThemeViewController.swift */, 1B1F9542231FF58300085906 /* ConfettiViewController.swift */, 7065C70220B5C2C200ED5099 /* Type of CardParts */, 607FACD91AFB9204008FA782 /* Main.storyboard */, 607FACDC1AFB9204008FA782 /* Images.xcassets */, 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */, 607FACD31AFB9204008FA782 /* Supporting Files */, ); name = "Example for CardParts"; path = CardParts; sourceTree = "<group>"; }; 607FACD31AFB9204008FA782 /* Supporting Files */ = { isa = PBXGroup; children = ( 607FACD41AFB9204008FA782 /* Info.plist */, ); name = "Supporting Files"; sourceTree = "<group>"; }; 607FACE81AFB9204008FA782 /* Tests */ = { isa = PBXGroup; children = ( 61D4706E1FDA709B00F451F0 /* CardPartButtonViewTests.swift */, 61D4706A1FDA709B00F451F0 /* CardPartImageViewTests.swift */, 9A33961E2410208D00167DD5 /* CardPartsBottomSheetTests.swift */, 80F3C767234ADA7900F5D271 /* CardPartMapViewTests.swift */, 61D4706C1FDA709B00F451F0 /* CardPartSeparatorViewTests.swift */, 61D4706F1FDA709B00F451F0 /* CardPartSpacerViewTests.swift */, 61D470691FDA709B00F451F0 /* CardPartTableViewCellTests.swift */, 61D4706D1FDA709B00F451F0 /* CardPartTextViewTests.swift */, 61D4706B1FDA709B00F451F0 /* CardPartTitleViewTests.swift */, 61D470841FDB08EE00F451F0 /* CardPartTitleDescriptionViewTests.swift */, 607FACE91AFB9204008FA782 /* Supporting Files */, 55AB80CE20B61E1700B5994B /* CardUtilsTests.swift */, ); path = Tests; sourceTree = "<group>"; }; 607FACE91AFB9204008FA782 /* Supporting Files */ = { isa = PBXGroup; children = ( 607FACEA1AFB9204008FA782 /* Info.plist */, ); name = "Supporting Files"; sourceTree = "<group>"; }; 607FACF51AFB993E008FA782 /* Podspec Metadata */ = { isa = PBXGroup; children = ( ED96724FD3D3088F4E52A4B3 /* CardParts.podspec */, 2BA2B86E3C2946F91C18390D /* README.md */, 30570F5B2E2268AA7B34EEC0 /* LICENSE */, ); name = "Podspec Metadata"; sourceTree = "<group>"; }; 7065C70220B5C2C200ED5099 /* Type of CardParts */ = { isa = PBXGroup; children = ( 61D470641FDA6D1A00F451F0 /* ReactiveCardController.swift */, 61D470611FDA6D1A00F451F0 /* StateCardController.swift */, 70F595CF20B5C56200B6E688 /* CardPartTextViewCardController.swift */, 0019E5D523A1960300ABB19D /* CardPartAttributedTextViewController.swift */, 70F595D120B5C66100B6E688 /* CardPartImageViewCardController.swift */, 70F595D320B5C7B300B6E688 /* CardPartButtonViewCardController.swift */, 70F595D520B5C87300B6E688 /* CardPartTitleViewCardController.swift */, 70F595D720B5C95700B6E688 /* CardPartSeparatorViewCardController.swift */, 70F595D920B5C9F200B6E688 /* CardPartStackViewCardController.swift */, 70F595DB20B5CAF800B6E688 /* CardPartTableViewCardController.swift */, 70F595DF20B5CDCE00B6E688 /* CardPartCollectionViewCardController.swift */, 7051279A20B5D35900B8E635 /* CardPartBarViewCardController.swift */, 7051279C20B5D3D900B8E635 /* CardPartPagedViewCardController.swift */, 7051279E20B5E2ED00B8E635 /* CardPartTextFieldCardController.swift */, 705127A020B5E52600B8E635 /* CardPartSliderViewCardController.swift */, 9A9AE5C223FBA72D0006E1EC /* CardPartBottomSheetCardController.swift */, 7021D3D120B5FA8600F9CB80 /* ThemedCardController.swift */, 5570914220D18B07004D8E5A /* CardPartOrientedViewCardController.swift */, 55809E2520DABBF3008BE0D2 /* CardPartCenteredViewCardController.swift */, 70BB3BF320FF89A500F55D57 /* CardPartTitleDescriptionViewCardController.swift */, 1BD1073F22F22823000163D2 /* CardPartBorderViewController.swift */, 1B1E203C22F47A8700734EDA /* CardPartPillLabelCardController.swift */, 1B1E203F22F4ADE900734EDA /* CardPartRoundedStackViewCardController.swift */, 1B693BA0230DAE2900662382 /* CardPartIconLabelCardController.swift */, 1B1F9540231F3F4900085906 /* CardPartConfettiViewCardController.swift */, 1B5F65402333DA3D001B473F /* CardPartProgressBarViewCardController.swift */, 80F3C7652348F32300F5D271 /* CardPartMapViewCardController.swift */, 1BCE6B772360AE26006B7325 /* CardPartRadioButtonCardController.swift */, 1BC4B4E0236762DD00EE7A31 /* CardPartHistogramCardController.swift */, EB16E3892396E8FD003F70A2 /* CardParthCustomMarginsCardController.swift */, 2012C74D23E39ECF00F48EFA /* CardPartMultiSliderViewCardController.swift */, 20F68E7C2405EFC200395102 /* CardPartSwitchViewCardController.swift */, ); name = "Type of CardParts"; sourceTree = "<group>"; }; 87F0B966E5030117E89059DF /* Frameworks */ = { isa = PBXGroup; children = ( F639F94EB1911C332F07F7C5 /* Pods_CardParts_Example.framework */, 411FA9F08E0EF1AF5B1D8EA1 /* Pods_CardParts_Tests.framework */, ); name = Frameworks; sourceTree = "<group>"; }; BABC66A8CBE4FFFB18E62FFE /* Pods */ = { isa = PBXGroup; children = ( FB125B7758C7A2765F5C9714 /* Pods-CardParts_Example.debug.xcconfig */, 58D33E4374206E3A72623089 /* Pods-CardParts_Example.release.xcconfig */, B21177744B384387575CB614 /* Pods-CardParts_Tests.debug.xcconfig */, EDB3B26F1BF024EF68A0F8CC /* Pods-CardParts_Tests.release.xcconfig */, 40FADEDFCAAAB84F115A1C2B /* Pods-CardParts_Example.debug.xcconfig */, 3274FA90B83B27385FE1EC09 /* Pods-CardParts_Example.release.xcconfig */, 2C0391CB2C9A48987E449BE1 /* Pods-CardParts_Tests.debug.xcconfig */, E768E0B2422010080EDB1A52 /* Pods-CardParts_Tests.release.xcconfig */, ); path = Pods; sourceTree = "<group>"; }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ 607FACCF1AFB9204008FA782 /* CardParts_Example */ = { isa = PBXNativeTarget; buildConfigurationList = 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "CardParts_Example" */; buildPhases = ( BB601C2A9A2BAECC51F571D1 /* [CP] Check Pods Manifest.lock */, 607FACCC1AFB9204008FA782 /* Sources */, 607FACCD1AFB9204008FA782 /* Frameworks */, 607FACCE1AFB9204008FA782 /* Resources */, 2B9B7383D03B89214C749584 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); dependencies = ( ); name = CardParts_Example; productName = CardParts; productReference = 607FACD01AFB9204008FA782 /* CardParts_Example.app */; productType = "com.apple.product-type.application"; }; 607FACE41AFB9204008FA782 /* CardParts_Tests */ = { isa = PBXNativeTarget; buildConfigurationList = 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "CardParts_Tests" */; buildPhases = ( 90BAACBDEF50357EFD255920 /* [CP] Check Pods Manifest.lock */, 607FACE11AFB9204008FA782 /* Sources */, 607FACE21AFB9204008FA782 /* Frameworks */, 607FACE31AFB9204008FA782 /* Resources */, 12987F7420BBBE1513625EA6 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); dependencies = ( 607FACE71AFB9204008FA782 /* PBXTargetDependency */, ); name = CardParts_Tests; productName = Tests; productReference = 607FACE51AFB9204008FA782 /* CardParts_Tests.xctest */; productType = "com.apple.product-type.bundle.unit-test"; }; /* End PBXNativeTarget section */ /* Begin PBXProject section */ 607FACC81AFB9204008FA782 /* Project object */ = { isa = PBXProject; attributes = { LastSwiftUpdateCheck = 0830; LastUpgradeCheck = 1020; ORGANIZATIONNAME = CocoaPods; TargetAttributes = { 607FACCF1AFB9204008FA782 = { CreatedOnToolsVersion = 6.3.1; LastSwiftMigration = 1020; ProvisioningStyle = Manual; }; 607FACE41AFB9204008FA782 = { CreatedOnToolsVersion = 6.3.1; LastSwiftMigration = 1020; ProvisioningStyle = Manual; TestTargetID = 607FACCF1AFB9204008FA782; }; }; }; buildConfigurationList = 607FACCB1AFB9204008FA782 /* Build configuration list for PBXProject "CardParts" */; compatibilityVersion = "Xcode 3.2"; developmentRegion = en; hasScannedForEncodings = 0; knownRegions = ( en, Base, ); mainGroup = 607FACC71AFB9204008FA782; productRefGroup = 607FACD11AFB9204008FA782 /* Products */; projectDirPath = ""; projectRoot = ""; targets = ( 607FACCF1AFB9204008FA782 /* CardParts_Example */, 607FACE41AFB9204008FA782 /* CardParts_Tests */, ); }; /* End PBXProject section */ /* Begin PBXResourcesBuildPhase section */ 607FACCE1AFB9204008FA782 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( 607FACDB1AFB9204008FA782 /* Main.storyboard in Resources */, 607FACE01AFB9204008FA782 /* LaunchScreen.xib in Resources */, 607FACDD1AFB9204008FA782 /* Images.xcassets in Resources */, ); runOnlyForDeploymentPostprocessing = 0; }; 607FACE31AFB9204008FA782 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ 12987F7420BBBE1513625EA6 /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( "${PODS_ROOT}/Target Support Files/Pods-CardParts_Tests/Pods-CardParts_Tests-frameworks.sh", "${BUILT_PRODUCTS_DIR}/CardParts/CardParts.framework", "${BUILT_PRODUCTS_DIR}/Differentiator/Differentiator.framework", "${BUILT_PRODUCTS_DIR}/RxCocoa/RxCocoa.framework", "${BUILT_PRODUCTS_DIR}/RxDataSources/RxDataSources.framework", "${BUILT_PRODUCTS_DIR}/RxGesture/RxGesture.framework", "${BUILT_PRODUCTS_DIR}/RxSwift/RxSwift.framework", ); name = "[CP] Embed Pods Frameworks"; outputPaths = ( "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/CardParts.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Differentiator.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxCocoa.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxDataSources.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxGesture.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxSwift.framework", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-CardParts_Tests/Pods-CardParts_Tests-frameworks.sh\"\n"; showEnvVarsInLog = 0; }; 2B9B7383D03B89214C749584 /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputPaths = ( "${PODS_ROOT}/Target Support Files/Pods-CardParts_Example/Pods-CardParts_Example-frameworks.sh", "${BUILT_PRODUCTS_DIR}/CardParts/CardParts.framework", "${BUILT_PRODUCTS_DIR}/Differentiator/Differentiator.framework", "${BUILT_PRODUCTS_DIR}/RxCocoa/RxCocoa.framework", "${BUILT_PRODUCTS_DIR}/RxDataSources/RxDataSources.framework", "${BUILT_PRODUCTS_DIR}/RxGesture/RxGesture.framework", "${BUILT_PRODUCTS_DIR}/RxSwift/RxSwift.framework", ); name = "[CP] Embed Pods Frameworks"; outputPaths = ( "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/CardParts.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/Differentiator.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxCocoa.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxDataSources.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxGesture.framework", "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/RxSwift.framework", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-CardParts_Example/Pods-CardParts_Example-frameworks.sh\"\n"; showEnvVarsInLog = 0; }; 90BAACBDEF50357EFD255920 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( ); inputPaths = ( "${PODS_PODFILE_DIR_PATH}/Podfile.lock", "${PODS_ROOT}/Manifest.lock", ); name = "[CP] Check Pods Manifest.lock"; outputFileListPaths = ( ); outputPaths = ( "$(DERIVED_FILE_DIR)/Pods-CardParts_Tests-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; BB601C2A9A2BAECC51F571D1 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; files = ( ); inputFileListPaths = ( ); inputPaths = ( "${PODS_PODFILE_DIR_PATH}/Podfile.lock", "${PODS_ROOT}/Manifest.lock", ); name = "[CP] Check Pods Manifest.lock"; outputFileListPaths = ( ); outputPaths = ( "$(DERIVED_FILE_DIR)/Pods-CardParts_Example-checkManifestLockResult.txt", ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n"; showEnvVarsInLog = 0; }; /* End PBXShellScriptBuildPhase section */ /* Begin PBXSourcesBuildPhase section */ 607FACCC1AFB9204008FA782 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 607FACD81AFB9204008FA782 /* MainViewController.swift in Sources */, 0019E5D623A1960300ABB19D /* CardPartAttributedTextViewController.swift in Sources */, 20F68E7D2405EFC200395102 /* CardPartSwitchViewCardController.swift in Sources */, 70F595D220B5C66100B6E688 /* CardPartImageViewCardController.swift in Sources */, 9A9AE5C323FBA72D0006E1EC /* CardPartBottomSheetCardController.swift in Sources */, 61D470681FDA6D1B00F451F0 /* ReactiveCardController.swift in Sources */, 1B1E203D22F47A8700734EDA /* CardPartPillLabelCardController.swift in Sources */, 70F595CE20B5C4D700B6E688 /* ThemeViewController.swift in Sources */, 61D470651FDA6D1B00F451F0 /* StateCardController.swift in Sources */, 7051279B20B5D35900B8E635 /* CardPartBarViewCardController.swift in Sources */, 1B1F9541231F3F4900085906 /* CardPartConfettiViewCardController.swift in Sources */, 705127A120B5E52600B8E635 /* CardPartSliderViewCardController.swift in Sources */, 607FACD61AFB9204008FA782 /* AppDelegate.swift in Sources */, 7021D3D220B5FA8600F9CB80 /* ThemedCardController.swift in Sources */, 70F595E020B5CDCE00B6E688 /* CardPartCollectionViewCardController.swift in Sources */, 1BCE6B782360AE26006B7325 /* CardPartRadioButtonCardController.swift in Sources */, 1BC4B4E1236762DD00EE7A31 /* CardPartHistogramCardController.swift in Sources */, 70F595D820B5C95700B6E688 /* CardPartSeparatorViewCardController.swift in Sources */, 7051279D20B5D3D900B8E635 /* CardPartPagedViewCardController.swift in Sources */, 70F595DC20B5CAF800B6E688 /* CardPartTableViewCardController.swift in Sources */, 7051279F20B5E2ED00B8E635 /* CardPartTextFieldCardController.swift in Sources */, EB16E38A2396E8FD003F70A2 /* CardParthCustomMarginsCardController.swift in Sources */, 2012C74E23E39ED000F48EFA /* CardPartMultiSliderViewCardController.swift in Sources */, 70BB3BF420FF89A500F55D57 /* CardPartTitleDescriptionViewCardController.swift in Sources */, 1B1E204022F4ADE900734EDA /* CardPartRoundedStackViewCardController.swift in Sources */, 1B693BA1230DAE2900662382 /* CardPartIconLabelCardController.swift in Sources */, 1B1F9543231FF58400085906 /* ConfettiViewController.swift in Sources */, 70F595D020B5C56200B6E688 /* CardPartTextViewCardController.swift in Sources */, 70F595D420B5C7B300B6E688 /* CardPartButtonViewCardController.swift in Sources */, 70F595D620B5C87300B6E688 /* CardPartTitleViewCardController.swift in Sources */, 70F595DA20B5C9F200B6E688 /* CardPartStackViewCardController.swift in Sources */, 5570914320D18B07004D8E5A /* CardPartOrientedViewCardController.swift in Sources */, 55809E2620DABBF3008BE0D2 /* CardPartCenteredViewCardController.swift in Sources */, 80F3C7662348F32300F5D271 /* CardPartMapViewCardController.swift in Sources */, 1B5F65412333DA3D001B473F /* CardPartProgressBarViewCardController.swift in Sources */, 1BD1074022F22823000163D2 /* CardPartBorderViewController.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; 607FACE11AFB9204008FA782 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( 80F3C768234ADA7900F5D271 /* CardPartMapViewTests.swift in Sources */, 61D470751FDA709B00F451F0 /* CardPartButtonViewTests.swift in Sources */, 61D470721FDA709B00F451F0 /* CardPartTitleViewTests.swift in Sources */, 55AB80CF20B61E1700B5994B /* CardUtilsTests.swift in Sources */, 9A33961F2410208D00167DD5 /* CardPartsBottomSheetTests.swift in Sources */, 61D470731FDA709B00F451F0 /* CardPartSeparatorViewTests.swift in Sources */, 61D470741FDA709B00F451F0 /* CardPartTextViewTests.swift in Sources */, 61D470711FDA709B00F451F0 /* CardPartImageViewTests.swift in Sources */, 61D470761FDA709B00F451F0 /* CardPartSpacerViewTests.swift in Sources */, 61D470701FDA709B00F451F0 /* CardPartTableViewCellTests.swift in Sources */, 61D470851FDB08EE00F451F0 /* CardPartTitleDescriptionViewTests.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; /* End PBXSourcesBuildPhase section */ /* Begin PBXTargetDependency section */ 607FACE71AFB9204008FA782 /* PBXTargetDependency */ = { isa = PBXTargetDependency; target = 607FACCF1AFB9204008FA782 /* CardParts_Example */; targetProxy = 607FACE61AFB9204008FA782 /* PBXContainerItemProxy */; }; /* End PBXTargetDependency section */ /* Begin PBXVariantGroup section */ 607FACD91AFB9204008FA782 /* Main.storyboard */ = { isa = PBXVariantGroup; children = ( 607FACDA1AFB9204008FA782 /* Base */, ); name = Main.storyboard; sourceTree = "<group>"; }; 607FACDE1AFB9204008FA782 /* LaunchScreen.xib */ = { isa = PBXVariantGroup; children = ( 607FACDF1AFB9204008FA782 /* Base */, ); name = LaunchScreen.xib; sourceTree = "<group>"; }; /* End PBXVariantGroup section */ /* Begin XCBuildConfiguration section */ 607FACED1AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_DYNAMIC_NO_PIC = NO; GCC_NO_COMMON_BLOCKS = YES; GCC_OPTIMIZATION_LEVEL = 0; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", "$(inherited)", ); GCC_SYMBOLS_PRIVATE_EXTERN = NO; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 10.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 4.0; }; name = Debug; }; 607FACEE1AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; buildSettings = { ALWAYS_SEARCH_USER_PATHS = NO; CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x"; CLANG_CXX_LIBRARY = "libc++"; CLANG_ENABLE_MODULES = YES; CLANG_ENABLE_OBJC_ARC = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; CLANG_WARN_BOOL_CONVERSION = YES; CLANG_WARN_COMMA = YES; CLANG_WARN_CONSTANT_CONVERSION = YES; CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; CLANG_WARN_EMPTY_BODY = YES; CLANG_WARN_ENUM_CONVERSION = YES; CLANG_WARN_INFINITE_RECURSION = YES; CLANG_WARN_INT_CONVERSION = YES; CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; CLANG_WARN_STRICT_PROTOTYPES = YES; CLANG_WARN_SUSPICIOUS_MOVE = YES; CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = gnu99; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; GCC_WARN_UNDECLARED_SELECTOR = YES; GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; IPHONEOS_DEPLOYMENT_TARGET = 10.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; SWIFT_VERSION = 4.0; VALIDATE_PRODUCT = YES; }; name = Release; }; 607FACF01AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = FB125B7758C7A2765F5C9714 /* Pods-CardParts_Example.debug.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_STYLE = Manual; DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; INFOPLIST_FILE = CardParts/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; MODULE_NAME = ExampleApp; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; }; name = Debug; }; 607FACF11AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = 58D33E4374206E3A72623089 /* Pods-CardParts_Example.release.xcconfig */; buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CODE_SIGN_STYLE = Manual; DEVELOPMENT_TEAM = ""; ENABLE_BITCODE = NO; INFOPLIST_FILE = CardParts/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; MODULE_NAME = ExampleApp; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.demo.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; }; name = Release; }; 607FACF31AFB9204008FA782 /* Debug */ = { isa = XCBuildConfiguration; baseConfigurationReference = B21177744B384387575CB614 /* Pods-CardParts_Tests.debug.xcconfig */; buildSettings = { CODE_SIGN_STYLE = Manual; DEVELOPMENT_TEAM = ""; GCC_PREPROCESSOR_DEFINITIONS = ( "DEBUG=1", "$(inherited)", ); INFOPLIST_FILE = Tests/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; }; name = Debug; }; 607FACF41AFB9204008FA782 /* Release */ = { isa = XCBuildConfiguration; baseConfigurationReference = EDB3B26F1BF024EF68A0F8CC /* Pods-CardParts_Tests.release.xcconfig */; buildSettings = { CODE_SIGN_STYLE = Manual; DEVELOPMENT_TEAM = ""; INFOPLIST_FILE = Tests/Info.plist; IPHONEOS_DEPLOYMENT_TARGET = 10.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks @loader_path/Frameworks"; PRODUCT_BUNDLE_IDENTIFIER = "org.cocoapods.$(PRODUCT_NAME:rfc1034identifier)"; PRODUCT_NAME = "$(TARGET_NAME)"; PROVISIONING_PROFILE_SPECIFIER = ""; SWIFT_VERSION = 5.0; }; name = Release; }; /* End XCBuildConfiguration section */ /* Begin XCConfigurationList section */ 607FACCB1AFB9204008FA782 /* Build configuration list for PBXProject "CardParts" */ = { isa = XCConfigurationList; buildConfigurations = ( 607FACED1AFB9204008FA782 /* Debug */, 607FACEE1AFB9204008FA782 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Debug; }; 607FACEF1AFB9204008FA782 /* Build configuration list for PBXNativeTarget "CardParts_Example" */ = { isa = XCConfigurationList; buildConfigurations = ( 607FACF01AFB9204008FA782 /* Debug */, 607FACF11AFB9204008FA782 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Debug; }; 607FACF21AFB9204008FA782 /* Build configuration list for PBXNativeTarget "CardParts_Tests" */ = { isa = XCConfigurationList; buildConfigurations = ( 607FACF31AFB9204008FA782 /* Debug */, 607FACF41AFB9204008FA782 /* Release */, ); defaultConfigurationIsVisible = 0; defaultConfigurationName = Debug; }; /* End XCConfigurationList section */ }; rootObject = 607FACC81AFB9204008FA782 /* Project object */; }
{ "pile_set_name": "Github" }
ZLib Module for AOLserver 4.5 Release 2.0 This is AOLserver module that implements Zlib interface. Allows compressing/uncompressing Tcl strings, gzip file support, and support for the Ns_Gzip AOLserver routine. Compiling and Installing: To compile this driver, you'll need to have zlib library compiled and installed. Zlib is installed by default on OS/X and can be easily added for Linux and FreeBSD (e.g., the zlib-devel package). The makefile will produce both a dynamic library and an AOLserver module. libnszlib: The library includes an implementation of an "ns_zlib" command to provide compression capabilities to a Tcl interpreter. It can be used with the "load" command, e.g., "load libnszlib.so". The library depends on AOLserver libnsd and Zlib libzlib libraries so they need to found in the dynamic linker path as well. nszlib.so: The AOLserver module, "nszlib.so", includes a call to both add the "ns_zlib" command to all interpreters for the virutal server and also installs an "Ns_GzipProc" so that the "Ns_Gzip" function can be used to compress content, e.g., at the end of an ADP request. See the Ns_Gzip(3) man page for details. ns_zlib command: ns_zlib compress data Returns compressed string ns_zlib uncompress data Uncompresses previously compressed string ns_zlib gzip data Returns compressed string in gzip format, string can be saved in a file with extension .gz and gzip will be able to uncompress it ns_zlib gzipfile file Compresses the specified file, creating a file with the same name but a .gz suffix appened ns_zlib gunzip file Uncompresses gzip file and returns text Authors: Vlad Seryakov vlad@crystalballinc.com Jim Davidson jgdavidson@aol.com
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0 /* * Copyright (C) 2014 Davidlohr Bueso. */ #include <linux/sched/signal.h> #include <linux/sched/task.h> #include <linux/mm.h> #include <linux/vmacache.h> /* * Hash based on the pmd of addr if configured with MMU, which provides a good * hit rate for workloads with spatial locality. Otherwise, use pages. */ #ifdef CONFIG_MMU #define VMACACHE_SHIFT PMD_SHIFT #else #define VMACACHE_SHIFT PAGE_SHIFT #endif #define VMACACHE_HASH(addr) ((addr >> VMACACHE_SHIFT) & VMACACHE_MASK) /* * This task may be accessing a foreign mm via (for example) * get_user_pages()->find_vma(). The vmacache is task-local and this * task's vmacache pertains to a different mm (ie, its own). There is * nothing we can do here. * * Also handle the case where a kernel thread has adopted this mm via * kthread_use_mm(). That kernel thread's vmacache is not applicable to this mm. */ static inline bool vmacache_valid_mm(struct mm_struct *mm) { return current->mm == mm && !(current->flags & PF_KTHREAD); } void vmacache_update(unsigned long addr, struct vm_area_struct *newvma) { if (vmacache_valid_mm(newvma->vm_mm)) current->vmacache.vmas[VMACACHE_HASH(addr)] = newvma; } static bool vmacache_valid(struct mm_struct *mm) { struct task_struct *curr; if (!vmacache_valid_mm(mm)) return false; curr = current; if (mm->vmacache_seqnum != curr->vmacache.seqnum) { /* * First attempt will always be invalid, initialize * the new cache for this task here. */ curr->vmacache.seqnum = mm->vmacache_seqnum; vmacache_flush(curr); return false; } return true; } struct vm_area_struct *vmacache_find(struct mm_struct *mm, unsigned long addr) { int idx = VMACACHE_HASH(addr); int i; count_vm_vmacache_event(VMACACHE_FIND_CALLS); if (!vmacache_valid(mm)) return NULL; for (i = 0; i < VMACACHE_SIZE; i++) { struct vm_area_struct *vma = current->vmacache.vmas[idx]; if (vma) { #ifdef CONFIG_DEBUG_VM_VMACACHE if (WARN_ON_ONCE(vma->vm_mm != mm)) break; #endif if (vma->vm_start <= addr && vma->vm_end > addr) { count_vm_vmacache_event(VMACACHE_FIND_HITS); return vma; } } if (++idx == VMACACHE_SIZE) idx = 0; } return NULL; } #ifndef CONFIG_MMU struct vm_area_struct *vmacache_find_exact(struct mm_struct *mm, unsigned long start, unsigned long end) { int idx = VMACACHE_HASH(start); int i; count_vm_vmacache_event(VMACACHE_FIND_CALLS); if (!vmacache_valid(mm)) return NULL; for (i = 0; i < VMACACHE_SIZE; i++) { struct vm_area_struct *vma = current->vmacache.vmas[idx]; if (vma && vma->vm_start == start && vma->vm_end == end) { count_vm_vmacache_event(VMACACHE_FIND_HITS); return vma; } if (++idx == VMACACHE_SIZE) idx = 0; } return NULL; } #endif
{ "pile_set_name": "Github" }
resources = require('node-odata').resources initData = (model, path) -> require(path).forEach (item) -> data = new model(item) data.save() console.log "data init: #{path} import successful." module.exports = import: -> resources.user.find().exec (err, users) -> unless users.length initData(resources.user, "./system/user.json") initData(resources.article, "./article/article.json") initData(resources.category, "./article/category.json") initData(resources.board, "./board/board.json") initData(resources.gallery, "./photo/gallery.json")
{ "pile_set_name": "Github" }
// Copyright (C) 2008 Google Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.caja.reporting; import junit.framework.TestCase; import java.util.List; import java.util.ArrayList; import java.io.IOException; /** * @author ihab.awad@gmail.com */ public class AbstractMessageQueueTest extends TestCase { /** * A simple message type to use for testing. */ private static class TestMessageType implements MessageTypeInt { private final MessageLevel level; private final String name; public TestMessageType(MessageLevel level, String name) { this.level = level; this.name = name; } public void format(MessagePart[] parts, MessageContext context, Appendable out) throws IOException { out.append(name); out.append(" "); parts[0].format(context, out); } public MessageLevel getLevel() { return level; } public int getParamCount() { return 1; } public String name() { return name; } } private static final MessageTypeInt TEST_ERROR = new TestMessageType(MessageLevel.ERROR, "test error"); private static final MessageTypeInt TEST_WARNING = new TestMessageType(MessageLevel.WARNING, "test warning"); private MessageContext mc; private MessageQueue mq; @Override public void setUp() { mc = new MessageContext(); mq = new AbstractMessageQueue() { private List<Message> messages = new ArrayList<Message>(); public List<Message> getMessages() { return messages; } }; } @Override public void tearDown() { mc = null; mq = null; } public final void testAddMessageSimple() { assertEquals(0, mq.getMessages().size()); mq.addMessage(TEST_WARNING, MessagePart.Factory.valueOf("test")); assertEquals(1, mq.getMessages().size()); assertEquals( MessageLevel.WARNING, mq.getMessages().get(0).getMessageLevel()); assertEquals( "test warning test", mq.getMessages().get(0).format(mc)); } public final void testAddMessageWithLevel() { assertEquals(0, mq.getMessages().size()); mq.addMessage( TEST_WARNING, MessageLevel.ERROR, MessagePart.Factory.valueOf("test")); assertEquals(1, mq.getMessages().size()); assertEquals( MessageLevel.ERROR, mq.getMessages().get(0).getMessageLevel()); assertEquals( "test warning test", mq.getMessages().get(0).format(mc)); } public final void testHasMessageAtLevel() { assertEquals(0, mq.getMessages().size()); mq.addMessage(TEST_WARNING, MessagePart.Factory.valueOf("test")); assertEquals(1, mq.getMessages().size()); assertFalse(mq.hasMessageAtLevel(MessageLevel.ERROR)); mq.addMessage(TEST_ERROR, MessagePart.Factory.valueOf("test")); assertEquals(2, mq.getMessages().size()); assertTrue(mq.hasMessageAtLevel(MessageLevel.ERROR)); } }
{ "pile_set_name": "Github" }
#define MAP_32BIT 0x40
{ "pile_set_name": "Github" }
/* SPDX-License-Identifier: GPL-2.0 */ #undef TRACE_SYSTEM #define TRACE_SYSTEM snd_pcm #if !defined(_PCM_PARAMS_TRACE_H) || defined(TRACE_HEADER_MULTI_READ) #define _PCM_PARAMS_TRACE_H #include <linux/tracepoint.h> #define HW_PARAM_ENTRY(param) {SNDRV_PCM_HW_PARAM_##param, #param} #define hw_param_labels \ HW_PARAM_ENTRY(ACCESS), \ HW_PARAM_ENTRY(FORMAT), \ HW_PARAM_ENTRY(SUBFORMAT), \ HW_PARAM_ENTRY(SAMPLE_BITS), \ HW_PARAM_ENTRY(FRAME_BITS), \ HW_PARAM_ENTRY(CHANNELS), \ HW_PARAM_ENTRY(RATE), \ HW_PARAM_ENTRY(PERIOD_TIME), \ HW_PARAM_ENTRY(PERIOD_SIZE), \ HW_PARAM_ENTRY(PERIOD_BYTES), \ HW_PARAM_ENTRY(PERIODS), \ HW_PARAM_ENTRY(BUFFER_TIME), \ HW_PARAM_ENTRY(BUFFER_SIZE), \ HW_PARAM_ENTRY(BUFFER_BYTES), \ HW_PARAM_ENTRY(TICK_TIME) TRACE_EVENT(hw_mask_param, TP_PROTO(struct snd_pcm_substream *substream, snd_pcm_hw_param_t type, int index, const struct snd_mask *prev, const struct snd_mask *curr), TP_ARGS(substream, type, index, prev, curr), TP_STRUCT__entry( __field(int, card) __field(int, device) __field(int, subdevice) __field(int, direction) __field(snd_pcm_hw_param_t, type) __field(int, index) __field(int, total) __array(__u32, prev_bits, 8) __array(__u32, curr_bits, 8) ), TP_fast_assign( __entry->card = substream->pcm->card->number; __entry->device = substream->pcm->device; __entry->subdevice = substream->number; __entry->direction = substream->stream; __entry->type = type; __entry->index = index; __entry->total = substream->runtime->hw_constraints.rules_num; memcpy(__entry->prev_bits, prev->bits, sizeof(__u32) * 8); memcpy(__entry->curr_bits, curr->bits, sizeof(__u32) * 8); ), TP_printk("pcmC%dD%d%s:%d %03d/%03d %s %08x%08x%08x%08x %08x%08x%08x%08x", __entry->card, __entry->device, __entry->direction ? "c" : "p", __entry->subdevice, __entry->index, __entry->total, __print_symbolic(__entry->type, hw_param_labels), __entry->prev_bits[3], __entry->prev_bits[2], __entry->prev_bits[1], __entry->prev_bits[0], __entry->curr_bits[3], __entry->curr_bits[2], __entry->curr_bits[1], __entry->curr_bits[0] ) ); TRACE_EVENT(hw_interval_param, TP_PROTO(struct snd_pcm_substream *substream, snd_pcm_hw_param_t type, int index, const struct snd_interval *prev, const struct snd_interval *curr), TP_ARGS(substream, type, index, prev, curr), TP_STRUCT__entry( __field(int, card) __field(int, device) __field(int, subdevice) __field(int, direction) __field(snd_pcm_hw_param_t, type) __field(int, index) __field(int, total) __field(unsigned int, prev_min) __field(unsigned int, prev_max) __field(unsigned int, prev_openmin) __field(unsigned int, prev_openmax) __field(unsigned int, prev_integer) __field(unsigned int, prev_empty) __field(unsigned int, curr_min) __field(unsigned int, curr_max) __field(unsigned int, curr_openmin) __field(unsigned int, curr_openmax) __field(unsigned int, curr_integer) __field(unsigned int, curr_empty) ), TP_fast_assign( __entry->card = substream->pcm->card->number; __entry->device = substream->pcm->device; __entry->subdevice = substream->number; __entry->direction = substream->stream; __entry->type = type; __entry->index = index; __entry->total = substream->runtime->hw_constraints.rules_num; __entry->prev_min = prev->min; __entry->prev_max = prev->max; __entry->prev_openmin = prev->openmin; __entry->prev_openmax = prev->openmax; __entry->prev_integer = prev->integer; __entry->prev_empty = prev->empty; __entry->curr_min = curr->min; __entry->curr_max = curr->max; __entry->curr_openmin = curr->openmin; __entry->curr_openmax = curr->openmax; __entry->curr_integer = curr->integer; __entry->curr_empty = curr->empty; ), TP_printk("pcmC%dD%d%s:%d %03d/%03d %s %d %d %s%u %u%s %d %d %s%u %u%s", __entry->card, __entry->device, __entry->direction ? "c" : "p", __entry->subdevice, __entry->index, __entry->total, __print_symbolic(__entry->type, hw_param_labels), __entry->prev_empty, __entry->prev_integer, __entry->prev_openmin ? "(" : "[", __entry->prev_min, __entry->prev_max, __entry->prev_openmax ? ")" : "]", __entry->curr_empty, __entry->curr_integer, __entry->curr_openmin ? "(" : "[", __entry->curr_min, __entry->curr_max, __entry->curr_openmax ? ")" : "]" ) ); #endif /* _PCM_PARAMS_TRACE_H */ /* This part must be outside protection */ #undef TRACE_INCLUDE_PATH #define TRACE_INCLUDE_PATH . #undef TRACE_INCLUDE_FILE #define TRACE_INCLUDE_FILE pcm_param_trace #include <trace/define_trace.h>
{ "pile_set_name": "Github" }
const flatten = require('.') test('Test 1', () => { expect(flatten([])).toEqual([]) }) test('Test 2', () => { expect(flatten([1, 2, 3])).toEqual([1, 2, 3]) }) test('Test 3', () => { expect(flatten([[1, 2, 3], ['a', 'b', 'c'], [1, 2, 3]])).toEqual([ 1, 2, 3, 'a', 'b', 'c', 1, 2, 3, ]) }) test('Test 4', () => { expect(flatten([[3, 4, 5], [[9, 9, 9]], ['a,b,c']])).toEqual([ 3, 4, 5, [9, 9, 9], 'a,b,c', ]) }) test('Test 5', () => { expect(flatten([[[3], [4], [5]], [9], [9], [8], [[1, 2, 3]]])).toEqual([ [3], [4], [5], 9, 9, 8, [1, 2, 3], ]) })
{ "pile_set_name": "Github" }
var _ = require('underscore'), select = require('CSSselect'), utils = require('../utils'), domEach = utils.domEach, isTag = utils.isTag; var find = exports.find = function(selector) { var elems = _.reduce(this, function(memo, elem) { return memo.concat(_.filter(elem.children, isTag)); }, []); return this._make(select(selector, elems)); }; // Get the parent of each element in the current set of matched elements, // optionally filtered by a selector. var parent = exports.parent = function(selector) { var set = []; var $set; domEach(this, function(idx, elem) { var parentElem = elem.parent; if (parentElem && set.indexOf(parentElem) < 0) { set.push(parentElem); } }); if (arguments.length) { set = filter.call(set, selector, this); } return this._make(set); }; var parents = exports.parents = function(selector) { var parentNodes = []; // When multiple DOM elements are in the original set, the resulting set will // be in *reverse* order of the original elements as well, with duplicates // removed. this.toArray().reverse().forEach(function(elem) { traverseParents(this, elem.parent, selector, Infinity) .forEach(function(node) { if (parentNodes.indexOf(node) === -1) { parentNodes.push(node); } } ); }, this); return this._make(parentNodes); }; // For each element in the set, get the first element that matches the selector // by testing the element itself and traversing up through its ancestors in the // DOM tree. var closest = exports.closest = function(selector) { var set = []; if (!selector) { return this._make(set); } domEach(this, function(idx, elem) { var closestElem = traverseParents(this, elem, selector, 1)[0]; // Do not add duplicate elements to the set if (closestElem && set.indexOf(closestElem) < 0) { set.push(closestElem); } }.bind(this)); return this._make(set); }; var next = exports.next = function(selector) { if (!this[0]) { return this; } var elems = []; _.forEach(this, function(elem) { while ((elem = elem.next)) { if (isTag(elem)) { elems.push(elem); return; } } }); return selector ? filter.call(elems, selector, this) : this._make(elems); }; var nextAll = exports.nextAll = function(selector) { if (!this[0]) { return this; } var elems = []; _.forEach(this, function(elem) { while ((elem = elem.next)) { if (isTag(elem) && elems.indexOf(elem) === -1) { elems.push(elem); } } }); return selector ? filter.call(elems, selector, this) : this._make(elems); }; var nextUntil = exports.nextUntil = function(selector, filterSelector) { if (!this[0]) { return this; } var elems = [], untilNode, untilNodes; if (typeof selector === 'string') { untilNode = select(selector, this.nextAll().toArray())[0]; } else if (selector && selector.cheerio) { untilNodes = selector.toArray(); } else if (selector) { untilNode = selector; } _.forEach(this, function(elem) { while ((elem = elem.next)) { if ((untilNode && elem !== untilNode) || (untilNodes && untilNodes.indexOf(elem) === -1) || (!untilNode && !untilNodes)) { if (isTag(elem) && elems.indexOf(elem) === -1) { elems.push(elem); } } else { break; } } }); return filterSelector ? filter.call(elems, filterSelector, this) : this._make(elems); }; var prev = exports.prev = function(selector) { if (!this[0]) { return this; } var elems = []; _.forEach(this, function(elem) { while ((elem = elem.prev)) { if (isTag(elem)) { elems.push(elem); return; } } }); return selector ? filter.call(elems, selector, this) : this._make(elems); }; var prevAll = exports.prevAll = function(selector) { if (!this[0]) { return this; } var elems = []; _.forEach(this, function(elem) { while ((elem = elem.prev)) { if (isTag(elem) && elems.indexOf(elem) === -1) { elems.push(elem); } } }); return selector ? filter.call(elems, selector, this) : this._make(elems); }; var prevUntil = exports.prevUntil = function(selector, filterSelector) { if (!this[0]) { return this; } var elems = [], untilNode, untilNodes; if (typeof selector === 'string') { untilNode = select(selector, this.prevAll().toArray())[0]; } else if (selector && selector.cheerio) { untilNodes = selector.toArray(); } else if (selector) { untilNode = selector; } _.forEach(this, function(elem) { while ((elem = elem.prev)) { if ((untilNode && elem !== untilNode) || (untilNodes && untilNodes.indexOf(elem) === -1) || (!untilNode && !untilNodes)) { if (isTag(elem) && elems.indexOf(elem) === -1) { elems.push(elem); } } else { break; } } }); return filterSelector ? filter.call(elems, filterSelector, this) : this._make(elems); }; var siblings = exports.siblings = function(selector) { var parent = this.parent(); var elems = _.filter( parent ? parent.children() : this.siblingsAndMe(), function(elem) { return isTag(elem) && !this.is(elem); }, this ); if (selector !== undefined) { return filter.call(elems, selector, this); } else { return this._make(elems); } }; var children = exports.children = function(selector) { var elems = _.reduce(this, function(memo, elem) { return memo.concat(_.filter(elem.children, isTag)); }, []); if (selector === undefined) return this._make(elems); else if (_.isNumber(selector)) return this._make(elems[selector]); return filter.call(elems, selector, this); }; var contents = exports.contents = function() { return this._make(_.reduce(this, function(all, elem) { all.push.apply(all, elem.children); return all; }, [])); }; var each = exports.each = function(fn) { var i = 0, len = this.length; while (i < len && fn.call(this._make(this[i]), i, this[i]) !== false) ++i; return this; }; var map = exports.map = function(fn) { return this._make(_.reduce(this, function(memo, el, i) { var val = fn.call(el, i, el); return val == null ? memo : memo.concat(val); }, [])); }; var filter = exports.filter = function(match, container) { container = container || this; var make = _.bind(container._make, container); var filterFn; if (_.isString(match)) { filterFn = select.compile(match); } else if (_.isFunction(match)) { filterFn = function(el, i) { return match.call(make(el), i, el); }; } else if (match.cheerio) { filterFn = match.is.bind(match); } else { filterFn = function(el) { return match === el; }; } return make(_.filter(this, filterFn)); }; var first = exports.first = function() { return this.length > 1 ? this._make(this[0]) : this; }; var last = exports.last = function() { return this.length > 1 ? this._make(this[this.length - 1]) : this; }; // Reduce the set of matched elements to the one at the specified index. var eq = exports.eq = function(i) { i = +i; // Use the first identity optimization if possible if (i === 0 && this.length <= 1) return this; if (i < 0) i = this.length + i; return this[i] ? this._make(this[i]) : this._make([]); }; var slice = exports.slice = function() { return this._make([].slice.apply(this, arguments)); }; function traverseParents(self, elem, selector, limit) { var elems = []; while (elem && elems.length < limit) { if (!selector || filter.call([elem], selector, self).length) { elems.push(elem); } elem = elem.parent; } return elems; } // End the most recent filtering operation in the current chain and return the // set of matched elements to its previous state. var end = exports.end = function() { return this.prevObject || this._make([]); };
{ "pile_set_name": "Github" }
# To learn about Buck see [Docs](https://buckbuild.com/). # To run your application with Buck: # - install Buck # - `npm start` - to start the packager # - `cd android` # - `keytool -genkey -v -keystore keystores/debug.keystore -storepass android -alias androiddebugkey -keypass android -dname "CN=Android Debug,O=Android,C=US"` # - `./gradlew :app:copyDownloadableDepsToLibs` - make all Gradle compile dependencies available to Buck # - `buck install -r android/app` - compile, install and run application # lib_deps = [] for jarfile in glob(['libs/*.jar']): name = 'jars__' + jarfile[jarfile.rindex('/') + 1: jarfile.rindex('.jar')] lib_deps.append(':' + name) prebuilt_jar( name = name, binary_jar = jarfile, ) for aarfile in glob(['libs/*.aar']): name = 'aars__' + aarfile[aarfile.rindex('/') + 1: aarfile.rindex('.aar')] lib_deps.append(':' + name) android_prebuilt_aar( name = name, aar = aarfile, ) android_library( name = "all-libs", exported_deps = lib_deps, ) android_library( name = "app-code", srcs = glob([ "src/main/java/**/*.java", ]), deps = [ ":all-libs", ":build_config", ":res", ], ) android_build_config( name = "build_config", package = "com.tutorialproject", ) android_resource( name = "res", package = "com.tutorialproject", res = "src/main/res", ) android_binary( name = "app", keystore = "//android/keystores:debug", manifest = "src/main/AndroidManifest.xml", package_type = "debug", deps = [ ":app-code", ], )
{ "pile_set_name": "Github" }
<component name="libraryTable"> <library name="Maven: com.sun.jersey:jersey-core:1.19.1"> <CLASSES> <root url="jar://$MAVEN_REPOSITORY$/com/sun/jersey/jersey-core/1.19.1/jersey-core-1.19.1.jar!/" /> </CLASSES> <JAVADOC> <root url="jar://$MAVEN_REPOSITORY$/com/sun/jersey/jersey-core/1.19.1/jersey-core-1.19.1-javadoc.jar!/" /> </JAVADOC> <SOURCES> <root url="jar://$MAVEN_REPOSITORY$/com/sun/jersey/jersey-core/1.19.1/jersey-core-1.19.1-sources.jar!/" /> </SOURCES> </library> </component>
{ "pile_set_name": "Github" }
/* * This file is generated by jOOQ. */ package stroom.meta.impl.db.jooq.tables; import java.util.Arrays; import java.util.List; import javax.annotation.processing.Generated; import org.jooq.Field; import org.jooq.ForeignKey; import org.jooq.Identity; import org.jooq.Index; import org.jooq.Name; import org.jooq.Record; import org.jooq.Row5; import org.jooq.Schema; import org.jooq.Table; import org.jooq.TableField; import org.jooq.UniqueKey; import org.jooq.impl.DSL; import org.jooq.impl.TableImpl; import stroom.meta.impl.db.jooq.Indexes; import stroom.meta.impl.db.jooq.Keys; import stroom.meta.impl.db.jooq.Stroom; import stroom.meta.impl.db.jooq.tables.records.MetaValRecord; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.12.3" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class MetaVal extends TableImpl<MetaValRecord> { private static final long serialVersionUID = 452326025; /** * The reference instance of <code>stroom.meta_val</code> */ public static final MetaVal META_VAL = new MetaVal(); /** * The class holding records for this type */ @Override public Class<MetaValRecord> getRecordType() { return MetaValRecord.class; } /** * The column <code>stroom.meta_val.id</code>. */ public final TableField<MetaValRecord, Long> ID = createField(DSL.name("id"), org.jooq.impl.SQLDataType.BIGINT.nullable(false).identity(true), this, ""); /** * The column <code>stroom.meta_val.create_time</code>. */ public final TableField<MetaValRecord, Long> CREATE_TIME = createField(DSL.name("create_time"), org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>stroom.meta_val.meta_id</code>. */ public final TableField<MetaValRecord, Long> META_ID = createField(DSL.name("meta_id"), org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * The column <code>stroom.meta_val.meta_key_id</code>. */ public final TableField<MetaValRecord, Integer> META_KEY_ID = createField(DSL.name("meta_key_id"), org.jooq.impl.SQLDataType.INTEGER.nullable(false), this, ""); /** * The column <code>stroom.meta_val.val</code>. */ public final TableField<MetaValRecord, Long> VAL = createField(DSL.name("val"), org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, ""); /** * Create a <code>stroom.meta_val</code> table reference */ public MetaVal() { this(DSL.name("meta_val"), null); } /** * Create an aliased <code>stroom.meta_val</code> table reference */ public MetaVal(String alias) { this(DSL.name(alias), META_VAL); } /** * Create an aliased <code>stroom.meta_val</code> table reference */ public MetaVal(Name alias) { this(alias, META_VAL); } private MetaVal(Name alias, Table<MetaValRecord> aliased) { this(alias, aliased, null); } private MetaVal(Name alias, Table<MetaValRecord> aliased, Field<?>[] parameters) { super(alias, null, aliased, parameters, DSL.comment("")); } public <O extends Record> MetaVal(Table<O> child, ForeignKey<O, MetaValRecord> key) { super(child, key, META_VAL); } @Override public Schema getSchema() { return Stroom.STROOM; } @Override public List<Index> getIndexes() { return Arrays.<Index>asList(Indexes.META_VAL_META_VAL_CREATE_TIME, Indexes.META_VAL_META_VAL_META_ID, Indexes.META_VAL_PRIMARY); } @Override public Identity<MetaValRecord, Long> getIdentity() { return Keys.IDENTITY_META_VAL; } @Override public UniqueKey<MetaValRecord> getPrimaryKey() { return Keys.KEY_META_VAL_PRIMARY; } @Override public List<UniqueKey<MetaValRecord>> getKeys() { return Arrays.<UniqueKey<MetaValRecord>>asList(Keys.KEY_META_VAL_PRIMARY); } @Override public MetaVal as(String alias) { return new MetaVal(DSL.name(alias), this); } @Override public MetaVal as(Name alias) { return new MetaVal(alias, this); } /** * Rename this table */ @Override public MetaVal rename(String name) { return new MetaVal(DSL.name(name), null); } /** * Rename this table */ @Override public MetaVal rename(Name name) { return new MetaVal(name, null); } // ------------------------------------------------------------------------- // Row5 type methods // ------------------------------------------------------------------------- @Override public Row5<Long, Long, Long, Integer, Long> fieldsRow() { return (Row5) super.fieldsRow(); } }
{ "pile_set_name": "Github" }
[広告テキスト] Android用N64エミュレータ。 Mupen64Plus AEは現在ベータテスト中です。 [アプリの説明] このアプリケーションはROMファイルが付属していません! - あなたがそのROMを再生する前に、あなたはゲームを所有する必要があります! - 多くのパブリックドメインのゲーム/デモはwww.pdroms.deでご利用いただけます 99¢のバージョンはあなたが寄付したい場合のためのものです。 - このアプリは作者のウェブサイトから無料でダウンロードできます (広告なし!) - 寄付をご検討ください。 - 寄付は多くのオープンソースプロジェクトと開発者たちをサポートします Mupen64Plus、Android Edition (AE)はAndroid用N64エミュレーターです。現在ベータテスト中、すなわち開発段階であることを意味し、数多くのグラフィック、ラグやオーディオの不具合が残っています。www.paulscode.comを訪れてバグ修正やFAQはもちろん、開発とテストに参加してください。 ゲームのスクリーンショットに関する潜在的な著作権の問題を回避するために、私はスクリーンショットのみパブリックドメインのゲームやデモを使用することに決めました。アプリが再び市場から削除されるような場合は、paulscode.com上で将来のアップデートを無料で取得することができるようになります。 このアプリはGNU GPL v3でライセンスされ、すべてのソースコードは、GITリポジトリ https://github.com/mupen64plus-ae/mupen64plus-ae から入手できます。リポジトリへの書き込み権限をご希望の開発者は、サポートフォーラムで作者(Paul)に連絡を取ってください。  もし、プロジェクトのブランチに補助が必要な場合は、作者まで知らせていただければ、セットアップのお手伝いをします。 Xperia PLAYに最適化 - アナログ入力は、タッチパッドを介して可能です! パーミッション Internet - クラッシュレポートを送信するために使用されます。決してウィルス、スパイウェアやいかなる種類の高校をダウンロードすることはありません。いかなる個人情報を収集することはありません。通常のデータ·レートが適用されます。 Write External Storage - 記憶装置は、セーブデータとカスタム入力プロファイルを書き込むために使用されます Vibrate -デバイスが対応している場合、振動パックのエミュレーションとボタンのフィードバックに使用されます。 はじめに - 任意の内部ストレージ/SDカードにロムを転送してください。 - アプリからそれらを参照できます。 - ほとんどのデバイス上の標準のSDカードのディレクトリは、/mnt/sdcard です。 - ロムは.n64、.v64または.z64形式、必要に応じてzipファイルである必要があります。 ヘル、FAQS、バグ修正、ゲームのバックアップ手順などは: - http://www.paulscode.com
{ "pile_set_name": "Github" }
/* NSIS Modern User Interface Finish page (implemented using nsDialogs) */ ;-------------------------------- ;Page interface settings and variables !macro MUI_FINISHPAGE_INTERFACE !ifndef MUI_FINISHPAGE_INTERFACE !define MUI_FINISHPAGE_INTERFACE Var mui.FinishPage Var mui.FinishPage.Image Var mui.FinishPage.Image.Bitmap Var mui.FinishPage.Title Var mui.FinishPage.Title.Font Var mui.FinishPage.Text !endif !ifndef MUI_FINISHPAGE_NOREBOOTSUPPORT !ifndef MUI_FINISHPAGE_RETURNVALUE_VARIABLES !define MUI_FINISHPAGE_RETURNVALUE_VARIABLES Var mui.FinishPage.ReturnValue !endif !else ifdef MUI_FINISHPAGE_RUN | MUI_FINISHPAGE_SHOWREADME !ifndef MUI_FINISHPAGE_RETURNVALUE_VARIABLES !define MUI_FINISHPAGE_RETURNVALUE_VARIABLES Var mui.FinishPage.ReturnValue !endif !endif !ifdef MUI_FINISHPAGE_CANCEL_ENABLED !ifndef MUI_FINISHPAGE_CANCEL_ENABLED_VARIABLES !define MUI_FINISHPAGE_CANCEL_ENABLED_VARIABLES Var mui.FinishPage.DisableAbortWarning !endif !endif !ifdef MUI_FINISHPAGE_RUN !ifndef MUI_FINISHPAGE_RUN_VARIABLES !define MUI_FINISHPAGE_RUN_VARIABLES Var mui.FinishPage.Run !endif !endif !ifdef MUI_FINISHPAGE_SHOWREADME !ifndef MUI_FINISHPAGE_SHOREADME_VARAIBLES !define MUI_FINISHPAGE_SHOREADME_VARAIBLES Var mui.FinishPage.ShowReadme !endif !endif !ifdef MUI_FINISHPAGE_LINK !ifndef MUI_FINISHPAGE_LINK_VARIABLES !define MUI_FINISHPAGE_LINK_VARIABLES Var mui.FinishPage.Link !endif !endif !ifndef MUI_FINISHPAGE_NOREBOOTSUPPORT !ifndef MUI_FINISHPAGE_REBOOT_VARIABLES !define MUI_FINISHPAGE_REBOOT_VARIABLES Var mui.FinishPage.RebootNow Var mui.FinishPage.RebootLater !endif !endif !insertmacro MUI_DEFAULT MUI_${MUI_PAGE_UNINSTALLER_PREFIX}WELCOMEFINISHPAGE_BITMAP "${NSISDIR}\Contrib\Graphics\Wizard\win.bmp" !macroend ;-------------------------------- ;Interface initialization !macro MUI_FINISHPAGE_GUIINIT !ifndef MUI_${MUI_PAGE_UNINSTALLER_PREFIX}WELCOMEFINISHPAGE_GUINIT !define MUI_${MUI_PAGE_UNINSTALLER_PREFIX}WELCOMEFINISHPAGE_GUINIT Function ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.GUIInit InitPluginsDir File "/oname=$PLUGINSDIR\modern-wizard.bmp" "${MUI_${MUI_PAGE_UNINSTALLER_PREFIX}WELCOMEFINISHPAGE_BITMAP}" !ifdef MUI_${MUI_PAGE_UNINSTALLER_PREFIX}PAGE_FUNCTION_GUIINIT Call "${MUI_${MUI_PAGE_UNINSTALLER_PREFIX}PAGE_FUNCTION_GUIINIT}" !endif !ifndef MUI_${MUI_PAGE_UNINSTALLER_PREFIX}FINISHPAGE_NOAUTOCLOSE SetAutoClose true !endif FunctionEnd !insertmacro MUI_SET MUI_${MUI_PAGE_UNINSTALLER_PREFIX}PAGE_FUNCTION_GUIINIT ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.GUIInit !endif !macroend ;-------------------------------- ;Abort warning !macro MUI_FINISHPAGE_ABORTWARNING !ifdef MUI_FINISHPAGE_CANCEL_ENABLED !ifndef MUI_${MUI_PAGE_UNINSTALLER_PREFIX}FINISHPAGE_ABORTWARNING !define MUI_${MUI_PAGE_UNINSTALLER_PREFIX}FINISHPAGE_ABORTWARNING Function ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.AbortWarning ${if} $mui.FinishPage.DisableAbortWarning == "1" Quit ${endif} !ifdef MUI_${MUI_PAGE_UNINSTALLER_PREFIX}PAGE_FUNCTION_ABORTWARNING Call ${MUI_${MUI_PAGE_UNINSTALLER_PREFIX}PAGE_FUNCTION_ABORTWARNING} !endif FunctionEnd !insertmacro MUI_SET MUI_${MUI_PAGE_UNINSTALLER_PREFIX}PAGE_FUNCTION_ABORTWARNING ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.AbortWarning !endif !endif !macroend ;-------------------------------- ;Page declaration !macro MUI_PAGEDECLARATION_FINISH !insertmacro MUI_SET MUI_${MUI_PAGE_UNINSTALLER_PREFIX}FINISHPAGE "" !insertmacro MUI_FINISHPAGE_INTERFACE !insertmacro MUI_FINISHPAGE_GUIINIT !insertmacro MUI_FINISHPAGE_ABORTWARNING !insertmacro MUI_DEFAULT MUI_FINISHPAGE_TITLE "$(MUI_${MUI_PAGE_UNINSTALLER_PREFIX}TEXT_FINISH_INFO_TITLE)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_TEXT "$(MUI_${MUI_PAGE_UNINSTALLER_PREFIX}TEXT_FINISH_INFO_TEXT)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_BUTTON "$(MUI_BUTTONTEXT_FINISH)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_TEXT_REBOOT "$(MUI_${MUI_PAGE_UNINSTALLER_PREFIX}TEXT_FINISH_INFO_REBOOT)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_TEXT_REBOOTNOW "$(MUI_TEXT_FINISH_REBOOTNOW)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_TEXT_REBOOTLATER "$(MUI_TEXT_FINISH_REBOOTLATER)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_RUN_TEXT "$(MUI_TEXT_FINISH_RUN)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_SHOWREADME_TEXT "$(MUI_TEXT_FINISH_SHOWREADME)" !insertmacro MUI_DEFAULT MUI_FINISHPAGE_LINK_COLOR "000080" !insertmacro MUI_PAGE_FUNCTION_FULLWINDOW PageEx ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}custom PageCallbacks ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.Pre_${MUI_UNIQUEID} \ ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.Leave_${MUI_UNIQUEID} Caption " " PageExEnd !insertmacro MUI_FUNCTION_FINISHPAGE ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.Pre_${MUI_UNIQUEID} \ ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.Leave_${MUI_UNIQUEID} \ ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}mui.FinishPage.Link_${MUI_UNIQUEID} !insertmacro MUI_UNSET MUI_FINISHPAGE_TITLE !insertmacro MUI_UNSET MUI_FINISHPAGE_TITLE_3LINES !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_LARGE !insertmacro MUI_UNSET MUI_FINISHPAGE_BUTTON !insertmacro MUI_UNSET MUI_FINISHPAGE_CANCEL_ENABLED !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_REBOOT !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_REBOOTNOW !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_REBOOTLATER !insertmacro MUI_UNSET MUI_FINISHPAGE_REBOOTLATER_DEFAULT !insertmacro MUI_UNSET MUI_FINISHPAGE_RUN !insertmacro MUI_UNSET MUI_FINISHPAGE_RUN_TEXT !insertmacro MUI_UNSET MUI_FINISHPAGE_RUN_PARAMETERS !insertmacro MUI_UNSET MUI_FINISHPAGE_RUN_NOTCHECKED !insertmacro MUI_UNSET MUI_FINISHPAGE_RUN_FUNCTION !insertmacro MUI_UNSET MUI_FINISHPAGE_SHOWREADME !insertmacro MUI_UNSET MUI_FINISHPAGE_SHOWREADME_TEXT !insertmacro MUI_UNSET MUI_FINISHPAGE_SHOWREADME_NOTCHECKED !insertmacro MUI_UNSET MUI_FINISHPAGE_SHOWREADME_FUNCTION !insertmacro MUI_UNSET MUI_FINISHPAGE_LINK !insertmacro MUI_UNSET MUI_FINISHPAGE_LINK_LOCATION !insertmacro MUI_UNSET MUI_FINISHPAGE_LINK_COLOR !insertmacro MUI_UNSET MUI_FINISHPAGE_NOREBOOTSUPPORT !insertmacro MUI_UNSET MUI_FINISHPAGE_ABORTWARNINGCHECK !insertmacro MUI_UNSET MUI_FINISHPAGE_CURFIELD_TOP !insertmacro MUI_UNSET MUI_FINISHPAGE_CURFIELD_BOTTOM !macroend !macro MUI_PAGE_FINISH !verbose push !verbose ${MUI_VERBOSE} !insertmacro MUI_PAGE_INIT !insertmacro MUI_PAGEDECLARATION_FINISH !verbose pop !macroend !macro MUI_UNPAGE_FINISH !verbose push !verbose ${MUI_VERBOSE} !insertmacro MUI_UNPAGE_INIT !insertmacro MUI_PAGEDECLARATION_FINISH !verbose pop !macroend ;-------------------------------- ;Page functions !macro MUI_FUNCTION_FINISHPAGE PRE LEAVE LINK !ifdef MUI_FINISHPAGE_LINK Function "${LINK}" ExecShell open "${MUI_FINISHPAGE_LINK_LOCATION}" FunctionEnd !endif Function "${PRE}" !insertmacro MUI_PAGE_FUNCTION_CUSTOM PRE ;Set text on Next button SendMessage $mui.Button.Next ${WM_SETTEXT} 0 "STR:${MUI_FINISHPAGE_BUTTON}" ;Enable cancel button if set in script !ifdef MUI_FINISHPAGE_CANCEL_ENABLED EnableWindow $mui.Button.Cancel 1 !endif ;Create dialog nsDialogs::Create 1044 Pop $mui.FinishPage nsDialogs::SetRTL $(^RTL) SetCtlColors $mui.FinishPage "" "${MUI_BGCOLOR}" ;Image control ${NSD_CreateBitmap} 0u 0u 109u 193u "" Pop $mui.FinishPage.Image !ifndef MUI_${MUI_PAGE_UNINSTALLER_PREFIX}WELCOMEFINISHPAGE_BITMAP_NOSTRETCH ${NSD_SetStretchedImage} $mui.FinishPage.Image $PLUGINSDIR\modern-wizard.bmp $mui.FinishPage.Image.Bitmap !else ${NSD_SetImage} $mui.FinishPage.Image $PLUGINSDIR\modern-wizard.bmp $mui.FinishPage.Image.Bitmap !endif ;Positiong of controls ;Title !ifndef MUI_FINISHPAGE_TITLE_3LINES !define MUI_FINISHPAGE_TITLE_HEIGHT 28 !else !define MUI_FINISHPAGE_TITLE_HEIGHT 38 !endif ;Text ;17 = 10 (top margin) + 7 (distance between texts) !define /math MUI_FINISHPAGE_TEXT_TOP 17 + ${MUI_FINISHPAGE_TITLE_HEIGHT} ;Height if space required for radio buttons or check boxes !ifndef MUI_FINISHPAGE_TEXT_LARGE !define MUI_FINISHPAGE_TEXT_HEIGHT_BUTTONS 40 !else !define MUI_FINISHPAGE_TEXT_HEIGHT_BUTTONS 60 !endif !define /math MUI_FINISHPAGE_TEXT_BOTTOM_BUTTONS ${MUI_FINISHPAGE_TEXT_TOP} + ${MUI_FINISHPAGE_TEXT_HEIGHT_BUTTONS} ;Positioning of radio buttons to ask for a reboot !ifndef MUI_FINISHPAGE_NOREBOOTSUPPORT !define /math MUI_FINISHPAGE_REBOOTNOW_TOP ${MUI_FINISHPAGE_TEXT_BOTTOM_BUTTONS} + 5 ;Distance between text and options ;25 = 10 (height of first radio button) + 15 (distance between buttons) !define /math MUI_FINISHPAGE_REBOOTLATER_TOP ${MUI_FINISHPAGE_REBOOTNOW_TOP} + 25 !endif ;Positioning of checkboxes !ifdef MUI_FINISHPAGE_RUN !define /math MUI_FINISHPAGE_RUN_TOP ${MUI_FINISHPAGE_TEXT_BOTTOM_BUTTONS} + 5 ;Distance between text and options !endif !ifdef MUI_FINISHPAGE_SHOWREADME !ifdef MUI_FINISHPAGE_RUN ;25 = 10 (height of run checkbox) + 10 (distance between checkboxes) !define /math MUI_FINISHPAGE_SHOWREADME_TOP ${MUI_FINISHPAGE_RUN_TOP} + 20 !else !define /math MUI_FINISHPAGE_SHOWREADME_TOP ${MUI_FINISHPAGE_TEXT_BOTTOM_BUTTONS} + 5 ;Distance between text and options !endif !endif !ifndef MUI_FINISHPAGE_RUN & MUI_FINISHPAGE_SHOWREADME ;Height if full space is available for text and link !ifndef MUI_FINISHPAGE_LINK !define MUI_FINISHPAGE_TEXT_HEIGHT 130 !else !define MUI_FINISHPAGE_TEXT_HEIGHT 120 !endif !endif !ifndef MUI_FINISHPAGE_NOREBOOTSUPPORT ${if} ${RebootFlag} ;Title text ${NSD_CreateLabel} 120u 10u 195u ${MUI_FINISHPAGE_TITLE_HEIGHT}u "${MUI_FINISHPAGE_TITLE}" Pop $mui.FinishPage.Title SetCtlColors $mui.FinishPage.Title "" "${MUI_BGCOLOR}" CreateFont $mui.FinishPage.Title.Font "$(^Font)" "12" "700" SendMessage $mui.FinishPage.Title ${WM_SETFONT} $mui.FinishPage.Title.Font 0 ;Finish text ${NSD_CreateLabel} 120u 45u 195u ${MUI_FINISHPAGE_TEXT_HEIGHT_BUTTONS}u "${MUI_FINISHPAGE_TEXT_REBOOT}" Pop $mui.FinishPage.Text SetCtlColors $mui.FinishPage.Text "" "${MUI_BGCOLOR}" ;Radio buttons for reboot page ${NSD_CreateRadioButton} 120u ${MUI_FINISHPAGE_REBOOTNOW_TOP}u 195u 10u "${MUI_FINISHPAGE_TEXT_REBOOTNOW}" Pop $mui.FinishPage.RebootNow SetCtlColors $mui.FinishPage.RebootNow "" "${MUI_BGCOLOR}" ${NSD_CreateRadioButton} 120u ${MUI_FINISHPAGE_REBOOTLATER_TOP}u 195u 10u "${MUI_FINISHPAGE_TEXT_REBOOTLATER}" Pop $mui.FinishPage.RebootLater SetCtlColors $mui.FinishPage.RebootLater "" "${MUI_BGCOLOR}" !ifndef MUI_FINISHPAGE_REBOOTLATER_DEFAULT SendMessage $mui.FinishPage.RebootNow ${BM_SETCHECK} ${BST_CHECKED} 0 !else SendMessage $mui.FinishPage.RebootLater ${BM_SETCHECK} ${BST_CHECKED} 0 !endif ${NSD_SetFocus} $mui.FinishPage.RebootNow ${else} !endif ;Title text ${NSD_CreateLabel} 120u 10u 195u ${MUI_FINISHPAGE_TITLE_HEIGHT}u "${MUI_FINISHPAGE_TITLE}" Pop $mui.FinishPage.Title SetCtlColors $mui.FinishPage.Title "" "${MUI_BGCOLOR}" CreateFont $mui.FinishPage.Title.Font "$(^Font)" "12" "700" SendMessage $mui.FinishPage.Title ${WM_SETFONT} $mui.FinishPage.Title.Font 0 ;Finish text !ifndef MUI_FINISHPAGE_RUN & MUI_FINISHPAGE_SHOWREADME ${NSD_CreateLabel} 120u ${MUI_FINISHPAGE_TEXT_TOP}u 195u ${MUI_FINISHPAGE_TEXT_HEIGHT}u "${MUI_FINISHPAGE_TEXT}" !else ${NSD_CreateLabel} 120u ${MUI_FINISHPAGE_TEXT_TOP}u 195u ${MUI_FINISHPAGE_TEXT_HEIGHT_BUTTONS}u "${MUI_FINISHPAGE_TEXT}" !endif Pop $mui.FinishPage.Text SetCtlColors $mui.FinishPage.Text "" "${MUI_BGCOLOR}" ;Checkboxes !ifdef MUI_FINISHPAGE_RUN ${NSD_CreateCheckbox} 120u ${MUI_FINISHPAGE_RUN_TOP}u 195u 10u "${MUI_FINISHPAGE_RUN_TEXT}" Pop $mui.FinishPage.Run SetCtlColors $mui.FinishPage.Run "" "${MUI_BGCOLOR}" !ifndef MUI_FINISHPAGE_RUN_NOTCHECKED SendMessage $mui.FinishPage.Run ${BM_SETCHECK} ${BST_CHECKED} 0 !endif ${NSD_SetFocus} $mui.FinishPage.Run !endif !ifdef MUI_FINISHPAGE_SHOWREADME ${NSD_CreateCheckbox} 120u ${MUI_FINISHPAGE_SHOWREADME_TOP}u 195u 10u "${MUI_FINISHPAGE_SHOWREADME_TEXT}" Pop $mui.FinishPage.ShowReadme SetCtlColors $mui.FinishPage.ShowReadme "" "${MUI_BGCOLOR}" !ifndef MUI_FINISHPAGE_SHOWREADME_NOTCHECKED SendMessage $mui.FinishPage.ShowReadme ${BM_SETCHECK} ${BST_CHECKED} 0 !endif !ifndef MUI_FINISHPAGE_RUN ${NSD_SetFocus} $mui.FinishPage.ShowReadme !endif !endif ;Link !ifdef MUI_FINISHPAGE_LINK ${NSD_CreateLink} 120u 175u 195u 10u "${MUI_FINISHPAGE_LINK}" Pop $mui.FinishPage.Link SetCtlColors $mui.FinishPage.Link "${MUI_FINISHPAGE_LINK_COLOR}" "${MUI_BGCOLOR}" ${NSD_OnClick} $mui.FinishPage.Link "${LINK}" !endif !ifndef MUI_FINISHPAGE_NOREBOOTSUPPORT ${endif} !endif !insertmacro MUI_PAGE_FUNCTION_CUSTOM SHOW !ifdef MUI_FINISHPAGE_CANCEL_ENABLED StrCpy $mui.FinishPage.DisableAbortWarning "1" !endif ;Show page Call ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}muiPageLoadFullWindow !insertmacro MUI_PAGE_FUNCTION_CUSTOM SHOW nsDialogs::Show Call ${MUI_PAGE_UNINSTALLER_FUNCPREFIX}muiPageUnloadFullWindow !ifdef MUI_FINISHPAGE_CANCEL_ENABLED StrCpy $mui.FinishPage.DisableAbortWarning "" !endif ;Delete image from memory ${NSD_FreeImage} $mui.FinishPage.Image.Bitmap !insertmacro MUI_UNSET MUI_FINISHPAGE_TITLE_HEIGHT !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_TOP !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_HEIGHT !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_HEIGHT_BUTTONS !insertmacro MUI_UNSET MUI_FINISHPAGE_TEXT_BOTTOM_BUTTONS !insertmacro MUI_UNSET MUI_FINISHPAGE_REBOOTNOW_TOP !insertmacro MUI_UNSET MUI_FINISHPAGE_REBOOTLATER_TOP !insertmacro MUI_UNSET MUI_FINISHPAGE_RUN_TOP !insertmacro MUI_UNSET MUI_FINISHPAGE_SHOWREADME_TOP FunctionEnd Function "${LEAVE}" !insertmacro MUI_PAGE_FUNCTION_CUSTOM LEAVE !ifndef MUI_FINISHPAGE_NOREBOOTSUPPORT ;Check whether the user has chosen to reboot the computer ${if} ${RebootFlag} SendMessage $mui.FinishPage.RebootNow ${BM_GETCHECK} 0 0 $mui.FinishPage.ReturnValue ${if} $mui.FinishPage.ReturnValue = ${BST_CHECKED} Reboot ${else} Return ${endif} ${endif} !endif ;Run functions depending on checkbox state !ifdef MUI_FINISHPAGE_RUN SendMessage $mui.FinishPage.Run ${BM_GETCHECK} 0 0 $mui.FinishPage.ReturnValue ${if} $mui.FinishPage.ReturnValue = ${BST_CHECKED} !ifndef MUI_FINISHPAGE_RUN_FUNCTION !ifndef MUI_FINISHPAGE_RUN_PARAMETERS Exec "$\"${MUI_FINISHPAGE_RUN}$\"" !else Exec "$\"${MUI_FINISHPAGE_RUN}$\" ${MUI_FINISHPAGE_RUN_PARAMETERS}" !endif !else Call "${MUI_FINISHPAGE_RUN_FUNCTION}" !endif ${endif} !endif !ifdef MUI_FINISHPAGE_SHOWREADME SendMessage $mui.FinishPage.ShowReadme ${BM_GETCHECK} 0 0 $mui.FinishPage.ReturnValue ${if} $mui.FinishPage.ReturnValue = ${BST_CHECKED} !ifndef MUI_FINISHPAGE_SHOWREADME_FUNCTION ExecShell open "${MUI_FINISHPAGE_SHOWREADME}" !else Call "${MUI_FINISHPAGE_SHOWREADME_FUNCTION}" !endif ${endif} !endif FunctionEnd !macroend
{ "pile_set_name": "Github" }
package Paws::FSX::DescribeBackups; use Moose; has BackupIds => (is => 'ro', isa => 'ArrayRef[Str|Undef]'); has Filters => (is => 'ro', isa => 'ArrayRef[Paws::FSX::Filter]'); has MaxResults => (is => 'ro', isa => 'Int'); has NextToken => (is => 'ro', isa => 'Str'); use MooseX::ClassAttribute; class_has _api_call => (isa => 'Str', is => 'ro', default => 'DescribeBackups'); class_has _returns => (isa => 'Str', is => 'ro', default => 'Paws::FSX::DescribeBackupsResponse'); class_has _result_key => (isa => 'Str', is => 'ro'); 1; ### main pod documentation begin ### =head1 NAME Paws::FSX::DescribeBackups - Arguments for method DescribeBackups on L<Paws::FSX> =head1 DESCRIPTION This class represents the parameters used for calling the method DescribeBackups on the L<Amazon FSx|Paws::FSX> service. Use the attributes of this class as arguments to method DescribeBackups. You shouldn't make instances of this class. Each attribute should be used as a named argument in the call to DescribeBackups. =head1 SYNOPSIS my $fsx = Paws->service('FSX'); my $DescribeBackupsResponse = $fsx->DescribeBackups( BackupIds => [ 'MyBackupId', ... # min: 12, max: 128 ], # OPTIONAL Filters => [ { Name => 'file-system-id', # values: file-system-id, backup-type; OPTIONAL Values => [ 'MyFilterValue', ... # min: 1, max: 128 ], # max: 20; OPTIONAL }, ... ], # OPTIONAL MaxResults => 1, # OPTIONAL NextToken => 'MyNextToken', # OPTIONAL ); # Results: my $Backups = $DescribeBackupsResponse->Backups; my $NextToken = $DescribeBackupsResponse->NextToken; # Returns a L<Paws::FSX::DescribeBackupsResponse> object. Values for attributes that are native types (Int, String, Float, etc) can passed as-is (scalar values). Values for complex Types (objects) can be passed as a HashRef. The keys and values of the hashref will be used to instance the underlying object. For the AWS API documentation, see L<https://docs.aws.amazon.com/goto/WebAPI/fsx/DescribeBackups> =head1 ATTRIBUTES =head2 BackupIds => ArrayRef[Str|Undef] (Optional) IDs of the backups you want to retrieve (String). This overrides any filters. If any IDs are not found, BackupNotFound will be thrown. =head2 Filters => ArrayRef[L<Paws::FSX::Filter>] (Optional) Filters structure. Supported names are file-system-id and backup-type. =head2 MaxResults => Int (Optional) Maximum number of backups to return in the response (integer). This parameter value must be greater than 0. The number of items that Amazon FSx returns is the minimum of the C<MaxResults> parameter specified in the request and the service's internal maximum number of items per page. =head2 NextToken => Str (Optional) Opaque pagination token returned from a previous C<DescribeBackups> operation (String). If a token present, the action continues the list from where the returning call left off. =head1 SEE ALSO This class forms part of L<Paws>, documenting arguments for method DescribeBackups in L<Paws::FSX> =head1 BUGS and CONTRIBUTIONS The source code is located here: L<https://github.com/pplu/aws-sdk-perl> Please report bugs to: L<https://github.com/pplu/aws-sdk-perl/issues> =cut
{ "pile_set_name": "Github" }
CMAKE_COMPILER_2005 ------------------- Using the Visual Studio 2005 compiler from Microsoft Set to true when using the Visual Studio 2005 compiler from Microsoft.
{ "pile_set_name": "Github" }
// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // syntax = "proto3"; package google.api; option go_package = "google.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig"; option java_multiple_files = true; option java_outer_classname = "EndpointProto"; option java_package = "com.google.api"; option objc_class_prefix = "GAPI"; // `Endpoint` describes a network endpoint that serves a set of APIs. // A service may expose any number of endpoints, and all endpoints share the // same service configuration, such as quota configuration and monitoring // configuration. // // Example service configuration: // // name: library-example.googleapis.com // endpoints: // # Below entry makes 'google.example.library.v1.Library' // # API be served from endpoint address library-example.googleapis.com. // # It also allows HTTP OPTIONS calls to be passed to the backend, for // # it to decide whether the subsequent cross-origin request is // # allowed to proceed. // - name: library-example.googleapis.com // allow_cors: true message Endpoint { // The canonical name of this endpoint. string name = 1; // DEPRECATED: This field is no longer supported. Instead of using aliases, // please specify multiple [google.api.Endpoint][google.api.Endpoint] for each of the intended // aliases. // // Additional names that this endpoint will be hosted on. repeated string aliases = 2 [deprecated = true]; // The list of features enabled on this endpoint. repeated string features = 4; // The specification of an Internet routable address of API frontend that will // handle requests to this [API // Endpoint](https://cloud.google.com/apis/design/glossary). It should be // either a valid IPv4 address or a fully-qualified domain name. For example, // "8.8.8.8" or "myservice.appspot.com". string target = 101; // Allowing // [CORS](https://en.wikipedia.org/wiki/Cross-origin_resource_sharing), aka // cross-domain traffic, would allow the backends served from this endpoint to // receive and respond to HTTP OPTIONS requests. The response will be used by // the browser to determine whether the subsequent cross-origin request is // allowed to proceed. bool allow_cors = 5; }
{ "pile_set_name": "Github" }
from dataflows import Flow, load, dump_to_path, dump_to_zip, printer, add_metadata from dataflows import sort_rows, filter_rows, find_replace, delete_fields, set_type, validate, unpivot {% if 'custom' in processing %} def my_custom_processing(row): # Do some modifications to the row here: # ... return row {% endif %} def {{slug}}(): flow = Flow( # Load inputs {% if input == 'file' %} load('{{input_url}}', format='{{format}}', {% if sheet %}sheet={{sheet}}{% endif %}), {% endif %} {% if input == 'remote' %} load('{{input_url}}', format='{{format}}', {% if sheet %}sheet={{sheet}}{% endif %}), {% endif %} {% if input == 'sql' %} load('{{input_url}}', table='{{input_db_table}}'), {% endif %} {% if input == 'other' %} {% endif %} # Process them (if necessary) {% if 'sort' in processing %} sort_rows('{field_name}'), # Key is a Python format string or a list of field names {% endif %} {% if 'filter' in processing %} filter_rows(), {% endif %} {% if 'find_replace' in processing %} find_replace([ dict(name='field_name', patterns=[ dict(find='re-pattern-to-find', replace='re-pattern-to-replace-with'), ]) ]), {% endif %} {% if 'delete_fields' in processing %} delete_fields(['field_name']), # Pass a list of field names to delete from the data {% endif %} {% if 'set_type' in processing %} set_type('field_name', type='number', constraints=dict(minimum=3)), # There are quite a few options you can use here # Take a look at https://frictionlessdata.io/specs/table-schema/ # Or you can simply use validate() here instead {% endif %} {% if 'unpivot' in processing %} unpivot(unpivot_fields, extra_keys, extra_value), # See documentation on the meaning of each of these parameters {% endif %} {% if 'custom' in processing %} my_custom_processing, {% endif %} # Save the results add_metadata(name='{{slug}}', title='''{{title}}'''), {% if output in ('print', 'print_n_pkg') %} printer(), {% endif %} {% if output == 'list' %} {% endif %} {% if output in ('dp_csv', 'print_n_pkg') %} dump_to_path('{{slug}}'), {% endif %} {% if output == 'dp_csv_zip' %} dump_to_zip('{{slug}}.zip'), {% endif %} {% if output == 'dp_json' %} dump_to_path('{{slug}}', force_format=True, format='json'), {% endif %} {% if output == 'dp_json_zip' %} dump_to_zip('{{slug}}.zip', force_format=True, format='json'), {% endif %} {% if output == 'sql' %} dump_to_sql('{{output_url}}', table='{{output_db_table}}') {% endif %} ) {% if output != 'list' %} flow.process() {% endif %} {% if output == 'list' %} data, *_ = flow.results() {% endif %} if __name__ == '__main__': {{slug}}()
{ "pile_set_name": "Github" }
#!/usr/bin/env bash set -e count_only_flag='' extended_syntax_flag='' filter='' num_jobs=1 have_gnu_parallel= flags=() while [[ "$#" -ne 0 ]]; do case "$1" in -c) count_only_flag=1 ;; -f) shift filter="$1" flags+=('-f' "$filter") ;; -j) shift num_jobs="$1" ;; -x) extended_syntax_flag='-x' flags+=('-x') ;; *) break ;; esac shift done if ( type -p parallel &>/dev/null ); then have_gnu_parallel=1 elif [[ "$num_jobs" != 1 ]]; then printf 'bats: cannot execute "%s" jobs without GNU parallel\n' "$num_jobs" >&2 exit 1 fi trap 'kill 0; exit 1' int all_tests=() for filename in "$@"; do if [[ ! -f "$filename" ]]; then printf 'bats: %s does not exist\n' "$filename" >&2 exit 1 fi test_names=() test_dupes=() while read -r line; do if [[ ! "$line" =~ ^bats_test_function\ ]]; then continue fi line="${line%$'\r'}" line="${line#* }" all_tests+=( "$(printf "%s\t%s" "$filename" "$line")" ) if [[ " ${test_names[*]} " == *" $line "* ]]; then test_dupes+=("$line") continue fi test_names+=("$line") done < <(BATS_TEST_FILTER="$filter" bats-preprocess "$filename") if [[ "${#test_dupes[@]}" -ne 0 ]]; then printf 'bats warning: duplicate test name(s) in %s: %s\n' "$filename" "${test_dupes[*]}" >&2 fi done if [[ -n "$count_only_flag" ]]; then printf '%d\n' "${#all_tests[@]}" exit fi status=0 printf '1..%d\n' "${#all_tests[@]}" # No point on continuing if there's no tests. if [[ "${#all_tests[@]}" == 0 ]]; then exit fi if [[ "$num_jobs" != 1 ]]; then # Only use GNU parallel when we want parallel execution -- there is a small # amount of overhead using it over a simple loop in the serial case. set -o pipefail printf '%s\n' "${all_tests[@]}" | grep -v '^$' | \ parallel -qk -j "$num_jobs" --colsep="\t" -- bats-exec-test "${flags[@]}" '{1}' '{2}' '{#}' || status=1 else # Just do it serially. test_number=1 while IFS=$'\t' read -r filename test_name; do bats-exec-test "${flags[@]}" "$filename" "$test_name" "$test_number" || status=1 ((++test_number)) done < <(printf '%s\n' "${all_tests[@]}" | grep -v '^$') fi exit "$status"
{ "pile_set_name": "Github" }
Test a specific SSA building regression a back edge would not be split due to being on try/catch boundary.
{ "pile_set_name": "Github" }
<?xml version="1.0" ?> <pqevents> <pqevent object="pqClientMainWindow/menubar/menuSources/Alphabetical" command="activate" arguments="RTAnalyticSource" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/Accept" command="activate" arguments="" /> <pqevent object="pqClientMainWindow/menubar/menuFilters/Alphabetical" command="activate" arguments="Gradient" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/Accept" command="activate" arguments="" /> <pqevent object="pqClientMainWindow/representationToolbar/displayRepresentation/comboBox" command="activated" arguments="3D Glyphs" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/SearchBox/AdvancedButton" command="set_boolean" arguments="true" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/SelectOrientationVectors/ComboBox" command="activated" arguments="RTDataGradient" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/Orient/CheckBox" command="set_boolean" arguments="true" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/UseGlyphCullingAndLOD/CheckBox" command="set_boolean" arguments="true" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/ColorByLODIndex/CheckBox" command="set_boolean" arguments="true" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Add" command="activate" arguments="" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Add" command="activate" arguments="" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table" command="setCurrent" arguments="0.0" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table" command="edit" arguments="0.0" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table/qt_scrollarea_viewport/1QExpandingLineEdit0" command="set_string" arguments="60" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table" command="setCurrent" arguments="0.1" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table" command="edit" arguments="0.1" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table/qt_scrollarea_viewport/1QExpandingLineEdit0" command="set_string" arguments="0.5" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table" command="setCurrent" arguments="1.0" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table" command="edit" arguments="1.0" /> <pqevent object="pqClientMainWindow/propertiesDock/propertiesPanel/scrollArea/qt_scrollarea_viewport/scrollAreaWidgetContents/DisplayFrame/ProxyPanel/LODValues/ScalarValueList/Table/qt_scrollarea_viewport/1QExpandingLineEdit0" command="set_string" arguments="70" /> </pqevents>
{ "pile_set_name": "Github" }
from flask_restx import Resource from sqlalchemy import func from CTFd.api.v1.statistics import statistics_namespace from CTFd.models import Submissions from CTFd.utils.decorators import admins_only @statistics_namespace.route("/submissions/<column>") class SubmissionPropertyCounts(Resource): @admins_only def get(self, column): if column in Submissions.__table__.columns.keys(): prop = getattr(Submissions, column) data = ( Submissions.query.with_entities(prop, func.count(prop)) .group_by(prop) .all() ) return {"success": True, "data": dict(data)} else: response = {"success": False, "errors": "That could not be found"}, 404 return response
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------------------------------- // Copyright (C) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. //------------------------------------------------------------------------------------------------------- function test0(){ if((function () {;})) { } }; // generate profile test0(); test0(); test0(); test0(); // run JITted code runningJITtedCode = true; test0(); test0(); test0(); test0();
{ "pile_set_name": "Github" }
# p-finally [![Build Status](https://travis-ci.org/sindresorhus/p-finally.svg?branch=master)](https://travis-ci.org/sindresorhus/p-finally) > [`Promise#finally()`](https://github.com/tc39/proposal-promise-finally) [ponyfill](https://ponyfill.com) - Invoked when the promise is settled regardless of outcome Useful for cleanup. ## Install ``` $ npm install --save p-finally ``` ## Usage ```js const pFinally = require('p-finally'); const dir = createTempDir(); pFinally(write(dir), () => cleanup(dir)); ``` ## API ### pFinally(promise, [onFinally]) Returns a `Promise`. #### onFinally Type: `Function` Note: Throwing or returning a rejected promise will reject `promise` with the rejection reason. ## Related - [p-try](https://github.com/sindresorhus/p-try) - `Promise#try()` ponyfill - Starts a promise chain - [More…](https://github.com/sindresorhus/promise-fun) ## License MIT © [Sindre Sorhus](https://sindresorhus.com)
{ "pile_set_name": "Github" }
package types // Seccomp represents the config for a seccomp profile for syscall restriction. type Seccomp struct { DefaultAction Action `json:"defaultAction"` Architectures []Arch `json:"architectures"` Syscalls []*Syscall `json:"syscalls"` } // Arch used for additional architectures type Arch string // Additional architectures permitted to be used for system calls // By default only the native architecture of the kernel is permitted const ( ArchX86 Arch = "SCMP_ARCH_X86" ArchX86_64 Arch = "SCMP_ARCH_X86_64" ArchX32 Arch = "SCMP_ARCH_X32" ArchARM Arch = "SCMP_ARCH_ARM" ArchAARCH64 Arch = "SCMP_ARCH_AARCH64" ArchMIPS Arch = "SCMP_ARCH_MIPS" ArchMIPS64 Arch = "SCMP_ARCH_MIPS64" ArchMIPS64N32 Arch = "SCMP_ARCH_MIPS64N32" ArchMIPSEL Arch = "SCMP_ARCH_MIPSEL" ArchMIPSEL64 Arch = "SCMP_ARCH_MIPSEL64" ArchMIPSEL64N32 Arch = "SCMP_ARCH_MIPSEL64N32" ) // Action taken upon Seccomp rule match type Action string // Define actions for Seccomp rules const ( ActKill Action = "SCMP_ACT_KILL" ActTrap Action = "SCMP_ACT_TRAP" ActErrno Action = "SCMP_ACT_ERRNO" ActTrace Action = "SCMP_ACT_TRACE" ActAllow Action = "SCMP_ACT_ALLOW" ) // Operator used to match syscall arguments in Seccomp type Operator string // Define operators for syscall arguments in Seccomp const ( OpNotEqual Operator = "SCMP_CMP_NE" OpLessThan Operator = "SCMP_CMP_LT" OpLessEqual Operator = "SCMP_CMP_LE" OpEqualTo Operator = "SCMP_CMP_EQ" OpGreaterEqual Operator = "SCMP_CMP_GE" OpGreaterThan Operator = "SCMP_CMP_GT" OpMaskedEqual Operator = "SCMP_CMP_MASKED_EQ" ) // Arg used for matching specific syscall arguments in Seccomp type Arg struct { Index uint `json:"index"` Value uint64 `json:"value"` ValueTwo uint64 `json:"valueTwo"` Op Operator `json:"op"` } // Syscall is used to match a syscall in Seccomp type Syscall struct { Name string `json:"name"` Action Action `json:"action"` Args []*Arg `json:"args"` }
{ "pile_set_name": "Github" }
{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": { "collapsed": true, "deletable": true, "editable": true }, "outputs": [], "source": [ "# Let's see how to use DBT to:\n", "# 1: train a VGG-like network on CIFAR-10\n", "# 2: continue a train from the last iteration\n", "# 3: do TRANSFER LEARNING from the trained model to another model that will be able to classify CIFAR-100\n", "# 4: do FINE TUNING of the model trained on CIFAR-10 to solve the CIFAR-100 classification problem\n", "# 5: compare the train/validation/test performance of the models\n", "\n", "import pandas as pd\n", "import pprint\n", "import tensorflow as tf\n", "from dytb.inputs.predefined import Cifar10, Cifar100\n", "from dytb.train import train\n", "from dytb.models.predefined.VGG import VGG" ] }, { "cell_type": "code", "execution_count": 2, "metadata": { "collapsed": true, "deletable": true, "editable": true }, "outputs": [], "source": [ "# Instantiate the model\n", "vgg = VGG()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "collapsed": true, "deletable": true, "editable": true }, "outputs": [], "source": [ "# Instantiate the CIFAR-10 input source\n", "cifar10 = Cifar10.Cifar10()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Original training set size 50000. Augmented training set size: 100000\n", "<tf.Variable 'VGG/64/conv1/W:0' shape=(3, 3, 3, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv1/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/W:0' shape=(3, 3, 64, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/W:0' shape=(3, 3, 64, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/W:0' shape=(3, 3, 128, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/W:0' shape=(3, 3, 128, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/W:0' shape=(3, 3, 256, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/W:0' shape=(512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/W:0' shape=(512, 10) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/b:0' shape=(10,) dtype=float32_ref>\n", "Model VGG: trainable parameters: 14982474. Size: 59929.896 KB\n", "[!] No checkpoint file found\n", "2017-05-22 15:54:25.113484: step 0, loss = 2.5562 (18.3 examples/sec; 2.726 sec/batch)\n", "2017-05-22 15:54:31.518555: step 200, loss = 2.0407 (1772.2 examples/sec; 0.028 sec/batch)\n", "2017-05-22 15:54:38.177017: step 400, loss = 1.7758 (1668.4 examples/sec; 0.030 sec/batch)\n", "2017-05-22 15:54:44.512619: step 600, loss = 1.6580 (1730.2 examples/sec; 0.029 sec/batch)\n", "2017-05-22 15:54:51.121688: step 800, loss = 1.6563 (1893.3 examples/sec; 0.026 sec/batch)\n", "2017-05-22 15:54:57.477840: step 1000, loss = 1.6784 (1622.5 examples/sec; 0.031 sec/batch)\n", "2017-05-22 15:55:03.983570: step 1200, loss = 1.4601 (1897.1 examples/sec; 0.026 sec/batch)\n", "2017-05-22 15:55:10.386923: step 1400, loss = 1.2355 (1740.0 examples/sec; 0.029 sec/batch)\n", "2017-05-22 15:55:17.030403: step 1600, loss = 1.2162 (1677.3 examples/sec; 0.030 sec/batch)\n", "2017-05-22 15:55:23.402104: step 1800, loss = 1.0021 (1757.7 examples/sec; 0.028 sec/batch)\n", "2017-05-22 15:55:29.881070: step 2000, loss = 1.1827 (1824.0 examples/sec; 0.027 sec/batch)\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "2017-05-22 15:55:47.020577 (1): train accuracy = 0.587 validation accuracy = 0.572\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n" ] } ], "source": [ "# 1: Train VGG on Cifar10 for an Epoch\n", "\n", "# Place the train process on GPU:0\n", "device = '/gpu:0'\n", "with tf.device(device):\n", " info = train(\n", " model=vgg,\n", " dataset=cifar10,\n", " hyperparameters={\n", " \"epochs\": 1,\n", " \"batch_size\": 50,\n", " \"regularizations\": {\n", " \"l2\": 1e-5,\n", " \"augmentation\": {\n", " \"name\": \"FlipLR\",\n", " \"fn\": tf.image.random_flip_left_right,\n", " # On average the training set size double appling this\n", " # transformation, thus factor=2\n", " \"factor\": 2,\n", " }\n", " },\n", " \"gd\": {\n", " \"optimizer\": tf.train.AdamOptimizer,\n", " \"args\": {\n", " \"learning_rate\": 1e-3,\n", " \"beta1\": 0.9,\n", " \"beta2\": 0.99,\n", " \"epsilon\": 1e-8\n", " }\n", " }\n", " },\n", " force_restart=True)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "data": { "text/html": [ "<div>\n", "<table border=\"1\" class=\"dataframe\">\n", " <thead>\n", " <tr style=\"text-align: right;\">\n", " <th></th>\n", " <th>test</th>\n", " <th>train</th>\n", " <th>validation</th>\n", " </tr>\n", " </thead>\n", " <tbody>\n", " <tr>\n", " <th>accuracy</th>\n", " <td>0.5718</td>\n", " <td>0.59188</td>\n", " <td>0.5717</td>\n", " </tr>\n", " </tbody>\n", "</table>\n", "</div>" ], "text/plain": [ " test train validation\n", "accuracy 0.5718 0.59188 0.5717" ] }, "execution_count": 5, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Info contains every information related to the trained model.\n", "# We're interested in stats only, thus we extract only them from the info dict\n", "# Display the results in a table. Let's use a Pandas DataFrame for that\n", "\n", "# Extract the accuracyes measured in every set (train/validation/test)\n", "accuracies = {key: value[\"accuracy\"] for key, value in info[\"stats\"].items()}\n", "df = pd.DataFrame.from_records(accuracies, index=[\"accuracy\"])\n", "df" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "collapsed": false, "deletable": true, "editable": true, "scrolled": true }, "outputs": [ { "data": { "text/html": [ "<div>\n", "<table border=\"1\" class=\"dataframe\">\n", " <thead>\n", " <tr style=\"text-align: right;\">\n", " <th></th>\n", " <th>0</th>\n", " <th>1</th>\n", " <th>2</th>\n", " <th>3</th>\n", " <th>4</th>\n", " <th>5</th>\n", " <th>6</th>\n", " <th>7</th>\n", " <th>8</th>\n", " <th>9</th>\n", " </tr>\n", " </thead>\n", " <tbody>\n", " <tr>\n", " <th>0</th>\n", " <td>2945.0</td>\n", " <td>178.0</td>\n", " <td>75.0</td>\n", " <td>60.0</td>\n", " <td>6.0</td>\n", " <td>62.0</td>\n", " <td>31.0</td>\n", " <td>37.0</td>\n", " <td>1137.0</td>\n", " <td>423.0</td>\n", " </tr>\n", " <tr>\n", " <th>1</th>\n", " <td>12.0</td>\n", " <td>4019.0</td>\n", " <td>1.0</td>\n", " <td>16.0</td>\n", " <td>0.0</td>\n", " <td>8.0</td>\n", " <td>17.0</td>\n", " <td>6.0</td>\n", " <td>99.0</td>\n", " <td>838.0</td>\n", " </tr>\n", " <tr>\n", " <th>2</th>\n", " <td>1138.0</td>\n", " <td>28.0</td>\n", " <td>1146.0</td>\n", " <td>525.0</td>\n", " <td>265.0</td>\n", " <td>757.0</td>\n", " <td>597.0</td>\n", " <td>207.0</td>\n", " <td>127.0</td>\n", " <td>202.0</td>\n", " </tr>\n", " <tr>\n", " <th>3</th>\n", " <td>244.0</td>\n", " <td>23.0</td>\n", " <td>138.0</td>\n", " <td>1234.0</td>\n", " <td>60.0</td>\n", " <td>2354.0</td>\n", " <td>367.0</td>\n", " <td>118.0</td>\n", " <td>65.0</td>\n", " <td>340.0</td>\n", " </tr>\n", " <tr>\n", " <th>4</th>\n", " <td>302.0</td>\n", " <td>33.0</td>\n", " <td>422.0</td>\n", " <td>399.0</td>\n", " <td>1887.0</td>\n", " <td>516.0</td>\n", " <td>602.0</td>\n", " <td>611.0</td>\n", " <td>36.0</td>\n", " <td>233.0</td>\n", " </tr>\n", " <tr>\n", " <th>5</th>\n", " <td>140.0</td>\n", " <td>16.0</td>\n", " <td>109.0</td>\n", " <td>516.0</td>\n", " <td>75.0</td>\n", " <td>3598.0</td>\n", " <td>109.0</td>\n", " <td>161.0</td>\n", " <td>32.0</td>\n", " <td>228.0</td>\n", " </tr>\n", " <tr>\n", " <th>6</th>\n", " <td>64.0</td>\n", " <td>62.0</td>\n", " <td>115.0</td>\n", " <td>588.0</td>\n", " <td>106.0</td>\n", " <td>171.0</td>\n", " <td>3610.0</td>\n", " <td>24.0</td>\n", " <td>55.0</td>\n", " <td>245.0</td>\n", " </tr>\n", " <tr>\n", " <th>7</th>\n", " <td>115.0</td>\n", " <td>10.0</td>\n", " <td>47.0</td>\n", " <td>118.0</td>\n", " <td>167.0</td>\n", " <td>999.0</td>\n", " <td>36.0</td>\n", " <td>3006.0</td>\n", " <td>23.0</td>\n", " <td>530.0</td>\n", " </tr>\n", " <tr>\n", " <th>8</th>\n", " <td>494.0</td>\n", " <td>253.0</td>\n", " <td>14.0</td>\n", " <td>43.0</td>\n", " <td>1.0</td>\n", " <td>29.0</td>\n", " <td>14.0</td>\n", " <td>10.0</td>\n", " <td>3904.0</td>\n", " <td>229.0</td>\n", " </tr>\n", " <tr>\n", " <th>9</th>\n", " <td>63.0</td>\n", " <td>434.0</td>\n", " <td>3.0</td>\n", " <td>14.0</td>\n", " <td>0.0</td>\n", " <td>33.0</td>\n", " <td>4.0</td>\n", " <td>18.0</td>\n", " <td>88.0</td>\n", " <td>4331.0</td>\n", " </tr>\n", " </tbody>\n", "</table>\n", "</div>" ], "text/plain": [ " 0 1 2 3 4 5 6 7 8 \\\n", "0 2945.0 178.0 75.0 60.0 6.0 62.0 31.0 37.0 1137.0 \n", "1 12.0 4019.0 1.0 16.0 0.0 8.0 17.0 6.0 99.0 \n", "2 1138.0 28.0 1146.0 525.0 265.0 757.0 597.0 207.0 127.0 \n", "3 244.0 23.0 138.0 1234.0 60.0 2354.0 367.0 118.0 65.0 \n", "4 302.0 33.0 422.0 399.0 1887.0 516.0 602.0 611.0 36.0 \n", "5 140.0 16.0 109.0 516.0 75.0 3598.0 109.0 161.0 32.0 \n", "6 64.0 62.0 115.0 588.0 106.0 171.0 3610.0 24.0 55.0 \n", "7 115.0 10.0 47.0 118.0 167.0 999.0 36.0 3006.0 23.0 \n", "8 494.0 253.0 14.0 43.0 1.0 29.0 14.0 10.0 3904.0 \n", "9 63.0 434.0 3.0 14.0 0.0 33.0 4.0 18.0 88.0 \n", "\n", " 9 \n", "0 423.0 \n", "1 838.0 \n", "2 202.0 \n", "3 340.0 \n", "4 233.0 \n", "5 228.0 \n", "6 245.0 \n", "7 530.0 \n", "8 229.0 \n", "9 4331.0 " ] }, "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Extract the confusion matrices \n", "confusion_matrices = {key: value[\"confusion_matrix\"] for key, value in info[\"stats\"].items()}\n", "# Display the confusione matrices for the training set\n", "df = pd.DataFrame(confusion_matrices[\"train\"])\n", "df" ] }, { "cell_type": "code", "execution_count": 7, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Original training set size 50000. Augmented training set size: 100000\n", "<tf.Variable 'VGG/64/conv1/W:0' shape=(3, 3, 3, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv1/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/W:0' shape=(3, 3, 64, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/W:0' shape=(3, 3, 64, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/W:0' shape=(3, 3, 128, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/W:0' shape=(3, 3, 128, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/W:0' shape=(3, 3, 256, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/W:0' shape=(512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/W:0' shape=(512, 10) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/b:0' shape=(10,) dtype=float32_ref>\n", "Model VGG: trainable parameters: 14982474. Size: 59929.896 KB\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "2017-05-22 15:56:41.150636: step 2200, loss = 0.6995 (1489.9 examples/sec; 0.034 sec/batch)\n", "2017-05-22 15:56:47.728752: step 2400, loss = 1.2488 (1825.8 examples/sec; 0.027 sec/batch)\n", "2017-05-22 15:56:54.283833: step 2600, loss = 0.9503 (1493.5 examples/sec; 0.033 sec/batch)\n", "2017-05-22 15:57:00.739826: step 2800, loss = 0.8572 (1549.1 examples/sec; 0.032 sec/batch)\n", "2017-05-22 15:57:07.264069: step 3000, loss = 1.0171 (1491.0 examples/sec; 0.034 sec/batch)\n", "2017-05-22 15:57:13.645133: step 3200, loss = 0.7402 (1206.1 examples/sec; 0.041 sec/batch)\n", "2017-05-22 15:57:20.242200: step 3400, loss = 0.9686 (1741.3 examples/sec; 0.029 sec/batch)\n", "2017-05-22 15:57:26.501463: step 3600, loss = 0.9150 (1587.2 examples/sec; 0.032 sec/batch)\n", "2017-05-22 15:57:33.058041: step 3800, loss = 0.6963 (1597.9 examples/sec; 0.031 sec/batch)\n", "2017-05-22 15:57:39.353625: step 4000, loss = 1.1301 (1725.1 examples/sec; 0.029 sec/batch)\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/model.ckpt-4000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/model.ckpt-4000\n", "2017-05-22 15:57:57.462822 (2): train accuracy = 0.749 validation accuracy = 0.724\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-4000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-4000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-4000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-4000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-4000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best/model.ckpt-4000\n" ] } ], "source": [ "# 2: train it again for another epoch\n", "# Note the `force_restart` parameter removed.\n", "# `epochs` is the TOTAL number of epoch for the trained model\n", "# Thus since we trained it before for a single epoch,\n", "# we set \"epochs\": 2 in order to train it for another epoch\n", "\n", "with tf.device(device):\n", " info = train(\n", " model=vgg,\n", " dataset=cifar10,\n", " hyperparameters={\n", " \"epochs\": 2,\n", " \"batch_size\": 50,\n", " \"regularizations\": {\n", " \"l2\": 1e-5,\n", " \"augmentation\": {\n", " \"name\": \"FlipLR\",\n", " \"fn\": tf.image.random_flip_left_right,\n", " \"factor\": 2,\n", " }\n", " },\n", " \"gd\": {\n", " \"optimizer\": tf.train.AdamOptimizer,\n", " \"args\": {\n", " \"learning_rate\": 1e-3,\n", " \"beta1\": 0.9,\n", " \"beta2\": 0.99,\n", " \"epsilon\": 1e-8\n", " }\n", " }\n", " })" ] }, { "cell_type": "code", "execution_count": 8, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "data": { "text/html": [ "<div>\n", "<table border=\"1\" class=\"dataframe\">\n", " <thead>\n", " <tr style=\"text-align: right;\">\n", " <th></th>\n", " <th>test</th>\n", " <th>train</th>\n", " <th>validation</th>\n", " </tr>\n", " </thead>\n", " <tbody>\n", " <tr>\n", " <th>accuracy</th>\n", " <td>0.7241</td>\n", " <td>0.74684</td>\n", " <td>0.724</td>\n", " </tr>\n", " </tbody>\n", "</table>\n", "</div>" ], "text/plain": [ " test train validation\n", "accuracy 0.7241 0.74684 0.724" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Display the results in a table. Let's use a Pandas DataFrame for that\n", "accuracies = {key: value[\"accuracy\"] for key, value in info[\"stats\"].items()}\n", "df = pd.DataFrame.from_records(accuracies, index=[\"accuracy\"])\n", "df" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "collapsed": true, "deletable": true, "editable": true }, "outputs": [], "source": [ "# Save last trained model info\n", "vggInfo = info" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Original training set size 50000. Augmented training set size: 100000\n", "<tf.Variable 'VGG/64/conv1/W:0' shape=(3, 3, 3, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv1/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/W:0' shape=(3, 3, 64, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/W:0' shape=(3, 3, 64, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/W:0' shape=(3, 3, 128, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/W:0' shape=(3, 3, 128, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/W:0' shape=(3, 3, 256, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/W:0' shape=(512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/W:0' shape=(512, 100) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/b:0' shape=(100,) dtype=float32_ref>\n", "Model VGG: trainable parameters: 15028644. Size: 60114.576 KB\n", "[!] No checkpoint file found\n", "2017-05-22 15:58:46.456678: step 0, loss = 4.8550 (18.5 examples/sec; 2.706 sec/batch)\n", "2017-05-22 15:58:53.423647: step 200, loss = 4.6459 (1422.8 examples/sec; 0.035 sec/batch)\n", "2017-05-22 15:58:59.787396: step 400, loss = 4.6378 (1633.2 examples/sec; 0.031 sec/batch)\n", "2017-05-22 15:59:06.429601: step 600, loss = 4.6263 (1706.8 examples/sec; 0.029 sec/batch)\n", "2017-05-22 15:59:12.840329: step 800, loss = 4.6233 (1677.9 examples/sec; 0.030 sec/batch)\n", "2017-05-22 15:59:19.697066: step 1000, loss = 4.6179 (1469.4 examples/sec; 0.034 sec/batch)\n", "2017-05-22 15:59:26.260296: step 1200, loss = 4.6106 (1363.8 examples/sec; 0.037 sec/batch)\n", "2017-05-22 15:59:32.943655: step 1400, loss = 4.6101 (1664.8 examples/sec; 0.030 sec/batch)\n", "2017-05-22 15:59:39.250269: step 1600, loss = 4.6050 (1547.8 examples/sec; 0.032 sec/batch)\n", "2017-05-22 15:59:46.037072: step 1800, loss = 4.6037 (1602.8 examples/sec; 0.031 sec/batch)\n", "2017-05-22 15:59:52.438255: step 2000, loss = 4.6048 (1608.5 examples/sec; 0.031 sec/batch)\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "2017-05-22 16:00:11.160246 (1): train accuracy = 0.010 validation accuracy = 0.010\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n" ] } ], "source": [ "# 3: TRANSFER LEARNING\n", "# Use the best model trained on Cifar10, to classify Cifar 100 images.\n", "# Thus we train ONLY the softmax linear scope (that has 100 neurons, now),\n", "# keeping constant any other previosly trained layer\n", "# We load the weights from the previous trained model, or better\n", "# DyTB saves the \"best\" model (w.r.t. a metric) in a separate folder\n", "# So we extract the info[\"paths\"][\"best\"] path, that's the path of the best\n", "# model trained so far.\n", "cifar100 = Cifar100.Cifar100()\n", "with tf.device(device):\n", " transferInfo = train(\n", " model=vgg,\n", " dataset=cifar100,\n", " hyperparameters={\n", " \"epochs\": 1,\n", " \"batch_size\": 50,\n", " \"regularizations\": {\n", " \"l2\": 1e-5,\n", " \"augmentation\": {\n", " \"name\": \"FlipLR\",\n", " \"fn\": tf.image.random_flip_left_right,\n", " \"factor\": 2,\n", " }\n", " },\n", " \"gd\": {\n", " \"optimizer\": tf.train.AdamOptimizer,\n", " \"args\": {\n", " \"learning_rate\": 1e-3,\n", " \"beta1\": 0.9,\n", " \"beta2\": 0.99,\n", " \"epsilon\": 1e-8\n", " }\n", " }\n", " },\n", " force_restart=True,\n", " surgery={\n", " \"checkpoint_path\": vggInfo[\"paths\"][\"best\"],\n", " \"exclude_scopes\": \"VGG/softmax_linear\",\n", " \"trainable_scopes\": \"VGG/softmax_linear\"\n", " })" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Original training set size 50000. Augmented training set size: 100000\n", "<tf.Variable 'VGG/64/conv1/W:0' shape=(3, 3, 3, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv1/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/W:0' shape=(3, 3, 64, 64) dtype=float32_ref>\n", "<tf.Variable 'VGG/64/conv2/b:0' shape=(64,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/W:0' shape=(3, 3, 64, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv3/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/W:0' shape=(3, 3, 128, 128) dtype=float32_ref>\n", "<tf.Variable 'VGG/128/conv4/b:0' shape=(128,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/W:0' shape=(3, 3, 128, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv5/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv6/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/W:0' shape=(3, 3, 256, 256) dtype=float32_ref>\n", "<tf.Variable 'VGG/256/conv7/b:0' shape=(256,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/W:0' shape=(3, 3, 256, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv8/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv9/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512/conv10/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv11/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv12/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/W:0' shape=(3, 3, 512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/512b2/conv13/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/W:0' shape=(512, 512) dtype=float32_ref>\n", "<tf.Variable 'VGG/fc/b:0' shape=(512,) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/W:0' shape=(512, 100) dtype=float32_ref>\n", "<tf.Variable 'VGG/softmax_linear/b:0' shape=(100,) dtype=float32_ref>\n", "Model VGG: trainable parameters: 15028644. Size: 60114.576 KB\n", "[!] No checkpoint file found\n", "2017-05-22 16:00:58.062810: step 0, loss = 4.7794 (23.6 examples/sec; 2.122 sec/batch)\n", "2017-05-22 16:01:04.976976: step 200, loss = 4.6412 (1874.0 examples/sec; 0.027 sec/batch)\n", "2017-05-22 16:01:11.427974: step 400, loss = 4.6381 (1594.2 examples/sec; 0.031 sec/batch)\n", "2017-05-22 16:01:18.114518: step 600, loss = 4.6311 (1877.8 examples/sec; 0.027 sec/batch)\n", "2017-05-22 16:01:24.520331: step 800, loss = 4.6238 (1550.1 examples/sec; 0.032 sec/batch)\n", "2017-05-22 16:01:31.066939: step 1000, loss = 4.6181 (1501.1 examples/sec; 0.033 sec/batch)\n", "2017-05-22 16:01:37.486925: step 1200, loss = 4.6126 (1623.5 examples/sec; 0.031 sec/batch)\n", "2017-05-22 16:01:43.917332: step 1400, loss = 4.6089 (1853.3 examples/sec; 0.027 sec/batch)\n", "2017-05-22 16:01:50.444527: step 1600, loss = 4.6070 (1363.7 examples/sec; 0.037 sec/batch)\n", "2017-05-22 16:01:56.757677: step 1800, loss = 4.6046 (1828.3 examples/sec; 0.027 sec/batch)\n", "2017-05-22 16:02:03.368083: step 2000, loss = 4.6055 (1511.6 examples/sec; 0.033 sec/batch)\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/model.ckpt-2000\n", "2017-05-22 16:02:21.476925 (1): train accuracy = 0.010 validation accuracy = 0.010\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n", "INFO:tensorflow:Restoring parameters from /data/pgaleone/dtb_work/examples/log/VGG/CIFAR-100_Adam_l2=1e-05_fliplr/best/model.ckpt-2000\n" ] } ], "source": [ "# 4: FINE TUNING:\n", "# Use the model pointed by vggInfo to fine tune the whole network\n", "# and tune it on Cifar100.\n", "# Let's retrain the whole network end-to-end, starting from the learned weights\n", "# Just remove the \"traiable_scopes\" section from the surgery parameter\n", "with tf.device(device):\n", " fineTuningInfo = train(\n", " model=vgg,\n", " dataset=cifar100,\n", " hyperparameters={\n", " \"epochs\": 1,\n", " \"batch_size\": 50,\n", " \"regularizations\": {\n", " \"l2\": 1e-5,\n", " \"augmentation\": {\n", " \"name\": \"FlipLR\",\n", " \"fn\": tf.image.random_flip_left_right,\n", " \"factor\": 2,\n", " }\n", " },\n", " \"gd\": {\n", " \"optimizer\": tf.train.AdamOptimizer,\n", " \"args\": {\n", " \"learning_rate\": 1e-3,\n", " \"beta1\": 0.9,\n", " \"beta2\": 0.99,\n", " \"epsilon\": 1e-8\n", " }\n", " }\n", " },\n", " force_restart=True,\n", " surgery={\n", " \"checkpoint_path\": vggInfo[\"paths\"][\"best\"],\n", " \"exclude_scopes\": \"VGG/softmax_linear\"\n", " })\n" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "data": { "text/html": [ "<div>\n", "<table border=\"1\" class=\"dataframe\">\n", " <thead>\n", " <tr style=\"text-align: right;\">\n", " <th></th>\n", " <th>test</th>\n", " <th>train</th>\n", " <th>validation</th>\n", " </tr>\n", " </thead>\n", " <tbody>\n", " <tr>\n", " <th>accuracy</th>\n", " <td>0.01</td>\n", " <td>0.01032</td>\n", " <td>0.01</td>\n", " </tr>\n", " </tbody>\n", "</table>\n", "</div>" ], "text/plain": [ " test train validation\n", "accuracy 0.01 0.01032 0.01" ] }, "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Compare the performance of Transfer learning and Fine Tuning\n", "accuracies = {key: value[\"accuracy\"] for key, value in transferInfo[\"stats\"].items()}\n", "df = pd.DataFrame.from_records(accuracies, index=[\"accuracy\"])\n", "df" ] }, { "cell_type": "code", "execution_count": 13, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "data": { "text/html": [ "<div>\n", "<table border=\"1\" class=\"dataframe\">\n", " <thead>\n", " <tr style=\"text-align: right;\">\n", " <th></th>\n", " <th>test</th>\n", " <th>train</th>\n", " <th>validation</th>\n", " </tr>\n", " </thead>\n", " <tbody>\n", " <tr>\n", " <th>accuracy</th>\n", " <td>0.01</td>\n", " <td>0.0101</td>\n", " <td>0.01</td>\n", " </tr>\n", " </tbody>\n", "</table>\n", "</div>" ], "text/plain": [ " test train validation\n", "accuracy 0.01 0.0101 0.01" ] }, "execution_count": 13, "metadata": {}, "output_type": "execute_result" } ], "source": [ "accuracies = {key: value[\"accuracy\"] for key, value in fineTuningInfo[\"stats\"].items()}\n", "df = pd.DataFrame.from_records(accuracies, index=[\"accuracy\"])\n", "df" ] }, { "cell_type": "code", "execution_count": 14, "metadata": { "collapsed": false, "deletable": true, "editable": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "{ 'args': { 'batch_size': 50,\n", " 'checkpoint_path': '',\n", " 'comment': '',\n", " 'dataset': <dytb.inputs.predefined.Cifar10.Cifar10 object at 0x7f42e04a86a0>,\n", " 'epochs': 2,\n", " 'exclude_scopes': None,\n", " 'force_restart': False,\n", " 'gd': { 'args': { 'beta1': 0.9,\n", " 'beta2': 0.99,\n", " 'epsilon': 1e-08,\n", " 'learning_rate': 0.001},\n", " 'optimizer': <class 'tensorflow.python.training.adam.AdamOptimizer'>},\n", " 'lr_decay': {'enabled': False, 'epochs': 25, 'factor': 0.1},\n", " 'model': <dytb.models.predefined.VGG.VGG object at 0x7f4289c98ef0>,\n", " 'regularizations': { 'augmentation': { 'factor': 2,\n", " 'fn': <function random_flip_left_right at 0x7f4289d5c7b8>,\n", " 'name': 'FlipLR'},\n", " 'l2': 1e-05},\n", " 'seed': None,\n", " 'trainable_scopes': None},\n", " 'paths': { 'best': '/data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr/best',\n", " 'current': '/data/pgaleone/dtb_work/examples',\n", " 'log': '/data/pgaleone/dtb_work/examples/log/VGG/CIFAR-10_Adam_l2=1e-05_fliplr'},\n", " 'stats': { 'test': { 'accuracy': 0.72409998625516891,\n", " 'confusion_matrix': array([[ 628., 7., 141., 10., 42., 2., 10., 58., 73.,\n", " 29.],\n", " [ 9., 861., 3., 0., 5., 2., 25., 5., 20.,\n", " 70.],\n", " [ 26., 0., 655., 29., 108., 54., 107., 16., 3.,\n", " 2.],\n", " [ 10., 5., 107., 364., 94., 194., 179., 26., 10.,\n", " 12.],\n", " [ 8., 0., 67., 14., 766., 10., 75., 57., 3.,\n", " 0.],\n", " [ 3., 0., 79., 119., 64., 657., 39., 35., 1.,\n", " 2.],\n", " [ 2., 0., 44., 23., 26., 3., 895., 3., 1.,\n", " 3.],\n", " [ 2., 0., 39., 27., 63., 95., 14., 757., 1.,\n", " 2.],\n", " [ 61., 21., 32., 6., 6., 4., 21., 4., 828.,\n", " 17.],\n", " [ 17., 72., 5., 8., 4., 0., 19., 33., 12.,\n", " 830.]])},\n", " 'train': { 'accuracy': 0.74683998459577561,\n", " 'confusion_matrix': array([[ 3.20800000e+03, 1.50000000e+01, 7.17000000e+02,\n", " 3.60000000e+01, 2.13000000e+02, 1.40000000e+01,\n", " 4.40000000e+01, 3.37000000e+02, 2.77000000e+02,\n", " 1.06000000e+02],\n", " [ 2.70000000e+01, 4.43800000e+03, 1.30000000e+01,\n", " 9.00000000e+00, 1.90000000e+01, 2.00000000e+00,\n", " 1.38000000e+02, 1.00000000e+01, 7.70000000e+01,\n", " 2.66000000e+02],\n", " [ 1.49000000e+02, 4.00000000e+00, 3.37000000e+03,\n", " 1.65000000e+02, 5.16000000e+02, 1.84000000e+02,\n", " 5.26000000e+02, 8.60000000e+01, 3.30000000e+01,\n", " 5.00000000e+00],\n", " [ 4.30000000e+01, 2.00000000e+00, 4.88000000e+02,\n", " 1.95500000e+03, 4.33000000e+02, 1.08700000e+03,\n", " 8.02000000e+02, 7.80000000e+01, 4.20000000e+01,\n", " 3.00000000e+01],\n", " [ 2.80000000e+01, 0.00000000e+00, 2.98000000e+02,\n", " 7.20000000e+01, 3.93200000e+03, 6.40000000e+01,\n", " 3.22000000e+02, 2.93000000e+02, 5.00000000e+00,\n", " 4.00000000e+00],\n", " [ 5.00000000e+00, 0.00000000e+00, 3.10000000e+02,\n", " 6.19000000e+02, 3.21000000e+02, 3.36200000e+03,\n", " 2.34000000e+02, 1.62000000e+02, 5.00000000e+00,\n", " 1.20000000e+01],\n", " [ 8.00000000e+00, 6.00000000e+00, 1.94000000e+02,\n", " 1.13000000e+02, 1.28000000e+02, 2.10000000e+01,\n", " 4.54100000e+03, 3.00000000e+00, 1.70000000e+01,\n", " 4.00000000e+00],\n", " [ 1.40000000e+01, 0.00000000e+00, 1.75000000e+02,\n", " 1.22000000e+02, 3.33000000e+02, 3.60000000e+02,\n", " 4.20000000e+01, 3.89200000e+03, 1.00000000e+01,\n", " 1.60000000e+01],\n", " [ 2.30000000e+02, 7.80000000e+01, 1.45000000e+02,\n", " 3.40000000e+01, 8.00000000e+00, 5.00000000e+00,\n", " 8.50000000e+01, 2.20000000e+01, 4.32600000e+03,\n", " 4.60000000e+01],\n", " [ 9.30000000e+01, 2.57000000e+02, 2.40000000e+01,\n", " 4.70000000e+01, 2.10000000e+01, 1.40000000e+01,\n", " 6.10000000e+01, 1.01000000e+02, 4.20000000e+01,\n", " 4.35000000e+03]])},\n", " 'validation': { 'accuracy': 0.72399998486042028,\n", " 'confusion_matrix': array([[ 627., 7., 141., 10., 42., 2., 10., 59., 73.,\n", " 29.],\n", " [ 9., 861., 3., 0., 5., 2., 25., 5., 20.,\n", " 70.],\n", " [ 26., 0., 655., 29., 108., 54., 107., 16., 3.,\n", " 2.],\n", " [ 10., 5., 107., 363., 94., 194., 179., 26., 10.,\n", " 12.],\n", " [ 8., 0., 67., 14., 766., 10., 75., 57., 3.,\n", " 0.],\n", " [ 3., 0., 79., 119., 64., 658., 39., 35., 1.,\n", " 2.],\n", " [ 2., 0., 44., 23., 26., 3., 895., 3., 1.,\n", " 3.],\n", " [ 2., 0., 39., 27., 63., 95., 14., 757., 1.,\n", " 2.],\n", " [ 61., 21., 32., 6., 6., 4., 21., 4., 828.,\n", " 17.],\n", " [ 17., 72., 5., 8., 4., 0., 19., 33., 12.,\n", " 830.]])}},\n", " 'steps': {'decay': 50000, 'epoch': 2000, 'log': 200, 'max': 4000}}\n" ] } ], "source": [ "# For completeness, lets see what a info object contains\n", "pprint.pprint(info, indent=4)" ] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.1" } }, "nbformat": 4, "nbformat_minor": 1 }
{ "pile_set_name": "Github" }
Copyright (c) 2012-2016, Michael Bostock All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The name Michael Bostock may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{ "pile_set_name": "Github" }
using Server.Items; using Server.Mobiles; using System; namespace Server.Engines.Quests { public class TheRightToolForTheJobQuest : BaseQuest { public override bool DoneOnce => true; /* The Right Tool for the Job */ public override object Title => 1077741; /* Create new scissors and hammers while inside Amelia's workshop. Try making scissors up to 45 skill, the switch to making hammers until 50 skill.<br><center>-----</center><br>Hello! I guess you're here to learn something about Tinkering, eh? You've come to the right place, as Tinkering is what I've dedicated my life to. <br><br>You'll need two things to get started: a supply of ingots and the right tools for the job. You can either buy ingots from the market, or go mine them yourself. As for tools, you can try making your own set of Tinker's Tools, or if you'd prefer to buy them, I have some for sale.<br><br>Working here in my shop will let me give you pointers as you go, so you'll be able to learn faster than anywhere else. Start off making scissors until you reach 45 tinkering skill, then switch to hammers until you've achieved 50. Once you've done that, come talk to me and I'll give you something for your hard work. */ public override object Description => 1077744; /* I’m disappointed that you aren’t interested in learning more about Tinkering. It’s really such a useful skill!<br><br> *Amelia smiles*<br><br>At least you know where to find me if you change your mind, since I rarely spend time outside of this shop. */ public override object Refuse => 1077745; /* Nice going! You're not quite at Apprentice Tinkering yet, though, so you better get back to work. Remember that the quickest way to learn is to make scissors up until 45 skill, and then switch to hammers. Also, don't forget that working here in my shop will let me give you tips so you can learn faster. */ public override object Uncomplete => 1077746; /* You've done it! Look at our brand new Apprentice Tinker! You've still got quite a lot to learn if you want to be a Grandmaster Tinker, but I believe you can do it! Just keep in mind that if you're tinkering just to practice and improve your skill, make items that are moderately difficult (60-80% success chance), and try to stick to ones that use less ingots. <br><br>Come here, my brand new Apprentice Tinker, I want to give you something special. I created this just for you, so I hope you like it. It's a set of Tinker's Tools that contains a bit of magic. These tools have more charges than any Tinker's Tools a Tinker can make. You can even use them to make a normal set of tools, so that way you won't ever find yourself stuck somewhere with no tools! */ public override object Complete => 1077748; public TheRightToolForTheJobQuest() : base() { AddObjective(new ApprenticeObjective(SkillName.Tinkering, 50, "Springs And Things Workshop", 1077742, 1077743)); // 1077742 By tinkering inside of Amelia’s workshop, she is able to give you advice. This helps you hone your Tinkering skill faster than normal. // 1077743 Since you’ve left Amelia’s workshop, she cannot give you advice. Your Tinkering learning potential is no longer enhanced. AddReward(new BaseReward(typeof(AmeliasToolbox), 1077749)); } public override bool CanOffer() { #region Scroll of Alacrity PlayerMobile pm = Owner as PlayerMobile; if (pm.AcceleratedStart > DateTime.UtcNow) { Owner.SendLocalizedMessage(1077951); // You are already under the effect of an accelerated skillgain scroll. return false; } #endregion else return Owner.Skills.Tinkering.Base < 50; } public override void OnCompleted() { Owner.SendLocalizedMessage(1077747, null, 0x23); // You have achieved the rank of Apprentice Tinker. Talk to Amelia Youngstone in New Haven to see what kind of reward she has waiting for you. Owner.PlaySound(CompleteSound); } public override void Serialize(GenericWriter writer) { base.Serialize(writer); writer.Write(0); // version } public override void Deserialize(GenericReader reader) { base.Deserialize(reader); int version = reader.ReadInt(); } } public class Amelia : MondainQuester { public override Type[] Quests => new Type[] { typeof(TheRightToolForTheJobQuest) }; public override void InitSBInfo() { SBInfos.Add(new SBTinker(this)); } [Constructable] public Amelia() : base("Amelia Youngstone", "The Tinkering Instructor") { SetSkill(SkillName.ArmsLore, 120.0, 120.0); SetSkill(SkillName.Blacksmith, 120.0, 120.0); SetSkill(SkillName.Magery, 120.0, 120.0); SetSkill(SkillName.Tactics, 120.0, 120.0); SetSkill(SkillName.Swords, 120.0, 120.0); SetSkill(SkillName.Tinkering, 120.0, 120.0); SetSkill(SkillName.Mining, 120.0, 120.0); } public Amelia(Serial serial) : base(serial) { } public override void Advertise() { Say(1078123); // Tinkering is very useful for a blacksmith. You can make your own tools. } public override void OnOfferFailed() { Say(1077772); // I cannot teach you, for you know all I can teach! } public override void InitBody() { Female = true; CantWalk = true; Race = Race.Human; base.InitBody(); } public override void InitOutfit() { AddItem(new Backpack()); AddItem(new Sandals()); AddItem(new ShortPants()); AddItem(new HalfApron(0x8AB)); AddItem(new Doublet()); } public override void Serialize(GenericWriter writer) { base.Serialize(writer); writer.Write(0); // version } public override void Deserialize(GenericReader reader) { base.Deserialize(reader); int version = reader.ReadInt(); } } }
{ "pile_set_name": "Github" }
<h1>Other page</h1>
{ "pile_set_name": "Github" }
<!-- - Copyright (C) 2004-2012, 2014 Internet Systems Consortium, Inc. ("ISC") - Copyright (C) 2000-2003 Internet Software Consortium. - - Permission to use, copy, modify, and/or distribute this software for any - purpose with or without fee is hereby granted, provided that the above - copyright notice and this permission notice appear in all copies. - - THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH - REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY - AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT, - INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM - LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE - OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR - PERFORMANCE OF THIS SOFTWARE. --> <!-- Id --> <html> <head> <meta http-equiv="Content-Type" content="text/html; charset=ISO-8859-1"> <title>nsupdate</title> <meta name="generator" content="DocBook XSL Stylesheets V1.71.1"> </head> <body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF"><div class="refentry" lang="en"> <a name="man.nsupdate"></a><div class="titlepage"></div> <div class="refnamediv"> <h2>Name</h2> <p><span class="application">nsupdate</span> &#8212; Dynamic DNS update utility</p> </div> <div class="refsynopsisdiv"> <h2>Synopsis</h2> <div class="cmdsynopsis"><p><code class="command">nsupdate</code> [<code class="option">-d</code>] [<code class="option">-D</code>] [[<code class="option">-g</code>] | [<code class="option">-o</code>] | [<code class="option">-l</code>] | [<code class="option">-y <em class="replaceable"><code>[<span class="optional">hmac:</span>]keyname:secret</code></em></code>] | [<code class="option">-k <em class="replaceable"><code>keyfile</code></em></code>]] [<code class="option">-t <em class="replaceable"><code>timeout</code></em></code>] [<code class="option">-u <em class="replaceable"><code>udptimeout</code></em></code>] [<code class="option">-r <em class="replaceable"><code>udpretries</code></em></code>] [<code class="option">-R <em class="replaceable"><code>randomdev</code></em></code>] [<code class="option">-v</code>] [<code class="option">-T</code>] [<code class="option">-P</code>] [<code class="option">-V</code>] [filename]</p></div> </div> <div class="refsect1" lang="en"> <a name="id2543485"></a><h2>DESCRIPTION</h2> <p><span><strong class="command">nsupdate</strong></span> is used to submit Dynamic DNS Update requests as defined in RFC 2136 to a name server. This allows resource records to be added or removed from a zone without manually editing the zone file. A single update request can contain requests to add or remove more than one resource record. </p> <p> Zones that are under dynamic control via <span><strong class="command">nsupdate</strong></span> or a DHCP server should not be edited by hand. Manual edits could conflict with dynamic updates and cause data to be lost. </p> <p> The resource records that are dynamically added or removed with <span><strong class="command">nsupdate</strong></span> have to be in the same zone. Requests are sent to the zone's master server. This is identified by the MNAME field of the zone's SOA record. </p> <p> The <code class="option">-d</code> option makes <span><strong class="command">nsupdate</strong></span> operate in debug mode. This provides tracing information about the update requests that are made and the replies received from the name server. </p> <p> The <code class="option">-D</code> option makes <span><strong class="command">nsupdate</strong></span> report additional debugging information to <code class="option">-d</code>. </p> <p> The <code class="option">-L</code> option with an integer argument of zero or higher sets the logging debug level. If zero, logging is disabled. </p> <p> Transaction signatures can be used to authenticate the Dynamic DNS updates. These use the TSIG resource record type described in RFC 2845 or the SIG(0) record described in RFC 2535 and RFC 2931 or GSS-TSIG as described in RFC 3645. TSIG relies on a shared secret that should only be known to <span><strong class="command">nsupdate</strong></span> and the name server. Currently, the only supported encryption algorithm for TSIG is HMAC-MD5, which is defined in RFC 2104. Once other algorithms are defined for TSIG, applications will need to ensure they select the appropriate algorithm as well as the key when authenticating each other. For instance, suitable <span class="type">key</span> and <span class="type">server</span> statements would be added to <code class="filename">/etc/named.conf</code> so that the name server can associate the appropriate secret key and algorithm with the IP address of the client application that will be using TSIG authentication. SIG(0) uses public key cryptography. To use a SIG(0) key, the public key must be stored in a KEY record in a zone served by the name server. <span><strong class="command">nsupdate</strong></span> does not read <code class="filename">/etc/named.conf</code>. </p> <p> GSS-TSIG uses Kerberos credentials. Standard GSS-TSIG mode is switched on with the <code class="option">-g</code> flag. A non-standards-compliant variant of GSS-TSIG used by Windows 2000 can be switched on with the <code class="option">-o</code> flag. </p> <p><span><strong class="command">nsupdate</strong></span> uses the <code class="option">-y</code> or <code class="option">-k</code> option to provide the shared secret needed to generate a TSIG record for authenticating Dynamic DNS update requests, default type HMAC-MD5. These options are mutually exclusive. </p> <p> When the <code class="option">-y</code> option is used, a signature is generated from [<span class="optional"><em class="parameter"><code>hmac:</code></em></span>]<em class="parameter"><code>keyname:secret.</code></em> <em class="parameter"><code>keyname</code></em> is the name of the key, and <em class="parameter"><code>secret</code></em> is the base64 encoded shared secret. <em class="parameter"><code>hmac</code></em> is the name of the key algorithm; valid choices are <code class="literal">hmac-md5</code>, <code class="literal">hmac-sha1</code>, <code class="literal">hmac-sha224</code>, <code class="literal">hmac-sha256</code>, <code class="literal">hmac-sha384</code>, or <code class="literal">hmac-sha512</code>. If <em class="parameter"><code>hmac</code></em> is not specified, the default is <code class="literal">hmac-md5</code>. NOTE: Use of the <code class="option">-y</code> option is discouraged because the shared secret is supplied as a command line argument in clear text. This may be visible in the output from <span class="citerefentry"><span class="refentrytitle">ps</span>(1)</span> or in a history file maintained by the user's shell. </p> <p> With the <code class="option">-k</code> option, <span><strong class="command">nsupdate</strong></span> reads the shared secret from the file <em class="parameter"><code>keyfile</code></em>. Keyfiles may be in two formats: a single file containing a <code class="filename">named.conf</code>-format <span><strong class="command">key</strong></span> statement, which may be generated automatically by <span><strong class="command">ddns-confgen</strong></span>, or a pair of files whose names are of the format <code class="filename">K{name}.+157.+{random}.key</code> and <code class="filename">K{name}.+157.+{random}.private</code>, which can be generated by <span><strong class="command">dnssec-keygen</strong></span>. The <code class="option">-k</code> may also be used to specify a SIG(0) key used to authenticate Dynamic DNS update requests. In this case, the key specified is not an HMAC-MD5 key. </p> <p> <span><strong class="command">nsupdate</strong></span> can be run in a local-host only mode using the <code class="option">-l</code> flag. This sets the server address to localhost (disabling the <span><strong class="command">server</strong></span> so that the server address cannot be overridden). Connections to the local server will use a TSIG key found in <code class="filename">/var/run/named/session.key</code>, which is automatically generated by <span><strong class="command">named</strong></span> if any local master zone has set <span><strong class="command">update-policy</strong></span> to <span><strong class="command">local</strong></span>. The location of this key file can be overridden with the <code class="option">-k</code> option. </p> <p> By default, <span><strong class="command">nsupdate</strong></span> uses UDP to send update requests to the name server unless they are too large to fit in a UDP request in which case TCP will be used. The <code class="option">-v</code> option makes <span><strong class="command">nsupdate</strong></span> use a TCP connection. This may be preferable when a batch of update requests is made. </p> <p> The <code class="option">-p</code> sets the default port number to use for connections to a name server. The default is 53. </p> <p> The <code class="option">-t</code> option sets the maximum time an update request can take before it is aborted. The default is 300 seconds. Zero can be used to disable the timeout. </p> <p> The <code class="option">-u</code> option sets the UDP retry interval. The default is 3 seconds. If zero, the interval will be computed from the timeout interval and number of UDP retries. </p> <p> The <code class="option">-r</code> option sets the number of UDP retries. The default is 3. If zero, only one update request will be made. </p> <p> The <code class="option">-R <em class="replaceable"><code>randomdev</code></em></code> option specifies a source of randomness. If the operating system does not provide a <code class="filename">/dev/random</code> or equivalent device, the default source of randomness is keyboard input. <code class="filename">randomdev</code> specifies the name of a character device or file containing random data to be used instead of the default. The special value <code class="filename">keyboard</code> indicates that keyboard input should be used. This option may be specified multiple times. </p> <p> Other types can be entered using "TYPEXXXXX" where "XXXXX" is the decimal value of the type with no leading zeros. The rdata, if present, will be parsed using the UNKNOWN rdata format, (&lt;backslash&gt; &lt;hash&gt; &lt;space&gt; &lt;length&gt; &lt;space&gt; &lt;hexstring&gt;). </p> <p> The <code class="option">-T</code> and <code class="option">-P</code> options print out lists of non-meta types for which the type-specific presentation formats are known. <code class="option">-T</code> prints out the list of IANA-assigned types. <code class="option">-P</code> prints out the list of private types specific to <span><strong class="command">named</strong></span>. These options may be combined. <span><strong class="command">nsupdate</strong></span> will exit after the lists are printed. </p> <p> The -V option causes <span><strong class="command">nsupdate</strong></span> to print the version number and exit. </p> </div> <div class="refsect1" lang="en"> <a name="id2543885"></a><h2>INPUT FORMAT</h2> <p><span><strong class="command">nsupdate</strong></span> reads input from <em class="parameter"><code>filename</code></em> or standard input. Each command is supplied on exactly one line of input. Some commands are for administrative purposes. The others are either update instructions or prerequisite checks on the contents of the zone. These checks set conditions that some name or set of resource records (RRset) either exists or is absent from the zone. These conditions must be met if the entire update request is to succeed. Updates will be rejected if the tests for the prerequisite conditions fail. </p> <p> Every update request consists of zero or more prerequisites and zero or more updates. This allows a suitably authenticated update request to proceed if some specified resource records are present or missing from the zone. A blank input line (or the <span><strong class="command">send</strong></span> command) causes the accumulated commands to be sent as one Dynamic DNS update request to the name server. </p> <p> The command formats and their meaning are as follows: </p> <div class="variablelist"><dl> <dt><span class="term"> <span><strong class="command">server</strong></span> {servername} [port] </span></dt> <dd><p> Sends all dynamic update requests to the name server <em class="parameter"><code>servername</code></em>. When no server statement is provided, <span><strong class="command">nsupdate</strong></span> will send updates to the master server of the correct zone. The MNAME field of that zone's SOA record will identify the master server for that zone. <em class="parameter"><code>port</code></em> is the port number on <em class="parameter"><code>servername</code></em> where the dynamic update requests get sent. If no port number is specified, the default DNS port number of 53 is used. </p></dd> <dt><span class="term"> <span><strong class="command">local</strong></span> {address} [port] </span></dt> <dd><p> Sends all dynamic update requests using the local <em class="parameter"><code>address</code></em>. When no local statement is provided, <span><strong class="command">nsupdate</strong></span> will send updates using an address and port chosen by the system. <em class="parameter"><code>port</code></em> can additionally be used to make requests come from a specific port. If no port number is specified, the system will assign one. </p></dd> <dt><span class="term"> <span><strong class="command">zone</strong></span> {zonename} </span></dt> <dd><p> Specifies that all updates are to be made to the zone <em class="parameter"><code>zonename</code></em>. If no <em class="parameter"><code>zone</code></em> statement is provided, <span><strong class="command">nsupdate</strong></span> will attempt determine the correct zone to update based on the rest of the input. </p></dd> <dt><span class="term"> <span><strong class="command">class</strong></span> {classname} </span></dt> <dd><p> Specify the default class. If no <em class="parameter"><code>class</code></em> is specified, the default class is <em class="parameter"><code>IN</code></em>. </p></dd> <dt><span class="term"> <span><strong class="command">ttl</strong></span> {seconds} </span></dt> <dd><p> Specify the default time to live for records to be added. The value <em class="parameter"><code>none</code></em> will clear the default ttl. </p></dd> <dt><span class="term"> <span><strong class="command">key</strong></span> [hmac:] {keyname} {secret} </span></dt> <dd><p> Specifies that all updates are to be TSIG-signed using the <em class="parameter"><code>keyname</code></em> <em class="parameter"><code>secret</code></em> pair. If <em class="parameter"><code>hmac</code></em> is specified, then it sets the signing algorithm in use; the default is <code class="literal">hmac-md5</code>. The <span><strong class="command">key</strong></span> command overrides any key specified on the command line via <code class="option">-y</code> or <code class="option">-k</code>. </p></dd> <dt><span class="term"> <span><strong class="command">gsstsig</strong></span> </span></dt> <dd><p> Use GSS-TSIG to sign the updated. This is equivalent to specifying <code class="option">-g</code> on the commandline. </p></dd> <dt><span class="term"> <span><strong class="command">oldgsstsig</strong></span> </span></dt> <dd><p> Use the Windows 2000 version of GSS-TSIG to sign the updated. This is equivalent to specifying <code class="option">-o</code> on the commandline. </p></dd> <dt><span class="term"> <span><strong class="command">realm</strong></span> {[<span class="optional">realm_name</span>]} </span></dt> <dd><p> When using GSS-TSIG use <em class="parameter"><code>realm_name</code></em> rather than the default realm in <code class="filename">krb5.conf</code>. If no realm is specified the saved realm is cleared. </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">prereq</span>] nxdomain</strong></span> {domain-name} </span></dt> <dd><p> Requires that no resource record of any type exists with name <em class="parameter"><code>domain-name</code></em>. </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">prereq</span>] yxdomain</strong></span> {domain-name} </span></dt> <dd><p> Requires that <em class="parameter"><code>domain-name</code></em> exists (has as at least one resource record, of any type). </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">prereq</span>] nxrrset</strong></span> {domain-name} [class] {type} </span></dt> <dd><p> Requires that no resource record exists of the specified <em class="parameter"><code>type</code></em>, <em class="parameter"><code>class</code></em> and <em class="parameter"><code>domain-name</code></em>. If <em class="parameter"><code>class</code></em> is omitted, IN (internet) is assumed. </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">prereq</span>] yxrrset</strong></span> {domain-name} [class] {type} </span></dt> <dd><p> This requires that a resource record of the specified <em class="parameter"><code>type</code></em>, <em class="parameter"><code>class</code></em> and <em class="parameter"><code>domain-name</code></em> must exist. If <em class="parameter"><code>class</code></em> is omitted, IN (internet) is assumed. </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">prereq</span>] yxrrset</strong></span> {domain-name} [class] {type} {data...} </span></dt> <dd><p> The <em class="parameter"><code>data</code></em> from each set of prerequisites of this form sharing a common <em class="parameter"><code>type</code></em>, <em class="parameter"><code>class</code></em>, and <em class="parameter"><code>domain-name</code></em> are combined to form a set of RRs. This set of RRs must exactly match the set of RRs existing in the zone at the given <em class="parameter"><code>type</code></em>, <em class="parameter"><code>class</code></em>, and <em class="parameter"><code>domain-name</code></em>. The <em class="parameter"><code>data</code></em> are written in the standard text representation of the resource record's RDATA. </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">update</span>] del[<span class="optional">ete</span>]</strong></span> {domain-name} [ttl] [class] [type [data...]] </span></dt> <dd><p> Deletes any resource records named <em class="parameter"><code>domain-name</code></em>. If <em class="parameter"><code>type</code></em> and <em class="parameter"><code>data</code></em> is provided, only matching resource records will be removed. The internet class is assumed if <em class="parameter"><code>class</code></em> is not supplied. The <em class="parameter"><code>ttl</code></em> is ignored, and is only allowed for compatibility. </p></dd> <dt><span class="term"> <span><strong class="command">[<span class="optional">update</span>] add</strong></span> {domain-name} {ttl} [class] {type} {data...} </span></dt> <dd><p> Adds a new resource record with the specified <em class="parameter"><code>ttl</code></em>, <em class="parameter"><code>class</code></em> and <em class="parameter"><code>data</code></em>. </p></dd> <dt><span class="term"> <span><strong class="command">show</strong></span> </span></dt> <dd><p> Displays the current message, containing all of the prerequisites and updates specified since the last send. </p></dd> <dt><span class="term"> <span><strong class="command">send</strong></span> </span></dt> <dd><p> Sends the current message. This is equivalent to entering a blank line. </p></dd> <dt><span class="term"> <span><strong class="command">answer</strong></span> </span></dt> <dd><p> Displays the answer. </p></dd> <dt><span class="term"> <span><strong class="command">debug</strong></span> </span></dt> <dd><p> Turn on debugging. </p></dd> <dt><span class="term"> <span><strong class="command">version</strong></span> </span></dt> <dd><p> Print version number. </p></dd> <dt><span class="term"> <span><strong class="command">help</strong></span> </span></dt> <dd><p> Print a list of commands. </p></dd> </dl></div> <p> </p> <p> Lines beginning with a semicolon are comments and are ignored. </p> </div> <div class="refsect1" lang="en"> <a name="id2544859"></a><h2>EXAMPLES</h2> <p> The examples below show how <span><strong class="command">nsupdate</strong></span> could be used to insert and delete resource records from the <span class="type">example.com</span> zone. Notice that the input in each example contains a trailing blank line so that a group of commands are sent as one dynamic update request to the master name server for <span class="type">example.com</span>. </p> <pre class="programlisting"> # nsupdate &gt; update delete oldhost.example.com A &gt; update add newhost.example.com 86400 A 172.16.1.1 &gt; send </pre> <p> </p> <p> Any A records for <span class="type">oldhost.example.com</span> are deleted. And an A record for <span class="type">newhost.example.com</span> with IP address 172.16.1.1 is added. The newly-added record has a 1 day TTL (86400 seconds). </p> <pre class="programlisting"> # nsupdate &gt; prereq nxdomain nickname.example.com &gt; update add nickname.example.com 86400 CNAME somehost.example.com &gt; send </pre> <p> </p> <p> The prerequisite condition gets the name server to check that there are no resource records of any type for <span class="type">nickname.example.com</span>. If there are, the update request fails. If this name does not exist, a CNAME for it is added. This ensures that when the CNAME is added, it cannot conflict with the long-standing rule in RFC 1034 that a name must not exist as any other record type if it exists as a CNAME. (The rule has been updated for DNSSEC in RFC 2535 to allow CNAMEs to have RRSIG, DNSKEY and NSEC records.) </p> </div> <div class="refsect1" lang="en"> <a name="id2544903"></a><h2>FILES</h2> <div class="variablelist"><dl> <dt><span class="term"><code class="constant">/etc/resolv.conf</code></span></dt> <dd><p> used to identify default name server </p></dd> <dt><span class="term"><code class="constant">/var/run/named/session.key</code></span></dt> <dd><p> sets the default TSIG key for use in local-only mode </p></dd> <dt><span class="term"><code class="constant">K{name}.+157.+{random}.key</code></span></dt> <dd><p> base-64 encoding of HMAC-MD5 key created by <span class="citerefentry"><span class="refentrytitle">dnssec-keygen</span>(8)</span>. </p></dd> <dt><span class="term"><code class="constant">K{name}.+157.+{random}.private</code></span></dt> <dd><p> base-64 encoding of HMAC-MD5 key created by <span class="citerefentry"><span class="refentrytitle">dnssec-keygen</span>(8)</span>. </p></dd> </dl></div> </div> <div class="refsect1" lang="en"> <a name="id2541982"></a><h2>SEE ALSO</h2> <p> <em class="citetitle">RFC 2136</em>, <em class="citetitle">RFC 3007</em>, <em class="citetitle">RFC 2104</em>, <em class="citetitle">RFC 2845</em>, <em class="citetitle">RFC 1034</em>, <em class="citetitle">RFC 2535</em>, <em class="citetitle">RFC 2931</em>, <span class="citerefentry"><span class="refentrytitle">named</span>(8)</span>, <span class="citerefentry"><span class="refentrytitle">ddns-confgen</span>(8)</span>, <span class="citerefentry"><span class="refentrytitle">dnssec-keygen</span>(8)</span>. </p> </div> <div class="refsect1" lang="en"> <a name="id2542040"></a><h2>BUGS</h2> <p> The TSIG key is redundantly stored in two separate files. This is a consequence of nsupdate using the DST library for its cryptographic operations, and may change in future releases. </p> </div> </div></body> </html>
{ "pile_set_name": "Github" }
<?xml version='1.0' encoding='utf-8'?> <section xmlns="https://code.dccouncil.us/schemas/dc-library" xmlns:codified="https://code.dccouncil.us/schemas/codified" xmlns:codify="https://code.dccouncil.us/schemas/codify" xmlns:xi="http://www.w3.org/2001/XInclude" containing-doc="D.C. Code"> <num>16-5004</num> <reason>Repealed</reason> <heading>Payment of fees.</heading> <text>Repealed.</text> <annotations> <annotation doc="D.C. Law 12-257" type="History">Apr. 20, 1999, D.C. Law 12-257, § 5, 46 DCR 1309</annotation> <annotation doc="D.C. Law 13-136" type="History">June 27, 2000, D.C. Law 13-136, § 203, 47 DCR 2850</annotation> <annotation type="Emergency Legislation">For temporary (90 day) repeal of this chapter, see § 3(b) of Adoption and Safe Families Compliance Emergency Amendment Act of 2001 (D.C. Act 14-65, June 6, 2001, 48 DCR 5721).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) repeal of this chapter, see § 3(b) of the Adoption and Safe Families Compliance Congressional Review Emergency Amendment Act of 2000 (D.C. Act 13-451, November 7, 2000, 47 DCR 9399).</annotation> <annotation type="Emergency Legislation">For temporary (90-day) repeal of this chapter, see § 3(b) of the Adoption and Safe Families Compliance Emergency Amendment Act of 1999 (D.C. Act 13-383, July 24, 2000, 47 DCR 6700).</annotation> <annotation type="Emergency Legislation">For temporary repeal of chapter, see note to <cite path="§16-5001">§ 16-5001</cite>.</annotation> <annotation type="Temporary Legislation">Temporary addition of chapter: See note to <cite path="§16-5001">§ 16-5001</cite>.</annotation> <annotation type="Prior Codifications">1981 Ed., § 16-5004.</annotation> </annotations> </section>
{ "pile_set_name": "Github" }
// Copyright 2017 The TensorFlow Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ============================================================================= #include "tensorflow/contrib/boosted_trees/lib/utils/examples_iterable.h" namespace tensorflow { namespace boosted_trees { namespace utils { using Iterator = ExamplesIterable::Iterator; ExamplesIterable::ExamplesIterable( const std::vector<Tensor>& dense_float_feature_columns, const std::vector<sparse::SparseTensor>& sparse_float_feature_columns, const std::vector<sparse::SparseTensor>& sparse_int_feature_columns, int64 example_start, int64 example_end) : example_start_(example_start), example_end_(example_end) { // Create dense float column values. dense_float_column_values_.reserve(dense_float_feature_columns.size()); for (auto& dense_float_column : dense_float_feature_columns) { dense_float_column_values_.emplace_back( dense_float_column.template matrix<float>()); } // Create sparse float column iterables and values. sparse_float_column_iterables_.reserve(sparse_float_feature_columns.size()); sparse_float_column_values_.reserve(sparse_float_feature_columns.size()); for (auto& sparse_float_column : sparse_float_feature_columns) { sparse_float_column_iterables_.emplace_back( sparse_float_column.indices().template matrix<int64>(), example_start, example_end); sparse_float_column_values_.emplace_back( sparse_float_column.values().template vec<float>()); } // Create sparse int column iterables and values. sparse_int_column_iterables_.reserve(sparse_int_feature_columns.size()); sparse_int_column_values_.reserve(sparse_int_feature_columns.size()); for (auto& sparse_int_column : sparse_int_feature_columns) { sparse_int_column_iterables_.emplace_back( sparse_int_column.indices().template matrix<int64>(), example_start, example_end); sparse_int_column_values_.emplace_back( sparse_int_column.values().template vec<int64>()); } } Iterator::Iterator(ExamplesIterable* iter, int64 example_idx) : iter_(iter), example_idx_(example_idx) { // Create sparse iterators. sparse_float_column_iterators_.reserve( iter->sparse_float_column_iterables_.size()); for (auto& iterable : iter->sparse_float_column_iterables_) { sparse_float_column_iterators_.emplace_back(iterable.begin()); } sparse_int_column_iterators_.reserve( iter->sparse_int_column_iterables_.size()); for (auto& iterable : iter->sparse_int_column_iterables_) { sparse_int_column_iterators_.emplace_back(iterable.begin()); } // Pre-size example features. example_.dense_float_features.resize( iter_->dense_float_column_values_.size()); example_.sparse_float_features.resize( iter_->sparse_float_column_values_.size()); example_.sparse_int_features.resize(iter_->sparse_int_column_values_.size()); } } // namespace utils } // namespace boosted_trees } // namespace tensorflow
{ "pile_set_name": "Github" }
/** Used to compose unicode character classes. */ var rsAstralRange = '\\ud800-\\udfff', rsComboMarksRange = '\\u0300-\\u036f\\ufe20-\\ufe23', rsComboSymbolsRange = '\\u20d0-\\u20f0', rsVarRange = '\\ufe0e\\ufe0f'; /** Used to compose unicode capture groups. */ var rsZWJ = '\\u200d'; /** Used to detect strings with [zero-width joiners or code points from the astral planes](http://eev.ee/blog/2015/09/12/dark-corners-of-unicode/). */ var reHasComplexSymbol = RegExp('[' + rsZWJ + rsAstralRange + rsComboMarksRange + rsComboSymbolsRange + rsVarRange + ']'); module.exports = reHasComplexSymbol;
{ "pile_set_name": "Github" }
{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# 宽度深度模型/wide and deep model" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 介绍\n", "\n", "在之前的代码里大家看到了如何用tensorflow自带的op来构建灵活的神经网络,这里用tf中的高级接口,用更简单的方式完成wide&deep模型。\n", "\n", "大家都知道google官方给出的典型wide&deep模型结构如下:\n", "![](https://img-blog.csdn.net/20170502135611349)\n", "\n", "更一般的拼接模型ctr预估结构可以如下:\n", "![](https://yxzf.github.io/images/deeplearning/dnn_ctr/embeding.png)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 导入工具库" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Using TensorFlow version 1.4.0\n", "\n", "Feature columns are: ['I1', 'I2', 'I3', 'I4', 'I5', 'I6', 'I7', 'I8', 'I9', 'I10', 'I11', 'I12', 'I13', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'C10', 'C11', 'C12', 'C13', 'C14', 'C15', 'C16', 'C17', 'C18', 'C19', 'C20', 'C21', 'C22', 'C23', 'C24', 'C25', 'C26'] \n", "\n", "Columns and data as a dict: {'C19': 'f6a3e43b', 'C18': 'bd17c3da', 'C13': '7203f04e', 'C12': '79507c6b', 'C11': '77212bd7', 'C10': 'ceb10289', 'C17': '8efede7f', 'C16': '49013ffe', 'C15': '2c14c412', 'C14': '07d13a8f', 'I9': 475, 'I8': 17, 'I1': 0, 'I3': 1, 'I2': 127, 'I5': 1683, 'I4': 3, 'I7': 26, 'I6': 19, 'C9': 'a73ee510', 'C8': '0b153874', 'C3': '11c9d79e', 'C2': '8947f767', 'C1': '05db9164', 'C7': '18671b18', 'C6': 'fbad5c96', 'C5': '4cf72387', 'C4': '52a787c8', 'C22': 'ad3062eb', 'C23': 'c7dc6720', 'C20': 'a458ea53', 'C21': '35cd95c9', 'C26': '49d68486', 'C24': '3fdb382b', 'C25': '010f6491', 'I11': 9, 'I10': 0, 'I13': 3, 'I12': 0} \n", "\n" ] } ], "source": [ "from __future__ import absolute_import\n", "from __future__ import division\n", "from __future__ import print_function\n", "\n", "import time\n", "\n", "import tensorflow as tf\n", "\n", "tf.logging.set_verbosity(tf.logging.INFO)\n", "print(\"Using TensorFlow version %s\\n\" % (tf.__version__))\n", "\n", "# 我们这里使用的是criteo数据集,X的部分包括13个连续值列和26个类别型值的列\n", "CONTINUOUS_COLUMNS = [\"I\"+str(i) for i in range(1,14)] # 1-13 inclusive\n", "CATEGORICAL_COLUMNS = [\"C\"+str(i) for i in range(1,27)] # 1-26 inclusive\n", "# 标签是clicked\n", "LABEL_COLUMN = [\"clicked\"]\n", "\n", "# 训练集由 label列 + 连续值列 + 离散值列 构成\n", "TRAIN_DATA_COLUMNS = LABEL_COLUMN + CONTINUOUS_COLUMNS + CATEGORICAL_COLUMNS\n", "#TEST_DATA_COLUMNS = CONTINUOUS_COLUMNS + CATEGORICAL_COLUMNS\n", "\n", "# 特征列就是 连续值列+离散值列\n", "FEATURE_COLUMNS = CONTINUOUS_COLUMNS + CATEGORICAL_COLUMNS\n", "\n", "# 输出一些信息\n", "print('Feature columns are: ', FEATURE_COLUMNS, '\\n')\n", "\n", "# 数据示例\n", "sample = [ 0, 127, 1, 3, 1683, 19, 26, 17, 475, 0, 9, 0, 3, \"05db9164\", \"8947f767\", \"11c9d79e\", \"52a787c8\", \"4cf72387\", \"fbad5c96\", \"18671b18\", \"0b153874\", \"a73ee510\", \"ceb10289\", \"77212bd7\", \"79507c6b\", \"7203f04e\", \"07d13a8f\", \"2c14c412\", \"49013ffe\", \"8efede7f\", \"bd17c3da\", \"f6a3e43b\", \"a458ea53\", \"35cd95c9\", \"ad3062eb\", \"c7dc6720\", \"3fdb382b\", \"010f6491\", \"49d68486\"]\n", "\n", "print('Columns and data as a dict: ', dict(zip(FEATURE_COLUMNS, sample)), '\\n')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 输入文件解析\n", "\n", "我们把数据送进`Reader`然后从文件里一次读一个batch \n", "\n", "对`_input_fn()`函数做了特殊的封装处理,使得它更适合不同类型的文件读取\n", "\n", "注意一下:这里的文件是直接通过tensorflow读取的,我们没有用pandas这种工具,也没有一次性把所有数据读入内存,这样对于非常大规模的数据文件训练,是合理的。" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### 关于input_fn函数\n", "\n", "这个函数定义了我们怎么读取数据用于训练和测试。这里的返回结果是一个pair对,第一个元素是列名到具体取值的映射字典,第二个元素是label的序列。\n", "\n", "抽象一下,大概是这么个东西 `map(column_name => [Tensor of values]) , [Tensor of labels])`\n", "\n", "举个例子就长这样:\n", "\n", " { \n", " 'age': [ 39, 50, 38, 53, 28, … ], \n", " 'marital_status': [ 'Married-civ-spouse', 'Never-married', 'Widowed', 'Widowed' … ],\n", " ...\n", " 'gender': ['Male', 'Female', 'Male', 'Male', 'Female',, … ], \n", " } , \n", " [ 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### High-level structure of input functions for CSV-style data\n", "1. Queue file(s)\n", "2. Read a batch of data from the next file\n", "3. Create record defaults, generally 0 for continuous values, and \"\" for categorical. You can use named types if you prefer\n", "4. Decode the CSV and restructure it to be appropriate for the graph's input format\n", " * `zip()` column headers with the data\n", " * `pop()` off the label column(s)\n", " * Remove/pop any unneeded column(s)\n", " * Run `tf.expand_dims()` on categorical columns\n", " 5. Return the pair: `(feature_dict, label_array)`\n", " " ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "input function configured\n" ] } ], "source": [ "BATCH_SIZE = 2000\n", "\n", "def generate_input_fn(filename, batch_size=BATCH_SIZE):\n", " def _input_fn():\n", " filename_queue = tf.train.string_input_producer([filename])\n", " reader = tf.TextLineReader()\n", " # 只读batch_size行\n", " key, value = reader.read_up_to(filename_queue, num_records=batch_size)\n", " \n", " # 1个int型的label, 13个连续值, 26个字符串类型\n", " cont_defaults = [ [0] for i in range(1,14) ]\n", " cate_defaults = [ [\" \"] for i in range(1,27) ]\n", " label_defaults = [ [0] ]\n", " column_headers = TRAIN_DATA_COLUMNS\n", " \n", " # 第一列数据是label\n", " record_defaults = label_defaults + cont_defaults + cate_defaults\n", "\n", " # 解析读出的csv数据\n", " # 我们要手动把数据和header去zip在一起\n", " columns = tf.decode_csv(\n", " value, record_defaults=record_defaults)\n", " \n", " # 最终是列名到数据张量的映射字典\n", " all_columns = dict(zip(column_headers, columns))\n", " \n", " # 弹出和保存label标签\n", " labels = all_columns.pop(LABEL_COLUMN[0])\n", " \n", " # 其余列就是特征\n", " features = all_columns \n", "\n", " # 类别型的列我们要做一个类似one-hot的扩展操作\n", " for feature_name in CATEGORICAL_COLUMNS:\n", " features[feature_name] = tf.expand_dims(features[feature_name], -1)\n", "\n", " return features, labels\n", "\n", " return _input_fn\n", "\n", "print('input function configured')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 构建特征列\n", "这个部分我们来看一下用tensorflow的高级接口,如何方便地对特征进行处理" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#### 稀疏列/Sparse Columns\n", "我们先构建稀疏列(针对类别型)\n", "\n", "对于所有类别取值都清楚的我们用`sparse_column_with_keys()`处理\n", "\n", "对于类别可能比较多,没办法枚举的可以试试用`sparse_column_with_hash_bucket()`处理这个映射" ] }, { "cell_type": "code", "execution_count": 3, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Wide/Sparse columns configured\n" ] } ], "source": [ "# Sparse base columns.\n", "# C1 = tf.contrib.layers.sparse_column_with_hash_bucket('C1', hash_bucket_size=1000)\n", "# C2 = tf.contrib.layers.sparse_column_with_hash_bucket('C2', hash_bucket_size=1000)\n", "# C3 = tf.contrib.layers.sparse_column_with_hash_bucket('C3', hash_bucket_size=1000)\n", "# ...\n", "# Cn = tf.contrib.layers.sparse_column_with_hash_bucket('Cn', hash_bucket_size=1000)\n", "# wide_columns = [C1, C2, C3, ... , Cn]\n", "\n", "wide_columns = []\n", "for name in CATEGORICAL_COLUMNS:\n", " wide_columns.append(tf.contrib.layers.sparse_column_with_hash_bucket(\n", " name, hash_bucket_size=1000))\n", "\n", "print('Wide/Sparse columns configured')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#### 连续值列/Continuous columns\n", "通过`real_valued_column()`设定连续值列" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "deep/continuous columns configured\n" ] } ], "source": [ "# Continuous base columns.\n", "# I1 = tf.contrib.layers.real_valued_column(\"I1\")\n", "# I2 = tf.contrib.layers.real_valued_column(\"I2\")\n", "# I3 = tf.contrib.layers.real_valued_column(\"I3\")\n", "# ...\n", "# In = tf.contrib.layers.real_valued_column(\"In\")\n", "# deep_columns = [I1, I2, I3, ... , In]\n", "\n", "deep_columns = []\n", "for name in CONTINUOUS_COLUMNS:\n", " deep_columns.append(tf.contrib.layers.real_valued_column(name))\n", "\n", "print('deep/continuous columns configured')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "#### 特征工程变换\n", "因为这是一份做过脱敏处理的数据,所以我们做下面的2个操作\n", " \n", "* **分桶/bucketizing** 对连续值离散化和分桶\n", "* **生成交叉特征/feature crossing** 对2列或者多列去构建交叉组合特征(注意只有离散的特征才能交叉,所以如果连续值特征要用这个处理,要先离散化) " ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Transformations complete\n" ] } ], "source": [ "# No known Transformations. Can add some if desired. \n", "# Examples from other datasets are shown below.\n", "\n", "# age_buckets = tf.contrib.layers.bucketized_column(age,\n", "# boundaries=[ 18, 25, 30, 35, 40, 45, 50, 55, 60, 65 ])\n", "# education_occupation = tf.contrib.layers.crossed_column([education, occupation], \n", "# hash_bucket_size=int(1e4))\n", "# age_race_occupation = tf.contrib.layers.crossed_column([age_buckets, race, occupation], \n", "# hash_bucket_size=int(1e6))\n", "# country_occupation = tf.contrib.layers.crossed_column([native_country, occupation], \n", "# hash_bucket_size=int(1e4))\n", "\n", "print('Transformations complete')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "### Group feature columns into 2 objects\n", "\n", "The wide columns are the sparse, categorical columns that we specified, as well as our hashed, bucket, and feature crossed columns. \n", "\n", "The deep columns are composed of embedded categorical columns along with the continuous real-valued columns. **Column embeddings** transform a sparse, categorical tensor into a low-dimensional and dense real-valued vector. The embedding values are also trained along with the rest of the model. For more information about embeddings, see the TensorFlow tutorial on [Vector Representations Words](https://www.tensorflow.org/tutorials/word2vec/), or [Word Embedding](https://en.wikipedia.org/wiki/Word_embedding) on Wikipedia.\n", "\n", "The higher the dimension of the embedding is, the more degrees of freedom the model will have to learn the representations of the features. We are starting with an 8-dimension embedding for simplicity, but later you can come back and increase the dimensionality if you wish.\n", "\n" ] }, { "cell_type": "code", "execution_count": 6, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "WARNING:tensorflow:The default stddev value of initializer will change from \"1/sqrt(vocab_size)\" to \"1/sqrt(dimension)\" after 2017/02/25.\n", "wide and deep columns configured\n" ] } ], "source": [ "# Wide columns and deep columns.\n", "# wide_columns = [gender, race, native_country,\n", "# education, occupation, workclass,\n", "# marital_status, relationship,\n", "# age_buckets, education_occupation,\n", "# age_race_occupation, country_occupation]\n", "\n", "# deep_columns = [\n", "# tf.contrib.layers.embedding_column(workclass, dimension=8),\n", "# tf.contrib.layers.embedding_column(education, dimension=8),\n", "# tf.contrib.layers.embedding_column(marital_status, dimension=8),\n", "# tf.contrib.layers.embedding_column(gender, dimension=8),\n", "# tf.contrib.layers.embedding_column(relationship, dimension=8),\n", "# tf.contrib.layers.embedding_column(race, dimension=8),\n", "# tf.contrib.layers.embedding_column(native_country, dimension=8),\n", "# tf.contrib.layers.embedding_column(occupation, dimension=8),\n", "# age,\n", "# education_num,\n", "# capital_gain,\n", "# capital_loss,\n", "# hours_per_week,\n", "# ]\n", "\n", "# Embeddings for wide columns into deep columns\n", "for col in wide_columns:\n", " deep_columns.append(tf.contrib.layers.embedding_column(col, \n", " dimension=8))\n", "\n", "print('wide and deep columns configured')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 构建模型\n", "\n", "你可以根据实际情况构建“宽模型”、“深模型”、“深度宽度模型”\n", "\n", "* **Wide**: 相当于逻辑回归\n", "* **Deep**: 相当于多层感知器\n", "* **Wide & Deep**: 组合两种结构\n", "\n", "这里有2个参数`hidden_units` 或者 `dnn_hidden_units`可以指定隐层的节点个数,比如`[12, 20, 15]`构建3层神经元个数分别为12、20、15的隐层。" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model directory = ./models/model_WIDE_AND_DEEP_1525425429\n", "WARNING:tensorflow:From <ipython-input-7-e9002d0430ed>:37: calling __init__ (from tensorflow.contrib.learn.python.learn.estimators.dnn_linear_combined) with fix_global_step_increment_bug=False is deprecated and will be removed after 2017-04-15.\n", "Instructions for updating:\n", "Please set fix_global_step_increment_bug=True and update training steps in your pipeline. See pydoc for details.\n", "INFO:tensorflow:Using config: {'_save_checkpoints_secs': None, '_num_ps_replicas': 0, '_keep_checkpoint_max': 5, '_task_type': None, '_is_chief': True, '_cluster_spec': <tensorflow.python.training.server_lib.ClusterSpec object at 0x9280ad0>, '_model_dir': './models/model_WIDE_AND_DEEP_1525425429', '_save_checkpoints_steps': 100, '_keep_checkpoint_every_n_hours': 10000, '_session_config': None, '_tf_random_seed': None, '_save_summary_steps': 100, '_environment': 'local', '_num_worker_replicas': 0, '_task_id': 0, '_log_step_count_steps': 100, '_tf_config': gpu_options {\n", " per_process_gpu_memory_fraction: 1.0\n", "}\n", ", '_evaluation_master': '', '_master': ''}\n", "estimator built\n" ] } ], "source": [ "def create_model_dir(model_type):\n", " # 返回类似这样的结果 models/model_WIDE_AND_DEEP_1493043407\n", " return './models/model_' + model_type + '_' + str(int(time.time()))\n", "\n", "# 指定模型文件夹\n", "def get_model(model_type, model_dir):\n", " print(\"Model directory = %s\" % model_dir)\n", " \n", " # 对checkpoint去做设定\n", " runconfig = tf.contrib.learn.RunConfig(\n", " save_checkpoints_secs=None,\n", " save_checkpoints_steps = 100,\n", " )\n", " \n", " m = None\n", " \n", " # 宽模型\n", " if model_type == 'WIDE':\n", " m = tf.contrib.learn.LinearClassifier(\n", " model_dir=model_dir, \n", " feature_columns=wide_columns)\n", "\n", " # 深度模型\n", " if model_type == 'DEEP':\n", " m = tf.contrib.learn.DNNClassifier(\n", " model_dir=model_dir,\n", " feature_columns=deep_columns,\n", " hidden_units=[100, 50, 25])\n", "\n", " # 宽度深度模型\n", " if model_type == 'WIDE_AND_DEEP':\n", " m = tf.contrib.learn.DNNLinearCombinedClassifier(\n", " model_dir=model_dir,\n", " linear_feature_columns=wide_columns,\n", " dnn_feature_columns=deep_columns,\n", " dnn_hidden_units=[100, 70, 50, 25],\n", " config=runconfig)\n", " \n", " print('estimator built')\n", " \n", " return m\n", " \n", "\n", "MODEL_TYPE = 'WIDE_AND_DEEP'\n", "model_dir = create_model_dir(model_type=MODEL_TYPE)\n", "m = get_model(model_type=MODEL_TYPE, model_dir=model_dir)" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# 评估\n", "from tensorflow.contrib.learn.python.learn import evaluable\n", "isinstance(m, evaluable.Evaluable)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 拟合与模型训练\n", "\n", "执行`fit()`函数训练模型,可以试试不同的`train_steps`和`BATCH_SIZE`参数,会影响速度和结果" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# 训练文件与测试文件\n", "train_file = \"./criteo_data/criteo_train.txt\"\n", "eval_file = \"./criteo_data/criteo_test.txt\"" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "scrolled": true }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Rank of input Tensor (1) should be the same as output_rank (2) for column. Will attempt to expand dims. It is highly recommended that you resize your input, as this behavior may change.\n", "WARNING:tensorflow:Casting <dtype: 'int32'> labels to bool.\n", "WARNING:tensorflow:Casting <dtype: 'int32'> labels to bool.\n", "INFO:tensorflow:Create CheckpointSaverHook.\n", "INFO:tensorflow:Saving checkpoints for 2 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:loss = 383.61926, step = 2\n", "INFO:tensorflow:Saving checkpoints for 104 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 23.1193\n", "INFO:tensorflow:loss = 0.52519834, step = 202 (7.250 sec)\n", "INFO:tensorflow:Saving checkpoints for 206 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 28.1098\n", "INFO:tensorflow:Saving checkpoints for 308 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8379\n", "INFO:tensorflow:loss = 0.52586925, step = 402 (5.846 sec)\n", "INFO:tensorflow:Saving checkpoints for 410 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.676\n", "INFO:tensorflow:Saving checkpoints for 512 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.5047\n", "INFO:tensorflow:loss = 0.50194484, step = 602 (5.955 sec)\n", "INFO:tensorflow:Saving checkpoints for 614 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8302\n", "INFO:tensorflow:Saving checkpoints for 716 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.3302\n", "INFO:tensorflow:loss = 0.53253466, step = 802 (6.003 sec)\n", "INFO:tensorflow:Saving checkpoints for 818 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7999\n", "INFO:tensorflow:Saving checkpoints for 920 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8958\n", "INFO:tensorflow:loss = 0.5117367, step = 1002 (5.747 sec)\n", "INFO:tensorflow:Saving checkpoints for 1022 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8373\n", "INFO:tensorflow:Saving checkpoints for 1124 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.6937\n", "INFO:tensorflow:loss = 0.50721353, step = 1202 (5.966 sec)\n", "INFO:tensorflow:Saving checkpoints for 1226 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1806\n", "INFO:tensorflow:Saving checkpoints for 1328 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6754\n", "INFO:tensorflow:loss = 0.5044991, step = 1402 (5.916 sec)\n", "INFO:tensorflow:Saving checkpoints for 1430 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8313\n", "INFO:tensorflow:Saving checkpoints for 1532 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7576\n", "INFO:tensorflow:loss = 0.5024924, step = 1602 (5.969 sec)\n", "INFO:tensorflow:Saving checkpoints for 1634 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8521\n", "INFO:tensorflow:Saving checkpoints for 1736 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2324\n", "INFO:tensorflow:loss = 0.5241726, step = 1802 (5.717 sec)\n", "INFO:tensorflow:Saving checkpoints for 1838 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0927\n", "INFO:tensorflow:Saving checkpoints for 1940 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9787\n", "INFO:tensorflow:loss = 0.52364856, step = 2002 (5.918 sec)\n", "INFO:tensorflow:Saving checkpoints for 2042 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.3599\n", "INFO:tensorflow:Saving checkpoints for 2144 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9638\n", "INFO:tensorflow:loss = 0.5026111, step = 2202 (5.891 sec)\n", "INFO:tensorflow:Saving checkpoints for 2246 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0174\n", "INFO:tensorflow:Saving checkpoints for 2348 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9178\n", "INFO:tensorflow:loss = 0.5019771, step = 2402 (5.738 sec)\n", "INFO:tensorflow:Saving checkpoints for 2450 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9129\n", "INFO:tensorflow:Saving checkpoints for 2552 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.285\n", "INFO:tensorflow:loss = 0.4845492, step = 2602 (5.912 sec)\n", "INFO:tensorflow:Saving checkpoints for 2654 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8518\n", "INFO:tensorflow:Saving checkpoints for 2756 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0218\n", "INFO:tensorflow:loss = 0.51637685, step = 2802 (5.920 sec)\n", "INFO:tensorflow:Saving checkpoints for 2858 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0541\n", "INFO:tensorflow:Saving checkpoints for 2960 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9527\n", "INFO:tensorflow:loss = 0.50076705, step = 3002 (5.927 sec)\n", "INFO:tensorflow:Saving checkpoints for 3062 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8167\n", "INFO:tensorflow:Saving checkpoints for 3164 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7644\n", "INFO:tensorflow:loss = 0.49627173, step = 3202 (5.769 sec)\n", "INFO:tensorflow:Saving checkpoints for 3266 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.4713\n", "INFO:tensorflow:Saving checkpoints for 3368 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9586\n", "INFO:tensorflow:loss = 0.4953499, step = 3402 (5.974 sec)\n", "INFO:tensorflow:Saving checkpoints for 3470 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.249\n", "INFO:tensorflow:Saving checkpoints for 3572 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.069\n", "INFO:tensorflow:loss = 0.49573877, step = 3602 (5.892 sec)\n", "INFO:tensorflow:Saving checkpoints for 3674 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7109\n", "INFO:tensorflow:Saving checkpoints for 3776 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9269\n", "INFO:tensorflow:loss = 0.5173947, step = 3802 (5.770 sec)\n", "INFO:tensorflow:Saving checkpoints for 3878 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7293\n", "INFO:tensorflow:Saving checkpoints for 3980 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.3551\n", "INFO:tensorflow:loss = 0.5168913, step = 4002 (5.907 sec)\n", "INFO:tensorflow:Saving checkpoints for 4082 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.5056\n", "INFO:tensorflow:Saving checkpoints for 4184 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.5982\n", "INFO:tensorflow:loss = 0.4942948, step = 4202 (5.909 sec)\n", "INFO:tensorflow:Saving checkpoints for 4286 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9633\n", "INFO:tensorflow:Saving checkpoints for 4388 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1609\n", "INFO:tensorflow:loss = 0.49204993, step = 4402 (5.732 sec)\n", "INFO:tensorflow:Saving checkpoints for 4490 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0157\n", "INFO:tensorflow:Saving checkpoints for 4592 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.5001\n", "INFO:tensorflow:loss = 0.47883478, step = 4602 (5.962 sec)\n", "INFO:tensorflow:Saving checkpoints for 4694 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7373\n", "INFO:tensorflow:Saving checkpoints for 4796 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.6698\n", "INFO:tensorflow:loss = 0.51010066, step = 4802 (5.964 sec)\n", "INFO:tensorflow:Saving checkpoints for 4898 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7543\n", "INFO:tensorflow:Saving checkpoints for 5000 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8706\n", "INFO:tensorflow:loss = 0.496355, step = 5002 (5.952 sec)\n", "INFO:tensorflow:Saving checkpoints for 5102 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1737\n", "INFO:tensorflow:loss = 0.48928428, step = 5202 (5.082 sec)\n", "INFO:tensorflow:Saving checkpoints for 5204 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1487\n", "INFO:tensorflow:Saving checkpoints for 5306 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.471\n", "INFO:tensorflow:loss = 0.4903523, step = 5402 (5.954 sec)\n", "INFO:tensorflow:Saving checkpoints for 5408 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9119\n", "INFO:tensorflow:Saving checkpoints for 5510 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9811\n", "INFO:tensorflow:loss = 0.49182308, step = 5602 (5.744 sec)\n", "INFO:tensorflow:Saving checkpoints for 5612 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6137\n", "INFO:tensorflow:Saving checkpoints for 5714 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7582\n", "INFO:tensorflow:loss = 0.51414853, step = 5802 (5.969 sec)\n", "INFO:tensorflow:Saving checkpoints for 5816 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8651\n", "INFO:tensorflow:Saving checkpoints for 5918 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7809\n", "INFO:tensorflow:loss = 0.5147529, step = 6002 (5.956 sec)\n", "INFO:tensorflow:Saving checkpoints for 6020 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0825\n", "INFO:tensorflow:Saving checkpoints for 6122 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0911\n", "INFO:tensorflow:loss = 0.491095, step = 6202 (5.899 sec)\n", "INFO:tensorflow:Saving checkpoints for 6224 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.835\n", "INFO:tensorflow:Saving checkpoints for 6326 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.4511\n", "INFO:tensorflow:loss = 0.4875968, step = 6402 (5.800 sec)\n", "INFO:tensorflow:Saving checkpoints for 6428 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0431\n", "INFO:tensorflow:Saving checkpoints for 6530 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0124\n", "INFO:tensorflow:loss = 0.47473788, step = 6602 (5.911 sec)\n", "INFO:tensorflow:Saving checkpoints for 6632 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8711\n", "INFO:tensorflow:Saving checkpoints for 6734 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.5408\n", "INFO:tensorflow:loss = 0.50613326, step = 6802 (5.974 sec)\n", "INFO:tensorflow:Saving checkpoints for 6836 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.79\n", "INFO:tensorflow:Saving checkpoints for 6938 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9317\n", "INFO:tensorflow:loss = 0.49292383, step = 7002 (5.766 sec)\n", "INFO:tensorflow:Saving checkpoints for 7040 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6444\n", "INFO:tensorflow:Saving checkpoints for 7142 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7767\n", "INFO:tensorflow:loss = 0.4855942, step = 7202 (5.964 sec)\n", "INFO:tensorflow:Saving checkpoints for 7244 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0477\n", "INFO:tensorflow:Saving checkpoints for 7346 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8509\n", "INFO:tensorflow:loss = 0.4879568, step = 7402 (5.923 sec)\n", "INFO:tensorflow:Saving checkpoints for 7448 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9138\n", "INFO:tensorflow:Saving checkpoints for 7550 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.951\n", "INFO:tensorflow:loss = 0.48870382, step = 7602 (5.758 sec)\n", "INFO:tensorflow:Saving checkpoints for 7652 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7741\n", "INFO:tensorflow:Saving checkpoints for 7754 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1165\n", "INFO:tensorflow:loss = 0.51130104, step = 7802 (5.925 sec)\n", "INFO:tensorflow:Saving checkpoints for 7856 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.895\n", "INFO:tensorflow:Saving checkpoints for 7958 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9485\n", "INFO:tensorflow:loss = 0.5131527, step = 8002 (5.928 sec)\n", "INFO:tensorflow:Saving checkpoints for 8060 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8513\n", "INFO:tensorflow:Saving checkpoints for 8162 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.2702\n", "INFO:tensorflow:loss = 0.48932356, step = 8202 (5.903 sec)\n", "INFO:tensorflow:Saving checkpoints for 8264 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1738\n", "INFO:tensorflow:Saving checkpoints for 8366 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1566\n", "INFO:tensorflow:loss = 0.48442444, step = 8402 (5.708 sec)\n", "INFO:tensorflow:Saving checkpoints for 8468 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.1651\n", "INFO:tensorflow:Saving checkpoints for 8570 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1729\n", "INFO:tensorflow:loss = 0.47236505, step = 8602 (5.889 sec)\n", "INFO:tensorflow:Saving checkpoints for 8672 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.3996\n", "INFO:tensorflow:Saving checkpoints for 8774 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.2737\n", "INFO:tensorflow:loss = 0.5052763, step = 8802 (5.944 sec)\n", "INFO:tensorflow:Saving checkpoints for 8876 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9456\n", "INFO:tensorflow:Saving checkpoints for 8978 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8424\n", "INFO:tensorflow:loss = 0.49116126, step = 9002 (5.758 sec)\n", "INFO:tensorflow:Saving checkpoints for 9080 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.615\n", "INFO:tensorflow:Saving checkpoints for 9182 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.5405\n", "INFO:tensorflow:loss = 0.4835304, step = 9202 (5.983 sec)\n", "INFO:tensorflow:Saving checkpoints for 9284 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7099\n", "INFO:tensorflow:Saving checkpoints for 9386 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.586\n", "INFO:tensorflow:loss = 0.4846368, step = 9402 (5.897 sec)\n", "INFO:tensorflow:Saving checkpoints for 9488 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0763\n", "INFO:tensorflow:Saving checkpoints for 9590 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8913\n", "INFO:tensorflow:loss = 0.4866239, step = 9602 (5.927 sec)\n", "INFO:tensorflow:Saving checkpoints for 9692 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9176\n", "INFO:tensorflow:Saving checkpoints for 9794 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8555\n", "INFO:tensorflow:loss = 0.51006484, step = 9802 (5.754 sec)\n", "INFO:tensorflow:Saving checkpoints for 9896 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 31.9199\n", "INFO:tensorflow:Saving checkpoints for 9998 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0719\n", "INFO:tensorflow:loss = 0.51255614, step = 10002 (6.015 sec)\n", "INFO:tensorflow:Saving checkpoints for 10100 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0625\n", "INFO:tensorflow:Saving checkpoints for 10202 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8023\n", "INFO:tensorflow:loss = 0.48584107, step = 10202 (5.930 sec)\n", "INFO:tensorflow:Saving checkpoints for 10304 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.5218\n", "INFO:tensorflow:loss = 0.48140407, step = 10402 (5.125 sec)\n", "INFO:tensorflow:Saving checkpoints for 10406 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.6643\n", "INFO:tensorflow:Saving checkpoints for 10508 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9589\n", "INFO:tensorflow:loss = 0.47275436, step = 10602 (5.871 sec)\n", "INFO:tensorflow:Saving checkpoints for 10610 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9231\n", "INFO:tensorflow:Saving checkpoints for 10712 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1866\n", "INFO:tensorflow:loss = 0.5033426, step = 10802 (5.726 sec)\n", "INFO:tensorflow:Saving checkpoints for 10814 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9069\n", "INFO:tensorflow:Saving checkpoints for 10916 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0843\n", "INFO:tensorflow:loss = 0.4893472, step = 11002 (5.930 sec)\n", "INFO:tensorflow:Saving checkpoints for 11018 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8252\n", "INFO:tensorflow:Saving checkpoints for 11120 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8465\n", "INFO:tensorflow:loss = 0.48183277, step = 11202 (5.936 sec)\n", "INFO:tensorflow:Saving checkpoints for 11222 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0827\n", "INFO:tensorflow:Saving checkpoints for 11324 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8579\n", "INFO:tensorflow:loss = 0.48213634, step = 11402 (5.755 sec)\n", "INFO:tensorflow:Saving checkpoints for 11426 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.733\n", "INFO:tensorflow:Saving checkpoints for 11528 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2039\n", "INFO:tensorflow:loss = 0.48272803, step = 11602 (5.919 sec)\n", "INFO:tensorflow:Saving checkpoints for 11630 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9725\n", "INFO:tensorflow:Saving checkpoints for 11732 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.789\n", "INFO:tensorflow:loss = 0.506504, step = 11802 (5.931 sec)\n", "INFO:tensorflow:Saving checkpoints for 11834 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.24\n", "INFO:tensorflow:Saving checkpoints for 11936 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.5431\n", "INFO:tensorflow:loss = 0.51041526, step = 12002 (5.935 sec)\n", "INFO:tensorflow:Saving checkpoints for 12038 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.3221\n", "INFO:tensorflow:Saving checkpoints for 12140 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.7584\n", "INFO:tensorflow:loss = 0.4840238, step = 12202 (5.747 sec)\n", "INFO:tensorflow:Saving checkpoints for 12242 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6937\n", "INFO:tensorflow:Saving checkpoints for 12344 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9127\n", "INFO:tensorflow:loss = 0.48007318, step = 12402 (5.950 sec)\n", "INFO:tensorflow:Saving checkpoints for 12446 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1066\n", "INFO:tensorflow:Saving checkpoints for 12548 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.0939\n", "INFO:tensorflow:loss = 0.4707959, step = 12602 (6.005 sec)\n", "INFO:tensorflow:Saving checkpoints for 12650 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.3398\n", "INFO:tensorflow:Saving checkpoints for 12752 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6643\n", "INFO:tensorflow:loss = 0.50073105, step = 12802 (5.998 sec)\n", "INFO:tensorflow:Saving checkpoints for 12854 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1485\n", "INFO:tensorflow:Saving checkpoints for 12956 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2198\n", "INFO:tensorflow:loss = 0.48709363, step = 13002 (5.706 sec)\n", "INFO:tensorflow:Saving checkpoints for 13058 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6911\n", "INFO:tensorflow:Saving checkpoints for 13160 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.894\n", "INFO:tensorflow:loss = 0.47868958, step = 13202 (5.958 sec)\n", "INFO:tensorflow:Saving checkpoints for 13262 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0573\n", "INFO:tensorflow:Saving checkpoints for 13364 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7856\n", "INFO:tensorflow:loss = 0.4787089, step = 13402 (5.923 sec)\n", "INFO:tensorflow:Saving checkpoints for 13466 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.5471\n", "INFO:tensorflow:Saving checkpoints for 13568 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0263\n", "INFO:tensorflow:loss = 0.4809025, step = 13602 (5.705 sec)\n", "INFO:tensorflow:Saving checkpoints for 13670 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7781\n", "INFO:tensorflow:Saving checkpoints for 13772 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.9871\n", "INFO:tensorflow:loss = 0.50386125, step = 13802 (5.930 sec)\n", "INFO:tensorflow:Saving checkpoints for 13874 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.986\n", "INFO:tensorflow:Saving checkpoints for 13976 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8169\n", "INFO:tensorflow:loss = 0.5093261, step = 14002 (5.930 sec)\n", "INFO:tensorflow:Saving checkpoints for 14078 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.668\n", "INFO:tensorflow:Saving checkpoints for 14180 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.005\n", "INFO:tensorflow:loss = 0.48381323, step = 14202 (5.775 sec)\n", "INFO:tensorflow:Saving checkpoints for 14282 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7084\n", "INFO:tensorflow:Saving checkpoints for 14384 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8872\n", "INFO:tensorflow:loss = 0.48138177, step = 14402 (5.949 sec)\n", "INFO:tensorflow:Saving checkpoints for 14486 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2346\n", "INFO:tensorflow:Saving checkpoints for 14588 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.659\n", "INFO:tensorflow:loss = 0.4707154, step = 14602 (5.931 sec)\n", "INFO:tensorflow:Saving checkpoints for 14690 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.4108\n", "INFO:tensorflow:Saving checkpoints for 14792 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.6803\n", "INFO:tensorflow:loss = 0.502075, step = 14802 (5.819 sec)\n", "INFO:tensorflow:Saving checkpoints for 14894 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.757\n", "INFO:tensorflow:Saving checkpoints for 14996 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.6987\n", "INFO:tensorflow:loss = 0.48955604, step = 15002 (5.961 sec)\n", "INFO:tensorflow:Saving checkpoints for 15098 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9673\n", "INFO:tensorflow:Saving checkpoints for 15200 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2175\n", "INFO:tensorflow:loss = 0.4818753, step = 15202 (5.903 sec)\n", "INFO:tensorflow:Saving checkpoints for 15302 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.6938\n", "INFO:tensorflow:loss = 0.48233324, step = 15402 (5.121 sec)\n", "INFO:tensorflow:Saving checkpoints for 15404 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9636\n", "INFO:tensorflow:Saving checkpoints for 15506 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8218\n", "INFO:tensorflow:loss = 0.4845931, step = 15602 (5.931 sec)\n", "INFO:tensorflow:Saving checkpoints for 15608 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.4433\n", "INFO:tensorflow:Saving checkpoints for 15710 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8916\n", "INFO:tensorflow:loss = 0.50622445, step = 15802 (5.968 sec)\n", "INFO:tensorflow:Saving checkpoints for 15812 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0151\n", "INFO:tensorflow:Saving checkpoints for 15914 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.5828\n", "INFO:tensorflow:loss = 0.51010966, step = 16002 (5.775 sec)\n", "INFO:tensorflow:Saving checkpoints for 16016 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9331\n", "INFO:tensorflow:Saving checkpoints for 16118 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1875\n", "INFO:tensorflow:loss = 0.4836615, step = 16202 (5.902 sec)\n", "INFO:tensorflow:Saving checkpoints for 16220 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.8104\n", "INFO:tensorflow:Saving checkpoints for 16322 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8741\n", "INFO:tensorflow:loss = 0.48098013, step = 16402 (5.958 sec)\n", "INFO:tensorflow:Saving checkpoints for 16424 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8091\n", "INFO:tensorflow:Saving checkpoints for 16526 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.5486\n", "INFO:tensorflow:loss = 0.4695204, step = 16602 (5.969 sec)\n", "INFO:tensorflow:Saving checkpoints for 16628 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0424\n", "INFO:tensorflow:Saving checkpoints for 16730 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8442\n", "INFO:tensorflow:loss = 0.50164527, step = 16802 (5.755 sec)\n", "INFO:tensorflow:Saving checkpoints for 16832 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7707\n", "INFO:tensorflow:Saving checkpoints for 16934 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0926\n", "INFO:tensorflow:loss = 0.48918447, step = 17002 (5.921 sec)\n", "INFO:tensorflow:Saving checkpoints for 17036 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2155\n", "INFO:tensorflow:Saving checkpoints for 17138 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0074\n", "INFO:tensorflow:loss = 0.47913647, step = 17202 (5.906 sec)\n", "INFO:tensorflow:Saving checkpoints for 17240 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1414\n", "INFO:tensorflow:Saving checkpoints for 17342 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1936\n", "INFO:tensorflow:loss = 0.47959447, step = 17402 (5.718 sec)\n", "INFO:tensorflow:Saving checkpoints for 17444 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.6898\n", "INFO:tensorflow:Saving checkpoints for 17546 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8186\n", "INFO:tensorflow:loss = 0.48204988, step = 17602 (5.954 sec)\n", "INFO:tensorflow:Saving checkpoints for 17648 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1228\n", "INFO:tensorflow:Saving checkpoints for 17750 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.0315\n", "INFO:tensorflow:loss = 0.5047125, step = 17802 (5.994 sec)\n", "INFO:tensorflow:Saving checkpoints for 17852 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1292\n", "INFO:tensorflow:Saving checkpoints for 17954 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8264\n", "INFO:tensorflow:loss = 0.50951916, step = 18002 (5.747 sec)\n", "INFO:tensorflow:Saving checkpoints for 18056 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.1272\n", "INFO:tensorflow:Saving checkpoints for 18158 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8218\n", "INFO:tensorflow:loss = 0.48249555, step = 18202 (5.922 sec)\n", "INFO:tensorflow:Saving checkpoints for 18260 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8553\n", "INFO:tensorflow:Saving checkpoints for 18362 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9885\n", "INFO:tensorflow:loss = 0.47924712, step = 18402 (5.926 sec)\n", "INFO:tensorflow:Saving checkpoints for 18464 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.8848\n", "INFO:tensorflow:Saving checkpoints for 18566 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.395\n", "INFO:tensorflow:loss = 0.46886802, step = 18602 (5.893 sec)\n", "INFO:tensorflow:Saving checkpoints for 18668 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.2775\n", "INFO:tensorflow:Saving checkpoints for 18770 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.5929\n", "INFO:tensorflow:loss = 0.5005961, step = 18802 (5.667 sec)\n", "INFO:tensorflow:Saving checkpoints for 18872 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9238\n", "INFO:tensorflow:Saving checkpoints for 18974 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0456\n", "INFO:tensorflow:loss = 0.4847619, step = 19002 (5.913 sec)\n", "INFO:tensorflow:Saving checkpoints for 19076 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0182\n", "INFO:tensorflow:Saving checkpoints for 19178 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.9734\n", "INFO:tensorflow:loss = 0.47770432, step = 19202 (5.922 sec)\n", "INFO:tensorflow:Saving checkpoints for 19280 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.4214\n", "INFO:tensorflow:Saving checkpoints for 19382 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.6074\n", "INFO:tensorflow:loss = 0.4758588, step = 19402 (5.731 sec)\n", "INFO:tensorflow:Saving checkpoints for 19484 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.7778\n", "INFO:tensorflow:Saving checkpoints for 19586 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.1873\n", "INFO:tensorflow:loss = 0.48031697, step = 19602 (5.924 sec)\n", "INFO:tensorflow:Saving checkpoints for 19688 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 33.0876\n", "INFO:tensorflow:Saving checkpoints for 19790 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 35.0324\n", "INFO:tensorflow:loss = 0.5011429, step = 19802 (5.911 sec)\n", "INFO:tensorflow:Saving checkpoints for 19892 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 34.3502\n", "INFO:tensorflow:Saving checkpoints for 19994 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:global_step/sec: 32.73\n", "INFO:tensorflow:loss = 0.50794, step = 20002 (5.994 sec)\n", "INFO:tensorflow:Saving checkpoints for 20002 into ./models/model_WIDE_AND_DEEP_1525425429/model.ckpt.\n", "INFO:tensorflow:Loss for final step: 0.50794.\n", "fit done\n" ] } ], "source": [ "# This can be found with\n", "# wc -l train.csv\n", "train_sample_size = 2000000\n", "train_steps = train_sample_size/BATCH_SIZE*20\n", "\n", "m.fit(input_fn=generate_input_fn(train_file, BATCH_SIZE), steps=train_steps)\n", "\n", "print('fit done')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## 评估模型准确率\n", "评估准确率" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "scrolled": true }, "outputs": [], "source": [ "eval_sample_size = 500000 # this can be found with a 'wc -l eval.csv'\n", "eval_steps = eval_sample_size/BATCH_SIZE\n", "\n", "results = m.evaluate(input_fn=generate_input_fn(eval_file), \n", " steps=eval_steps)\n", "print('evaluate done')\n", "\n", "print('Accuracy: %s' % results['accuracy'])\n", "print(results)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "进行预估" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def pred_fn():\n", " sample = [ 0, 127, 1, 3, 1683, 19, 26, 17, 475, 0, 9, 0, 3, \"05db9164\", \"8947f767\", \"11c9d79e\", \"52a787c8\", \"4cf72387\", \"fbad5c96\", \"18671b18\", \"0b153874\", \"a73ee510\", \"ceb10289\", \"77212bd7\", \"79507c6b\", \"7203f04e\", \"07d13a8f\", \"2c14c412\", \"49013ffe\", \"8efede7f\", \"bd17c3da\", \"f6a3e43b\", \"a458ea53\", \"35cd95c9\", \"ad3062eb\", \"c7dc6720\", \"3fdb382b\", \"010f6491\", \"49d68486\"]\n", " sample_dict = dict(zip(FEATURE_COLUMNS, sample))\n", " \n", " for feature_name in CATEGORICAL_COLUMNS:\n", " sample_dict[feature_name] = tf.expand_dims(sample_dict[feature_name], -1)\n", " \n", " for feature_name in CONTINUOUS_COLUMNS:\n", " sample_dict[feature_name] = tf.constant(sample_dict[feature_name], dtype=tf.int32)\n", " print(sample_dict)\n", "\n", " return sample_dict\n", "\n", "m.predict(input_fn=pred_fn)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 2", "language": "python", "name": "python2" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 2 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.14" } }, "nbformat": 4, "nbformat_minor": 2 }
{ "pile_set_name": "Github" }
package com.tinyspeck.debug { import com.bit101.components.HSlider; import com.bit101.components.Label; import com.bit101.components.PushButton; import com.tinyspeck.core.beacon.StageBeacon; import com.tinyspeck.engine.control.TSFrontController; import com.tinyspeck.engine.data.client.ConfirmationDialogVO; import com.tinyspeck.engine.event.TSEvent; import com.tinyspeck.engine.model.TSModelLocator; import com.tinyspeck.engine.physics.avatar.PhysicsParameter; import com.tinyspeck.engine.physics.avatar.PhysicsSettables; import com.tinyspeck.engine.physics.avatar.PhysicsSetting; import com.tinyspeck.engine.port.IMoveListener; import com.tinyspeck.engine.util.StringUtil; import com.tinyspeck.engine.util.TFUtil; import com.tinyspeck.engine.view.renderer.LocationRenderer; import com.tinyspeck.engine.view.ui.Checkbox; import com.tinyspeck.engine.view.util.StaticFilters; import flash.display.Sprite; import flash.events.Event; import flash.events.MouseEvent; import flash.system.System; import flash.text.TextField; import flash.text.TextFieldAutoSize; import flash.text.TextFormat; public class AdminDialogPhysicsPanel implements IMoveListener { private var sp:Sprite; private var form_sp:Sprite; private var model:TSModelLocator; private var settables:PhysicsSettables; private var parametersV:Vector.<PhysicsParameter>; private var btsV:Vector.<PushButton> = new Vector.<PushButton>(); private var master_setting:PhysicsSetting; private var adj_tf:TextField = new TextField(); private var adj_cb:Checkbox; private var values_tf:TextField = new TextField(); public function AdminDialogPhysicsPanel(){ // } public function init(sp:Sprite):void { this.sp = sp; model = TSModelLocator.instance; // get this now if we can if (model.worldModel.location && model.worldModel.location.physics_setting) { master_setting = model.worldModel.location.physics_setting.clone(); } settables = model.physicsModel.settables; parametersV = settables.parametersV; TSFrontController.instance.registerMoveListener(this); model.physicsModel.registerCBProp(onPcAdjustmentsChange, "pc_adjustments"); model.physicsModel.registerCBProp(onPcAdjustmentsChange, "ignore_pc_adjustments"); form_sp = new Sprite(); form_sp.visible = !model.moveModel.moving && model.worldModel.location && model.worldModel.location.physics_setting; sp.addChild(form_sp); var label:Label; var label2:Label; var slider:HSlider; var push_bt:PushButton; var each_h:int = 25; var param:PhysicsParameter; for (var i:int=parametersV.length-1;i>-1;i--) { param = parametersV[int(i)]; label = new Label(form_sp, 100, (i*each_h)); label.name = param.name+'_label'; label2 = new Label(form_sp, label.x+180, label.y); label2.name = param.name+'_label2'; push_bt = new PushButton(form_sp, label.x+202, label.y+16, 'RESET', function(e:Event):void { resetParamFormAndParams(e.currentTarget.name.replace('_bt', '')); }); push_bt.name = param.name+'_bt'; push_bt.height = 12; push_bt.width = 40; slider = new HSlider(form_sp, label.x, label.y+16, onParamSliderChange); slider.name = param.name; slider.height = 12; slider.width = 200; slider.backClick = true; } push_bt = new PushButton(form_sp, 160, 6+((parametersV.length)*each_h), 'CLIPBOARD', function():void { System.setClipboard(getCurrentPhysicsForClipboard()); }); push_bt.width = 60; push_bt.height = 14; push_bt = new PushButton(form_sp, push_bt.x+push_bt.width+5, push_bt.y, 'SAVE', saveCurrentPhysicsToLocation); push_bt.width = 60; push_bt.height = 14; resetParamFormAndParams(); constructPhysicsSettingsButtons(); adj_cb = new Checkbox({ x: 0, y: push_bt.y + push_bt.height + 5, checked: model.physicsModel.ignore_pc_adjustments, label: 'ignore pc physics adjustments for testing (not permanent)', name: 'adj_cb' }); adj_cb.addEventListener(TSEvent.CHANGED, function():void { model.physicsModel.ignore_pc_adjustments = adj_cb.checked; }); form_sp.addChild(adj_cb); TFUtil.prepTF(adj_tf, true); adj_tf.embedFonts = false; adj_tf.autoSize = TextFieldAutoSize.NONE; adj_tf.x = 0; adj_tf.y = adj_cb.y+adj_cb.h+5; form_sp.addChild(adj_tf); values_tf.defaultTextFormat = new TextFormat('Arial'); values_tf.x = 0; values_tf.y = adj_tf.y; values_tf.autoSize = TextFieldAutoSize.LEFT; values_tf.multiline = true; form_sp.addChild(values_tf); //let the debug class know about the text field PhysicsValueTracker.instance.value_tf = values_tf; informOfAdjustments(); CONFIG::debugging { StageBeacon.mouse_move_sig.add(stageMouseMove); } } CONFIG::debugging private function stageMouseMove(e:MouseEvent):void { if (!form_sp || !form_sp.visible) { Console.removePhysicsTrackedValue(' Location xy'); } const gameRenderer:LocationRenderer = TSFrontController.instance.getMainView().gameRenderer; try { Console.trackPhysicsValue(' Location xy', 'x:'+gameRenderer.getMouseXinMiddleground(), 'y:'+gameRenderer.getMouseYinMiddleground()); } catch(err:Error) {} } // IMoveListener funcs // ----------------------------------------------------------------- public function moveLocationHasChanged():void { } public function moveLocationAssetsAreReady():void { if (!sp) return; constructPhysicsSettingsButtons(); switchPhysicsSettingsByName(model.worldModel.location.tsid); informOfAdjustments(); } public function moveMoveStarted():void { if (!model.worldModel.location) return; if (!sp) return; form_sp.visible = false; } public function moveMoveEnded():void { if (!sp) return; form_sp.visible = true; master_setting = model.worldModel.location.physics_setting.clone(); } // ----------------------------------------------------------------- // END IMoveListener funcs private function onPcAdjustmentsChange(adjustments:Object):void { CONFIG::debugging { if (model.flashVarModel.benchmark_physics_adjustments) { Benchmark.addCheck('AdminDialogPhysicsPanel.onPcAdjustmentsChange:\n' + '\tadjustments:\n' + StringUtil.deepTrace(adjustments) + '\tmaster_setting:\n' + StringUtil.deepTrace(master_setting)); } } // a little delay here to make sure we go after PhysicsController's handler StageBeacon.waitForNextFrame(TSFrontController.instance.applyPhysicsSettingsFromAdminPanel, master_setting); informOfAdjustments(); } private var adjustments_are_in_play:Boolean; private function informOfAdjustments():void { adjustments_are_in_play = false; var txt:String = '<b>There are no pc adjustments affecting physics right now.</b>'; var changed_txt:String = ''; var adjustments:Object = model.physicsModel.pc_adjustments; for (var i:int; i<parametersV.length; i++) { var name:String = parametersV[int(i)].name; var type:String = parametersV[int(i)].type; if (adjustments && name in adjustments) { if (type == PhysicsParameter.TYPE_BOOL) { changed_txt+= ' * '+name+' set to '+(adjustments[name]=='1'?true:false)+'<br>'; } else { if (adjustments[name] != 1) changed_txt+= ' * '+name+' multiplied by '+adjustments[name]+'<br>'; } } } if (changed_txt) { if (model.worldModel.location.no_physics_adjustments) { txt = '<b>These pc adjustments would be affecting physics right now, but the location has no_physics_adjustments:true, so they are not:</b><br>'+changed_txt; } else { if (model.physicsModel.ignore_pc_adjustments) { txt = '<b>These pc adjustments would be affecting physics right now, but you\'re ignoring them:</b><br>'+changed_txt; } else { adjustments_are_in_play = true; txt = '<font color="#cc0000"><b>These pc adjustments are affecting physics right now:</b><br>'+changed_txt+'</font>'; } } } adj_tf.htmlText = '<font face="Arial">'+txt+'<font>'; adj_tf.width = 300; adj_tf.height = adj_tf.textHeight+4; values_tf.y = adj_tf.y+adj_tf.height+10; } private function constructPhysicsSettingsButtons():void { if (!sp) return; var loc_tsid:String = model.worldModel.location.tsid; var push_bt:PushButton; CONFIG::debugging { Console.warn('setting phys to '+loc_tsid); } var settingNamesA:Array = settables.getSettingNames(); var setting_name:String; for (var i:int = settingNamesA.length-1;i>-1;i--) { setting_name = settingNamesA[i]; push_bt = form_sp.getChildByName(setting_name) as PushButton; if (!push_bt) { push_bt = new PushButton(form_sp, 0, 0, setting_name.toUpperCase(), switchPhysicsSettingsFromButton); push_bt.name = setting_name; push_bt.width = 90; push_bt.height = 14; btsV.push(push_bt); } push_bt.y = 16+((settingNamesA.length-1-i)*17); } form_sp.visible = true; } // no param_name, and it resets them all private function resetParamFormAndParams(param_name:String = ''):void { if (!model.worldModel.pc.apo) return; if (!settables) return; if (!master_setting) return; var setting:PhysicsSetting = settables.getSettingByName(master_setting.name); var param:PhysicsParameter; var slider:HSlider; var label:Label; var label2:Label; for (var i:int=0;i<parametersV.length;i++) { param = parametersV[int(i)]; //Console.dir(param) if (param_name && param.name != param_name) continue; slider = HSlider(form_sp.getChildByName(param.name)); slider.minimum = param.min; slider.maximum = param.max; slider.value = setting[param.name]; label = Label(form_sp.getChildByName(param.name+'_label')); label.text = param.label+' ['+param.min +' to '+param.max+'] def: '+slider.value+''; label2 = Label(form_sp.getChildByName(param.name+'_label2')); label2.text = formatNumberForPropSliders(slider.value, param.name); setParamFromSlider(slider); } TSFrontController.instance.applyPhysicsSettingsFromAdminPanel(master_setting); } private function switchPhysicsSettingsFromButton(e:Event):void { switchPhysicsSettingsByName(PushButton(e.currentTarget).name); } private function switchPhysicsSettingsByName(n:String):void { var setting:PhysicsSetting = settables.getSettingByName(n); master_setting = setting.clone(); resetParamFormAndParams(); var push_bt:PushButton; var settingNamesA:Array = settables.getSettingNames(); var setting_name:String; for (var i:int;i<settingNamesA.length;i++) { setting_name = settingNamesA[int(i)]; push_bt = form_sp.getChildByName(setting_name) as PushButton; if (push_bt.name == n) { push_bt.filters = StaticFilters.black_GlowA; } else { push_bt.filters = null; } } } private function formatNumberForPropSliders(n:Number, name:String):String { return String(adjustPhysicsValue(n, name)); } private function adjustPhysicsValue(n:Number, name:String):Number { var param:PhysicsParameter = settables.getParameterByName(name); var type:String = (param) ? param.type : PhysicsParameter.TYPE_NUM; switch (type) { case PhysicsParameter.TYPE_NUM: if (parseInt(String(n)) != n) return Number(n.toFixed(3)); break; case PhysicsParameter.TYPE_INT: return Math.round(n); case PhysicsParameter.TYPE_BOOL: return (n == 0) ? 0 : 1; } return n; } private function onParamSliderChange(e:Event):void { setParamFromSlider(HSlider(e.currentTarget)); TSFrontController.instance.applyPhysicsSettingsFromAdminPanel(master_setting); } private function setParamFromSlider(slider:HSlider):void { master_setting[slider.name] = adjustPhysicsValue(slider.value, slider.name); Label(form_sp.getChildByName(slider.name+'_label2')).text = formatNumberForPropSliders(slider.value, slider.name); } private function getCurrentPhysicsForClipboard():String { var str:String = '{\r'; var param:PhysicsParameter; var slider:HSlider; for (var i:int=0;i<parametersV.length;i++) { param = parametersV[int(i)]; slider = HSlider(form_sp.getChildByName(param.name)); str+= '\t\''+param.name+'\': '+adjustPhysicsValue(slider.value, param.name); if (i<parametersV.length-1) str+=','; str+= '\r'; } return str+'}'; } private function getCurrentPhysicsForLocation():Object { var ob:Object = {}; var param:PhysicsParameter; var slider:HSlider; for (var i:int=0;i<parametersV.length;i++) { param = parametersV[int(i)]; slider = HSlider(form_sp.getChildByName(param.name)); ob[param.name] = adjustPhysicsValue(slider.value, param.name); } return ob; } private function saveCurrentPhysicsToLocationConfirmed(value:*):void { if (value === true) saveCurrentPhysicsToLocation(null, true); } private function saveCurrentPhysicsToLocation(ee:Event, proceed:Boolean = false):void { if (adjustments_are_in_play && !proceed) { var txt:String = 'To be clear, you currently have physics adjustments in play. These can be from buffs or what have you. '+ 'So before saving, you may want to uncheck the <b>ignore pc physics adjustments</b> checkbox in the physics panel to make sure '+ 'the settings you are saving are what you want.'; TSFrontController.instance.confirm(new ConfirmationDialogVO(saveCurrentPhysicsToLocationConfirmed, txt, [ {value: false, label: 'Don\'t Save'}, {value: true, label: 'Save'} ], false)); return; } var amf:Object = model.worldModel.location.AMF(); var physics:Object = getCurrentPhysicsForLocation(); TSFrontController.instance.saveLocation(amf, physics, function(success:Boolean):void { if (success) { // the save was succeful model.activityModel.growl_message = 'PHYSICS SAVED'; // make this permanent locally var setting:PhysicsSetting = settables.getSettingByName(model.worldModel.location.tsid); if (setting) { PhysicsSetting.updateFromAnonymous(physics, setting); master_setting = setting.clone(); switchPhysicsSettingsByName(model.worldModel.location.tsid); // update the physics too model.worldModel.physics_obs[model.worldModel.location.tsid] = physics; } else { CONFIG::debugging { Console.error('wtf'); } } } else { // revert ? } }); } } }
{ "pile_set_name": "Github" }
glob-parent [![Build Status](https://travis-ci.org/es128/glob-parent.svg)](https://travis-ci.org/es128/glob-parent) [![Coverage Status](https://img.shields.io/coveralls/es128/glob-parent.svg)](https://coveralls.io/r/es128/glob-parent?branch=master) ====== Javascript module to extract the non-magic parent path from a glob string. [![NPM](https://nodei.co/npm/glob-parent.png?downloads=true&downloadRank=true&stars=true)](https://nodei.co/npm/glob-parent/) [![NPM](https://nodei.co/npm-dl/glob-parent.png?height=3&months=9)](https://nodei.co/npm-dl/glob-parent/) Usage ----- ```sh npm install glob-parent --save ``` ```js var globParent = require('glob-parent'); globParent('path/to/*.js'); // 'path/to' globParent('/root/path/to/*.js'); // '/root/path/to' globParent('/*.js'); // '/' globParent('*.js'); // '.' globParent('**/*.js'); // '.' globParent('path/{to,from}'); // 'path' globParent('path/!(to|from)'); // 'path' globParent('path/?(to|from)'); // 'path' globParent('path/+(to|from)'); // 'path' globParent('path/*(to|from)'); // 'path' globParent('path/@(to|from)'); // 'path' globParent('path/**/*'); // 'path' // if provided a non-glob path, returns the nearest dir globParent('path/foo/bar.js'); // 'path/foo' globParent('path/foo/'); // 'path/foo' globParent('path/foo'); // 'path' (see issue #3 for details) ``` Change Log ---------- [See release notes page on GitHub](https://github.com/es128/glob-parent/releases) License ------- [ISC](https://raw.github.com/es128/glob-parent/master/LICENSE)
{ "pile_set_name": "Github" }
// // GTLang.h // GTKit // // Created on 14-8-27. // Tencent is pleased to support the open source community by making // Tencent GT (Version 2.4 and subsequent versions) available. // // Notwithstanding anything to the contrary herein, any previous version // of Tencent GT shall not be subject to the license hereunder. // All right, title, and interest, including all intellectual property rights, // in and to the previous version of Tencent GT (including any and all copies thereof) // shall be owned and retained by Tencent and subject to the license under the // Tencent GT End User License Agreement (http://gt.qq.com/wp-content/EULA_EN.html). // // Copyright (C) 2015 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the MIT License (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // // http://opensource.org/licenses/MIT // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. // // #ifndef GT_DEBUG_DISABLE #import <Foundation/Foundation.h> #import "GTDebugDef.h" #import "GTList.h" #define M_GT_DSTRING(key)\ [[GTLang sharedInstance] getDString:key] //#define M_GT_SET_DSTRING(key,enString,zhString) func_setDstring(key,enString,zhString) //FOUNDATION_EXPORT void func_setDstring(NSString * key, NSString * stringEn, NSString * stringZh); #define M_GT_SET_DSTRING(key,enString,zhString)\ [self setDString:key forStringEn:stringEn forStringZh:stringZh] @interface GTLang : NSObject { NSString *_curLanguage; //语言,目前只支持zh(包括简体和繁体-统一为简体)和en GTList *_zhDStringList; GTList *_enDStringList; } M_GT_AS_SINGLETION(GTLang) @property (nonatomic, retain) NSString *curLanguage; @property (nonatomic, retain) GTList *zhDStringList; @property (nonatomic, retain) GTList *enDStringList; - (NSString *)getCurLanguage; - (NSString *)getDString:(NSString *)key; - (void)setDString:(NSString *)key forStringEn:(NSString *)enString forStringZh:(NSString *)zhString; @end #endif
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This package is generated by client-gen with arguments: --clientset-name=release_1_5 --input=[api/v1,apps/v1beta1,authentication/v1beta1,authorization/v1beta1,autoscaling/v1,batch/v1,batch/v2alpha1,certificates/v1alpha1,extensions/v1beta1,policy/v1beta1,rbac/v1alpha1,storage/v1beta1] // This package has the automatically generated typed clients. package v1alpha1
{ "pile_set_name": "Github" }
using System.Reflection; using System.Runtime.CompilerServices; // Information about this assembly is defined by the following attributes. // Change them to the values specific to your project. [assembly: AssemblyTitle("iOSImmutableCollections")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("")] [assembly: AssemblyCopyright("Microsoft")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // The assembly version has the format "{Major}.{Minor}.{Build}.{Revision}". // The form "{Major}.{Minor}.*" will automatically update the build and revision, // and "{Major}.{Minor}.{Build}.*" will update just the revision. [assembly: AssemblyVersion("1.0.*")] // The following attributes are used to specify the signing key for the assembly, // if desired. See the Mono documentation for more information about signing. //[assembly: AssemblyDelaySign(false)] //[assembly: AssemblyKeyFile("")]
{ "pile_set_name": "Github" }
// -*- C++ -*- //===-- numeric -----------------------------------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// #ifndef _TEST_SUPPORT_STDLIB_NUMERIC #define _TEST_SUPPORT_STDLIB_NUMERIC #include_next <numeric> #include <pstl/internal/pstl_config.h> #if _PSTL_EXECUTION_POLICIES_DEFINED // If <execution> has already been included, pull in implementations # include <pstl/internal/glue_numeric_impl.h> #else // Otherwise just pull in forward declarations # include <pstl/internal/glue_numeric_defs.h> # define _PSTL_NUMERIC_FORWARD_DECLARED 1 #endif #endif /* _TEST_SUPPORT_STDLIB_NUMERIC */
{ "pile_set_name": "Github" }
/* * File: skiplist-lock.h * Author: Vasileios Trigonakis <vasileios.trigonakis@epfl.ch> * Description: * skiplist-lock.h is part of ASCYLIB * * Copyright (c) 2014 Vasileios Trigonakis <vasileios.trigonakis@epfl.ch>, * Tudor David <tudor.david@epfl.ch> * Distributed Programming Lab (LPD), EPFL * * ASCYLIB is free software: you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation, version 2 * of the License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * */ #include <assert.h> #include <getopt.h> #include <limits.h> #include <pthread.h> #include <signal.h> #include <stdlib.h> #include <stdio.h> #include <sys/time.h> #include <time.h> #include <stdint.h> #include "common.h" #include <atomic_ops.h> #include "lock_if.h" #include "ssmem.h" #include "optik.h" extern unsigned int global_seed; extern __thread ssmem_allocator_t* alloc; typedef struct queue_node { skey_t key; sval_t val; struct queue_node* next; } queue_node_t; typedef ALIGNED(CACHE_LINE_SIZE) struct queue { queue_node_t* head; ptlock_t head_lock; uint8_t padding1[CACHE_LINE_SIZE - sizeof(queue_node_t*) - sizeof(ptlock_t)]; queue_node_t* tail; ptlock_t tail_lock; uint8_t padding2[CACHE_LINE_SIZE - sizeof(queue_node_t*) - sizeof(ptlock_t)]; } queue_t; int floor_log_2(unsigned int n); /* * Create a new node without setting its next fields. */ queue_node_t* queue_new_simple_node(skey_t key, sval_t val, int toplevel, int transactional); /* * Create a new node with its next field. * If next=NULL, then this create a tail node. */ queue_node_t *queue_new_node(skey_t key, sval_t val, queue_node_t *next); void queue_delete_node(queue_node_t* n); queue_t* queue_new(); void queue_delete(queue_t* qu); int queue_size(queue_t* cqu);
{ "pile_set_name": "Github" }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package ssa import ( "cmd/compile/internal/types" "fmt" "testing" ) func BenchmarkCopyElim1(b *testing.B) { benchmarkCopyElim(b, 1) } func BenchmarkCopyElim10(b *testing.B) { benchmarkCopyElim(b, 10) } func BenchmarkCopyElim100(b *testing.B) { benchmarkCopyElim(b, 100) } func BenchmarkCopyElim1000(b *testing.B) { benchmarkCopyElim(b, 1000) } func BenchmarkCopyElim10000(b *testing.B) { benchmarkCopyElim(b, 10000) } func BenchmarkCopyElim100000(b *testing.B) { benchmarkCopyElim(b, 100000) } func benchmarkCopyElim(b *testing.B, n int) { c := testConfig(b) values := make([]interface{}, 0, n+2) values = append(values, Valu("mem", OpInitMem, types.TypeMem, 0, nil)) last := "mem" for i := 0; i < n; i++ { name := fmt.Sprintf("copy%d", i) values = append(values, Valu(name, OpCopy, types.TypeMem, 0, nil, last)) last = name } values = append(values, Exit(last)) // Reverse values array to make it hard for i := 0; i < len(values)/2; i++ { values[i], values[len(values)-1-i] = values[len(values)-1-i], values[i] } for i := 0; i < b.N; i++ { fun := c.Fun("entry", Bloc("entry", values...)) Copyelim(fun.f) } }
{ "pile_set_name": "Github" }
/************************************************************* * * MathJax/jax/output/HTML-CSS/fonts/STIX/General/Regular/Hiragana.js * * Copyright (c) 2009-2017 The MathJax Consortium * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ MathJax.Hub.Insert( MathJax.OutputJax['HTML-CSS'].FONTDATA.FONTS['STIXGeneral'], { 0x306E: [661,41,901,37,840] // HIRAGANA LETTER NO } ); MathJax.Ajax.loadComplete(MathJax.OutputJax["HTML-CSS"].fontDir + "/General/Regular/Hiragana.js");
{ "pile_set_name": "Github" }
# Microservice ## Custom Resource Definition The Microservice CRD is responsible for defining what components make a Microservice. ## Controller The Microservice controller is responsible for creating a set of VersionedMicroservices. These VersionedMicroservices depend on available releases, which are defined by the [ImagePolicy](./image-policy.md) and [ConfigPolicy](./config-policy.md). For each new Release or Configuration, the Microservice will create a new VersionedMicroservice and annotate it appropriately. These annotations are important so we can use them in other parts of the system. The Microservice Controller also takes care of deleting deprecated versions from the system. It does this by looking at which releases should be deployed and which ones are currently deployed. If there are versions available that shouldn't be released anymore, it will delete them. ![diagram](full-flow.png)
{ "pile_set_name": "Github" }
<?php /** * Validates arbitrary text according to the HTML spec. */ class HTMLPurifier_AttrDef_Text extends HTMLPurifier_AttrDef { /** * @param string $string * @param HTMLPurifier_Config $config * @param HTMLPurifier_Context $context * @return bool|string */ public function validate($string, $config, $context) { return $this->parseCDATA($string); } } // vim: et sw=4 sts=4
{ "pile_set_name": "Github" }
#include <check.h> /* Check unit test framework API. */ #include <stdlib.h> /* EXIT_SUCCESS, EXIT_FAILURE */ #include <unistd.h> /* pipe() */ #include <fcntl.h> /* fcntl() */ #include <err.h> /* err(), errx() */ #include <time.h> /* nanosleep() */ #include <stdio.h> /* snprintf */ #include <unistd.h> /* pipe */ #include <sys/types.h> #include <sys/socket.h> /* socket */ #include <sys/un.h> /* sockaddr_un */ #include "../src/negotiate.h" #include "../src/negotiate.c" /* struct definitions, static structures */ #include "../src/dgsh-conc.c" /* pi */ //#include "../src/dgsh-internal-api.h" /* chosen_mb */ struct dgsh_negotiation *fresh_mb; struct dgsh_edge *compact_edges; struct dgsh_edge **pointers_to_edges; int n_ptedges; int *args; /* Depending on whether a test triggers a failure or not, a different sequence * of actions may be needed to exit normally. * exit_state is the control variable for following the correct * sequence of actions. */ int exit_state = 0; void setup_concs(struct dgsh_negotiation *mb) { mb->n_concs = 2; mb->conc_array = (struct dgsh_conc *)malloc(sizeof(struct dgsh_conc) * mb->n_concs); mb->conc_array[0].pid = 2000; mb->conc_array[0].input_fds = 2; mb->conc_array[0].output_fds = 2; mb->conc_array[0].multiple_inputs = false; mb->conc_array[0].endpoint_pid = 102; mb->conc_array[0].n_proc_pids = 2; mb->conc_array[0].proc_pids = (int *)malloc(sizeof(int) * 2); mb->conc_array[0].proc_pids[0] = 100; mb->conc_array[0].proc_pids[1] = 101; mb->conc_array[1].pid = 2001; mb->conc_array[1].input_fds = 3; mb->conc_array[1].output_fds = 3; mb->conc_array[1].multiple_inputs = true; mb->conc_array[1].endpoint_pid = 103; mb->conc_array[1].n_proc_pids = 2; mb->conc_array[1].proc_pids = (int *)malloc(sizeof(int) * 2); mb->conc_array[1].proc_pids[0] = 100; mb->conc_array[1].proc_pids[1] = 101; } void setup_graph_solution(void) { chosen_mb->graph_solution = (struct dgsh_node_connections *)malloc( sizeof(struct dgsh_node_connections) * chosen_mb->n_nodes); struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; graph_solution[0].node_index = 0; graph_solution[0].n_edges_incoming = 2; graph_solution[0].edges_incoming = (struct dgsh_edge *)malloc( sizeof(struct dgsh_edge) * graph_solution[0].n_edges_incoming); memcpy(&graph_solution[0].edges_incoming[0], &chosen_mb->edge_array[0], sizeof(struct dgsh_edge)); memcpy(&graph_solution[0].edges_incoming[1], &chosen_mb->edge_array[2], sizeof(struct dgsh_edge)); graph_solution[0].n_edges_outgoing = 1; graph_solution[0].edges_outgoing = (struct dgsh_edge *)malloc( sizeof(struct dgsh_edge) * graph_solution[0].n_edges_outgoing); memcpy(&graph_solution[0].edges_outgoing[0], &chosen_mb->edge_array[4], sizeof(struct dgsh_edge)); graph_solution[1].node_index = 1; graph_solution[1].n_edges_incoming = 1; graph_solution[1].edges_incoming = (struct dgsh_edge *)malloc( sizeof(struct dgsh_edge) * graph_solution[1].n_edges_incoming); memcpy(&graph_solution[1].edges_incoming[0], &chosen_mb->edge_array[1], sizeof(struct dgsh_edge)); graph_solution[1].n_edges_outgoing = 2; graph_solution[1].edges_outgoing = (struct dgsh_edge *)malloc( sizeof(struct dgsh_edge) * graph_solution[1].n_edges_outgoing); memcpy(&graph_solution[1].edges_outgoing[0], &chosen_mb->edge_array[2], sizeof(struct dgsh_edge)); memcpy(&graph_solution[1].edges_outgoing[1], &chosen_mb->edge_array[3], sizeof(struct dgsh_edge)); graph_solution[2].node_index = 2; graph_solution[2].n_edges_incoming = 0; graph_solution[2].edges_incoming = NULL; graph_solution[2].n_edges_outgoing = 2; graph_solution[2].edges_outgoing = (struct dgsh_edge *)malloc( sizeof(struct dgsh_edge) * graph_solution[2].n_edges_outgoing); memcpy(&graph_solution[2].edges_outgoing[0], &chosen_mb->edge_array[0], sizeof(struct dgsh_edge)); memcpy(&graph_solution[2].edges_outgoing[1], &chosen_mb->edge_array[1], sizeof(struct dgsh_edge)); graph_solution[3].node_index = 3; graph_solution[3].n_edges_incoming = 2; graph_solution[3].edges_incoming = (struct dgsh_edge *)malloc( sizeof(struct dgsh_edge) * graph_solution[3].n_edges_incoming); memcpy(&graph_solution[0].edges_incoming[0], &chosen_mb->edge_array[3], sizeof(struct dgsh_edge)); memcpy(&graph_solution[0].edges_incoming[1], &chosen_mb->edge_array[4], sizeof(struct dgsh_edge)); graph_solution[3].n_edges_outgoing = 0; graph_solution[3].edges_outgoing = NULL; } void setup_chosen_mb(void) { struct dgsh_node *nodes; struct dgsh_edge *edges; int n_nodes; int n_edges; n_nodes = 4; nodes = (struct dgsh_node *)malloc(sizeof(struct dgsh_node) * n_nodes); nodes[0].pid = 100; nodes[0].index = 0; strcpy(nodes[0].name, "proc0"); nodes[0].requires_channels = 2; nodes[0].provides_channels = 1; nodes[0].dgsh_in = 1; nodes[0].dgsh_out = 1; nodes[1].pid = 101; nodes[1].index = 1; strcpy(nodes[1].name, "proc1"); nodes[1].requires_channels = 1; nodes[1].provides_channels = 2; nodes[1].dgsh_in = 1; nodes[1].dgsh_out = 1; /* dgsh OUT and not IN = initiator node. * This node could start the negotiation. * Fix. */ nodes[2].pid = 102; nodes[2].index = 2; strcpy(nodes[2].name, "proc2"); nodes[2].requires_channels = 0; nodes[2].provides_channels = 2; nodes[2].dgsh_in = 0; nodes[2].dgsh_out = 1; /* dgsh IN and not OUT = termination node. * This node couldn't start the negotiation. * Fix. */ nodes[3].pid = 103; nodes[3].index = 3; strcpy(nodes[3].name, "proc3"); nodes[3].requires_channels = 2; nodes[3].provides_channels = 0; nodes[3].dgsh_in = 1; nodes[3].dgsh_out = 0; n_edges = 5; edges = (struct dgsh_edge *)malloc(sizeof(struct dgsh_edge) *n_edges); edges[0].from = 2; edges[0].to = 0; edges[0].instances = 0; edges[0].from_instances = 0; edges[0].to_instances = 0; edges[1].from = 2; edges[1].to = 1; edges[1].instances = 0; edges[1].from_instances = 0; edges[1].to_instances = 0; edges[2].from = 1; edges[2].to = 0; edges[2].instances = 0; edges[2].from_instances = 0; edges[2].to_instances = 0; edges[3].from = 1; edges[3].to = 3; edges[3].instances = 0; edges[3].from_instances = 0; edges[3].to_instances = 0; edges[4].from = 0; edges[4].to = 3; edges[4].instances = 0; edges[4].from_instances = 0; edges[4].to_instances = 0; double dgsh_version = 0.1; chosen_mb = (struct dgsh_negotiation *)malloc(sizeof(struct dgsh_negotiation)); chosen_mb->version = dgsh_version; chosen_mb->node_array = nodes; chosen_mb->n_nodes = n_nodes; chosen_mb->edge_array = edges; chosen_mb->n_edges = n_edges; chosen_mb->graph_solution = NULL; /* check_negotiation_round() */ chosen_mb->state = PS_NEGOTIATION; chosen_mb->initiator_pid = 103; /* Node 3 */ chosen_mb->origin_fd_direction = STDOUT_FILENO; chosen_mb->n_concs = 0; chosen_mb->conc_array = NULL; } /* Identical to chosen_mb except for the initiator field. */ void setup_mb(struct dgsh_negotiation **mb) { struct dgsh_node *nodes; struct dgsh_edge *edges; int n_nodes; int n_edges; n_nodes = 4; nodes = (struct dgsh_node *)malloc(sizeof(struct dgsh_node) * n_nodes); nodes[0].pid = 100; nodes[0].index = 0; strcpy(nodes[0].name, "proc0"); nodes[0].requires_channels = 2; nodes[0].provides_channels = 1; nodes[0].dgsh_in = 1; nodes[0].dgsh_out = 1; nodes[1].pid = 101; nodes[1].index = 1; strcpy(nodes[1].name, "proc1"); nodes[1].requires_channels = 1; nodes[1].provides_channels = 2; nodes[1].dgsh_in = 1; nodes[1].dgsh_out = 1; nodes[2].pid = 102; nodes[2].index = 2; strcpy(nodes[2].name, "proc2"); nodes[2].requires_channels = 0; nodes[2].provides_channels = 2; nodes[2].dgsh_in = 0; nodes[2].dgsh_out = 1; nodes[3].pid = 103; nodes[3].index = 3; strcpy(nodes[3].name, "proc3"); nodes[3].requires_channels = 2; nodes[3].provides_channels = 0; nodes[3].dgsh_in = 1; nodes[3].dgsh_out = 0; n_edges = 5; edges = (struct dgsh_edge *)malloc(sizeof(struct dgsh_edge) *n_edges); edges[0].from = 2; edges[0].to = 0; edges[0].instances = 0; edges[0].from_instances = 0; edges[0].to_instances = 0; edges[1].from = 2; edges[1].to = 1; edges[1].instances = 0; edges[1].from_instances = 0; edges[1].to_instances = 0; edges[2].from = 1; edges[2].to = 0; edges[2].instances = 0; edges[2].from_instances = 0; edges[2].to_instances = 0; edges[3].from = 1; edges[3].to = 3; edges[3].instances = 0; edges[3].from_instances = 0; edges[3].to_instances = 0; edges[4].from = 0; edges[4].to = 3; edges[4].instances = 0; edges[4].from_instances = 0; edges[4].to_instances = 0; double dgsh_version = 0.1; struct dgsh_negotiation *temp_mb = (struct dgsh_negotiation *)malloc(sizeof(struct dgsh_negotiation)); temp_mb->version = dgsh_version; temp_mb->node_array = nodes; temp_mb->n_nodes = n_nodes; temp_mb->edge_array = edges; temp_mb->n_edges = n_edges; temp_mb->graph_solution = NULL; /* check_negotiation_round() */ temp_mb->state = PS_NEGOTIATION; temp_mb->initiator_pid = 102; /* Node 2 */ temp_mb->origin_index = 2; temp_mb->origin_fd_direction = STDOUT_FILENO; temp_mb->n_concs = 0; temp_mb->conc_array = NULL; *mb = temp_mb; } void setup_pointers_to_edges(void) { n_ptedges = 2; pointers_to_edges = (struct dgsh_edge **)malloc(sizeof(struct dgsh_edge *) *n_ptedges); int i; for (i = 0; i < n_ptedges; i++) { pointers_to_edges[i] = (struct dgsh_edge *)malloc(sizeof(struct dgsh_edge)); pointers_to_edges[i]->from = i; pointers_to_edges[i]->to = 3; // the node. pointers_to_edges[i]->instances = 0; pointers_to_edges[i]->from_instances = 0; pointers_to_edges[i]->to_instances = 0; } } void setup_self_node(void) { /* fill in self_node */ memcpy(&self_node, &chosen_mb->node_array[3], sizeof(struct dgsh_node)); } void setup_self_node_io_side(void) { self_node_io_side.index = 3; self_node_io_side.fd_direction = 0; } /* establish_io_connections() */ void setup_pipe_fds(void) { /* fill in self_pipe_fds */ self_pipe_fds.n_input_fds = 2; self_pipe_fds.input_fds = (int *)malloc(sizeof(int) * self_pipe_fds.n_input_fds); self_pipe_fds.input_fds[0] = 3; self_pipe_fds.input_fds[1] = 4; self_pipe_fds.n_output_fds = 0; } void setup_args(void) { args = (int *)malloc(sizeof(int) * 3); args[0] = -1; args[1] = -1; args[2] = -1; } void setup(void) { setup_chosen_mb(); setup_self_node(); setup_self_node_io_side(); setup_pipe_fds(); setup_graph_solution(); } void setup_test_set_fds(void) { setup_chosen_mb(); setup_self_node(); } void setup_test_add_node(void) { setup_chosen_mb(); setup_self_node(); setup_self_node_io_side(); } void setup_test_lookup_dgsh_edge(void) { setup_chosen_mb(); } void setup_test_fill_dgsh_edge(void) { setup_chosen_mb(); setup_self_node(); setup_self_node_io_side(); } void setup_test_add_edge(void) { setup_chosen_mb(); } void setup_test_try_add_dgsh_edge(void) { setup_chosen_mb(); setup_self_node(); setup_self_node_io_side(); } void setup_test_try_add_dgsh_node(void) { setup_chosen_mb(); setup_self_node(); } void setup_test_fill_node(void) { /* setup_self_node() requires setup_chosen_mb() */ setup_chosen_mb(); setup_self_node(); } void setup_test_free_mb(void) { setup_chosen_mb(); } void setup_test_analyse_read(void) { setup_chosen_mb(); setup_mb(&fresh_mb); setup_self_node(); setup_self_node_io_side(); } /*void setup_test_point_io_direction(void) { setup_chosen_mb(); setup_self_node(); setup_self_node_io_side(); }*/ void setup_test_alloc_copy_graph_solution(void) { setup_mb(&fresh_mb); } void setup_test_alloc_copy_concs(void) { setup_mb(&fresh_mb); } void setup_test_alloc_copy_edges(void) { setup_mb(&fresh_mb); } void setup_test_alloc_copy_nodes(void) { setup_mb(&fresh_mb); } void setup_test_read_chunk(void) { setup_self_node_io_side(); } void setup_test_alloc_io_fds(void) { setup_chosen_mb(); setup_graph_solution(); setup_self_node(); } void setup_test_get_provided_fds_n(void) { setup_chosen_mb(); setup_graph_solution(); } void setup_test_get_expected_fds_n(void) { setup_chosen_mb(); setup_graph_solution(); } void setup_test_get_origin_pid(void) { setup_chosen_mb(); } void setup_test_read_input_fds(void) { setup_chosen_mb(); setup_graph_solution(); setup_self_node(); } void setup_test_read_graph_solution(void) { setup_mb(&fresh_mb); setup_chosen_mb(); setup_self_node_io_side(); } void setup_test_read_concs(void) { setup_mb(&fresh_mb); setup_chosen_mb(); setup_concs(chosen_mb); setup_self_node_io_side(); } void setup_test_write_graph_solution(void) { setup_chosen_mb(); setup_graph_solution(); setup_self_node_io_side(); } void setup_test_write_concs(void) { setup_chosen_mb(); setup_concs(chosen_mb); setup_self_node_io_side(); } void setup_test_read_message_block(void) { setup_chosen_mb(); setup_self_node_io_side(); } void setup_test_write_message_block(void) { setup_chosen_mb(); setup_self_node_io_side(); } void setup_test_make_compact_edge_array(void) { setup_pointers_to_edges(); } void setup_test_reallocate_edge_pointer_array(void) { setup_pointers_to_edges(); } /*void setup_test_assign_edge_instances(void) { setup_chosen_mb(); setup_pointers_to_edges(); } void setup_test_eval_constraints(void) { setup_args(); } */ void setup_test_move(void) { setup_pointers_to_edges(); } void setup_test_satisfy_io_constraints(void) { setup_chosen_mb(); setup_pointers_to_edges(); setup_args(); } void setup_test_dry_match_io_constraints(void) { setup_chosen_mb(); setup_graph_solution(); setup_pointers_to_edges(); setup_args(); } void setup_test_node_match_constraints(void) { setup_chosen_mb(); } void setup_test_free_graph_solution(void) { setup_chosen_mb(); setup_graph_solution(); } void setup_test_solve_graph(void) { setup_chosen_mb(); setup_graph_solution(); setup_pointers_to_edges(); setup_args(); } void setup_test_calculate_conc_fds(void) { setup_chosen_mb(); setup_graph_solution(); setup_concs(chosen_mb); } void setup_test_write_output_fds(void) { setup_chosen_mb(); /* For setting up graph_solution. */ setup_graph_solution(); setup_self_node(); } void setup_test_set_dispatcher(void) { setup_chosen_mb(); setup_self_node_io_side(); } void setup_test_establish_io_connections(void) { setup_pipe_fds(); setup_chosen_mb(); setup_self_node(); } void setup_pi(void) { pi = (struct portinfo *)calloc(5, sizeof(struct portinfo)); pi[0].pid = 101; pi[0].seen = false; pi[0].written = true; pi[1].pid = 100; pi[1].seen = true; pi[1].written = false; pi[3].pid = 103; pi[3].seen = true; pi[3].written = true; } void setup_test_is_ready(void) { setup_pi(); setup_chosen_mb(); } void setup_test_set_io_channels(void) { setup_pi(); setup_chosen_mb(); } void retire_pointers_to_edges(void) { int i; for (i = 0; i < n_ptedges; i++) free(pointers_to_edges[i]); free(pointers_to_edges); } void retire_graph_solution(struct dgsh_node_connections *graph_solution, int node_index) { int i; for (i = 0; i <= node_index; i++) { if (graph_solution[i].n_edges_incoming) free(graph_solution[i].edges_incoming); if (graph_solution[i].n_edges_outgoing) free(graph_solution[i].edges_outgoing); } free(graph_solution); } void retire_concs(struct dgsh_negotiation *mb) { int i; for (i = 0; i < mb->n_concs; i++) free(mb->conc_array[i].proc_pids); free(mb->conc_array); } void retire_chosen_mb(void) { free(chosen_mb->node_array); free(chosen_mb->edge_array); free(chosen_mb); } void retire_mb(struct dgsh_negotiation *mb) { free(mb->node_array); free(mb->edge_array); free(mb); } /* establish_io_connections() */ void retire_pipe_fds(void) { if (self_pipe_fds.n_input_fds > 0) free(self_pipe_fds.input_fds); if (self_pipe_fds.n_output_fds > 0) free(self_pipe_fds.output_fds); /* What about self_pipe_fds.input_fds? */ } void retire_args(void) { free(args); } void retire(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); retire_pipe_fds(); } void retire_test_set_fds(void) { retire_chosen_mb(); } void retire_test_construct_message_block(void) { retire_chosen_mb(); } void retire_test_add_node(void) { retire_chosen_mb(); } void retire_test_lookup_dgsh_edge(void) { retire_chosen_mb(); } void retire_test_fill_dgsh_edge(void) { retire_chosen_mb(); } void retire_test_add_edge(void) { retire_chosen_mb(); } void retire_test_try_add_dgsh_edge(void) { retire_chosen_mb(); } void retire_test_try_add_dgsh_node(void) { retire_chosen_mb(); } void retire_test_analyse_read(void) { if (exit_state == 1) { retire_mb(fresh_mb); exit_state = 0; } else retire_chosen_mb(); } /*void retire_test_point_io_direction(void) { retire_chosen_mb(); }*/ void retire_test_alloc_copy_graph_solution(void) { retire_mb(fresh_mb); } void retire_test_alloc_copy_concs(void) { retire_mb(fresh_mb); } void retire_test_alloc_copy_edges(void) { retire_mb(fresh_mb); } void retire_test_alloc_copy_nodes(void) { retire_mb(fresh_mb); } void retire_test_alloc_io_fds(void) { retire_pipe_fds(); retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); } void retire_test_get_provided_fds_n(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); } void retire_test_get_expected_fds_n(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); } void retire_test_read_input_fds(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); } void retire_test_get_origin_pid(void) { retire_chosen_mb(); } void retire_test_read_message_block(void) { retire_chosen_mb(); } void retire_test_write_message_block(void) { retire_chosen_mb(); } void retire_test_read_graph_solution(void) { retire_chosen_mb(); retire_mb(fresh_mb); } void retire_test_read_concs(void) { retire_concs(chosen_mb); retire_chosen_mb(); retire_mb(fresh_mb); } void retire_test_write_graph_solution(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); } void retire_test_write_concs(void) { retire_concs(chosen_mb); retire_chosen_mb(); } void retire_test_make_compact_edge_array(void) { free(compact_edges); retire_pointers_to_edges(); } void retire_test_reallocate_edge_pointer_array(void) { retire_pointers_to_edges(); } /*void retire_test_assign_edge_instances(void) { retire_chosen_mb(); retire_pointers_to_edges(); } void retire_test_eval_constraints(void) { retire_args(); } */ void retire_test_move(void) { retire_pointers_to_edges(); } void retire_test_satisfy_io_constraints(void) { retire_chosen_mb(); retire_pointers_to_edges(); retire_args(); } void retire_test_dry_match_io_constraints(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); retire_pointers_to_edges(); retire_args(); } void retire_test_node_match_constraints(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_chosen_mb(); } void retire_test_free_graph_solution(void) { retire_chosen_mb(); } void retire_test_solve_graph(void) { /* Are the other data structures handled correctly? * They could be deallocated above our feet. */ if (!exit_state) retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); else exit_state = 0; retire_chosen_mb(); retire_pointers_to_edges(); retire_args(); } void retire_test_calculate_conc_fds(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); retire_concs(chosen_mb); retire_chosen_mb(); } void retire_test_write_output_fds(void) { retire_graph_solution(chosen_mb->graph_solution, chosen_mb->n_nodes - 1); } void retire_test_set_dispatcher(void) { retire_chosen_mb(); } void retire_test_establish_io_connections(void) { /* See setup_test_establish_io_connections() */ retire_chosen_mb(); retire_pipe_fds(); } void retire_pi(void) { free(pi); } void retire_test_is_ready(void) { retire_pi(); retire_chosen_mb(); } void retire_test_set_io_channels(void) { retire_concs(chosen_mb); retire_chosen_mb(); retire_pi(); } START_TEST(test_solve_graph) { DPRINTF(4, "%s", __func__); /* A normal case with fixed, tight constraints. */ ck_assert_int_eq(solve_graph(), OP_SUCCESS); struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; ck_assert_int_eq(graph_solution[3].n_edges_incoming, 2); ck_assert_int_eq(graph_solution[3].n_edges_outgoing, 0); ck_assert_int_eq(chosen_mb->edge_array[3].instances, 1); ck_assert_int_eq(chosen_mb->edge_array[4].instances, 1); ck_assert_int_eq(graph_solution[3].edges_incoming[0].instances, 1); ck_assert_int_eq(graph_solution[0].edges_outgoing[0].instances, 1); ck_assert_int_eq(graph_solution[3].edges_incoming[1].instances, 1); ck_assert_int_eq(graph_solution[1].edges_outgoing[1].instances, 1); ck_assert_int_eq((long int)graph_solution[3].edges_outgoing, 0); retire_test_solve_graph(); /* An impossible case. */ setup_test_solve_graph(); chosen_mb->node_array[3].requires_channels = 1; ck_assert_int_eq(solve_graph(), OP_ERROR); exit_state = 1; retire_test_solve_graph(); /* Relaxing our target node's constraint. */ setup_test_solve_graph(); chosen_mb->node_array[3].requires_channels = -1; ck_assert_int_eq(solve_graph(), OP_SUCCESS); graph_solution = chosen_mb->graph_solution; ck_assert_int_eq(graph_solution[3].n_edges_incoming, 2); ck_assert_int_eq(graph_solution[3].n_edges_outgoing, 0); /* Pair edges still have tight constraints. */ ck_assert_int_eq(chosen_mb->edge_array[3].instances, 1); ck_assert_int_eq(chosen_mb->edge_array[4].instances, 1); ck_assert_int_eq(graph_solution[3].edges_incoming[0].instances, 1); ck_assert_int_eq(graph_solution[0].edges_outgoing[0].instances, 1); ck_assert_int_eq(graph_solution[3].edges_incoming[1].instances, 1); ck_assert_int_eq(graph_solution[1].edges_outgoing[1].instances, 1); ck_assert_int_eq((long int)graph_solution[3].edges_outgoing, 0); retire_test_solve_graph(); /* Relaxing also pair nodes' constraints. */ setup_test_solve_graph(); chosen_mb->node_array[3].requires_channels = -1; chosen_mb->node_array[0].provides_channels = -1; chosen_mb->node_array[1].provides_channels = -1; ck_assert_int_eq(solve_graph(), OP_SUCCESS); graph_solution = chosen_mb->graph_solution; ck_assert_int_eq(graph_solution[3].n_edges_incoming, 2); ck_assert_int_eq(graph_solution[3].n_edges_outgoing, 0); /* Flexible both sides: instances previously set to 5 */ ck_assert_int_eq(chosen_mb->edge_array[3].instances, 1); ck_assert_int_eq(chosen_mb->edge_array[4].instances, 1); ck_assert_int_eq(graph_solution[3].edges_incoming[0].instances, 1); ck_assert_int_eq(graph_solution[0].edges_outgoing[0].instances, 1); ck_assert_int_eq(graph_solution[3].edges_incoming[1].instances, 1); ck_assert_int_eq(graph_solution[1].edges_outgoing[1].instances, 1); ck_assert_int_eq((long int)graph_solution[3].edges_outgoing, 0); /* Collateral impact. Node 1 (flex) -> Node 0 (tight) */ ck_assert_int_eq(chosen_mb->edge_array[2].instances, 1); ck_assert_int_eq(graph_solution[1].edges_outgoing[0].instances, 1); } END_TEST START_TEST(test_calculate_conc_fds) { DPRINTF(4, "%s()", __func__); chosen_mb->conc_array[0].input_fds = -1; chosen_mb->conc_array[0].output_fds = -1; chosen_mb->conc_array[1].input_fds = -1; chosen_mb->conc_array[1].output_fds = -1; struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; graph_solution[0].edges_incoming[0].instances = 1; graph_solution[0].edges_outgoing[0].instances = 1; graph_solution[1].edges_incoming[0].instances = 1; graph_solution[1].edges_outgoing[0].instances = 1; /* endpoint for conc with pid 2001*/ graph_solution[3].edges_incoming[0].instances = 1; graph_solution[3].edges_incoming[1].instances = 1; /* endpoint for conc with pid 2000*/ graph_solution[2].edges_outgoing[0].instances = 1; graph_solution[2].edges_outgoing[1].instances = 1; ck_assert_int_eq(calculate_conc_fds(), OP_SUCCESS); } END_TEST START_TEST(test_free_graph_solution) { ck_assert_int_eq(free_graph_solution(3), OP_SUCCESS); /* Invalid node indexes, the function's argument, are checked by assertion. */ } END_TEST START_TEST(test_establish_io_connections) { /* Should be in the solution propagation test suite. */ /* The test case contains an arrangement of 0 fds and another of >0 fds. */ int *input_fds = NULL; int n_input_fds = 2; int *output_fds = NULL; int n_output_fds = 0; int fd[2]; if (pipe(fd) == -1) { /* fd pair: 4 -- 5 */ perror("pipe open failed"); exit(1); } self_pipe_fds.input_fds[0] = fd[0]; DPRINTF(4, "%s: Opened pipe pair: input_fds[0]: %d, output: %d", __func__, fd[0], fd[1]); ck_assert_int_eq(establish_io_connections(NULL, NULL, NULL, NULL), OP_SUCCESS); /* Freed */ ck_assert_int_eq(self_pipe_fds.n_input_fds, 0); ck_assert_int_eq(self_pipe_fds.n_output_fds, 0); close(fd[1]); retire_test_establish_io_connections(); setup_test_establish_io_connections(); if (pipe(fd) == -1) { /* fd pair: 4 -- 5 */ perror("pipe open failed"); exit(1); } self_pipe_fds.input_fds[0] = fd[0]; DPRINTF(4, "%s: Opened pipe pair: input_fds[0]: %d, output: %d", __func__, fd[0], fd[1]); self_pipe_fds.input_fds[1] = 6; ck_assert_int_eq(establish_io_connections(&input_fds, &n_input_fds, &output_fds, &n_output_fds), OP_SUCCESS); ck_assert_int_eq(n_input_fds, 2); ck_assert_int_eq(input_fds[0], 0); ck_assert_int_eq(input_fds[1], 6); ck_assert_int_eq(n_output_fds, 0); close(fd[1]); } END_TEST struct dgsh_edge **edges_in; int n_edges_in; struct dgsh_edge **edges_out; int n_edges_out; void retire_dmic(void) { retire(); if (n_edges_in) free(edges_in); if (n_edges_out) free(edges_out); } START_TEST(test_node_match_constraints) { DPRINTF(4, "%s()\n", __func__); /* Default topology; take a look at setup_chosen_mb() */ chosen_mb->node_array[3].requires_channels = 2; ck_assert_int_eq(node_match_constraints(), OP_SUCCESS); struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; ck_assert_int_eq(graph_solution[0].node_index, 0); ck_assert_int_eq(graph_solution[0].n_edges_incoming, 2); ck_assert_int_eq(graph_solution[0].n_instances_incoming_free, 0); ck_assert_int_eq(graph_solution[0].n_edges_outgoing, 1); ck_assert_int_eq(graph_solution[0].n_instances_outgoing_free, 0); ck_assert_int_eq(graph_solution[1].node_index, 1); ck_assert_int_eq(graph_solution[1].n_edges_incoming, 1); ck_assert_int_eq(graph_solution[1].n_instances_incoming_free, 0); ck_assert_int_eq(graph_solution[1].n_edges_outgoing, 2); ck_assert_int_eq(graph_solution[1].n_instances_outgoing_free, 0); ck_assert_int_eq(graph_solution[2].node_index, 2); ck_assert_int_eq(graph_solution[2].n_edges_incoming, 0); ck_assert_int_eq(graph_solution[2].n_instances_incoming_free, 0); ck_assert_int_eq(graph_solution[2].n_edges_outgoing, 2); ck_assert_int_eq(graph_solution[2].n_instances_outgoing_free, 0); ck_assert_int_eq(graph_solution[3].node_index, 3); ck_assert_int_eq(graph_solution[3].n_edges_incoming, 2); ck_assert_int_eq(graph_solution[3].n_instances_incoming_free, 0); ck_assert_int_eq(graph_solution[3].n_edges_outgoing, 0); ck_assert_int_eq(graph_solution[3].n_instances_outgoing_free, 0); } END_TEST START_TEST(test_dry_match_io_constraints) { DPRINTF(4, "%s", __func__); struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; /* A normal case with fixed, tight constraints. */ struct dgsh_node_connections *current_connections = &graph_solution[3]; current_connections->n_edges_incoming = 0; current_connections->n_edges_outgoing = 0; ck_assert_int_eq(dry_match_io_constraints(&chosen_mb->node_array[3], current_connections, &edges_in, &edges_out), OP_SUCCESS); /* Hard coded. Observe the topology of the prototype solution in setup(). */ ck_assert_int_eq(current_connections->n_edges_incoming, 2); ck_assert_int_eq(current_connections->n_edges_outgoing, 0); /* A case not matching at first sight; match result will * be decided in cross_match_constraints() */ current_connections->n_edges_incoming = 0; current_connections->n_edges_outgoing = 0; chosen_mb->node_array[3].requires_channels = 3; ck_assert_int_eq(dry_match_io_constraints(&chosen_mb->node_array[3], current_connections, &edges_in, &edges_out), OP_SUCCESS); ck_assert_int_eq(current_connections->n_edges_incoming, 2); ck_assert_int_eq(current_connections->n_edges_outgoing, 0); /* Relaxing our target node's constraint. */ current_connections->n_edges_incoming = 0; current_connections->n_edges_outgoing = 0; chosen_mb->node_array[3].requires_channels = -1; ck_assert_int_eq(dry_match_io_constraints(&chosen_mb->node_array[3], current_connections, &edges_in, &edges_out), OP_SUCCESS); ck_assert_int_eq(current_connections->n_edges_incoming, 2); ck_assert_int_eq(current_connections->n_edges_outgoing, 0); ck_assert_int_eq(chosen_mb->edge_array[3].to_instances, -1); ck_assert_int_eq(chosen_mb->edge_array[4].to_instances, -1); } END_TEST START_TEST(test_satisfy_io_constraints) { /* To be concise, when changing the second argument that mirrors * the channel constraint of the node under evaluation, we should * also change it in the node array, but it does not matter since it * is the pair nodes that we are interested in. */ int free_instances = 0; /* Fixed constraint both sides, just satisfy. */ ck_assert_int_eq(satisfy_io_constraints(&free_instances, 2, pointers_to_edges, 2, true), OP_SUCCESS); ck_assert_int_eq(free_instances, 0); /* Fixed constraint both sides, not matching at * first sight, but will leave it to cross_match_constraints() * to decide */ ck_assert_int_eq(satisfy_io_constraints(&free_instances, 1, pointers_to_edges, 2, true), OP_SUCCESS); ck_assert_int_eq(free_instances, 0); /* Fixed constraint bith sides, plenty. */ ck_assert_int_eq(satisfy_io_constraints(&free_instances, 5, pointers_to_edges, 2, true), OP_SUCCESS); ck_assert_int_eq(free_instances, 0); /* Fixed constraint node, flexible pair, just one. */ chosen_mb->node_array[0].provides_channels = -1; ck_assert_int_eq(satisfy_io_constraints(&free_instances, 2, pointers_to_edges, 2, true), OP_SUCCESS); ck_assert_int_eq(free_instances, 0); /* Fixed constraint node, flexible pair, * cross_match_constraints() will decide */ chosen_mb->node_array[0].provides_channels = -1; ck_assert_int_eq(satisfy_io_constraints(&free_instances, 1, pointers_to_edges, 2, true), OP_SUCCESS); ck_assert_int_eq(free_instances, 0); retire_test_satisfy_io_constraints(); /* Expand the semantics of remaining_free_channels to fixed constraints as in this case. */ /* Fixed constraint node, flexible pair, plenty. */ setup_test_satisfy_io_constraints(); chosen_mb->node_array[0].provides_channels = -1; ck_assert_int_eq(satisfy_io_constraints(&free_instances, 5, pointers_to_edges, 2, true), OP_SUCCESS); ck_assert_int_eq(free_instances, 0); free_instances = 0; retire_test_satisfy_io_constraints(); /* Flexible constraint both sides */ setup_test_satisfy_io_constraints(); chosen_mb->node_array[0].provides_channels = -1; ck_assert_int_eq(satisfy_io_constraints(&free_instances, -1, pointers_to_edges, 2, 1), OP_SUCCESS); ck_assert_int_eq(free_instances, -1); free_instances = 0; } END_TEST START_TEST(test_move) { int diff = 1; bool is_edge_incoming = true; DPRINTF(4, "%s()", __func__); pointers_to_edges[0]->from_instances = 1; pointers_to_edges[0]->to_instances = 2; pointers_to_edges[1]->from_instances = 2; pointers_to_edges[1]->to_instances = 1; ck_assert_int_eq(move(pointers_to_edges, n_ptedges, diff, is_edge_incoming), OP_SUCCESS); ck_assert_int_eq(pointers_to_edges[0]->from_instances, 1); ck_assert_int_eq(pointers_to_edges[0]->to_instances, 2); ck_assert_int_eq(pointers_to_edges[1]->from_instances, 2); ck_assert_int_eq(pointers_to_edges[1]->to_instances, 2); } END_TEST START_TEST(test_record_move_flexible) { /* Successful increase move */ int diff = 1; int index = -1; int to_move_index = 2; int instances = 0; int to_move = 2; record_move_flexible(&diff, &index, to_move_index, &instances, to_move); ck_assert_int_eq(diff, 0); ck_assert_int_eq(index, to_move_index); ck_assert_int_eq(instances, 1); /* Can't subtract instances from size 1 */ diff = -1; index = -1; to_move_index = 2; instances = 0; to_move = 1; record_move_flexible(&diff, &index, to_move_index, &instances, to_move); ck_assert_int_eq(diff, -1); ck_assert_int_eq(index, -1); ck_assert_int_eq(instances, 0); /* Successful decrease. diff greater than to_move */ diff = -3; index = -1; to_move_index = 2; instances = 0; to_move = 2; record_move_flexible(&diff, &index, to_move_index, &instances, to_move); ck_assert_int_eq(diff, -2); ck_assert_int_eq(index, to_move_index); ck_assert_int_eq(instances, -1); /* Successful decrease. diff smaller than to_move */ diff = -2; index = -1; to_move_index = 2; instances = 0; to_move = 4; record_move_flexible(&diff, &index, to_move_index, &instances, to_move); ck_assert_int_eq(diff, 0); ck_assert_int_eq(index, to_move_index); ck_assert_int_eq(instances, -2); } END_TEST START_TEST(test_record_move_unbalanced) { /* Successful increase move */ int diff = 1; int index = -1; int to_move_index = 2; int instances = 0; int to_move = 2; int pair = 3; record_move_unbalanced(&diff, &index, to_move_index, &instances, to_move, pair); ck_assert_int_eq(diff, 0); ck_assert_int_eq(index, to_move_index); ck_assert_int_eq(instances, 1); /* Successful decrease. diff greater than to_move - pair */ diff = -3; index = -1; to_move_index = 2; instances = 0; to_move = 2; pair = 1; record_move_unbalanced(&diff, &index, to_move_index, &instances, to_move, pair); ck_assert_int_eq(diff, -2); ck_assert_int_eq(index, to_move_index); ck_assert_int_eq(instances, -1); /* Successful decrease. diff smaller than to_move - pair */ diff = -2; index = -1; to_move_index = 2; instances = 0; to_move = 4; pair = 1; record_move_unbalanced(&diff, &index, to_move_index, &instances, to_move, pair); ck_assert_int_eq(diff, 0); ck_assert_int_eq(index, to_move_index); ck_assert_int_eq(instances, -2); } END_TEST START_TEST(test_reallocate_edge_pointer_array) { ck_assert_int_eq(reallocate_edge_pointer_array(NULL, 1), OP_ERROR); ck_assert_int_eq(reallocate_edge_pointer_array(&pointers_to_edges, -2), OP_ERROR); ck_assert_int_eq(reallocate_edge_pointer_array(&pointers_to_edges, 0), OP_ERROR); /* Not incresing the value of n_ptedges to not perplex freeing * pointers_to_edges because reallocation only accounts for * struct dgsh_edge *. */ ck_assert_int_eq(reallocate_edge_pointer_array(&pointers_to_edges, n_ptedges + 1), OP_SUCCESS); } END_TEST START_TEST(test_make_compact_edge_array) { ck_assert_int_eq(make_compact_edge_array(NULL, 2, pointers_to_edges), OP_ERROR); ck_assert_int_eq(make_compact_edge_array(&compact_edges, -2, pointers_to_edges), OP_ERROR); ck_assert_int_eq(make_compact_edge_array(&compact_edges, 0, pointers_to_edges), OP_ERROR); ck_assert_int_eq(make_compact_edge_array(&compact_edges, n_ptedges, NULL), OP_ERROR); struct dgsh_edge *p = pointers_to_edges[0]; pointers_to_edges[0] = NULL; ck_assert_int_eq(make_compact_edge_array(&compact_edges, n_ptedges, pointers_to_edges), OP_ERROR); pointers_to_edges[0] = p; ck_assert_int_eq(make_compact_edge_array(&compact_edges, n_ptedges, pointers_to_edges), OP_SUCCESS); } END_TEST START_TEST(test_write_output_fds) { int write_fd = 1; int *output_fds; /* 0 outgoing edges for node 3, so no action really. */ ck_assert_int_eq(write_output_fds(write_fd, output_fds, 0), OP_SUCCESS); /* Switch to node 2 that has 2 outgoing edges. */ memcpy(&self_node, &chosen_mb->node_array[2], sizeof(struct dgsh_node)); output_fds = (int *)malloc(sizeof(int) * 2); ck_assert_int_eq(write_output_fds(write_fd, output_fds, DGSH_HANDLE_ERROR), OP_SUCCESS); free(output_fds); /* Incomplete testing since socket descriptors have not yet been setup. * This will hapeen through the shell. */ } END_TEST START_TEST(test_set_dispatcher) { set_dispatcher(); ck_assert_int_eq(chosen_mb->origin_index, 3); ck_assert_int_eq(chosen_mb->origin_fd_direction, 0); /* The input side */ } END_TEST START_TEST(test_alloc_node_connections) { struct dgsh_edge *test; /* It is assumed that negative number of edges have already * been checked. See e.g. read_graph_solution(). */ ck_assert_int_eq(alloc_node_connections(NULL, 2, 1, 2), OP_ERROR); ck_assert_int_eq(alloc_node_connections(&test, 1, 2, 2), OP_ERROR); ck_assert_int_eq(alloc_node_connections(&test, 1, -1, 2), OP_ERROR); ck_assert_int_eq(alloc_node_connections(&test, 1, 1, -2), OP_ERROR); ck_assert_int_eq(alloc_node_connections(&test, 1, 1, 2), OP_SUCCESS); free(test); } END_TEST START_TEST(test_write_concs) { int fd[2]; int buf_size = getpagesize(); int pid; int i; int n_concs = chosen_mb->n_concs; int concs_size = sizeof(struct dgsh_conc) * n_concs; struct dgsh_conc *conc_array = (struct dgsh_conc *)malloc(concs_size); if (pipe(fd) == -1) { perror("pipe open failed"); exit(1); } DPRINTF(4, "%s()...", __func__); DPRINTF(4, "Opened pipe pair %d - %d.", fd[0], fd[1]); pid = fork(); if (pid <= 0) { int rsize = -1; DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); close(fd[1]); DPRINTF(4, "Child reads concs of size %d.", concs_size); rsize = read(fd[0], conc_array, concs_size); if (rsize == -1) { DPRINTF(4, "Write concs failed."); exit(1); } DPRINTF(4, "Child: closes fd %d.", fd[0]); close(fd[0]); DPRINTF(4, "Child with pid %d exits.", (int)getpid()); } else { DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(write_concs(fd[1]), OP_SUCCESS); } } END_TEST /* Incomplete? */ START_TEST(test_write_graph_solution) { int fd[2]; int buf_size = getpagesize(); int pid; int i; int n_nodes = chosen_mb->n_nodes; int graph_solution_size = sizeof(struct dgsh_node_connections) * n_nodes; struct dgsh_node_connections *graph_solution = (struct dgsh_node_connections *)malloc(graph_solution_size); if (pipe(fd) == -1) { perror("pipe open failed"); exit(1); } DPRINTF(4, "%s()...", __func__); DPRINTF(4, "Opened pipe pair %d - %d.", fd[0], fd[1]); pid = fork(); if (pid <= 0) { int rsize = -1; DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); close(fd[1]); DPRINTF(4, "Child reads graph solution of size %d.", graph_solution_size); rsize = read(fd[0], graph_solution, graph_solution_size); if (rsize == -1) { DPRINTF(4, "Write graph solution failed."); exit(1); } for (i = 0; i < chosen_mb->n_nodes; i++) { struct dgsh_node_connections *nc = &graph_solution[i]; int in_edges_size = sizeof(struct dgsh_edge) * nc->n_edges_incoming; int out_edges_size = sizeof(struct dgsh_edge) * nc->n_edges_outgoing; if ((in_edges_size > buf_size) || (out_edges_size > buf_size)) { DPRINTF(4, "Dgsh negotiation graph solution for node at index %d: incoming connections of size %d or outgoing connections of size %d do not fit to buffer of size %d.\n", nc->node_index, in_edges_size, out_edges_size, buf_size); exit(1); } DPRINTF(4, "Child reads incoming edges of node %d in fd %d. Total size: %d", i, fd[1], in_edges_size); /* Transmit a node's incoming connections. */ rsize = read(fd[0], nc->edges_incoming, in_edges_size); if (rsize == -1) { DPRINTF(4, "Read edges incoming failed."); exit(1); } DPRINTF(4, "Child reads outgoing edges of node %d in fd %d. Total size: %d", i, fd[1], out_edges_size); /* Transmit a node's outgoing connections. */ rsize = read(fd[0], nc->edges_outgoing, out_edges_size); if (rsize == -1) { DPRINTF(4, "Read edges outgoing failed."); exit(1); } } DPRINTF(4, "Child: closes fd %d.", fd[0]); close(fd[0]); DPRINTF(4, "Child with pid %d exits.", (int)getpid()); } else { DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(write_graph_solution(fd[1]), OP_SUCCESS); } } END_TEST /* Incomplete? */ START_TEST(test_write_message_block) { int fd[2]; int pid; DPRINTF(4, "%s()", __func__); if(pipe(fd) == -1){ perror("pipe open failed"); exit(1); } DPRINTF(4, "Opened pipe pair %d - %d.", fd[0], fd[1]); pid = fork(); if (pid <= 0) { DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); struct dgsh_negotiation *test_mb = (struct dgsh_negotiation *) malloc(sizeof(struct dgsh_negotiation)); int mb_struct_size = sizeof(struct dgsh_negotiation); int i = 0; int rsize = -1; close(fd[1]); DPRINTF(4, "Child reads message block structure of size %d.", mb_struct_size); rsize = read(fd[0], test_mb, mb_struct_size); if (rsize == -1) { DPRINTF(4, "Read message block failed."); exit(1); } int n_nodes = test_mb->n_nodes; int n_edges = test_mb->n_edges; int mb_nodes_size = sizeof(struct dgsh_node) * n_nodes; test_mb->node_array = (struct dgsh_node *)malloc(mb_nodes_size); int mb_edges_size = sizeof(struct dgsh_edge) * n_edges; test_mb->edge_array = (struct dgsh_edge *)malloc(mb_edges_size); DPRINTF(4, "Child reads message block node array of size %d.", mb_nodes_size); rsize = read(fd[0], test_mb->node_array, mb_nodes_size); if (rsize == -1) { DPRINTF(4, "Read node array failed."); exit(1); } DPRINTF(4, "Child reads message block edge array of size %d.", mb_edges_size); rsize = read(fd[0], test_mb->edge_array, mb_edges_size); if (rsize == -1) { DPRINTF(4, "Read edge array failed."); exit(1); } for (i = 0; i < test_mb->n_edges; i++) { struct dgsh_edge *e = &test_mb->edge_array[i]; DPRINTF(4, "Edge from: %d, to: %d", e->from, e->to); } DPRINTF(4, "Child: closes fd %d.", fd[0]); close(fd[0]); DPRINTF(4, "Child with pid %d exits.", (int)getpid()); retire_mb(test_mb); } else { DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(write_message_block(fd[1]), OP_SUCCESS); } } END_TEST /* Incomplete? */ START_TEST(test_read_message_block) { int fd[2]; int pid; DPRINTF(4, "%s()", __func__); if(pipe(fd) == -1){ perror("pipe open failed"); exit(1); } DPRINTF(4, "Opened pipe pair %d - %d.", fd[0], fd[1]); pid = fork(); if (pid <= 0) { DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); struct dgsh_negotiation *test_mb; setup_mb(&test_mb); int n_nodes = test_mb->n_nodes; int n_edges = test_mb->n_edges; int mb_struct_size = sizeof(struct dgsh_negotiation); int mb_nodes_size = sizeof(struct dgsh_node) * n_nodes; int mb_edges_size = sizeof(struct dgsh_edge) * n_edges; int i = 0; int wsize = -1; struct timespec tm; tm.tv_sec = 0; tm.tv_nsec = 1000000; close(fd[0]); DPRINTF(4, "Child writes message block structure of size %d.", mb_struct_size); wsize = write(fd[1], test_mb, mb_struct_size); if (wsize == -1) { DPRINTF(4, "Write message block structure failed."); exit(1); } /* Sleep for 1 millisecond to write-read orderly. * Why do we need this? * Shouldn't the write block by deafult? */ nanosleep(&tm, NULL); DPRINTF(4, "Child writes message block node array of size %d.", mb_nodes_size); wsize = write(fd[1], test_mb->node_array, mb_nodes_size); if (wsize == -1) { DPRINTF(4, "Write message block node array failed."); exit(1); } /* Sleep for 1 millisecond before the next operation. */ nanosleep(&tm, NULL); DPRINTF(4, "Child writes message block edge array of size %d.", mb_edges_size); for (i = 0; i < test_mb->n_edges; i++) { struct dgsh_edge *e = &test_mb->edge_array[i]; DPRINTF(4, "Edge from: %d, to: %d", e->from, e->to); } wsize = write(fd[1], test_mb->edge_array, mb_edges_size); if (wsize == -1) { DPRINTF(4, "Write message block edge array failed."); exit(1); } DPRINTF(4, "Child: closes fd %d.", fd[1]); close(fd[1]); DPRINTF(4, "Child with pid %d exits.", (int)getpid()); retire_mb(test_mb); } else { DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(read_message_block(fd[0], &fresh_mb), OP_SUCCESS); } } END_TEST /* Incomplete? */ START_TEST(test_read_graph_solution) { int fd[2]; int pid; int i; int n_nodes = fresh_mb->n_nodes; int buf_size = getpagesize(); int graph_solution_size = sizeof(struct dgsh_node_connections) * n_nodes; struct timespec tm; tm.tv_sec = 0; tm.tv_nsec = 1000000; DPRINTF(4, "%s()", __func__); if(pipe(fd) == -1){ perror("pipe open failed"); exit(1); } DPRINTF(4, "Opened pipe pair %d - %d.", fd[0], fd[1]); pid = fork(); if (pid <= 0) { int wsize = -1; DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); setup_graph_solution(); struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; close(fd[0]); DPRINTF(4, "Child writes graph solution of size %d.", graph_solution_size); wsize = write(fd[1], graph_solution, graph_solution_size); if (wsize == -1) { DPRINTF(4, "Write graph solution failed."); exit(1); } /* Sleep for 1 millisecond before the next operation. */ nanosleep(&tm, NULL); for (i = 0; i < chosen_mb->n_nodes; i++) { struct dgsh_node_connections *nc = &graph_solution[i]; int in_edges_size = sizeof(struct dgsh_edge) * nc->n_edges_incoming; int out_edges_size = sizeof(struct dgsh_edge) * nc->n_edges_outgoing; int wsize = -1; if ((in_edges_size > buf_size) || (out_edges_size > buf_size)) { DPRINTF(4, "Dgsh negotiation graph solution for node at index %d: incoming connections of size %d or outgoing connections of size %d do not fit to buffer of size %d.\n", nc->node_index, in_edges_size, out_edges_size, buf_size); exit(1); } DPRINTF(4, "Child writes incoming edges of node %d in fd %d. Total size: %d", i, fd[1], in_edges_size); /* Transmit a node's incoming connections. */ wsize = write(fd[1], nc->edges_incoming, in_edges_size); if (wsize == -1) { DPRINTF(4, "Write edges incoming failed."); exit(1); } /* Sleep for 1 millisecond before the next operation. */ nanosleep(&tm, NULL); DPRINTF(4, "Child writes outgoing edges of node %d in fd %d. Total size: %d", i, fd[1], out_edges_size); /* Transmit a node's outgoing connections. */ wsize = write(fd[1], nc->edges_outgoing, out_edges_size); if (wsize == -1) { DPRINTF(4, "Write edges outgoing failed."); exit(1); } /* Sleep for 1 millisecond before the next operation. */ nanosleep(&tm, NULL); } DPRINTF(4, "Child: closes fd %d.", fd[1]); close(fd[1]); DPRINTF(4, "Child with pid %d exits.", (int)getpid()); retire_graph_solution(graph_solution, chosen_mb->n_nodes - 1); } else { DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(read_graph_solution(fd[0], fresh_mb), OP_SUCCESS); } } END_TEST START_TEST(test_read_concs) { int fd[2]; int pid; int i; int n_concs = fresh_mb->n_concs; int buf_size = getpagesize(); int concs_size = sizeof(struct dgsh_conc) * n_concs; DPRINTF(4, "%s()", __func__); if(pipe(fd) == -1){ perror("pipe open failed"); exit(1); } DPRINTF(4, "Opened pipe pair %d - %d.", fd[0], fd[1]); pid = fork(); if (pid <= 0) { int wsize = -1; DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); setup_graph_solution(); struct dgsh_conc *concs = chosen_mb->conc_array; close(fd[0]); DPRINTF(4, "Child writes concs of size %d.", concs_size); wsize = write(fd[1], concs, concs_size); if (wsize == -1) { DPRINTF(4, "Write concs failed."); exit(1); } } else { DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(read_concs(fd[0], fresh_mb), OP_SUCCESS); } } END_TEST START_TEST(test_alloc_fds) { int *fds = NULL; int n_fds = 0; ck_assert_int_eq(alloc_fds(&fds, n_fds), OP_SUCCESS); ck_assert_int_eq((int)(long)fds, 0); n_fds = 2; ck_assert_int_eq(alloc_fds(&fds, n_fds), OP_SUCCESS); ck_assert_int_ne((int)(long)fds, 0); free(fds); } END_TEST START_TEST(test_alloc_io_fds) { /* By default initialised to 0. See setup_chosen_mb() */ chosen_mb->graph_solution[3].edges_incoming[0].instances = 1; chosen_mb->graph_solution[3].edges_incoming[1].instances = 1; ck_assert_int_eq(alloc_io_fds(), OP_SUCCESS); ck_assert_int_eq(self_pipe_fds.n_input_fds, 2); ck_assert_int_eq(self_pipe_fds.n_output_fds, 0); } END_TEST START_TEST(test_get_origin_pid) { chosen_mb->origin_index = 3; ck_assert_int_eq(get_origin_pid(chosen_mb), 103); chosen_mb->origin_index = 1; ck_assert_int_eq(get_origin_pid(chosen_mb), 101); } END_TEST START_TEST(test_get_expected_fds_n) { struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; graph_solution[0].edges_incoming[0].instances = 1; graph_solution[1].edges_incoming[0].instances = 1; graph_solution[3].edges_incoming[0].instances = 1; graph_solution[3].edges_incoming[1].instances = 1; ck_assert_int_eq(get_expected_fds_n(chosen_mb, 103), 2); ck_assert_int_eq(get_expected_fds_n(chosen_mb, 100), 1); ck_assert_int_eq(get_expected_fds_n(chosen_mb, 101), 1); ck_assert_int_eq(get_expected_fds_n(chosen_mb, 102), 0); } END_TEST START_TEST(test_get_provided_fds_n) { struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; graph_solution[0].edges_outgoing[0].instances = 1; graph_solution[1].edges_outgoing[0].instances = 1; graph_solution[1].edges_outgoing[1].instances = 1; graph_solution[2].edges_outgoing[0].instances = 1; graph_solution[2].edges_outgoing[1].instances = 1; ck_assert_int_eq(get_provided_fds_n(chosen_mb, 103), 0); ck_assert_int_eq(get_provided_fds_n(chosen_mb, 100), 1); ck_assert_int_eq(get_provided_fds_n(chosen_mb, 101), 2); ck_assert_int_eq(get_provided_fds_n(chosen_mb, 102), 2); } END_TEST START_TEST (test_read_write_fd) { int pipefd[2]; int sock, rsock; int readfd; char msg[] = "hello"; char buff[20]; int n; pid_t pid; socklen_t len; struct sockaddr_un local, remote; if (pipe(pipefd) == -1) err(1, "pipe"); local.sun_family = AF_UNIX; snprintf(local.sun_path, sizeof(local.sun_path), "/tmp/conc-%d", getpid()); len = strlen(local.sun_path) + 1 + sizeof(local.sun_family); switch ((pid = fork())) { case 0: /* Child: connect, pass fd and write test message */ if ((sock = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) err(1, "socket"); sleep(1); if (connect(sock, (struct sockaddr *)&local, len) == -1) err(1, "connect %s", local.sun_path); write_fd(sock, pipefd[STDIN_FILENO]); close(sock); // Should wait for data to be transmitted close(pipefd[STDIN_FILENO]); sleep(1); if (write(pipefd[STDOUT_FILENO], msg, sizeof(msg)) <= 0) err(1, "write"); exit(0); default: /* Parent: accept connection, read fd and read test message */ close(pipefd[STDIN_FILENO]); close(pipefd[STDOUT_FILENO]); if ((sock = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) err(1, "socket"); if (bind(sock, (struct sockaddr *)&local, len) == -1) err(1, "bind %s", local.sun_path); if (listen(sock, 5) == -1) err(1, "listen"); rsock = accept(sock, (struct sockaddr *)&remote, &len); readfd = read_fd(rsock); if ((n = read(readfd, buff, sizeof(buff))) == -1) err(1, "read"); (void)unlink(local.sun_path); ck_assert_int_eq(n, sizeof(msg)); ck_assert_str_eq(msg, buff); break; case -1: /* Error */ err(1, "fork"); } } END_TEST /* Incomplete? */ START_TEST(test_read_input_fds) { int sockets[2]; int fd, ping; struct msghdr msg; struct iovec vec[1]; union fdmsg cmsg; struct cmsghdr *h; int wsize = -1; memset(&msg, 0, sizeof(struct msghdr)); DPRINTF(4, "%s()...pid %d", __func__, (int)getpid()); if (socketpair(AF_UNIX, SOCK_DGRAM, 0, sockets) < 0) { perror("Error opening stream socket pair. Exiting now."); exit(1); } DPRINTF(4, "Opened socket pair %d - %d.", sockets[0], sockets[1]); fd = open("unit-test-dgsh", O_CREAT | O_RDWR, 0660); wsize = write(fd, "Unit testing dgsh...", 21); if (wsize == -1) { DPRINTF(4, "Write to 'unit-test-dgsh' failed."); exit(1); } close(fd); fd = open("unit-test-dgsh", O_RDONLY); if (fd < 0) { perror("Failed to open file test-dgsh for reading."); exit(1); } int pid = fork(); if (pid <= 0) { DPRINTF(4, "Child speaking with pid %d.", (int)getpid()); DPRINTF(4, "Child closes socket %d.", sockets[1]); close(sockets[1]); vec[0].iov_base = &ping; vec[0].iov_len = 1; msg.msg_iov = vec; msg.msg_iovlen = 1; msg.msg_name = 0; msg.msg_namelen = 0; msg.msg_control = cmsg.buf; msg.msg_controllen = sizeof(union fdmsg); msg.msg_flags = 0; h = CMSG_FIRSTHDR(&msg); h->cmsg_level = SOL_SOCKET; h->cmsg_type = SCM_RIGHTS; h->cmsg_len = CMSG_LEN(sizeof(int)); *((int*)CMSG_DATA(h)) = fd; DPRINTF(4, "Child goes sendmsg()"); if((sendmsg(sockets[0], &msg, 0)) < 0){ perror("sendmsg()"); exit(EXIT_FAILURE); } DPRINTF(4, "Child: closes fd %d.", fd); close(fd); DPRINTF(4, "Child with pid %d exits.", (int)getpid()); } else { struct dgsh_node_connections *graph_solution = chosen_mb->graph_solution; memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); /* Edges have been setup with 0 instances. * See setup_chosen_mb(). * Edges then copied to graph_solution. * See setup_graph_solution(). */ graph_solution[1].edges_incoming[0].instances = 1; int *input_fds = (int *)malloc(sizeof(int)); input_fds[0] = -1; DPRINTF(4, "Parent speaking with pid %d.", (int)getpid()); ck_assert_int_eq(read_input_fds(sockets[1], input_fds), OP_SUCCESS); ck_assert_int_ge(input_fds[0], 3); /* Hard-coded ceiling to check whether some weird * error has caused some random number to slip in. */ ck_assert_int_le(input_fds[0], 20); free(input_fds); DPRINTF(4, "Parent with pid %d exits.", (int)getpid()); } close(sockets[0]); close(sockets[1]); } END_TEST /* Incomplete? */ START_TEST(test_read_chunk) { /* Requires setting up of I/O multiplexing (a bash shell extension) * in order to be able to support bidirectional negotiation. * Without it we cannot test this function because it tries to read * repeatedly from stdin and then stdout until it manages. * We cannot feed it; writing to stdin or stdout * ends up in the unit test file output). * The good news is that the core of this function is the call to * call read, which has been successfully tested. * Then there is variable checking to determine the exit code. */ int fd[2]; int wsize = -1; DPRINTF(4, "%s()...", __func__); if(pipe(fd) == -1){ perror("pipe open failed"); exit(1); } /* Broken pipe error if we close the other side. */ //close(fd[0]); wsize = write(fd[1], "test-in", 9); if (wsize == -1) { DPRINTF(4, "Write to 'test-in' failed."); exit(1); } char buf[32]; int read_fd = -1; int bytes_read = -1; ck_assert_int_eq(read_chunk(fd[0], buf, 32, &bytes_read, 5), OP_SUCCESS); ck_assert_int_eq(bytes_read, 9); close(fd[0]); close(fd[1]); } END_TEST START_TEST(test_call_read) { int fd[2]; int wsize = -1; DPRINTF(4, "%s()...", __func__); if(pipe(fd) == -1){ perror("pipe open failed"); exit(1); } if ((wsize = write(fd[1], "test", 5)) == -1) { DPRINTF(4, "Write to 'test' failed.\n"); exit(1); } char buf[32]; int bytes_read = -1; int error_code = -1; ck_assert_int_eq(call_read(fd[0], buf, 32, &bytes_read, &error_code), OP_SUCCESS); ck_assert_int_eq(bytes_read, 5); ck_assert_int_eq(error_code, 0); close(fd[0]); close(fd[1]); } END_TEST START_TEST(test_alloc_copy_mb) { const int size = sizeof(struct dgsh_negotiation); char buf[512]; struct dgsh_negotiation *mb; ck_assert_int_eq(alloc_copy_mb(&mb, buf, 86, 512), OP_ERROR); char buf2[32]; ck_assert_int_eq(alloc_copy_mb(&mb, buf2, size, 32), OP_ERROR); ck_assert_int_eq(alloc_copy_mb(&mb, buf, size, 512), OP_SUCCESS); free(mb); } END_TEST START_TEST(test_alloc_copy_proc_pids) { struct dgsh_conc c; c.n_proc_pids = 2; const int size = sizeof(int) * c.n_proc_pids; int pids[2] = {101, 103}; char buf[512]; memcpy(buf, pids, size); ck_assert_int_eq(alloc_copy_proc_pids(&c, buf, 86, 512), OP_ERROR); char buf2[8]; ck_assert_int_eq(alloc_copy_proc_pids(&c, buf2, size, 4), OP_ERROR); ck_assert_int_eq(alloc_copy_proc_pids(&c, buf, size, 512), OP_SUCCESS); } END_TEST START_TEST(test_alloc_copy_concs) { const int n_concs = fresh_mb->n_concs = 1; const int size = sizeof(struct dgsh_conc) * n_concs; struct dgsh_conc c; char buf[512]; memcpy(buf, &c, size); ck_assert_int_eq(alloc_copy_concs(fresh_mb, buf, 86, 512), OP_ERROR); char buf2[8]; ck_assert_int_eq(alloc_copy_concs(fresh_mb, buf2, size, 8), OP_ERROR); ck_assert_int_eq(alloc_copy_concs(fresh_mb, buf, size, 512), OP_SUCCESS); } END_TEST START_TEST(test_alloc_copy_nodes) { const int size = sizeof(struct dgsh_node) * fresh_mb->n_nodes; char buf[512]; ck_assert_int_eq(alloc_copy_nodes(fresh_mb, buf, 86, 512), OP_ERROR); char buf2[32]; ck_assert_int_eq(alloc_copy_nodes(fresh_mb, buf2, size, 32), OP_ERROR); free(fresh_mb->node_array); /* to avoid memory leak */ ck_assert_int_eq(alloc_copy_nodes(fresh_mb, buf, size, 512), OP_SUCCESS); } END_TEST START_TEST(test_alloc_copy_edges) { const int size = sizeof(struct dgsh_edge) * fresh_mb->n_edges; char buf[256]; ck_assert_int_eq(alloc_copy_edges(fresh_mb, buf, 86, 256), OP_ERROR); char buf2[32]; ck_assert_int_eq(alloc_copy_edges(fresh_mb, buf2, size, 32), OP_ERROR); free(fresh_mb->edge_array); /* to avoid memory leak */ ck_assert_int_eq(alloc_copy_edges(fresh_mb, buf, size, 256), OP_SUCCESS); } END_TEST START_TEST(test_alloc_copy_graph_solution) { const int size = sizeof(struct dgsh_node_connections) * fresh_mb->n_nodes; char buf[256]; ck_assert_int_eq(alloc_copy_graph_solution(fresh_mb, buf, 86, 256), OP_ERROR); char buf2[32]; ck_assert_int_eq(alloc_copy_edges(fresh_mb, buf2, size, 32), OP_ERROR); /* Free to avoid memory leak. * About the free() see how graph_solution is malloc'ed * at alloc_copy_graph_solution. */ free(fresh_mb->graph_solution); ck_assert_int_eq(alloc_copy_graph_solution(fresh_mb, buf, size, 256), OP_SUCCESS); free(fresh_mb->graph_solution); /* Easier to handle here. */ } END_TEST START_TEST(test_check_read) { ck_assert_int_eq(check_read(512, 1024, 256), OP_ERROR); ck_assert_int_eq(check_read(512, 256, 512), OP_ERROR); ck_assert_int_eq(check_read(512, 1024, 512), OP_SUCCESS); } END_TEST /* *START_TEST(test_point_io_direction) { ck_assert_int_eq(point_io_direction(STDOUT_FILENO), STDIN_FILENO); memcpy(&self_node, &chosen_mb->node_array[2], sizeof(struct dgsh_node)); ck_assert_int_eq(point_io_direction(STDIN_FILENO), STDOUT_FILENO); } END_TEST */ START_TEST(test_analyse_read) { DPRINTF(4, "%s()", __func__); /* error state flag seen; terminal process such as node 3 * which is the current node leave. */ bool should_transmit_mb = false; int serialno_ntimes_same = 0; int run_ntimes_same = 0; int error_ntimes_same = 0; int draw_exit_ntimes_same = 0; fresh_mb->state = PS_ERROR; fresh_mb->is_error_confirmed = true; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_ERROR); ck_assert_int_eq(error_ntimes_same, 1); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* error state flag seen; non-terminal process such as node 1 * which is the current node leave when they see it the second time. * This is the first. */ memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); error_ntimes_same = 0; fresh_mb->state = PS_ERROR; fresh_mb->is_error_confirmed = true; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_ERROR); ck_assert_int_eq(error_ntimes_same, 1); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* error state flag seen; non-terminal process such as node 1 * which is the current node have to leave when they see it * the second time. This is the second. * Before leaving they have to pass the block. */ memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); error_ntimes_same = 1; fresh_mb->state = PS_ERROR; fresh_mb->is_error_confirmed = true; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_ERROR); ck_assert_int_eq(error_ntimes_same, 2); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* All processes have to pass the block first except * for the ones who passed the block the last time * before finding a solution. */ error_ntimes_same = 1; memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); fresh_mb->state = PS_ERROR; fresh_mb->is_error_confirmed = true; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_ERROR); ck_assert_int_eq(error_ntimes_same, 2); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* run state flag seen; terminal process such as node 3 * which is the current node leave. */ run_ntimes_same = 0; error_ntimes_same = 0; fresh_mb->state = PS_RUN; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_RUN); ck_assert_int_eq(run_ntimes_same, 1); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* run state flag seen; non-terminal process such as node 1 * which is the current node leave when they see it the second time. * This is the first. */ memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); run_ntimes_same = 0; fresh_mb->state = PS_RUN; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_RUN); ck_assert_int_eq(run_ntimes_same, 1); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* run state flag seen; non-terminal process such as node 1 * which is the current node have to leave when they see it * the second time. This is the second. * Before leaving they have to pass the block. */ memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); run_ntimes_same = 1; fresh_mb->state = PS_RUN; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_RUN); ck_assert_int_eq(run_ntimes_same, 2); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); /* When they are to leave they pass the block first except * if they are the ones who passed the block the last time * before finding a solution. */ memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); should_transmit_mb = false; fresh_mb->state = PS_RUN; run_ntimes_same = 1; ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq(chosen_mb->state, PS_RUN); ck_assert_int_eq(run_ntimes_same, 2); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); /* Negotiation state */ setup_test_analyse_read(); run_ntimes_same = 0; error_ntimes_same = 0; /* set_dispatcher() */ self_node_io_side.index = 3; self_node_io_side.fd_direction = STDIN_FILENO; fresh_mb->initiator_pid = 110; /* Younger than chosen_mb. */ ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); retire_test_analyse_read(); setup_test_analyse_read(); should_transmit_mb = false; memcpy(&self_node, &chosen_mb->node_array[3], sizeof(struct dgsh_node)); /* set_dispatcher() */ self_node_io_side.index = 3; self_node_io_side.fd_direction = STDIN_FILENO; fresh_mb->initiator_pid = 103; /* Same initiator */ ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); ck_assert_int_eq((long int)chosen_mb, (long int)fresh_mb); retire_test_analyse_read(); setup_test_analyse_read(); memcpy(&self_node, &chosen_mb->node_array[0], sizeof(struct dgsh_node)); /* set_dispatcher() */ self_node_io_side.index = 0; self_node_io_side.fd_direction = STDOUT_FILENO; chosen_mb->origin_index = 3; chosen_mb->origin_fd_direction = STDIN_FILENO; fresh_mb->initiator_pid = 103; /* Same initiator */ ck_assert_int_eq(analyse_read(fresh_mb, &run_ntimes_same, &error_ntimes_same, &draw_exit_ntimes_same, self_node.name, self_node.pid, &self_node.requires_channels, &self_node.provides_channels), OP_SUCCESS); } END_TEST START_TEST(test_free_mb) { free_mb(chosen_mb); } END_TEST START_TEST(test_fill_node) { /* self node is node at index 3 of chosen_mb */ fill_node("test", 1003, NULL, NULL); ck_assert_int_eq(strcmp(self_node.name, "test"), 0); ck_assert_int_eq(self_node.pid, 1003); ck_assert_int_eq(self_node.requires_channels, 1); ck_assert_int_eq(self_node.provides_channels, 0); int n_input_fds = 1; int n_output_fds = 1; fill_node("test", 1003, &n_input_fds, &n_output_fds); ck_assert_int_eq(self_node.requires_channels, 1); ck_assert_int_eq(self_node.provides_channels, 1); } END_TEST START_TEST(test_try_add_dgsh_node) { int n_input_fds = 1; int n_output_fds = 1; ck_assert_int_eq(try_add_dgsh_node("proc3", 103, &n_input_fds, &n_output_fds), OP_EXISTS); ck_assert_int_eq(chosen_mb->n_nodes, 4); ck_assert_int_eq(self_node_io_side.index, 0); ck_assert_int_eq(self_node.index, 3); ck_assert_int_eq(try_add_dgsh_node("proc4", 104, &n_input_fds, &n_output_fds), OP_SUCCESS); ck_assert_int_eq(chosen_mb->n_nodes, 5); ck_assert_int_eq(self_node_io_side.index, 4); ck_assert_int_eq(self_node.index, 4); } END_TEST START_TEST(test_try_add_dgsh_edge) { /* Better in a setup function. */ chosen_mb->origin_fd_direction = STDOUT_FILENO; chosen_mb->origin_index = 0; /* self_node_io_side should also be set; it is set in setup */ ck_assert_int_eq(try_add_dgsh_edge(), OP_EXISTS); /* New edge: from new node to existing */ struct dgsh_node new; new.index = 4; new.pid = 104; memcpy(new.name, "proc4", 6); new.requires_channels = 1; new.provides_channels = 1; new.dgsh_in = 1; new.dgsh_out = 1; /* Better in a setup function. */ chosen_mb->origin_fd_direction = STDOUT_FILENO; chosen_mb->origin_index = new.index; /* self_node_io_side should also be set; it is set in setup */ memcpy(&self_node, &new, sizeof(struct dgsh_node)); chosen_mb->n_nodes++; chosen_mb->node_array = realloc(chosen_mb->node_array, sizeof(struct dgsh_node) * chosen_mb->n_nodes); memcpy(&chosen_mb->node_array[chosen_mb->n_nodes - 1], &new, sizeof(struct dgsh_node)); ck_assert_int_eq(try_add_dgsh_edge(), OP_SUCCESS); /* New edge: from existing to new node */ /* Better in a setup function. */ chosen_mb->origin_fd_direction = STDOUT_FILENO; chosen_mb->origin_index = 0; /* self_node_io_side should also be set; it is set in setup */ self_node_io_side.index = new.index; self_node_io_side.fd_direction = STDIN_FILENO; ck_assert_int_eq(try_add_dgsh_edge(), OP_SUCCESS); /* NOOP: message block created just now */ chosen_mb->origin_index = -1; ck_assert_int_eq(try_add_dgsh_edge(), OP_NOOP); } END_TEST START_TEST(test_add_edge) { struct dgsh_edge new; new.from = 2; new.to = 3; new.instances = 0; ck_assert_int_eq(add_edge(&new), OP_SUCCESS); ck_assert_int_eq(chosen_mb->n_edges, 6); } END_TEST START_TEST(test_fill_dgsh_edge) { struct dgsh_edge new; /* STDIN -> STDOUT */ /* Better in a setup function. */ chosen_mb->origin_fd_direction = STDOUT_FILENO; chosen_mb->origin_index = 0; /* self_node_io_side should also be set; it is set in setup */ ck_assert_int_eq(fill_dgsh_edge(&new), OP_SUCCESS); /* Impossible case. No such origin. */ chosen_mb->origin_index = 7; ck_assert_int_eq(fill_dgsh_edge(&new), OP_ERROR); /* STDOUT -> STDIN */ chosen_mb->origin_fd_direction = STDIN_FILENO; chosen_mb->origin_index = 3; memcpy(&self_node, &chosen_mb->node_array[0], sizeof(struct dgsh_node)); self_node_io_side.fd_direction = STDOUT_FILENO; self_node_io_side.index = 0; /* self_node_io_side should also be set; it is set in setup */ ck_assert_int_eq(fill_dgsh_edge(&new), OP_SUCCESS); } END_TEST START_TEST(test_lookup_dgsh_edge) { struct dgsh_edge new; new.from = 2; new.to = 3; ck_assert_int_eq(lookup_dgsh_edge(&new), OP_CREATE); ck_assert_int_eq(lookup_dgsh_edge(&chosen_mb->edge_array[4]), OP_EXISTS); } END_TEST START_TEST(test_add_node) { struct dgsh_node new; new.pid = 104; memcpy(new.name, "proc4", 6); new.requires_channels = 1; new.provides_channels = 1; memcpy(&self_node, &new, sizeof(struct dgsh_node)); ck_assert_int_eq(add_node(), OP_SUCCESS); ck_assert_int_eq(chosen_mb->n_nodes, 5); ck_assert_int_eq(self_node_io_side.index, 4); ck_assert_int_eq(self_node.index, 4); } END_TEST START_TEST(test_construct_message_block) { DPRINTF(4, "%s()", __func__); int pid = 7; const char tool_name[10] = "test"; ck_assert_int_eq(construct_message_block(tool_name, pid), OP_SUCCESS); ck_assert_int_eq(chosen_mb->version, 1); ck_assert_int_eq((long)chosen_mb->node_array, 0); ck_assert_int_eq(chosen_mb->n_nodes, 0); ck_assert_int_eq(chosen_mb->initiator_pid, pid); ck_assert_int_eq(chosen_mb->state, PS_NEGOTIATION); ck_assert_int_eq(chosen_mb->origin_index, -1); ck_assert_int_eq(chosen_mb->origin_fd_direction, -1); free(chosen_mb); } END_TEST START_TEST(test_get_env_var) { DPRINTF(4, "%s()...", __func__); int value = -1; putenv("DGSH_IN=0"); get_env_var("DGSH_IN", &value); ck_assert_int_eq(value, 0); value = -1; putenv("DGSH_OUT=1"); get_env_var("DGSH_OUT", &value); ck_assert_int_eq(value, 1); } END_TEST START_TEST(test_get_environment_vars) { DPRINTF(4, "%s()...", __func__); putenv("DGSH_IN=0"); putenv("DGSH_OUT=1"); get_environment_vars(); ck_assert_int_eq(self_node.dgsh_in, 0); ck_assert_int_eq(self_node.dgsh_out, 1); } END_TEST START_TEST(test_validate_input) { int i = 0; int o = 0; ck_assert_int_eq(validate_input(&i, &o, NULL), OP_ERROR); ck_assert_int_eq(validate_input(NULL, &o, "test"), OP_SUCCESS); ck_assert_int_eq(validate_input(&i, NULL, "test"), OP_SUCCESS); ck_assert_int_eq(validate_input(NULL, NULL, "test"), OP_SUCCESS); ck_assert_int_eq(validate_input(&i, &o, "test"), OP_SUCCESS); i = 0; o = 1; ck_assert_int_eq(validate_input(&i, &o, "test"), OP_SUCCESS); i = 1; o = 0; ck_assert_int_eq(validate_input(&i, &o, "test"), OP_SUCCESS); i = -1; o = -1; ck_assert_int_eq(validate_input(&i, &o, "test"), OP_SUCCESS); i = -2; o = -1; ck_assert_int_eq(validate_input(&i, &o, "test"), OP_ERROR); i = -1; o = -2; ck_assert_int_eq(validate_input(&i, &o, "test"), OP_ERROR); i = 1000; o = 1000; ck_assert_int_eq(validate_input(&i, &o, "test"), OP_SUCCESS); } END_TEST START_TEST(test_set_fds) { /* For node 3 which is a terminal node */ fd_set read_fds, write_fds; ck_assert_int_eq(set_fds(&read_fds, &write_fds, 0), 2); ck_assert_int_eq(self_node_io_side.fd_direction, STDIN_FILENO); ck_assert_int_eq(set_fds(&read_fds, &write_fds, 1), 2); ck_assert_int_eq(self_node_io_side.fd_direction, STDIN_FILENO); /* Make node 1 self node, which is a non terminal node */ memcpy(&self_node, &chosen_mb->node_array[1], sizeof(struct dgsh_node)); ck_assert_int_eq(set_fds(&read_fds, &write_fds, 0), 2); ck_assert_int_eq(self_node_io_side.fd_direction, STDOUT_FILENO); ck_assert_int_eq(set_fds(&read_fds, &write_fds, 1), 2); } END_TEST START_TEST(test_dgsh_negotiate) { int *input_fds; int n_input_fds = 0; int *output_fds; int n_output_fds = 0; ck_assert_int_eq(dgsh_negotiate(0, "test", &n_input_fds, &n_output_fds, &input_fds, &output_fds), 0); } END_TEST /* Suite conc */ START_TEST(test_is_ready) { chosen_mb->state = PS_RUN; ck_assert_int_eq(is_ready(3, chosen_mb), true); ck_assert_int_eq(is_ready(1, chosen_mb), false); ck_assert_int_eq(is_ready(0, chosen_mb), false); ck_assert_int_eq(pi[0].seen, false); ck_assert_int_eq(pi[1].written, false); ck_assert_int_eq(pi[1].run_ready, false); } END_TEST START_TEST (test_next_fd) { multiple_inputs = true; nfd = 5; bool ro = false; /* restore origin */ ck_assert_int_eq(next_fd(0, &ro), 1); ck_assert_int_eq(ro, false); ck_assert_int_eq(next_fd(1, &ro), 0); ck_assert_int_eq(ro, false); ck_assert_int_eq(next_fd(4, &ro), 4); ck_assert_int_eq(ro, true); ro = false; ck_assert_int_eq(next_fd(3, &ro), 3); ck_assert_int_eq(ro, true); multiple_inputs = false; noinput = false; ro = false; ck_assert_int_eq(next_fd(0, &ro), 1); ck_assert_int_eq(ro, false); ck_assert_int_eq(next_fd(1, &ro), 3); ck_assert_int_eq(ro, true); ro = false; ck_assert_int_eq(next_fd(3, &ro), 4); ck_assert_int_eq(ro, true); ro = false; ck_assert_int_eq(next_fd(4, &ro), 0); ck_assert_int_eq(ro, false); noinput = true; ro = false; ck_assert_int_eq(next_fd(1, &ro), 3); ck_assert_int_eq(ro, false); ck_assert_int_eq(next_fd(3, &ro), 4); ck_assert_int_eq(ro, false); ck_assert_int_eq(next_fd(4, &ro), 1); ck_assert_int_eq(ro, false); } END_TEST START_TEST(test_set_io_channels) { pid = 2000; /* static in dgsh-conc.c */ nfd = 4; /* ditto */ multiple_inputs = false; /* ditto */ ck_assert_int_eq(set_io_channels(chosen_mb), 0); ck_assert_int_eq(chosen_mb->n_concs, 1); ck_assert_int_eq(chosen_mb->conc_array[0].pid, 2000); ck_assert_int_eq(chosen_mb->conc_array[0].input_fds, -1); ck_assert_int_eq(chosen_mb->conc_array[0].output_fds, -1); ck_assert_int_eq(chosen_mb->conc_array[0].multiple_inputs, false); ck_assert_int_eq(chosen_mb->conc_array[0].n_proc_pids, 2); ck_assert_int_eq(chosen_mb->conc_array[0].proc_pids[0], 100); ck_assert_int_eq(chosen_mb->conc_array[0].proc_pids[1], 103); /* Exists with channels set: keep as it is */ ck_assert_int_eq(set_io_channels(chosen_mb), 0); ck_assert_int_eq(chosen_mb->n_concs, 1); ck_assert_int_eq(chosen_mb->conc_array[0].pid, 2000); ck_assert_int_eq(chosen_mb->conc_array[0].input_fds, -1); ck_assert_int_eq(chosen_mb->conc_array[0].output_fds, -1); /* Not exists: set channels (same pi, same channels as before */ pid = 2001; /* static in dgsh-conc.c */ multiple_inputs = true; ck_assert_int_eq(set_io_channels(chosen_mb), 0); ck_assert_int_eq(chosen_mb->n_concs, 2); DPRINTF(4, "%d", chosen_mb->conc_array[0].pid); ck_assert_int_eq(chosen_mb->conc_array[1].pid, 2001); ck_assert_int_eq(chosen_mb->conc_array[1].input_fds, -1); ck_assert_int_eq(chosen_mb->conc_array[1].output_fds, -1); ck_assert_int_eq(chosen_mb->conc_array[1].multiple_inputs, true); ck_assert_int_eq(chosen_mb->conc_array[1].n_proc_pids, 2); ck_assert_int_eq(chosen_mb->conc_array[1].proc_pids[0], 101); ck_assert_int_eq(chosen_mb->conc_array[1].proc_pids[1], 103); } END_TEST Suite * suite_connect(void) { Suite *s = suite_create("Connect"); TCase *tc_aiof = tcase_create("alloc io fds"); tcase_add_checked_fixture(tc_aiof, setup_test_alloc_io_fds, retire_test_alloc_io_fds); tcase_add_test(tc_aiof, test_alloc_io_fds); suite_add_tcase(s, tc_aiof); TCase *tc_af = tcase_create("alloc fds"); tcase_add_checked_fixture(tc_af, NULL, NULL); tcase_add_test(tc_af, test_alloc_fds); suite_add_tcase(s, tc_af); TCase *tc_trw = tcase_create("test read/write fd"); tcase_add_checked_fixture(tc_trw, NULL, NULL); tcase_add_test(tc_trw, test_read_write_fd); suite_add_tcase(s, tc_trw); TCase *tc_rif = tcase_create("read input fds"); tcase_add_checked_fixture(tc_rif, setup_test_read_input_fds, retire_test_read_input_fds); tcase_add_test(tc_rif, test_read_input_fds); suite_add_tcase(s, tc_rif); TCase *tc_gop = tcase_create("get origin pid"); tcase_add_checked_fixture(tc_gop, setup_test_get_origin_pid, retire_test_get_origin_pid); tcase_add_test(tc_gop, test_get_origin_pid); suite_add_tcase(s, tc_gop); TCase *tc_gefn = tcase_create("get expected fds number"); tcase_add_checked_fixture(tc_gefn, setup_test_get_expected_fds_n, retire_test_get_expected_fds_n); tcase_add_test(tc_gefn, test_get_expected_fds_n); suite_add_tcase(s, tc_gefn); TCase *tc_gpfn = tcase_create("get provided fds number"); tcase_add_checked_fixture(tc_gpfn, setup_test_get_provided_fds_n, retire_test_get_provided_fds_n); tcase_add_test(tc_gpfn, test_get_provided_fds_n); suite_add_tcase(s, tc_gpfn); TCase *tc_eic = tcase_create("establish io connections"); tcase_add_checked_fixture(tc_eic, setup_test_establish_io_connections, retire_test_establish_io_connections); tcase_add_test(tc_eic, test_establish_io_connections); suite_add_tcase(s, tc_eic); TCase *tc_anc = tcase_create("alloc node connections"); tcase_add_checked_fixture(tc_anc, NULL, NULL); tcase_add_test(tc_anc, test_alloc_node_connections); suite_add_tcase(s, tc_anc); TCase *tc_sd = tcase_create("set dispatcher"); tcase_add_checked_fixture(tc_sd, setup_test_set_dispatcher, retire_test_set_dispatcher); tcase_add_test(tc_sd, test_set_dispatcher); suite_add_tcase(s, tc_sd); /* Need to also simulate sendmsg; make sure it works. */ TCase *tc_awof = tcase_create("write output fds"); tcase_add_checked_fixture(tc_awof, setup_test_write_output_fds, retire_test_write_output_fds); tcase_add_test(tc_awof, test_write_output_fds); suite_add_tcase(s, tc_awof); return s; } Suite * suite_solve(void) { Suite *s = suite_create("Solve"); TCase *tc_wc = tcase_create("write concs"); tcase_add_checked_fixture(tc_wc, setup_test_write_concs, retire_test_write_concs); tcase_add_test(tc_wc, test_write_concs); suite_add_tcase(s, tc_wc); TCase *tc_rc = tcase_create("read concs"); tcase_add_checked_fixture(tc_rc, setup_test_read_concs, retire_test_read_concs); tcase_add_test(tc_rc, test_read_concs); suite_add_tcase(s, tc_rc); TCase *tc_rgs = tcase_create("read graph solution"); tcase_add_checked_fixture(tc_rgs, setup_test_read_graph_solution, retire_test_read_graph_solution); tcase_add_test(tc_rgs, test_read_graph_solution); suite_add_tcase(s, tc_rgs); TCase *tc_wgs = tcase_create("write graph solution"); tcase_add_checked_fixture(tc_wgs, setup_test_write_graph_solution, retire_test_write_graph_solution); tcase_add_test(tc_wgs, test_write_graph_solution); suite_add_tcase(s, tc_wgs); TCase *tc_ssg = tcase_create("solve dgsh graph"); tcase_add_checked_fixture(tc_ssg, setup_test_solve_graph, retire_test_solve_graph); tcase_add_test(tc_ssg, test_solve_graph); suite_add_tcase(s, tc_ssg); TCase *tc_ccf = tcase_create("calculate conc fds"); tcase_add_checked_fixture(tc_ccf, setup_test_calculate_conc_fds, retire_test_calculate_conc_fds); tcase_add_test(tc_ccf, test_calculate_conc_fds); suite_add_tcase(s, tc_ccf); TCase *tc_fgs = tcase_create("free graph solution"); tcase_add_checked_fixture(tc_fgs, setup_test_free_graph_solution, retire_test_free_graph_solution); tcase_add_test(tc_fgs, test_free_graph_solution); suite_add_tcase(s, tc_fgs); TCase *tc_nmc = tcase_create("node match constraints"); tcase_add_checked_fixture(tc_nmc, setup_test_node_match_constraints, retire_test_node_match_constraints); tcase_add_test(tc_nmc, test_node_match_constraints); suite_add_tcase(s, tc_nmc); TCase *tc_dmic = tcase_create("dry match io constraints"); tcase_add_checked_fixture(tc_dmic, setup_test_dry_match_io_constraints, retire_test_dry_match_io_constraints); tcase_add_test(tc_dmic, test_dry_match_io_constraints); suite_add_tcase(s, tc_dmic); TCase *tc_sic = tcase_create("satisfy io constraints"); tcase_add_checked_fixture(tc_sic, setup_test_satisfy_io_constraints, retire_test_satisfy_io_constraints); tcase_add_test(tc_sic, test_satisfy_io_constraints); suite_add_tcase(s, tc_sic); TCase *tc_mov = tcase_create("move"); tcase_add_checked_fixture(tc_mov, setup_test_move, retire_test_move); tcase_add_test(tc_mov, test_move); suite_add_tcase(s, tc_mov); TCase *tc_rmf = tcase_create("record move flexible"); tcase_add_checked_fixture(tc_rmf, NULL, NULL); tcase_add_test(tc_rmf, test_record_move_flexible); suite_add_tcase(s, tc_rmf); TCase *tc_rmu = tcase_create("record move unbalanced"); tcase_add_checked_fixture(tc_rmu, NULL, NULL); tcase_add_test(tc_rmu, test_record_move_unbalanced); suite_add_tcase(s, tc_rmu); /*TCase *tc_ec = tcase_create("evaluate constraints"); tcase_add_checked_fixture(tc_ec, setup_test_eval_constraints, retire_test_eval_constraints); tcase_add_test(tc_ec, test_eval_constraints); suite_add_tcase(s, tc_ec); *TCase *tc_aei = tcase_create("assign edge instances"); tcase_add_checked_fixture(tc_aei, setup_test_assign_edge_instances, retire_test_assign_edge_instances); tcase_add_test(tc_aei, test_assign_edge_instances); suite_add_tcase(s, tc_aei); */ TCase *tc_repa = tcase_create("reallocate edge pointer array"); tcase_add_checked_fixture(tc_repa, setup_test_reallocate_edge_pointer_array, retire_test_reallocate_edge_pointer_array); tcase_add_test(tc_repa, test_reallocate_edge_pointer_array); suite_add_tcase(s, tc_repa); TCase *tc_mcea = tcase_create("make compact edge array"); tcase_add_checked_fixture(tc_mcea, setup_test_make_compact_edge_array, retire_test_make_compact_edge_array); tcase_add_test(tc_mcea, test_make_compact_edge_array); suite_add_tcase(s, tc_mcea); return s; } Suite * suite_broadcast(void) { Suite *s = suite_create("Broadcast"); TCase *tc_wmb = tcase_create("write message block"); tcase_add_checked_fixture(tc_wmb, setup_test_write_message_block, retire_test_write_message_block); tcase_add_test(tc_wmb, test_write_message_block); suite_add_tcase(s, tc_wmb); TCase *tc_trm = tcase_create("read message block"); tcase_add_checked_fixture(tc_trm, setup_test_read_message_block, retire_test_read_message_block); tcase_add_test(tc_trm, test_read_message_block); suite_add_tcase(s, tc_trm); TCase *tc_trc = tcase_create("read chunk"); tcase_add_checked_fixture(tc_trc, setup_test_read_chunk, NULL); tcase_add_test(tc_trc, test_read_chunk); suite_add_tcase(s, tc_trc); TCase *tc_clr = tcase_create("call read"); tcase_add_checked_fixture(tc_clr, NULL, NULL); tcase_add_test(tc_clr, test_call_read); suite_add_tcase(s, tc_clr); TCase *tc_acm = tcase_create("alloc copy message block"); tcase_add_checked_fixture(tc_acm, NULL, NULL); tcase_add_test(tc_acm, test_alloc_copy_mb); suite_add_tcase(s, tc_acm); TCase *tc_acn = tcase_create("alloc copy nodes"); tcase_add_checked_fixture(tc_acn, setup_test_alloc_copy_nodes, retire_test_alloc_copy_nodes); tcase_add_test(tc_acn, test_alloc_copy_nodes); suite_add_tcase(s, tc_acn); TCase *tc_ace = tcase_create("alloc copy edges"); tcase_add_checked_fixture(tc_ace, setup_test_alloc_copy_edges, retire_test_alloc_copy_edges); tcase_add_test(tc_ace, test_alloc_copy_edges); suite_add_tcase(s, tc_ace); TCase *tc_acg = tcase_create("alloc copy graph solution"); tcase_add_checked_fixture(tc_acg, setup_test_alloc_copy_graph_solution, retire_test_alloc_copy_graph_solution); tcase_add_test(tc_acg, test_alloc_copy_graph_solution); suite_add_tcase(s, tc_acg); TCase *tc_acc = tcase_create("alloc copy concs"); tcase_add_checked_fixture(tc_acc, setup_test_alloc_copy_concs, retire_test_alloc_copy_concs); tcase_add_test(tc_acc, test_alloc_copy_concs); suite_add_tcase(s, tc_acc); TCase *tc_acp = tcase_create("alloc copy proc pids"); tcase_add_test(tc_acp, test_alloc_copy_proc_pids); suite_add_tcase(s, tc_acp); TCase *tc_cr = tcase_create("check read"); tcase_add_checked_fixture(tc_cr, NULL, NULL); tcase_add_test(tc_cr, test_check_read); suite_add_tcase(s, tc_cr); /* *TCase *tc_pid = tcase_create("point io direction"); tcase_add_checked_fixture(tc_pid, setup_test_point_io_direction, retire_test_point_io_direction); tcase_add_test(tc_pid, test_point_io_direction); suite_add_tcase(s, tc_pid); */ TCase *tc_ar = tcase_create("analyse read"); tcase_add_checked_fixture(tc_ar, setup_test_analyse_read, retire_test_analyse_read); tcase_add_test(tc_ar, test_analyse_read); suite_add_tcase(s, tc_ar); TCase *tc_fm = tcase_create("free message block"); tcase_add_checked_fixture(tc_fm, setup_test_free_mb, NULL); tcase_add_test(tc_fm, test_free_mb); suite_add_tcase(s, tc_fm); TCase *tc_fsn = tcase_create("fill dgsh node"); tcase_add_checked_fixture(tc_fsn, setup_test_fill_node, NULL); tcase_add_test(tc_fsn, test_fill_node); suite_add_tcase(s, tc_fsn); TCase *tc_tasn = tcase_create("try add dgsh node"); tcase_add_checked_fixture(tc_tasn, setup_test_try_add_dgsh_node, retire_test_try_add_dgsh_node); tcase_add_test(tc_tasn, test_try_add_dgsh_node); suite_add_tcase(s, tc_tasn); TCase *tc_tase = tcase_create("try add dgsh edge"); tcase_add_checked_fixture(tc_tase, setup_test_try_add_dgsh_edge, retire_test_try_add_dgsh_edge); tcase_add_test(tc_tase, test_try_add_dgsh_edge); suite_add_tcase(s, tc_tase); TCase *tc_ae = tcase_create("add edge"); tcase_add_checked_fixture(tc_ae, setup_test_add_edge, retire_test_add_edge); tcase_add_test(tc_ae, test_add_edge); suite_add_tcase(s, tc_ae); TCase *tc_fse = tcase_create("fill dgsh edge"); tcase_add_checked_fixture(tc_fse, setup_test_fill_dgsh_edge, retire_test_fill_dgsh_edge); tcase_add_test(tc_fse, test_fill_dgsh_edge); suite_add_tcase(s, tc_fse); TCase *tc_lse = tcase_create("lookup dgsh edge"); tcase_add_checked_fixture(tc_lse, setup_test_lookup_dgsh_edge, retire_test_lookup_dgsh_edge); tcase_add_test(tc_lse, test_lookup_dgsh_edge); suite_add_tcase(s, tc_lse); TCase *tc_an = tcase_create("add node"); tcase_add_checked_fixture(tc_an, setup_test_add_node, retire_test_add_node); tcase_add_test(tc_an, test_add_node); suite_add_tcase(s, tc_an); TCase *tc_cnmb = tcase_create("construct message block"); tcase_add_checked_fixture(tc_cnmb, NULL, NULL); tcase_add_test(tc_cnmb, test_construct_message_block); suite_add_tcase(s, tc_cnmb); TCase *tc_gev = tcase_create("get environment variable"); tcase_add_checked_fixture(tc_gev, NULL, NULL); tcase_add_test(tc_gev, test_get_env_var); suite_add_tcase(s, tc_gev); TCase *tc_gevs = tcase_create("get environment variables"); tcase_add_checked_fixture(tc_gevs, NULL, NULL); tcase_add_test(tc_gevs, test_get_environment_vars); suite_add_tcase(s, tc_gevs); TCase *tc_vi = tcase_create("validate input"); tcase_add_checked_fixture(tc_vi, NULL, NULL); tcase_add_test(tc_vi, test_validate_input); suite_add_tcase(s, tc_vi); TCase *tc_sf = tcase_create("set fds"); tcase_add_checked_fixture(tc_sf, setup_test_set_fds, retire_test_set_fds); tcase_add_test(tc_sf, test_set_fds); suite_add_tcase(s, tc_sf); TCase *tc_sn = tcase_create("dgsh negotiate"); tcase_add_checked_fixture(tc_sn, setup, retire); tcase_add_test(tc_sn, test_dgsh_negotiate); suite_add_tcase(s, tc_sn); return s; } Suite * suite_conc(void) { Suite *s = suite_create("Concentrator"); TCase *tc_tn = tcase_create("test next_fd"); TCase *tc_ir = tcase_create("test is_ready"); TCase *tc_si = tcase_create("set io"); TCase *tc_sich = tcase_create("set io channels"); tcase_add_checked_fixture(tc_tn, NULL, NULL); tcase_add_test(tc_tn, test_next_fd); suite_add_tcase(s, tc_tn); tcase_add_checked_fixture(tc_ir, setup_test_is_ready, retire_test_is_ready); tcase_add_test(tc_ir, test_is_ready); suite_add_tcase(s, tc_ir); tcase_add_checked_fixture(tc_sich, setup_test_set_io_channels, retire_test_set_io_channels); tcase_add_test(tc_sich, test_set_io_channels); suite_add_tcase(s, tc_sich); return s; } int run_suite(Suite *s) { int number_failed; SRunner *sr = srunner_create(s); srunner_run_all(sr, CK_VERBOSE); number_failed = srunner_ntests_failed(sr); srunner_free(sr); return (number_failed == 0) ? 1 : 0; } int run_suite_connect(void) { Suite *s = suite_connect(); return run_suite(s); } int run_suite_solve(void) { Suite *s = suite_solve(); return run_suite(s); } int run_suite_broadcast(void) { Suite *s = suite_broadcast(); return run_suite(s); } int run_suite_conc(void) { Suite *s = suite_conc(); return run_suite(s); } /* Output is not appropriate; only pass fail. */ int main() { int failed_neg, failed_sol, failed_conn, failed_conc; failed_neg = run_suite_broadcast(); failed_sol = run_suite_solve(); failed_conn = run_suite_connect(); failed_conc = run_suite_conc(); return (failed_neg && failed_sol && failed_conn && failed_conc) ? EXIT_SUCCESS : EXIT_FAILURE; }
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #import "WCAccountBaseViewController.h" @interface WCAccountRegByOldPhoneViewController : WCAccountBaseViewController { id <WCAccountRegByOldPhoneViewControllerDelegate> m_delegate; } - (void).cxx_destruct; - (void)reloadTableView; - (void)initNavigationBar; - (void)setDelegate:(id)arg1; - (void)viewDidLoad; - (id)init; @end
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -triple %itanium_abi_triple -emit-llvm -o - %s | FileCheck %s struct D; struct B { virtual D& operator = (const D&); }; struct D : B { D(); virtual void a(); }; void D::a() {} // CHECK: @_ZTV1D = {{.*}} @_ZN1DaSERKS_ // CHECK: define linkonce_odr {{.*}} @_ZN1DaSERKS_
{ "pile_set_name": "Github" }
-- Protecting from addons since we use this in secure code. local cos = math.cos; local sin = math.sin; local atan2 = math.atan2; local sqrt = math.sqrt; function Vector2D_ScaleBy(scalar, x, y) return x * scalar, y * scalar; end function Vector2D_DivideBy(divisor, x, y) return x / divisor, y / divisor; end function Vector2D_Add(leftX, leftY, rightX, rightY) return leftX + rightX, leftY + rightY; end function Vector2D_Subtract(leftX, leftY, rightX, rightY) return leftX - rightX, leftY - rightY; end function Vector2D_Cross(leftX, leftY, rightX, rightY) return leftX * rightY - leftY * rightX; end function Vector2D_Dot(leftX, leftY, rightX, rightY) return leftX * rightX + leftY * rightY; end function Vector2D_GetLengthSquared(x, y) return Vector2D_Dot(x, y, x, y); end function Vector2D_GetLength(x, y) return sqrt(Vector2D_GetLengthSquared(x, y)); end function Vector2D_Normalize(x, y) return Vector2D_DivideBy(Vector2D_GetLength(x, y), x, y); end function Vector2D_CalculateAngleBetween(leftX, leftY, rightX, rightY) return atan2(Vector2D_Cross(leftX, leftY, rightX, rightY), Vector2D_Dot(leftX, leftY, rightX, rightY)); end function Vector2D_RotateDirection(rotationRadians, x, y) local cosValue = cos(rotationRadians); local sinValue = sin(rotationRadians); return x * cosValue - y * sinValue, x * sinValue + y * cosValue; end Vector2DMixin = {}; function CreateVector2D(x, y) local vector = CreateFromMixins(Vector2DMixin); vector:OnLoad(x, y); return vector; end function AreVector2DEqual(left, right) if left and right then return left:IsEqualTo(right); end return left == right; end function Vector2DMixin:OnLoad(x, y) self:SetXY(x, y); end function Vector2DMixin:IsEqualTo(otherVector) return self.x == otherVector.x and self.y == otherVector.y; end function Vector2DMixin:GetXY() return self.x, self.y; end function Vector2DMixin:SetXY(x, y) self.x = x; self.y = y; end function Vector2DMixin:ScaleBy(scalar) self:SetXY(Vector2D_ScaleBy(scalar, self:GetXY())); end function Vector2DMixin:DivideBy(scalar) self:SetXY(Vector2D_DivideBy(scalar, self:GetXY())); end function Vector2DMixin:Add(other) self:SetXY(Vector2D_Add(self.x, self.y, other:GetXY())); end function Vector2DMixin:Subtract(other) self:SetXY(Vector2D_Subtract(self.x, self.y, other:GetXY())); end function Vector2DMixin:Cross(other) self:SetXY(Vector2D_Cross(self.x, self.y, other:GetXY())); end function Vector2DMixin:Dot(other) return Vector2D_Dot(self.x, self.y, other:GetXY()); end function Vector2DMixin:GetLengthSquared() return Vector2D_GetLengthSquared(self:GetXY()); end function Vector2DMixin:GetLength() return Vector2D_GetLength(self:GetXY()); end function Vector2DMixin:Normalize() self:SetXY(Vector2D_Normalize(self:GetXY())); end function Vector2DMixin:RotateDirection(rotationRadians) self:SetXY(Vector2D_RotateDirection(rotationRadians, self:GetXY())); end function Vector2DMixin:Clone() return CreateVector2D(self:GetXY()); end
{ "pile_set_name": "Github" }
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="24dp" android:height="24dp" android:viewportWidth="24" android:viewportHeight="24"> <path android:pathData="M4,4C4,2.8954 4.8954,2 6,2H14V6C14,7.1046 14.8954,8 16,8H20V20C20,21.1046 19.1046,22 18,22H6C4.8954,22 4,21.1046 4,20V4ZM11,10H13V13H16V15H13V18H11V15H8V13H11V10Z" android:fillColor="#ffffff" android:fillType="evenOdd"/> <path android:pathData="M14,2L20,8H16C14.8954,8 14,7.1046 14,6V2Z" android:strokeAlpha="0.5" android:fillColor="#ffffff" android:fillAlpha="0.5"/> </vector>
{ "pile_set_name": "Github" }
package de.metas.inoutcandidate.modelvalidator; import org.adempiere.ad.modelvalidator.annotations.ModelChange; import org.adempiere.ad.modelvalidator.annotations.Validator; import org.adempiere.model.InterfaceWrapperHelper; import org.compiere.model.ModelValidator; import de.metas.bpartner.BPartnerId; import de.metas.bpartner.service.IBPartnerBL; import de.metas.inoutcandidate.api.IShipmentSchedulePA; import de.metas.inoutcandidate.invalidation.IShipmentScheduleInvalidateBL; import de.metas.inoutcandidate.model.I_M_ShipmentSchedule; import de.metas.interfaces.I_C_BPartner; import de.metas.lang.SOTrx; import de.metas.util.Services; @Validator(I_C_BPartner.class) public class C_BPartner_ShipmentSchedule { /** * * * @param bpartner */ @ModelChange(timings = { ModelValidator.TYPE_AFTER_NEW, ModelValidator.TYPE_AFTER_CHANGE }, ifColumnsChanged = { I_C_BPartner.COLUMNNAME_AllowConsolidateInOut }) public void inValidateScheds(final I_C_BPartner bpartner) { // // Services final IShipmentSchedulePA shipmentSchedulesRepo = Services.get(IShipmentSchedulePA.class); final IBPartnerBL bpartnerBL = Services.get(IBPartnerBL.class); final boolean isBPAllowConsolidateInOut = bpartnerBL.isAllowConsolidateInOutEffective(bpartner, SOTrx.SALES); final BPartnerId bpartnerId = BPartnerId.ofRepoId(bpartner.getC_BPartner_ID()); shipmentSchedulesRepo .streamUnprocessedByPartnerIdAndAllowConsolidateInOut(bpartnerId, !isBPAllowConsolidateInOut) .forEach(sched -> setAllowConsolidateInOutAndSave(sched, isBPAllowConsolidateInOut)); } private void setAllowConsolidateInOutAndSave(final I_M_ShipmentSchedule sched, final boolean allowConsolidateInOut) { if (sched.isAllowConsolidateInOut() == allowConsolidateInOut) { return; } sched.setAllowConsolidateInOut(allowConsolidateInOut); InterfaceWrapperHelper.saveRecord(sched); // note that we do not need to invalidate the current sched explicitly.. // it will be updated as part of the segment, unless it has delivery rule force.. // and if it has that rule, then the partner change makes no difference to it, anyways. Services.get(IShipmentScheduleInvalidateBL.class).notifySegmentChangedForShipmentSchedule(sched); } }
{ "pile_set_name": "Github" }
/*! @file Forward declares `boost::hana::concat`. @copyright Louis Dionne 2013-2017 Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_HANA_FWD_CONCAT_HPP #define BOOST_HANA_FWD_CONCAT_HPP #include <boost/hana/config.hpp> #include <boost/hana/core/when.hpp> BOOST_HANA_NAMESPACE_BEGIN //! Combine two monadic structures together. //! @ingroup group-MonadPlus //! //! Given two monadic structures, `concat` combines them together and //! returns a new monadic structure. The exact definition of `concat` //! will depend on the exact model of MonadPlus at hand, but for //! sequences it corresponds intuitively to simple concatenation. //! //! Also note that combination is not required to be commutative. //! In other words, there is no requirement that //! @code //! concat(xs, ys) == concat(ys, xs) //! @endcode //! and indeed it does not hold in general. //! //! //! Signature //! --------- //! Given a `MonadPlus` `M`, the signature of `concat` is //! @f$ \mathtt{concat} : M(T) \times M(T) \to M(T) @f$. //! //! @param xs, ys //! Two monadic structures to combine together. //! //! //! Example //! ------- //! @include example/concat.cpp #ifdef BOOST_HANA_DOXYGEN_INVOKED constexpr auto concat = [](auto&& xs, auto&& ys) { return tag-dispatched; }; #else template <typename M, typename = void> struct concat_impl : concat_impl<M, when<true>> { }; struct concat_t { template <typename Xs, typename Ys> constexpr auto operator()(Xs&& xs, Ys&& ys) const; }; constexpr concat_t concat{}; #endif BOOST_HANA_NAMESPACE_END #endif // !BOOST_HANA_FWD_CONCAT_HPP
{ "pile_set_name": "Github" }
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # GB2312 most frequently used character table # # Char to FreqOrder table , from hz6763 # 512 --> 0.79 -- 0.79 # 1024 --> 0.92 -- 0.13 # 2048 --> 0.98 -- 0.06 # 6768 --> 1.00 -- 0.02 # # Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 # Random Distribution Ration = 512 / (3755 - 512) = 0.157 # # Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 GB2312_TABLE_SIZE = 3760 GB2312_CHAR_TO_FREQ_ORDER = ( 1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, 2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, 2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, 1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, 1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, 1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, 2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, 3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, 1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, 2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, 2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, 1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, 3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, 1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, 2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, 1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, 3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, 1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, 2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, 1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, 3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, 3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, 3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, 1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, 3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, 2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, 1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, 1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, 4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, 3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, 3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, 1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, 2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, 1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, 1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, 3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, 3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, 4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, 3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, 1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, 1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, 4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, 3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, 1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, 1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, 2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, 3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, 4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, 3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, 2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, 2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, 2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, 2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, 3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, 2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, 2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, 1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, 2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, 1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, 1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, 1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, 2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, 3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, 2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, 2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, 2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, 3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, 1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, 1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, 2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, 1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, 3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, 1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, 1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, 3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, 2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, 1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, 4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, 1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, 1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, 3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, 1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, 1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, 1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, 1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, 3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, 4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, 3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, 2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, 2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, 1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, 3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, 2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, 1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, 1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, 2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, 2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, 3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, 4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, 3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, 3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, 2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, 1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, 3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, 4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, 2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, 1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, 1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, 1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, 3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, 1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, 1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, 2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, 2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, 2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, 1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, 1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, 2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, 1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, 1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, 2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, 2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, 3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, 1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, 4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, 3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, 1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, 3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, 1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, 4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, 1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, 2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, 1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, 1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, 3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, 2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, 1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, 1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, 1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, 3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, 2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, 3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, 3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, 3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, 2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, 2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, 1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, 1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, 3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, 3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, 1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, 1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, 3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, 2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, 2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, 1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, 3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, 4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, 1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, 2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, 3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, 3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, 1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, 2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, 1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, 1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, 1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, 1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, 1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, 1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, #last 512 )
{ "pile_set_name": "Github" }
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-samples-protobuf</artifactId> <version>1.0-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <artifactId>protobuf-consumer</artifactId> <packaging>jar</packaging> <name>${project.artifactId}</name> <description>The demo consumer module of dubbo project</description> <properties> <skip_maven_deploy>true</skip_maven_deploy> <zookeeper.version>3.4.3</zookeeper.version> <dubbo.configcenter.zookeeper.version>2.7.4</dubbo.configcenter.zookeeper.version> </properties> <dependencies> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-metadata-report-zookeeper</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-serialization-protobuf</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-serialization-hessian2</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-serialization-kryo</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-registry-multicast</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-registry-nacos</artifactId> </dependency> <dependency> <groupId>com.alibaba.nacos</groupId> <artifactId>nacos-client</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-registry-zookeeper</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-configcenter-zookeeper</artifactId> <version>${dubbo.configcenter.zookeeper.version}</version> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-configcenter-nacos</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-metadata-report-nacos</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-config-spring</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-rpc-dubbo</artifactId> </dependency> <dependency> <groupId>org.apache.dubbo</groupId> <artifactId>dubbo-remoting-netty4</artifactId> </dependency> </dependencies> </project>
{ "pile_set_name": "Github" }
{ "name": "xtend", "version": "4.0.1", "description": "extend like a boss", "keywords": [ "extend", "merge", "options", "opts", "object", "array" ], "author": { "name": "Raynos", "email": "raynos2@gmail.com" }, "repository": { "type": "git", "url": "git://github.com/Raynos/xtend.git" }, "main": "immutable", "scripts": { "test": "node test" }, "dependencies": {}, "devDependencies": { "tape": "~1.1.0" }, "homepage": "https://github.com/Raynos/xtend", "contributors": [ { "name": "Jake Verbaten" }, { "name": "Matt Esch" } ], "bugs": { "url": "https://github.com/Raynos/xtend/issues", "email": "raynos2@gmail.com" }, "license": "MIT", "testling": { "files": "test.js", "browsers": [ "ie/7..latest", "firefox/16..latest", "firefox/nightly", "chrome/22..latest", "chrome/canary", "opera/12..latest", "opera/next", "safari/5.1..latest", "ipad/6.0..latest", "iphone/6.0..latest" ] }, "engines": { "node": ">=0.4" }, "gitHead": "23dc302a89756da89c1897bc732a752317e35390", "_id": "xtend@4.0.1", "_shasum": "a5c6d532be656e23db820efb943a1f04998d63af", "_from": "xtend@>=4.0.0 <4.1.0", "_npmVersion": "2.14.1", "_nodeVersion": "0.10.32", "_npmUser": { "name": "raynos", "email": "raynos2@gmail.com" }, "dist": { "shasum": "a5c6d532be656e23db820efb943a1f04998d63af", "tarball": "http://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz" }, "maintainers": [ { "name": "raynos", "email": "raynos2@gmail.com" } ], "directories": {}, "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz", "readme": "ERROR: No README data found!" }
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="ko"> <head> <!-- Generated by javadoc (1.8.0_191) on Tue Mar 03 19:55:20 KST 2020 --> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> <title>Uses of Class kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso.ForControlOLE (HWP Library 1.0.1 API)</title> <meta name="date" content="2020-03-03"> <link rel="stylesheet" type="text/css" href="../../../../../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../../../../../script.js"></script> </head> <body> <script type="text/javascript"><!-- try { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso.ForControlOLE (HWP Library 1.0.1 API)"; } } catch(err) { } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar.top"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.top.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../../../kr/dogfoot/hwplib/writer/bodytext/paragraph/control/gso/ForControlOLE.html" title="class in kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../../../index.html?kr/dogfoot/hwplib/writer/bodytext/paragraph/control/gso/class-use/ForControlOLE.html" target="_top">Frames</a></li> <li><a href="ForControlOLE.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h2 title="Uses of Class kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso.ForControlOLE" class="title">Uses of Class<br>kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso.ForControlOLE</h2> </div> <div class="classUseContainer">No usage of kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso.ForControlOLE</div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar.bottom"> <!-- --> </a> <div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div> <a name="navbar.bottom.firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../../../../../../overview-summary.html">Overview</a></li> <li><a href="../package-summary.html">Package</a></li> <li><a href="../../../../../../../../../kr/dogfoot/hwplib/writer/bodytext/paragraph/control/gso/ForControlOLE.html" title="class in kr.dogfoot.hwplib.writer.bodytext.paragraph.control.gso">Class</a></li> <li class="navBarCell1Rev">Use</li> <li><a href="../package-tree.html">Tree</a></li> <li><a href="../../../../../../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../../../../../../index-all.html">Index</a></li> <li><a href="../../../../../../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li>Prev</li> <li>Next</li> </ul> <ul class="navList"> <li><a href="../../../../../../../../../index.html?kr/dogfoot/hwplib/writer/bodytext/paragraph/control/gso/class-use/ForControlOLE.html" target="_top">Frames</a></li> <li><a href="ForControlOLE.html" target="_top">No&nbsp;Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../../../../../../allclasses-noframe.html">All&nbsp;Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip.navbar.bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2020. All rights reserved.</small></p> </body> </html>
{ "pile_set_name": "Github" }
// Copyright (c) 2012-2020 fo-dicom contributors. // Licensed under the Microsoft Public License (MS-PL). namespace Dicom.Media { #if NET35 using System.Threading; #else using System.Threading.Tasks; #endif using Dicom.IO; public delegate void DicomScanProgressCallback(DicomFileScanner scanner, string directory, int count); public delegate void DicomScanFileFoundCallback(DicomFileScanner scanner, DicomFile file, string fileName); public delegate void DicomScanCompleteCallback(DicomFileScanner scanner); public class DicomFileScanner { #region Private Members private readonly string _pattern; private readonly bool _recursive; private bool _stop; private int _count; #endregion #region Public Constructor public DicomFileScanner() { _pattern = null; _recursive = true; ProgressOnDirectoryChange = true; ProgressFilesCount = 10; } #endregion public event DicomScanProgressCallback Progress; public event DicomScanFileFoundCallback FileFound; public event DicomScanCompleteCallback Complete; #region Public Properties public bool ProgressOnDirectoryChange { get; set; } public int ProgressFilesCount { get; set; } public bool CheckForValidHeader { get; set; } #endregion #region Public Methods public void Start(string directory) { _stop = false; _count = 0; #if NET35 ScanProc(directory); #else Task.Run(() => ScanProc(directory)); #endif } public void Stop() { _stop = true; } #endregion #region Private Methods private void ScanProc(string directory) { ScanDirectory(directory); Complete?.Invoke(this); } private void ScanDirectory(string path) { if (_stop) return; if (Progress != null && ProgressOnDirectoryChange) Progress(this, path, _count); try { var directory = IOManager.CreateDirectoryReference(path); var files = directory.EnumerateFileNames(_pattern); foreach (string file in files) { if (_stop) return; ScanFile(file); _count++; if ((_count % ProgressFilesCount) == 0 && Progress != null) Progress(this, path, _count); } if (!_recursive) return; var dirs = directory.EnumerateDirectoryNames(); foreach (string dir in dirs) { if (_stop) return; ScanDirectory(dir); } } catch { // ignore exceptions? } } private void ScanFile(string file) { try { if (CheckForValidHeader && !DicomFile.HasValidHeader(file)) return; var df = DicomFile.Open(file); FileFound?.Invoke(this, df, file); } catch { // ignore exceptions? } } #endregion } }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="generator" content="rustdoc"> <meta name="description" content="API documentation for the Rust `fabsf64` fn in crate `std`."> <meta name="keywords" content="rust, rustlang, rust-lang, fabsf64"> <title>std::intrinsics::fabsf64 - Rust</title> <link rel="stylesheet" type="text/css" href="../../rustdoc.css"> <link rel="stylesheet" type="text/css" href="../../main.css"> <link rel="shortcut icon" href="https://doc.rust-lang.org/favicon.ico"> </head> <body class="rustdoc"> <!--[if lte IE 8]> <div class="warning"> This old browser is unsupported and will most likely display funky things. </div> <![endif]--> <nav class="sidebar"> <a href='../../std/index.html'><img src='https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png' alt='' width='100'></a> <p class='location'><a href='../index.html'>std</a>::<wbr><a href='index.html'>intrinsics</a></p><script>window.sidebarCurrent = {name: 'fabsf64', ty: 'fn', relpath: ''};</script><script defer src="sidebar-items.js"></script> </nav> <nav class="sub"> <form class="search-form js-only"> <div class="search-container"> <input class="search-input" name="search" autocomplete="off" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"> </div> </form> </nav> <section id='main' class="content fn"> <h1 class='fqn'><span class='in-band'>Function <a href='../index.html'>std</a>::<wbr><a href='index.html'>intrinsics</a>::<wbr><a class='fn' href=''>fabsf64</a></span><span class='out-of-band'><span id='render-detail'> <a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs"> [<span class='inner'>&#x2212;</span>] </a> </span><a id='src-4931' class='srclink' href='../../core/intrinsics/fn.fabsf64.html?gotosrc=4931' title='goto source code'>[src]</a></span></h1> <pre class='rust fn'>pub unsafe extern "rust-intrinsic" fn fabsf64(x: <a class='primitive' href='../primitive.f64.html'>f64</a>) -&gt; <a class='primitive' href='../primitive.f64.html'>f64</a></pre><div class='stability'><em class='stab unstable'>Unstable (<code>core_intrinsics</code> <a href="https://github.com/rust-lang/rust/issues/0">#0</a>)<p>: intrinsics are unlikely to ever be stabilized, instead they should be used through stabilized interfaces in the rest of the standard library</p> </em></div><div class='docblock'><p>Returns the absolute value of an <code>f64</code>.</p> </div></section> <section id='search' class="content hidden"></section> <section class="footer"></section> <aside id="help" class="hidden"> <div> <h1 class="hidden">Help</h1> <div class="shortcuts"> <h2>Keyboard Shortcuts</h2> <dl> <dt>?</dt> <dd>Show this help dialog</dd> <dt>S</dt> <dd>Focus the search field</dd> <dt>&larrb;</dt> <dd>Move up in search results</dd> <dt>&rarrb;</dt> <dd>Move down in search results</dd> <dt>&#9166;</dt> <dd>Go to active search result</dd> </dl> </div> <div class="infos"> <h2>Search Tricks</h2> <p> Prefix searches with a type followed by a colon (e.g. <code>fn:</code>) to restrict the search to a given type. </p> <p> Accepted types are: <code>fn</code>, <code>mod</code>, <code>struct</code>, <code>enum</code>, <code>trait</code>, <code>type</code>, <code>macro</code>, and <code>const</code>. </p> <p> Search functions by type signature (e.g. <code>vec -> usize</code>) </p> </div> </div> </aside> <script> window.rootPath = "../../"; window.currentCrate = "std"; window.playgroundUrl = "https://play.rust-lang.org/"; </script> <script src="../../jquery.js"></script> <script src="../../main.js"></script> <script src="../../playpen.js"></script> <script defer src="../../search-index.js"></script> </body> </html>
{ "pile_set_name": "Github" }
<!-- Stored in resources/views/layouts/app.blade.php --> <html> <head> <title>App Name - @yield('title')</title> </head> <body> @section('sidebar') *********This is the master sidebar.(from layout) *************<br> @show <div class="container"> @yield('content') </div> </body> </html>
{ "pile_set_name": "Github" }
// Package to work with VHD images // See https://technet.microsoft.com/en-us/virtualization/bb676673.aspx package vhd import ( "bytes" "encoding/binary" "encoding/hex" "fmt" "math" "os" "strconv" "time" "golang.org/x/text/encoding/unicode" "golang.org/x/text/transform" ) const VHD_COOKIE = "636f6e6563746978" // conectix const VHD_DYN_COOKIE = "6378737061727365" // cxsparse const VHD_CREATOR_APP = "676f2d766864" // go-vhd const VHD_CREATOR_HOST_OS = "5769326B" // Win2k const VHD_BLOCK_SIZE = 2 * 1024 * 1024 // 2MB const VHD_HEADER_SIZE = 512 const SECTOR_SIZE = 512 const FOURK_SECTOR_SIZE = 4096 const VHD_EXTRA_HEADER_SIZE = 1024 // A VDH file type VHD struct { Footer VHDHeader ExtraHeader VHDExtraHeader } // VHD Header type VHDHeader struct { Cookie [8]byte Features [4]byte FileFormatVersion [4]byte DataOffset [8]byte Timestamp [4]byte CreatorApplication [4]byte CreatorVersion [4]byte CreatorHostOS [4]byte OriginalSize [8]byte CurrentSize [8]byte DiskGeometry [4]byte DiskType [4]byte Checksum [4]byte UniqueId [16]byte SavedState [1]byte Reserved [427]byte } // VHD extra header, for dynamic and differential disks type VHDExtraHeader struct { Cookie [8]byte DataOffset [8]byte TableOffset [8]byte HeaderVersion [4]byte MaxTableEntries [4]byte BlockSize [4]byte Checksum [4]byte ParentUUID [16]byte ParentTimestamp [4]byte Reserved [4]byte ParentUnicodeName [512]byte ParentLocatorEntry1 [24]byte ParentLocatorEntry2 [24]byte ParentLocatorEntry3 [24]byte ParentLocatorEntry4 [24]byte ParentLocatorEntry5 [24]byte ParentLocatorEntry6 [24]byte ParentLocatorEntry7 [24]byte ParentLocatorEntry8 [24]byte Reserved2 [256]byte } // Options for the CreateSparseVHD function type VHDOptions struct { UUID string Timestamp int64 } /* * VHDExtraHeader methods */ func (header *VHDExtraHeader) CookieString() string { return string(header.Cookie[:]) } // Calculate and add the VHD dynamic/differential header checksum func (h *VHDExtraHeader) addChecksum() { buffer := new(bytes.Buffer) binary.Write(buffer, binary.BigEndian, h) checksum := 0 bb := buffer.Bytes() for counter := 0; counter < VHD_EXTRA_HEADER_SIZE; counter++ { checksum += int(bb[counter]) } binary.BigEndian.PutUint32(h.Checksum[:], uint32(^checksum)) } /* * VHDHeader methods */ func (h *VHDHeader) DiskTypeStr() (dt string) { switch h.DiskType[3] { case 0x00: dt = "None" case 0x01: dt = "Deprecated" case 0x02: dt = "Fixed" case 0x03: dt = "Dynamic" case 0x04: dt = "Differential" case 0x05: dt = "Reserved" case 0x06: dt = "Reserved" default: panic("Invalid disk type detected!") } return } // Return the timestamp of the header func (h *VHDHeader) TimestampTime() time.Time { tstamp := binary.BigEndian.Uint32(h.Timestamp[:]) return time.Unix(int64(946684800+tstamp), 0) } // Calculate and add the VHD header checksum func (h *VHDHeader) addChecksum() { buffer := new(bytes.Buffer) binary.Write(buffer, binary.BigEndian, h) checksum := 0 bb := buffer.Bytes() for counter := 0; counter < VHD_HEADER_SIZE; counter++ { checksum += int(bb[counter]) } binary.BigEndian.PutUint32(h.Checksum[:], uint32(^checksum)) } func CreateFixedHeader(size uint64, options *VHDOptions) VHDHeader { header := VHDHeader{} hexToField(VHD_COOKIE, header.Cookie[:]) hexToField("00000002", header.Features[:]) hexToField("00010000", header.FileFormatVersion[:]) hexToField("ffffffffffffffff", header.DataOffset[:]) // LOL Y2038 if options.Timestamp != 0 { binary.BigEndian.PutUint32(header.Timestamp[:], uint32(options.Timestamp)) } else { t := uint32(time.Now().Unix() - 946684800) binary.BigEndian.PutUint32(header.Timestamp[:], t) } hexToField(VHD_CREATOR_APP, header.CreatorApplication[:]) hexToField(VHD_CREATOR_HOST_OS, header.CreatorHostOS[:]) binary.BigEndian.PutUint64(header.OriginalSize[:], size) binary.BigEndian.PutUint64(header.CurrentSize[:], size) // total sectors = disk size / 512b sector size totalSectors := math.Floor(float64(size / 512)) // [C, H, S] geometry := calculateCHS(uint64(totalSectors)) binary.BigEndian.PutUint16(header.DiskGeometry[:2], uint16(geometry[0])) header.DiskGeometry[2] = uint8(geometry[1]) header.DiskGeometry[3] = uint8(geometry[2]) hexToField("00000002", header.DiskType[:]) // Fixed 0x00000002 hexToField("00000000", header.Checksum[:]) if options.UUID != "" { copy(header.UniqueId[:], uuidToBytes(options.UUID)) } else { copy(header.UniqueId[:], uuidgenBytes()) } header.addChecksum() return header } func RawToFixed(f *os.File, options *VHDOptions) { info, err := f.Stat() check(err) size := uint64(info.Size()) header := CreateFixedHeader(size, options) binary.Write(f, binary.BigEndian, header) } func VHDCreateSparse(size uint64, name string, options VHDOptions) VHD { header := VHDHeader{} hexToField(VHD_COOKIE, header.Cookie[:]) hexToField("00000002", header.Features[:]) hexToField("00010000", header.FileFormatVersion[:]) hexToField("0000000000000200", header.DataOffset[:]) // LOL Y2038 if options.Timestamp != 0 { binary.BigEndian.PutUint32(header.Timestamp[:], uint32(options.Timestamp)) } else { t := uint32(time.Now().Unix() - 946684800) binary.BigEndian.PutUint32(header.Timestamp[:], t) } hexToField(VHD_CREATOR_APP, header.CreatorApplication[:]) hexToField(VHD_CREATOR_HOST_OS, header.CreatorHostOS[:]) binary.BigEndian.PutUint64(header.OriginalSize[:], size) binary.BigEndian.PutUint64(header.CurrentSize[:], size) // total sectors = disk size / 512b sector size totalSectors := math.Floor(float64(size / 512)) // [C, H, S] geometry := calculateCHS(uint64(totalSectors)) binary.BigEndian.PutUint16(header.DiskGeometry[:2], uint16(geometry[0])) header.DiskGeometry[2] = uint8(geometry[1]) header.DiskGeometry[3] = uint8(geometry[2]) hexToField("00000003", header.DiskType[:]) // Sparse 0x00000003 hexToField("00000000", header.Checksum[:]) if options.UUID != "" { copy(header.UniqueId[:], uuidToBytes(options.UUID)) } else { copy(header.UniqueId[:], uuidgenBytes()) } header.addChecksum() // Fill the sparse header header2 := VHDExtraHeader{} hexToField(VHD_DYN_COOKIE, header2.Cookie[:]) hexToField("ffffffffffffffff", header2.DataOffset[:]) // header size + sparse header size binary.BigEndian.PutUint64(header2.TableOffset[:], uint64(VHD_EXTRA_HEADER_SIZE+VHD_HEADER_SIZE)) hexToField("00010000", header2.HeaderVersion[:]) maxTableSize := uint32(size / (VHD_BLOCK_SIZE)) binary.BigEndian.PutUint32(header2.MaxTableEntries[:], maxTableSize) binary.BigEndian.PutUint32(header2.BlockSize[:], VHD_BLOCK_SIZE) binary.BigEndian.PutUint32(header2.ParentTimestamp[:], uint32(0)) header2.addChecksum() f, err := os.Create(name) check(err) defer f.Close() binary.Write(f, binary.BigEndian, header) binary.Write(f, binary.BigEndian, header2) /* Write BAT entries The BAT is always extended to a sector (4K) boundary 1536 = 512 + 1024 (the VHD Header + VHD Sparse header size) */ for count := uint32(0); count < (FOURK_SECTOR_SIZE - 1536); count += 1 { f.Write([]byte{0xff}) } /* Windows creates 8K VHDs by default */ for i := 0; i < (FOURK_SECTOR_SIZE - VHD_HEADER_SIZE); i += 1 { f.Write([]byte{0x0}) } binary.Write(f, binary.BigEndian, header) return VHD{ Footer: header, ExtraHeader: header2, } } /* * VHD */ func FromFile(f *os.File) (vhd VHD) { vhd = VHD{} vhd.Footer = readVHDFooter(f) vhd.ExtraHeader = readVHDExtraHeader(f) return vhd } func (vhd *VHD) PrintInfo() { fmt.Println("\nVHD footer") fmt.Println("==========") vhd.PrintFooter() if vhd.Footer.DiskType[3] == 0x3 || vhd.Footer.DiskType[3] == 0x04 { fmt.Println("\nVHD sparse/differential header") fmt.Println("===============================") vhd.PrintExtraHeader() } } func (vhd *VHD) PrintExtraHeader() { header := vhd.ExtraHeader fmtField("Cookie", fmt.Sprintf("%s (%s)", hexs(header.Cookie[:]), header.CookieString())) fmtField("Data offset", hexs(header.DataOffset[:])) fmtField("Table offset", hexs(header.TableOffset[:])) fmtField("Header version", hexs(header.HeaderVersion[:])) fmtField("Max table entries", hexs(header.MaxTableEntries[:])) fmtField("Block size", hexs(header.BlockSize[:])) fmtField("Checksum", hexs(header.Checksum[:])) fmtField("Parent UUID", uuid(header.ParentUUID[:])) // Seconds since January 1, 1970 12:00:00 AM in UTC/GMT. // 946684800 = January 1, 2000 12:00:00 AM in UTC/GMT. tstamp := binary.BigEndian.Uint32(header.ParentTimestamp[:]) t := time.Unix(int64(946684800+tstamp), 0) fmtField("Parent timestamp", fmt.Sprintf("%s", t)) fmtField("Reserved", hexs(header.Reserved[:])) parentNameBytes, _, err := transform.Bytes( unicode.UTF16(unicode.BigEndian, unicode.IgnoreBOM).NewDecoder(), header.ParentUnicodeName[:], ) if err != nil { panic(err) } parentName := string(parentNameBytes) fmtField("Parent Name", parentName) // Parent locator entries ignored since it's a dynamic disk sum := 0 for _, b := range header.Reserved2 { sum += int(b) } fmtField("Reserved2", strconv.Itoa(sum)) } func (vhd *VHD) PrintFooter() { header := vhd.Footer //fmtField("Cookie", string(header.Cookie[:])) fmtField("Cookie", fmt.Sprintf("%s (%s)", hexs(header.Cookie[:]), string(header.Cookie[:]))) fmtField("Features", hexs(header.Features[:])) fmtField("File format version", hexs(header.FileFormatVersion[:])) dataOffset := binary.BigEndian.Uint64(header.DataOffset[:]) fmtField("Data offset", fmt.Sprintf("%s (%d bytes)", hexs(header.DataOffset[:]), dataOffset)) //// Seconds since January 1, 1970 12:00:00 AM in UTC/GMT. //// 946684800 = January 1, 2000 12:00:00 AM in UTC/GMT. t := time.Unix(int64(946684800+binary.BigEndian.Uint32(header.Timestamp[:])), 0) fmtField("Timestamp", fmt.Sprintf("%s", t)) fmtField("Creator application", string(header.CreatorApplication[:])) fmtField("Creator version", hexs(header.CreatorVersion[:])) fmtField("Creator OS", string(header.CreatorHostOS[:])) originalSize := binary.BigEndian.Uint64(header.OriginalSize[:]) fmtField("Original size", fmt.Sprintf("%s ( %d bytes )", hexs(header.OriginalSize[:]), originalSize)) currentSize := binary.BigEndian.Uint64(header.OriginalSize[:]) fmtField("Current size", fmt.Sprintf("%s ( %d bytes )", hexs(header.CurrentSize[:]), currentSize)) cilinders := int64(binary.BigEndian.Uint16(header.DiskGeometry[:2])) heads := int64(header.DiskGeometry[2]) sectors := int64(header.DiskGeometry[3]) dsize := cilinders * heads * sectors * 512 fmtField("Disk geometry", fmt.Sprintf("%s (c: %d, h: %d, s: %d) (%d bytes)", hexs(header.DiskGeometry[:]), cilinders, heads, sectors, dsize)) fmtField("Disk type", fmt.Sprintf("%s (%s)", hexs(header.DiskType[:]), header.DiskTypeStr())) fmtField("Checksum", hexs(header.Checksum[:])) fmtField("UUID", uuid(header.UniqueId[:])) fmtField("Saved state", fmt.Sprintf("%d", header.SavedState[0])) } /* Utility functions */ func calculateCHS(ts uint64) []uint { var sectorsPerTrack, heads, cylinderTimesHeads, cylinders float64 totalSectors := float64(ts) ret := make([]uint, 3) if totalSectors > 65535*16*255 { totalSectors = 65535 * 16 * 255 } if totalSectors >= 65535*16*63 { sectorsPerTrack = 255 heads = 16 cylinderTimesHeads = math.Floor(totalSectors / sectorsPerTrack) } else { sectorsPerTrack = 17 cylinderTimesHeads = math.Floor(totalSectors / sectorsPerTrack) heads = math.Floor((cylinderTimesHeads + 1023) / 1024) if heads < 4 { heads = 4 } if (cylinderTimesHeads >= (heads * 1024)) || heads > 16 { sectorsPerTrack = 31 heads = 16 cylinderTimesHeads = math.Floor(totalSectors / sectorsPerTrack) } if cylinderTimesHeads >= (heads * 1024) { sectorsPerTrack = 63 heads = 16 cylinderTimesHeads = math.Floor(totalSectors / sectorsPerTrack) } } cylinders = cylinderTimesHeads / heads // This will floor the values ret[0] = uint(cylinders) ret[1] = uint(heads) ret[2] = uint(sectorsPerTrack) return ret } func hexToField(hexs string, field []byte) { h, err := hex.DecodeString(hexs) check(err) copy(field, h) } // Return the number of blocks in the disk, diskSize in bytes func getMaxTableEntries(diskSize uint64) uint64 { return diskSize * (2 * 1024 * 1024) // block size is 2M } func readVHDExtraHeader(f *os.File) (header VHDExtraHeader) { buff := make([]byte, 1024) _, err := f.ReadAt(buff, 512) check(err) binary.Read(bytes.NewBuffer(buff[:]), binary.BigEndian, &header) return header } func readVHDFooter(f *os.File) (header VHDHeader) { info, err := f.Stat() check(err) buff := make([]byte, 512) _, err = f.ReadAt(buff, info.Size()-512) check(err) binary.Read(bytes.NewBuffer(buff[:]), binary.BigEndian, &header) return header } func readVHDHeader(f *os.File) (header VHDHeader) { buff := make([]byte, 512) _, err := f.ReadAt(buff, 0) check(err) binary.Read(bytes.NewBuffer(buff[:]), binary.BigEndian, &header) return header }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2007 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef JSHTMLSelectElementCustom_h #define JSHTMLSelectElementCustom_h #include "JSHTMLSelectElement.h" namespace WebCore { void selectIndexSetter(HTMLSelectElement*, JSC::ExecState*, unsigned index, JSC::JSValue); } #endif
{ "pile_set_name": "Github" }
export interface SearchResult<T> { resultsOnPage: T[]; numberOfPages: number; } /** * Enumeration specifying sorting order options. */ export enum SortingOrder { ASCENDING = 'ASCENDING', DESCENDING = 'DESCENDING', } export interface PageableSearch { page: number; pageSize: number; searchTerm: string; sortingOrder: SortingOrder; sortedColumn: string; }
{ "pile_set_name": "Github" }
/* //@HEADER // ************************************************************************ // // Kokkos v. 3.0 // Copyright (2020) National Technology & Engineering // Solutions of Sandia, LLC (NTESS). // // Under the terms of Contract DE-NA0003525 with NTESS, // the U.S. Government retains certain rights in this software. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // 3. Neither the name of the Corporation nor the names of the // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY NTESS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL NTESS OR THE // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Questions? Contact Christian R. Trott (crtrott@sandia.gov) // // ************************************************************************ //@HEADER */ #include <Kokkos_Core.hpp> #include <gtest/gtest.h> #include <cstdio> #include <PerfTest_Category.hpp> namespace Test { template <class ViewType> double fill_view(ViewType& a, typename ViewType::const_value_type& val, int repeat) { Kokkos::Timer timer; for (int i = 0; i < repeat; i++) { Kokkos::deep_copy(a, val); } Kokkos::fence(); return timer.seconds(); } template <class Layout> void run_fillview_tests123(int N, int R) { const int N1 = N; const int N2 = N1 * N1; const int N3 = N2 * N1; const int N4 = N2 * N2; const int N8 = N4 * N4; double time1, time2, time3, time_raw = 100000.0; { Kokkos::View<double*, Layout> a("A1", N8); time1 = fill_view(a, 1.1, R) / R; } { Kokkos::View<double**, Layout> a("A2", N4, N4); time2 = fill_view(a, 1.1, R) / R; } { Kokkos::View<double***, Layout> a("A3", N3, N3, N2); time3 = fill_view(a, 1.1, R) / R; } #if defined(KOKKOS_ENABLE_CUDA_LAMBDA) || !defined(KOKKOS_ENABLE_CUDA) { Kokkos::View<double*, Layout> a("A1", N8); double* a_ptr = a.data(); Kokkos::Timer timer; for (int r = 0; r < R; r++) { Kokkos::parallel_for( N8, KOKKOS_LAMBDA(const int& i) { a_ptr[i] = 1.1; }); } Kokkos::fence(); time_raw = timer.seconds() / R; } #endif double size = 1.0 * N8 * 8 / 1024 / 1024; printf(" Raw: %lf s %lf MB %lf GB/s\n", time_raw, size, size / 1024 / time_raw); printf(" Rank1: %lf s %lf MB %lf GB/s\n", time1, size, size / 1024 / time1); printf(" Rank2: %lf s %lf MB %lf GB/s\n", time2, size, size / 1024 / time2); printf(" Rank3: %lf s %lf MB %lf GB/s\n", time3, size, size / 1024 / time3); } template <class Layout> void run_fillview_tests45(int N, int R) { const int N1 = N; const int N2 = N1 * N1; const int N4 = N2 * N2; const int N8 = N4 * N4; double time4, time5, time_raw = 100000.0; { Kokkos::View<double****, Layout> a("A4", N2, N2, N2, N2); time4 = fill_view(a, 1.1, R) / R; } { Kokkos::View<double*****, Layout> a("A5", N2, N2, N1, N1, N2); time5 = fill_view(a, 1.1, R) / R; } #if defined(KOKKOS_ENABLE_CUDA_LAMBDA) || !defined(KOKKOS_ENABLE_CUDA) { Kokkos::View<double*, Layout> a("A1", N8); double* a_ptr = a.data(); Kokkos::Timer timer; for (int r = 0; r < R; r++) { Kokkos::parallel_for( N8, KOKKOS_LAMBDA(const int& i) { a_ptr[i] = 1.1; }); } Kokkos::fence(); time_raw = timer.seconds() / R; } #endif double size = 1.0 * N8 * 8 / 1024 / 1024; printf(" Raw: %lf s %lf MB %lf GB/s\n", time_raw, size, size / 1024 / time_raw); printf(" Rank4: %lf s %lf MB %lf GB/s\n", time4, size, size / 1024 / time4); printf(" Rank5: %lf s %lf MB %lf GB/s\n", time5, size, size / 1024 / time5); } template <class Layout> void run_fillview_tests6(int N, int R) { const int N1 = N; const int N2 = N1 * N1; const int N4 = N2 * N2; const int N8 = N4 * N4; double time6, time_raw = 100000.0; { Kokkos::View<double******, Layout> a("A6", N2, N1, N1, N1, N1, N2); time6 = fill_view(a, 1.1, R) / R; } #if defined(KOKKOS_ENABLE_CUDA_LAMBDA) || !defined(KOKKOS_ENABLE_CUDA) { Kokkos::View<double*, Layout> a("A1", N8); double* a_ptr = a.data(); Kokkos::Timer timer; for (int r = 0; r < R; r++) { Kokkos::parallel_for( N8, KOKKOS_LAMBDA(const int& i) { a_ptr[i] = 1.1; }); } Kokkos::fence(); time_raw = timer.seconds() / R; } #endif double size = 1.0 * N8 * 8 / 1024 / 1024; printf(" Raw: %lf s %lf MB %lf GB/s\n", time_raw, size, size / 1024 / time_raw); printf(" Rank6: %lf s %lf MB %lf GB/s\n", time6, size, size / 1024 / time6); } template <class Layout> void run_fillview_tests7(int N, int R) { const int N1 = N; const int N2 = N1 * N1; const int N4 = N2 * N2; const int N8 = N4 * N4; double time7, time_raw = 100000.0; { Kokkos::View<double*******, Layout> a("A7", N2, N1, N1, N1, N1, N1, N1); time7 = fill_view(a, 1.1, R) / R; } #if defined(KOKKOS_ENABLE_CUDA_LAMBDA) || !defined(KOKKOS_ENABLE_CUDA) { Kokkos::View<double*, Layout> a("A1", N8); double* a_ptr = a.data(); Kokkos::Timer timer; for (int r = 0; r < R; r++) { Kokkos::parallel_for( N8, KOKKOS_LAMBDA(const int& i) { a_ptr[i] = 1.1; }); } Kokkos::fence(); time_raw = timer.seconds() / R; } #endif double size = 1.0 * N8 * 8 / 1024 / 1024; printf(" Raw: %lf s %lf MB %lf GB/s\n", time_raw, size, size / 1024 / time_raw); printf(" Rank7: %lf s %lf MB %lf GB/s\n", time7, size, size / 1024 / time7); } template <class Layout> void run_fillview_tests8(int N, int R) { const int N1 = N; const int N2 = N1 * N1; const int N4 = N2 * N2; const int N8 = N4 * N4; double time8, time_raw = 100000.0; { Kokkos::View<double********, Layout> a("A8", N1, N1, N1, N1, N1, N1, N1, N1); time8 = fill_view(a, 1.1, R) / R; } #if defined(KOKKOS_ENABLE_CUDA_LAMBDA) || !defined(KOKKOS_ENABLE_CUDA) { Kokkos::View<double*, Layout> a("A1", N8); double* a_ptr = a.data(); Kokkos::Timer timer; for (int r = 0; r < R; r++) { Kokkos::parallel_for( N8, KOKKOS_LAMBDA(const int& i) { a_ptr[i] = 1.1; }); } Kokkos::fence(); time_raw = timer.seconds() / R; } #endif double size = 1.0 * N8 * 8 / 1024 / 1024; printf(" Raw: %lf s %lf MB %lf GB/s\n", time_raw, size, size / 1024 / time_raw); printf(" Rank8: %lf s %lf MB %lf GB/s\n", time8, size, size / 1024 / time8); } } // namespace Test
{ "pile_set_name": "Github" }
--sample-frequency=8000 --frame-length=25 # the default is 25 --low-freq=20 # the default. --high-freq=3700 # the default is zero meaning use the Nyquist (4k in this case). --num-ceps=23 # higher than the default which is 12. --snip-edges=false
{ "pile_set_name": "Github" }
;(function(exports) { // export the class if we are in a Node-like system. if (typeof module === 'object' && module.exports === exports) exports = module.exports = SemVer; // The debug function is excluded entirely from the minified version. // Note: this is the semver.org version of the spec that it implements // Not necessarily the package version of this code. exports.SEMVER_SPEC_VERSION = '2.0.0'; // The actual regexps go on exports.re var re = exports.re = []; var src = exports.src = []; var R = 0; // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. // ## Numeric Identifier // A single `0`, or a non-zero digit followed by zero or more digits. var NUMERICIDENTIFIER = R++; src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; var NUMERICIDENTIFIERLOOSE = R++; src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. var NONNUMERICIDENTIFIER = R++; src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; // ## Main Version // Three dot-separated numeric identifiers. var MAINVERSION = R++; src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + '(' + src[NUMERICIDENTIFIER] + ')\\.' + '(' + src[NUMERICIDENTIFIER] + ')'; var MAINVERSIONLOOSE = R++; src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; // ## Pre-release Version Identifier // A numeric identifier, or a non-numeric identifier. var PRERELEASEIDENTIFIER = R++; src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + '|' + src[NONNUMERICIDENTIFIER] + ')'; var PRERELEASEIDENTIFIERLOOSE = R++; src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + '|' + src[NONNUMERICIDENTIFIER] + ')'; // ## Pre-release Version // Hyphen, followed by one or more dot-separated pre-release version // identifiers. var PRERELEASE = R++; src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; var PRERELEASELOOSE = R++; src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; // ## Build Metadata Identifier // Any combination of digits, letters, or hyphens. var BUILDIDENTIFIER = R++; src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata // identifiers. var BUILD = R++; src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; // ## Full Version String // A main version, followed optionally by a pre-release version and // build metadata. // Note that the only major, minor, patch, and pre-release sections of // the version string are capturing groups. The build metadata is not a // capturing group, because it should not ever be used in version // comparison. var FULL = R++; var FULLPLAIN = 'v?' + src[MAINVERSION] + src[PRERELEASE] + '?' + src[BUILD] + '?'; src[FULL] = '^' + FULLPLAIN + '$'; // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty // common in the npm registry. var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + src[PRERELEASELOOSE] + '?' + src[BUILD] + '?'; var LOOSE = R++; src[LOOSE] = '^' + LOOSEPLAIN + '$'; var GTLT = R++; src[GTLT] = '((?:<|>)?=?)'; // Something like "2.*" or "1.2.x". // Note that "x.x" is a valid xRange identifer, meaning "any version" // Only the first item is strictly required. var XRANGEIDENTIFIERLOOSE = R++; src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; var XRANGEIDENTIFIER = R++; src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; var XRANGEPLAIN = R++; src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + '(?:(' + src[PRERELEASE] + ')' + ')?)?)?'; var XRANGEPLAINLOOSE = R++; src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + '(?:(' + src[PRERELEASELOOSE] + ')' + ')?)?)?'; // >=2.x, for example, means >=2.0.0-0 // <1.x would be the same as "<1.0.0-0", though. var XRANGE = R++; src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; var XRANGELOOSE = R++; src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; // Tilde ranges. // Meaning is "reasonably at or greater than" var LONETILDE = R++; src[LONETILDE] = '(?:~>?)'; var TILDETRIM = R++; src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); var tildeTrimReplace = '$1~'; var TILDE = R++; src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; var TILDELOOSE = R++; src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; // Caret ranges. // Meaning is "at least and backwards compatible with" var LONECARET = R++; src[LONECARET] = '(?:\\^)'; var CARETTRIM = R++; src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); var caretTrimReplace = '$1^'; var CARET = R++; src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; var CARETLOOSE = R++; src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; // A simple gt/lt/eq thing, or just "" to indicate "any version" var COMPARATORLOOSE = R++; src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; var COMPARATOR = R++; src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; // An expression to strip any whitespace between the gtlt and the thing // it modifies, so that `> 1.2.3` ==> `>1.2.3` var COMPARATORTRIM = R++; src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; // this one has to use the /g flag re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); var comparatorTrimReplace = '$1$2$3'; // Something like `1.2.3 - 1.2.4` // Note that these all use the loose form, because they'll be // checked against either the strict or loose comparator form // later. var HYPHENRANGE = R++; src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + '\\s+-\\s+' + '(' + src[XRANGEPLAIN] + ')' + '\\s*$'; var HYPHENRANGELOOSE = R++; src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + '\\s+-\\s+' + '(' + src[XRANGEPLAINLOOSE] + ')' + '\\s*$'; // Star ranges basically just allow anything at all. var STAR = R++; src[STAR] = '(<|>)?=?\\s*\\*'; // Compile to actual regexp objects. // All are flag-free, unless they were created above with a flag. for (var i = 0; i < R; i++) { ; if (!re[i]) re[i] = new RegExp(src[i]); } exports.parse = parse; function parse(version, loose) { var r = loose ? re[LOOSE] : re[FULL]; return (r.test(version)) ? new SemVer(version, loose) : null; } exports.valid = valid; function valid(version, loose) { var v = parse(version, loose); return v ? v.version : null; } exports.clean = clean; function clean(version, loose) { var s = parse(version, loose); return s ? s.version : null; } exports.SemVer = SemVer; function SemVer(version, loose) { if (version instanceof SemVer) { if (version.loose === loose) return version; else version = version.version; } if (!(this instanceof SemVer)) return new SemVer(version, loose); ; this.loose = loose; var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); if (!m) throw new TypeError('Invalid Version: ' + version); this.raw = version; // these are actually numbers this.major = +m[1]; this.minor = +m[2]; this.patch = +m[3]; // numberify any prerelease numeric ids if (!m[4]) this.prerelease = []; else this.prerelease = m[4].split('.').map(function(id) { return (/^[0-9]+$/.test(id)) ? +id : id; }); this.build = m[5] ? m[5].split('.') : []; this.format(); } SemVer.prototype.format = function() { this.version = this.major + '.' + this.minor + '.' + this.patch; if (this.prerelease.length) this.version += '-' + this.prerelease.join('.'); return this.version; }; SemVer.prototype.inspect = function() { return '<SemVer "' + this + '">'; }; SemVer.prototype.toString = function() { return this.version; }; SemVer.prototype.compare = function(other) { ; if (!(other instanceof SemVer)) other = new SemVer(other, this.loose); return this.compareMain(other) || this.comparePre(other); }; SemVer.prototype.compareMain = function(other) { if (!(other instanceof SemVer)) other = new SemVer(other, this.loose); return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch); }; SemVer.prototype.comparePre = function(other) { if (!(other instanceof SemVer)) other = new SemVer(other, this.loose); // NOT having a prerelease is > having one if (this.prerelease.length && !other.prerelease.length) return -1; else if (!this.prerelease.length && other.prerelease.length) return 1; else if (!this.prerelease.lenth && !other.prerelease.length) return 0; var i = 0; do { var a = this.prerelease[i]; var b = other.prerelease[i]; ; if (a === undefined && b === undefined) return 0; else if (b === undefined) return 1; else if (a === undefined) return -1; else if (a === b) continue; else return compareIdentifiers(a, b); } while (++i); }; SemVer.prototype.inc = function(release) { switch (release) { case 'major': this.major++; this.minor = -1; case 'minor': this.minor++; this.patch = -1; case 'patch': this.patch++; this.prerelease = []; break; case 'prerelease': if (this.prerelease.length === 0) this.prerelease = [0]; else { var i = this.prerelease.length; while (--i >= 0) { if (typeof this.prerelease[i] === 'number') { this.prerelease[i]++; i = -2; } } if (i === -1) // didn't increment anything this.prerelease.push(0); } break; default: throw new Error('invalid increment argument: ' + release); } this.format(); return this; }; exports.inc = inc; function inc(version, release, loose) { try { return new SemVer(version, loose).inc(release).version; } catch (er) { return null; } } exports.compareIdentifiers = compareIdentifiers; var numeric = /^[0-9]+$/; function compareIdentifiers(a, b) { var anum = numeric.test(a); var bnum = numeric.test(b); if (anum && bnum) { a = +a; b = +b; } return (anum && !bnum) ? -1 : (bnum && !anum) ? 1 : a < b ? -1 : a > b ? 1 : 0; } exports.rcompareIdentifiers = rcompareIdentifiers; function rcompareIdentifiers(a, b) { return compareIdentifiers(b, a); } exports.compare = compare; function compare(a, b, loose) { return new SemVer(a, loose).compare(b); } exports.compareLoose = compareLoose; function compareLoose(a, b) { return compare(a, b, true); } exports.rcompare = rcompare; function rcompare(a, b, loose) { return compare(b, a, loose); } exports.sort = sort; function sort(list, loose) { return list.sort(function(a, b) { return exports.compare(a, b, loose); }); } exports.rsort = rsort; function rsort(list, loose) { return list.sort(function(a, b) { return exports.rcompare(a, b, loose); }); } exports.gt = gt; function gt(a, b, loose) { return compare(a, b, loose) > 0; } exports.lt = lt; function lt(a, b, loose) { return compare(a, b, loose) < 0; } exports.eq = eq; function eq(a, b, loose) { return compare(a, b, loose) === 0; } exports.neq = neq; function neq(a, b, loose) { return compare(a, b, loose) !== 0; } exports.gte = gte; function gte(a, b, loose) { return compare(a, b, loose) >= 0; } exports.lte = lte; function lte(a, b, loose) { return compare(a, b, loose) <= 0; } exports.cmp = cmp; function cmp(a, op, b, loose) { var ret; switch (op) { case '===': ret = a === b; break; case '!==': ret = a !== b; break; case '': case '=': case '==': ret = eq(a, b, loose); break; case '!=': ret = neq(a, b, loose); break; case '>': ret = gt(a, b, loose); break; case '>=': ret = gte(a, b, loose); break; case '<': ret = lt(a, b, loose); break; case '<=': ret = lte(a, b, loose); break; default: throw new TypeError('Invalid operator: ' + op); } return ret; } exports.Comparator = Comparator; function Comparator(comp, loose) { if (comp instanceof Comparator) { if (comp.loose === loose) return comp; else comp = comp.value; } if (!(this instanceof Comparator)) return new Comparator(comp, loose); ; this.loose = loose; this.parse(comp); if (this.semver === ANY) this.value = ''; else this.value = this.operator + this.semver.version; } var ANY = {}; Comparator.prototype.parse = function(comp) { var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; var m = comp.match(r); if (!m) throw new TypeError('Invalid comparator: ' + comp); this.operator = m[1]; // if it literally is just '>' or '' then allow anything. if (!m[2]) this.semver = ANY; else { this.semver = new SemVer(m[2], this.loose); // <1.2.3-rc DOES allow 1.2.3-beta (has prerelease) // >=1.2.3 DOES NOT allow 1.2.3-beta // <=1.2.3 DOES allow 1.2.3-beta // However, <1.2.3 does NOT allow 1.2.3-beta, // even though `1.2.3-beta < 1.2.3` // The assumption is that the 1.2.3 version has something you // *don't* want, so we push the prerelease down to the minimum. if (this.operator === '<' && !this.semver.prerelease.length) { this.semver.prerelease = ['0']; this.semver.format(); } } }; Comparator.prototype.inspect = function() { return '<SemVer Comparator "' + this + '">'; }; Comparator.prototype.toString = function() { return this.value; }; Comparator.prototype.test = function(version) { ; return (this.semver === ANY) ? true : cmp(version, this.operator, this.semver, this.loose); }; exports.Range = Range; function Range(range, loose) { if ((range instanceof Range) && range.loose === loose) return range; if (!(this instanceof Range)) return new Range(range, loose); this.loose = loose; // First, split based on boolean or || this.raw = range; this.set = range.split(/\s*\|\|\s*/).map(function(range) { return this.parseRange(range.trim()); }, this).filter(function(c) { // throw out any that are not relevant for whatever reason return c.length; }); if (!this.set.length) { throw new TypeError('Invalid SemVer Range: ' + range); } this.format(); } Range.prototype.inspect = function() { return '<SemVer Range "' + this.range + '">'; }; Range.prototype.format = function() { this.range = this.set.map(function(comps) { return comps.join(' ').trim(); }).join('||').trim(); return this.range; }; Range.prototype.toString = function() { return this.range; }; Range.prototype.parseRange = function(range) { var loose = this.loose; range = range.trim(); ; // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; range = range.replace(hr, hyphenReplace); ; // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); ; // `~ 1.2.3` => `~1.2.3` range = range.replace(re[TILDETRIM], tildeTrimReplace); // `^ 1.2.3` => `^1.2.3` range = range.replace(re[CARETTRIM], caretTrimReplace); // normalize spaces range = range.split(/\s+/).join(' '); // At this point, the range is completely trimmed and // ready to be split into comparators. var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; var set = range.split(' ').map(function(comp) { return parseComparator(comp, loose); }).join(' ').split(/\s+/); if (this.loose) { // in loose mode, throw out any that are not valid comparators set = set.filter(function(comp) { return !!comp.match(compRe); }); } set = set.map(function(comp) { return new Comparator(comp, loose); }); return set; }; // Mostly just for testing and legacy API reasons exports.toComparators = toComparators; function toComparators(range, loose) { return new Range(range, loose).set.map(function(comp) { return comp.map(function(c) { return c.value; }).join(' ').trim().split(' '); }); } // comprised of xranges, tildes, stars, and gtlt's at this point. // already replaced the hyphen ranges // turn into a set of JUST comparators. function parseComparator(comp, loose) { ; comp = replaceCarets(comp, loose); ; comp = replaceTildes(comp, loose); ; comp = replaceXRanges(comp, loose); ; comp = replaceStars(comp, loose); ; return comp; } function isX(id) { return !id || id.toLowerCase() === 'x' || id === '*'; } // ~, ~> --> * (any, kinda silly) // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 function replaceTildes(comp, loose) { return comp.trim().split(/\s+/).map(function(comp) { return replaceTilde(comp, loose); }).join(' '); } function replaceTilde(comp, loose) { var r = loose ? re[TILDELOOSE] : re[TILDE]; return comp.replace(r, function(_, M, m, p, pr) { ; var ret; if (isX(M)) ret = ''; else if (isX(m)) ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; else if (isX(p)) // ~1.2 == >=1.2.0- <1.3.0- ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; else if (pr) { ; if (pr.charAt(0) !== '-') pr = '-' + pr; ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + M + '.' + (+m + 1) + '.0-0'; } else // ~1.2.3 == >=1.2.3-0 <1.3.0-0 ret = '>=' + M + '.' + m + '.' + p + '-0' + ' <' + M + '.' + (+m + 1) + '.0-0'; ; return ret; }); } // ^ --> * (any, kinda silly) // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 // ^1.2.3 --> >=1.2.3 <2.0.0 // ^1.2.0 --> >=1.2.0 <2.0.0 function replaceCarets(comp, loose) { return comp.trim().split(/\s+/).map(function(comp) { return replaceCaret(comp, loose); }).join(' '); } function replaceCaret(comp, loose) { var r = loose ? re[CARETLOOSE] : re[CARET]; return comp.replace(r, function(_, M, m, p, pr) { ; var ret; if (isX(M)) ret = ''; else if (isX(m)) ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; else if (isX(p)) if (M === '0') ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; else ret = '>=' + M + '.' + m + '.0-0 <' + (+M + 1) + '.0.0-0'; else if (pr) { ; if (pr.charAt(0) !== '-') pr = '-' + pr; if (M === '0') if (m === '0') ret = '=' + M + '.' + m + '.' + p + pr; else ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + M + '.' + (+m + 1) + '.0-0'; else ret = '>=' + M + '.' + m + '.' + p + pr + ' <' + (+M + 1) + '.0.0-0'; } else if (M === '0') if (m === '0') ret = '=' + M + '.' + m + '.' + p; else ret = '>=' + M + '.' + m + '.' + p + '-0' + ' <' + M + '.' + (+m + 1) + '.0-0'; else ret = '>=' + M + '.' + m + '.' + p + '-0' + ' <' + (+M + 1) + '.0.0-0'; ; return ret; }); } function replaceXRanges(comp, loose) { ; return comp.split(/\s+/).map(function(comp) { return replaceXRange(comp, loose); }).join(' '); } function replaceXRange(comp, loose) { comp = comp.trim(); var r = loose ? re[XRANGELOOSE] : re[XRANGE]; return comp.replace(r, function(ret, gtlt, M, m, p, pr) { ; var xM = isX(M); var xm = xM || isX(m); var xp = xm || isX(p); var anyX = xp; if (gtlt === '=' && anyX) gtlt = ''; if (gtlt && anyX) { // replace X with 0, and then append the -0 min-prerelease if (xM) M = 0; if (xm) m = 0; if (xp) p = 0; if (gtlt === '>') { // >1 => >=2.0.0-0 // >1.2 => >=1.3.0-0 // >1.2.3 => >= 1.2.4-0 gtlt = '>='; if (xM) { // no change } else if (xm) { M = +M + 1; m = 0; p = 0; } else if (xp) { m = +m + 1; p = 0; } } ret = gtlt + M + '.' + m + '.' + p + '-0'; } else if (xM) { // allow any ret = '*'; } else if (xm) { // append '-0' onto the version, otherwise // '1.x.x' matches '2.0.0-beta', since the tag // *lowers* the version value ret = '>=' + M + '.0.0-0 <' + (+M + 1) + '.0.0-0'; } else if (xp) { ret = '>=' + M + '.' + m + '.0-0 <' + M + '.' + (+m + 1) + '.0-0'; } ; return ret; }); } // Because * is AND-ed with everything else in the comparator, // and '' means "any version", just remove the *s entirely. function replaceStars(comp, loose) { ; // Looseness is ignored here. star is always as loose as it gets! return comp.trim().replace(re[STAR], ''); } // This function is passed to string.replace(re[HYPHENRANGE]) // M, m, patch, prerelease, build // 1.2 - 3.4.5 => >=1.2.0-0 <=3.4.5 // 1.2.3 - 3.4 => >=1.2.0-0 <3.5.0-0 Any 3.4.x will do // 1.2 - 3.4 => >=1.2.0-0 <3.5.0-0 function hyphenReplace($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { if (isX(fM)) from = ''; else if (isX(fm)) from = '>=' + fM + '.0.0-0'; else if (isX(fp)) from = '>=' + fM + '.' + fm + '.0-0'; else from = '>=' + from; if (isX(tM)) to = ''; else if (isX(tm)) to = '<' + (+tM + 1) + '.0.0-0'; else if (isX(tp)) to = '<' + tM + '.' + (+tm + 1) + '.0-0'; else if (tpr) to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; else to = '<=' + to; return (from + ' ' + to).trim(); } // if ANY of the sets match ALL of its comparators, then pass Range.prototype.test = function(version) { if (!version) return false; for (var i = 0; i < this.set.length; i++) { if (testSet(this.set[i], version)) return true; } return false; }; function testSet(set, version) { for (var i = 0; i < set.length; i++) { if (!set[i].test(version)) return false; } return true; } exports.satisfies = satisfies; function satisfies(version, range, loose) { try { range = new Range(range, loose); } catch (er) { return false; } return range.test(version); } exports.maxSatisfying = maxSatisfying; function maxSatisfying(versions, range, loose) { return versions.filter(function(version) { return satisfies(version, range, loose); }).sort(function(a, b) { return rcompare(a, b, loose); })[0] || null; } exports.validRange = validRange; function validRange(range, loose) { try { // Return '*' instead of '' so that truthiness works. // This will throw if it's invalid anyway return new Range(range, loose).range || '*'; } catch (er) { return null; } } // Use the define() function if we're in AMD land if (typeof define === 'function' && define.amd) define(exports); })( typeof exports === 'object' ? exports : typeof define === 'function' && define.amd ? {} : semver = {} );
{ "pile_set_name": "Github" }
# IMPORTANT: The version must match the version of docker-compose.yml --- version: '3' # All additional integrations should be added following this format only. services: capa: image: intelowlproject/intelowl_capa:${INTELOWL_TAG_VERSION} container_name: intelowl_capa restart: unless-stopped expose: - "4002" env_file: - env_file_integrations volumes: - generic_logs:/var/log/intel_owl depends_on: - uwsgi
{ "pile_set_name": "Github" }
import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by 哓哓 on 2015/11/30 0030. */ public class TestLog { public static void main(String[] args) { Logger log = LoggerFactory.getLogger(TestLog.class); log.error("**********Test*********"); } }
{ "pile_set_name": "Github" }
config PCRE_JIT_ENABLED bool depends on PACKAGE_libpcre && (arm || i386 || i686 || x86_64 || mips || mipsel || powerpc || sparc) default y if (arm || i686 || x86_64) prompt "Enable JIT compiler support" help Enable JIT (Just-In-Time) compiler support. Enabling this option can give an about 10x performance increase on JIT operations. It can be desireable for e.g. high performance Apache mod_rewrite or HA-Proxy reqrep operations. However, JIT should _only_ be enabled on architectures that are supported. Enabling JIT on unsupported platforms will result in a compilation failure. A list of supported architectures can be found here: https://pcre.org/original/doc/html/pcrejit.html#SEC3 .
{ "pile_set_name": "Github" }
#VRML_SIM R2020b utf8 WorldInfo { coordinateSystem "NUE" } Viewpoint { orientation 0.9201516338443356 0.3902998446880996 0.03141658749840674 5.65888 position -0.321062 0.718685 1.12281 } Background { skyColor [ 0.4 0.7 1 ] } PointLight { attenuation 0 0 1 intensity 0 } Robot { rotation 0 1 0 1.5708 children [ HingeJoint { jointParameters HingeJointParameters { axis 0 1 0 } device [ RotationalMotor { } ] endPoint Solid { children [ Gyro { lookupTable [ -1000 -1000 0 1000 1000 0 ] } ] physics Physics { } } } TestSuiteEmitter { } ] controller "gyro" } TestSuiteSupervisor { }
{ "pile_set_name": "Github" }
const arsenal = require('arsenal'); const async = require('async'); const metadata = require('../metadata/wrapper'); const logger = require('../utilities/logger'); const { loadCachedOverlay, managementDatabaseName, patchConfiguration, } = require('./configuration'); const { initManagementCredentials } = require('./credentials'); const { startWSManagementClient } = require('./push'); const { startPollingManagementClient } = require('./poll'); const { reshapeExceptionError } = arsenal.errorUtils; const { isManagementAgentUsed } = require('./agentClient'); const initRemoteManagementRetryDelay = 10000; const managementEndpointRoot = process.env.MANAGEMENT_ENDPOINT || 'https://api.zenko.io'; const managementEndpoint = `${managementEndpointRoot}/api/v1/instance`; const pushEndpointRoot = process.env.PUSH_ENDPOINT || 'https://push.api.zenko.io'; const pushEndpoint = `${pushEndpointRoot}/api/v1/instance`; function initManagementDatabase(log, callback) { // XXX choose proper owner names const md = new arsenal.models.BucketInfo(managementDatabaseName, 'owner', 'owner display name', new Date().toJSON()); metadata.createBucket(managementDatabaseName, md, log, error => { if (error) { if (error.BucketAlreadyExists) { log.info('created management database'); return callback(); } log.error('could not initialize management database', { error: reshapeExceptionError(error), method: 'initManagementDatabase' }); return callback(error); } log.info('initialized management database'); return callback(); }); } function startManagementListeners(instanceId, token) { const mode = process.env.MANAGEMENT_MODE || 'push'; if (mode === 'push') { const url = `${pushEndpoint}/${instanceId}/ws`; startWSManagementClient(url, token); } else { startPollingManagementClient(managementEndpoint, instanceId, token); } } /** * Initializes Orbit-based management by: * - creating the management database in metadata * - generating a key pair for credentials encryption * - generating an instance-unique ID * - getting an authentication token for the API * - loading and applying the latest cached overlay configuration * - starting a configuration update and metrics push background task * * @param {werelogs~Logger} log Request-scoped logger to be able to trace * initialization process * @param {function} callback Function to call once the overlay is loaded * (overlay) * * @returns {undefined} */ function initManagement(log, callback) { if ((process.env.REMOTE_MANAGEMENT_DISABLE && process.env.REMOTE_MANAGEMENT_DISABLE !== '0') || process.env.S3BACKEND === 'mem') { log.info('remote management disabled'); return; } /* Temporary check before to fully move to the process management agent. */ if (isManagementAgentUsed() ^ typeof callback === 'function') { let msg = 'misuse of initManagement function: '; msg += `MANAGEMENT_USE_AGENT: ${process.env.MANAGEMENT_USE_AGENT}`; msg += `, callback type: ${typeof callback}`; throw new Error(msg); } async.waterfall([ // eslint-disable-next-line arrow-body-style cb => { return isManagementAgentUsed() ? metadata.setup(cb) : cb(); }, cb => initManagementDatabase(log, cb), cb => metadata.getUUID(log, cb), (instanceId, cb) => initManagementCredentials( managementEndpoint, instanceId, log, cb), (instanceId, token, cb) => { if (!isManagementAgentUsed()) { cb(null, instanceId, token, {}); return; } loadCachedOverlay(log, (err, overlay) => cb(err, instanceId, token, overlay)); }, (instanceId, token, overlay, cb) => { if (!isManagementAgentUsed()) { cb(null, instanceId, token, overlay); return; } patchConfiguration(overlay, log, err => cb(err, instanceId, token, overlay)); }, ], (error, instanceId, token, overlay) => { if (error) { log.error('could not initialize remote management, retrying later', { error: reshapeExceptionError(error), method: 'initManagement' }); setTimeout(initManagement, initRemoteManagementRetryDelay, logger.newRequestLogger()); } else { log.info(`this deployment's Instance ID is ${instanceId}`); log.end('management init done'); startManagementListeners(instanceId, token); if (callback) { callback(overlay); } } }); } module.exports = { initManagement, initManagementDatabase, };
{ "pile_set_name": "Github" }
\ implements split-before, split-after and left-split \ as described in 4.3 (Path resolution) \ delimeter returned in R-string : split-before ( addr len delim -- addr-R len-R addr-L len-L ) 0 rot dup >r 0 ?do ( str char cnt R: len <sys> ) 2 pick over + c@ 2 pick = if leave then 1+ loop nip 2dup + r> 2 pick - 2swap ; \ delimeter returned in L-string : split-after ( addr len delim -- addr-R len-R addr-L len-L ) over 1- rot dup >r 0 ?do ( str char cnt R: len <sys> ) 2 pick over + c@ 2 pick = if leave then 1- loop nip dup 0 >= if 1+ else drop r@ then 2dup + r> 2 pick - 2swap ; \ delimiter not returned : left-split ( addr len delim -- addr-R len-R addr-L len-L ) 0 rot dup >r 0 ?do ( str char cnt R: len <sys> ) 2 pick i + c@ 2 pick = if leave then 1+ loop nip 2dup + 1+ r> 2 pick - dup if 1- then 2swap ; \ delimiter not returned [THIS FUNCTION IS NOT NEEDED] : right-split ( addr len delim -- addr-R len-R addr-L len-L ) dup >r split-after dup if 2dup + 1- c@ r@ = if 1- then then r> drop ;
{ "pile_set_name": "Github" }
/* FS-Cache statistics * * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. * Written by David Howells (dhowells@redhat.com) * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version * 2 of the License, or (at your option) any later version. */ #define FSCACHE_DEBUG_LEVEL THREAD #include <linux/module.h> #include <linux/proc_fs.h> #include <linux/seq_file.h> #include "internal.h" /* * operation counters */ atomic_t fscache_n_op_pend; atomic_t fscache_n_op_run; atomic_t fscache_n_op_enqueue; atomic_t fscache_n_op_requeue; atomic_t fscache_n_op_deferred_release; atomic_t fscache_n_op_initialised; atomic_t fscache_n_op_release; atomic_t fscache_n_op_gc; atomic_t fscache_n_op_cancelled; atomic_t fscache_n_op_rejected; atomic_t fscache_n_attr_changed; atomic_t fscache_n_attr_changed_ok; atomic_t fscache_n_attr_changed_nobufs; atomic_t fscache_n_attr_changed_nomem; atomic_t fscache_n_attr_changed_calls; atomic_t fscache_n_allocs; atomic_t fscache_n_allocs_ok; atomic_t fscache_n_allocs_wait; atomic_t fscache_n_allocs_nobufs; atomic_t fscache_n_allocs_intr; atomic_t fscache_n_allocs_object_dead; atomic_t fscache_n_alloc_ops; atomic_t fscache_n_alloc_op_waits; atomic_t fscache_n_retrievals; atomic_t fscache_n_retrievals_ok; atomic_t fscache_n_retrievals_wait; atomic_t fscache_n_retrievals_nodata; atomic_t fscache_n_retrievals_nobufs; atomic_t fscache_n_retrievals_intr; atomic_t fscache_n_retrievals_nomem; atomic_t fscache_n_retrievals_object_dead; atomic_t fscache_n_retrieval_ops; atomic_t fscache_n_retrieval_op_waits; atomic_t fscache_n_stores; atomic_t fscache_n_stores_ok; atomic_t fscache_n_stores_again; atomic_t fscache_n_stores_nobufs; atomic_t fscache_n_stores_oom; atomic_t fscache_n_store_ops; atomic_t fscache_n_store_calls; atomic_t fscache_n_store_pages; atomic_t fscache_n_store_radix_deletes; atomic_t fscache_n_store_pages_over_limit; atomic_t fscache_n_store_vmscan_not_storing; atomic_t fscache_n_store_vmscan_gone; atomic_t fscache_n_store_vmscan_busy; atomic_t fscache_n_store_vmscan_cancelled; atomic_t fscache_n_store_vmscan_wait; atomic_t fscache_n_marks; atomic_t fscache_n_uncaches; atomic_t fscache_n_acquires; atomic_t fscache_n_acquires_null; atomic_t fscache_n_acquires_no_cache; atomic_t fscache_n_acquires_ok; atomic_t fscache_n_acquires_nobufs; atomic_t fscache_n_acquires_oom; atomic_t fscache_n_invalidates; atomic_t fscache_n_invalidates_run; atomic_t fscache_n_updates; atomic_t fscache_n_updates_null; atomic_t fscache_n_updates_run; atomic_t fscache_n_relinquishes; atomic_t fscache_n_relinquishes_null; atomic_t fscache_n_relinquishes_waitcrt; atomic_t fscache_n_relinquishes_retire; atomic_t fscache_n_cookie_index; atomic_t fscache_n_cookie_data; atomic_t fscache_n_cookie_special; atomic_t fscache_n_object_alloc; atomic_t fscache_n_object_no_alloc; atomic_t fscache_n_object_lookups; atomic_t fscache_n_object_lookups_negative; atomic_t fscache_n_object_lookups_positive; atomic_t fscache_n_object_lookups_timed_out; atomic_t fscache_n_object_created; atomic_t fscache_n_object_avail; atomic_t fscache_n_object_dead; atomic_t fscache_n_checkaux_none; atomic_t fscache_n_checkaux_okay; atomic_t fscache_n_checkaux_update; atomic_t fscache_n_checkaux_obsolete; atomic_t fscache_n_cop_alloc_object; atomic_t fscache_n_cop_lookup_object; atomic_t fscache_n_cop_lookup_complete; atomic_t fscache_n_cop_grab_object; atomic_t fscache_n_cop_invalidate_object; atomic_t fscache_n_cop_update_object; atomic_t fscache_n_cop_drop_object; atomic_t fscache_n_cop_put_object; atomic_t fscache_n_cop_sync_cache; atomic_t fscache_n_cop_attr_changed; atomic_t fscache_n_cop_read_or_alloc_page; atomic_t fscache_n_cop_read_or_alloc_pages; atomic_t fscache_n_cop_allocate_page; atomic_t fscache_n_cop_allocate_pages; atomic_t fscache_n_cop_write_page; atomic_t fscache_n_cop_uncache_page; atomic_t fscache_n_cop_dissociate_pages; atomic_t fscache_n_cache_no_space_reject; atomic_t fscache_n_cache_stale_objects; atomic_t fscache_n_cache_retired_objects; atomic_t fscache_n_cache_culled_objects; /* * display the general statistics */ static int fscache_stats_show(struct seq_file *m, void *v) { seq_puts(m, "FS-Cache statistics\n"); seq_printf(m, "Cookies: idx=%u dat=%u spc=%u\n", atomic_read(&fscache_n_cookie_index), atomic_read(&fscache_n_cookie_data), atomic_read(&fscache_n_cookie_special)); seq_printf(m, "Objects: alc=%u nal=%u avl=%u ded=%u\n", atomic_read(&fscache_n_object_alloc), atomic_read(&fscache_n_object_no_alloc), atomic_read(&fscache_n_object_avail), atomic_read(&fscache_n_object_dead)); seq_printf(m, "ChkAux : non=%u ok=%u upd=%u obs=%u\n", atomic_read(&fscache_n_checkaux_none), atomic_read(&fscache_n_checkaux_okay), atomic_read(&fscache_n_checkaux_update), atomic_read(&fscache_n_checkaux_obsolete)); seq_printf(m, "Pages : mrk=%u unc=%u\n", atomic_read(&fscache_n_marks), atomic_read(&fscache_n_uncaches)); seq_printf(m, "Acquire: n=%u nul=%u noc=%u ok=%u nbf=%u" " oom=%u\n", atomic_read(&fscache_n_acquires), atomic_read(&fscache_n_acquires_null), atomic_read(&fscache_n_acquires_no_cache), atomic_read(&fscache_n_acquires_ok), atomic_read(&fscache_n_acquires_nobufs), atomic_read(&fscache_n_acquires_oom)); seq_printf(m, "Lookups: n=%u neg=%u pos=%u crt=%u tmo=%u\n", atomic_read(&fscache_n_object_lookups), atomic_read(&fscache_n_object_lookups_negative), atomic_read(&fscache_n_object_lookups_positive), atomic_read(&fscache_n_object_created), atomic_read(&fscache_n_object_lookups_timed_out)); seq_printf(m, "Invals : n=%u run=%u\n", atomic_read(&fscache_n_invalidates), atomic_read(&fscache_n_invalidates_run)); seq_printf(m, "Updates: n=%u nul=%u run=%u\n", atomic_read(&fscache_n_updates), atomic_read(&fscache_n_updates_null), atomic_read(&fscache_n_updates_run)); seq_printf(m, "Relinqs: n=%u nul=%u wcr=%u rtr=%u\n", atomic_read(&fscache_n_relinquishes), atomic_read(&fscache_n_relinquishes_null), atomic_read(&fscache_n_relinquishes_waitcrt), atomic_read(&fscache_n_relinquishes_retire)); seq_printf(m, "AttrChg: n=%u ok=%u nbf=%u oom=%u run=%u\n", atomic_read(&fscache_n_attr_changed), atomic_read(&fscache_n_attr_changed_ok), atomic_read(&fscache_n_attr_changed_nobufs), atomic_read(&fscache_n_attr_changed_nomem), atomic_read(&fscache_n_attr_changed_calls)); seq_printf(m, "Allocs : n=%u ok=%u wt=%u nbf=%u int=%u\n", atomic_read(&fscache_n_allocs), atomic_read(&fscache_n_allocs_ok), atomic_read(&fscache_n_allocs_wait), atomic_read(&fscache_n_allocs_nobufs), atomic_read(&fscache_n_allocs_intr)); seq_printf(m, "Allocs : ops=%u owt=%u abt=%u\n", atomic_read(&fscache_n_alloc_ops), atomic_read(&fscache_n_alloc_op_waits), atomic_read(&fscache_n_allocs_object_dead)); seq_printf(m, "Retrvls: n=%u ok=%u wt=%u nod=%u nbf=%u" " int=%u oom=%u\n", atomic_read(&fscache_n_retrievals), atomic_read(&fscache_n_retrievals_ok), atomic_read(&fscache_n_retrievals_wait), atomic_read(&fscache_n_retrievals_nodata), atomic_read(&fscache_n_retrievals_nobufs), atomic_read(&fscache_n_retrievals_intr), atomic_read(&fscache_n_retrievals_nomem)); seq_printf(m, "Retrvls: ops=%u owt=%u abt=%u\n", atomic_read(&fscache_n_retrieval_ops), atomic_read(&fscache_n_retrieval_op_waits), atomic_read(&fscache_n_retrievals_object_dead)); seq_printf(m, "Stores : n=%u ok=%u agn=%u nbf=%u oom=%u\n", atomic_read(&fscache_n_stores), atomic_read(&fscache_n_stores_ok), atomic_read(&fscache_n_stores_again), atomic_read(&fscache_n_stores_nobufs), atomic_read(&fscache_n_stores_oom)); seq_printf(m, "Stores : ops=%u run=%u pgs=%u rxd=%u olm=%u\n", atomic_read(&fscache_n_store_ops), atomic_read(&fscache_n_store_calls), atomic_read(&fscache_n_store_pages), atomic_read(&fscache_n_store_radix_deletes), atomic_read(&fscache_n_store_pages_over_limit)); seq_printf(m, "VmScan : nos=%u gon=%u bsy=%u can=%u wt=%u\n", atomic_read(&fscache_n_store_vmscan_not_storing), atomic_read(&fscache_n_store_vmscan_gone), atomic_read(&fscache_n_store_vmscan_busy), atomic_read(&fscache_n_store_vmscan_cancelled), atomic_read(&fscache_n_store_vmscan_wait)); seq_printf(m, "Ops : pend=%u run=%u enq=%u can=%u rej=%u\n", atomic_read(&fscache_n_op_pend), atomic_read(&fscache_n_op_run), atomic_read(&fscache_n_op_enqueue), atomic_read(&fscache_n_op_cancelled), atomic_read(&fscache_n_op_rejected)); seq_printf(m, "Ops : ini=%u dfr=%u rel=%u gc=%u\n", atomic_read(&fscache_n_op_initialised), atomic_read(&fscache_n_op_deferred_release), atomic_read(&fscache_n_op_release), atomic_read(&fscache_n_op_gc)); seq_printf(m, "CacheOp: alo=%d luo=%d luc=%d gro=%d\n", atomic_read(&fscache_n_cop_alloc_object), atomic_read(&fscache_n_cop_lookup_object), atomic_read(&fscache_n_cop_lookup_complete), atomic_read(&fscache_n_cop_grab_object)); seq_printf(m, "CacheOp: inv=%d upo=%d dro=%d pto=%d atc=%d syn=%d\n", atomic_read(&fscache_n_cop_invalidate_object), atomic_read(&fscache_n_cop_update_object), atomic_read(&fscache_n_cop_drop_object), atomic_read(&fscache_n_cop_put_object), atomic_read(&fscache_n_cop_attr_changed), atomic_read(&fscache_n_cop_sync_cache)); seq_printf(m, "CacheOp: rap=%d ras=%d alp=%d als=%d wrp=%d ucp=%d dsp=%d\n", atomic_read(&fscache_n_cop_read_or_alloc_page), atomic_read(&fscache_n_cop_read_or_alloc_pages), atomic_read(&fscache_n_cop_allocate_page), atomic_read(&fscache_n_cop_allocate_pages), atomic_read(&fscache_n_cop_write_page), atomic_read(&fscache_n_cop_uncache_page), atomic_read(&fscache_n_cop_dissociate_pages)); seq_printf(m, "CacheEv: nsp=%d stl=%d rtr=%d cul=%d\n", atomic_read(&fscache_n_cache_no_space_reject), atomic_read(&fscache_n_cache_stale_objects), atomic_read(&fscache_n_cache_retired_objects), atomic_read(&fscache_n_cache_culled_objects)); return 0; } /* * open "/proc/fs/fscache/stats" allowing provision of a statistical summary */ static int fscache_stats_open(struct inode *inode, struct file *file) { return single_open(file, fscache_stats_show, NULL); } const struct file_operations fscache_stats_fops = { .open = fscache_stats_open, .read = seq_read, .llseek = seq_lseek, .release = single_release, };
{ "pile_set_name": "Github" }
#include <vector> #include "gtest/gtest.h" #include "caffe/blob.hpp" #include "caffe/common.hpp" #include "caffe/filler.hpp" #include "caffe/layers/im2col_layer.hpp" #include "caffe/util/im2col.hpp" #include "caffe/test/test_caffe_main.hpp" namespace caffe { // Forward declare kernel functions template <typename Dtype> __global__ void im2col_gpu_kernel(const int n, const Dtype* data_im, const int height, const int width, const int kernel_h, const int kernel_w, const int pad_h, const int pad_w, const int stride_h, const int stride_w, const int dilation_h, const int dilation_w, const int height_col, const int width_col, Dtype* data_col); template <typename Dtype, int num_axes> __global__ void im2col_nd_gpu_kernel(const int n, const Dtype* data_im, const int* im_shape, const int* col_shape, const int* kernel_shape, const int* pad, const int* stride, const int* dilation, Dtype* data_col); template <typename Dtype> class Im2colKernelTest : public GPUDeviceTest<Dtype> { protected: Im2colKernelTest() // big so launches > 1024 threads : blob_bottom_(new Blob<Dtype>(5, 500, 15, 15)), blob_kernel_shape_(new Blob<int>()), blob_stride_(new Blob<int>()), blob_pad_(new Blob<int>()), blob_dilation_(new Blob<int>()), blob_top_(new Blob<Dtype>()), blob_top_cpu_(new Blob<Dtype>()) { FillerParameter filler_param; GaussianFiller<Dtype> filler(filler_param); filler.Fill(this->blob_bottom_); vector<int> dim_blob_shape(1, 2); blob_kernel_shape_->Reshape(dim_blob_shape); blob_stride_->Reshape(dim_blob_shape); blob_pad_->Reshape(dim_blob_shape); blob_dilation_->Reshape(dim_blob_shape); height_ = blob_bottom_->height(); width_ = blob_bottom_->width(); channels_ = blob_bottom_->channels(); pad_ = 0; stride_ = 2; dilation_ = 3; kernel_size_ = 3; height_col_ = (height_ + 2 * pad_ - (dilation_ * (kernel_size_ - 1) + 1)) / stride_ + 1; width_col_ = (width_ + 2 * pad_ - (dilation_ * (kernel_size_ - 1) + 1)) / stride_ + 1; for (int i = 0; i < 2; ++i) { blob_kernel_shape_->mutable_cpu_data()[i] = kernel_size_; blob_stride_->mutable_cpu_data()[i] = stride_; blob_pad_->mutable_cpu_data()[i] = pad_; blob_dilation_->mutable_cpu_data()[i] = dilation_; } } virtual ~Im2colKernelTest() { delete blob_bottom_; delete blob_top_; delete blob_top_cpu_; delete blob_kernel_shape_; delete blob_stride_; delete blob_pad_; delete blob_dilation_; } Blob<int>* const blob_kernel_shape_; Blob<int>* const blob_stride_; Blob<int>* const blob_pad_; Blob<int>* const blob_dilation_; Blob<Dtype>* const blob_bottom_; Blob<Dtype>* const blob_top_; Blob<Dtype>* const blob_top_cpu_; int height_; int width_; int channels_; int pad_; int stride_; int dilation_; int kernel_size_; int height_col_; int width_col_; }; TYPED_TEST_CASE(Im2colKernelTest, TestDtypes); TYPED_TEST(Im2colKernelTest, Test2D) { // Reshape the blobs to correct size for im2col output this->blob_top_->Reshape(this->blob_bottom_->num(), this->channels_ * this->kernel_size_ * this->kernel_size_, this->height_col_, this->width_col_); this->blob_top_cpu_->Reshape(this->blob_bottom_->num(), this->channels_ * this->kernel_size_ * this->kernel_size_, this->height_col_, this->width_col_); const TypeParam* bottom_data = this->blob_bottom_->gpu_data(); TypeParam* top_data = this->blob_top_->mutable_gpu_data(); TypeParam* cpu_data = this->blob_top_cpu_->mutable_cpu_data(); // CPU Version for (int n = 0; n < this->blob_bottom_->num(); ++n) { im2col_cpu(this->blob_bottom_->cpu_data() + this->blob_bottom_->offset(n), this->channels_, this->height_, this->width_, this->kernel_size_, this->kernel_size_, this->pad_, this->pad_, this->stride_, this->stride_, this->dilation_, this->dilation_, cpu_data + this->blob_top_cpu_->offset(n)); } // GPU version int num_kernels = this->channels_ * this->height_col_ * this->width_col_; int default_grid_dim = CAFFE_GET_BLOCKS(num_kernels); // Launch with different grid sizes for (int grid_div = 2; grid_div <= 8; grid_div++) { for (int n = 0; n < this->blob_bottom_->num(); ++n) { int grid_dim = default_grid_dim/grid_div; // NOLINT_NEXT_LINE(whitespace/operators) im2col_gpu_kernel<TypeParam><<<grid_dim, CAFFE_CUDA_NUM_THREADS>>>( num_kernels, bottom_data + this->blob_bottom_->offset(n), this->height_, this->width_, this->kernel_size_, this->kernel_size_, this->pad_, this->pad_, this->stride_, this->stride_, this->dilation_, this->dilation_, this->height_col_, this->width_col_, top_data + this->blob_top_->offset(n)); CUDA_POST_KERNEL_CHECK; } // Compare results against CPU version for (int i = 0; i < this->blob_top_->count(); ++i) { TypeParam cpuval = cpu_data[i]; TypeParam gpuval = this->blob_top_->cpu_data()[i]; EXPECT_EQ(cpuval, gpuval); if (cpuval != gpuval) { break; } } } } TYPED_TEST(Im2colKernelTest, TestND) { // Reshape the blobs to correct size for im2col output this->blob_top_->Reshape(this->blob_bottom_->num(), this->channels_ * this->kernel_size_ * this->kernel_size_, this->height_col_, this->width_col_); this->blob_top_cpu_->ReshapeLike(*this->blob_top_); const TypeParam* bottom_data_cpu = this->blob_bottom_->cpu_data(); TypeParam* top_data_cpu = this->blob_top_cpu_->mutable_cpu_data(); // CPU Version for (int n = 0; n < this->blob_bottom_->num(); ++n) { im2col_nd_cpu(bottom_data_cpu + this->blob_bottom_->offset(n), 2, this->blob_bottom_->shape().data() + 1, this->blob_top_cpu_->shape().data() + 1, this->blob_kernel_shape_->cpu_data(), this->blob_pad_->cpu_data(), this->blob_stride_->cpu_data(), this->blob_dilation_->cpu_data(), top_data_cpu + this->blob_top_cpu_->offset(n)); } // GPU version int num_kernels = this->channels_ * this->height_col_ * this->width_col_; int default_grid_dim = CAFFE_GET_BLOCKS(num_kernels); const TypeParam* bottom_data_gpu = this->blob_bottom_->gpu_data(); // Launch with different grid sizes for (int grid_div = 2; grid_div <= 8; grid_div++) { for (int n = 0; n < this->blob_bottom_->num(); ++n) { const int grid_dim = default_grid_dim / grid_div; TypeParam* top_data_gpu = this->blob_top_->mutable_gpu_data(); // NOLINT_NEXT_LINE(whitespace/operators) im2col_nd_gpu_kernel<TypeParam, 2><<<grid_dim, CAFFE_CUDA_NUM_THREADS>>>( num_kernels, bottom_data_gpu + this->blob_bottom_->offset(n), this->blob_bottom_->gpu_shape() + 1, this->blob_top_->gpu_shape() + 1, this->blob_kernel_shape_->gpu_data(), this->blob_pad_->gpu_data(), this->blob_stride_->gpu_data(), this->blob_dilation_->gpu_data(), top_data_gpu + this->blob_top_->offset(n)); CUDA_POST_KERNEL_CHECK; } // Compare results against CPU version for (int i = 0; i < this->blob_top_->count(); ++i) { TypeParam cpuval = top_data_cpu[i]; TypeParam gpuval = this->blob_top_->cpu_data()[i]; EXPECT_EQ(cpuval, gpuval); if (cpuval != gpuval) { break; } } } } } // namespace caffe
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package config import ( "crypto/sha256" "fmt" "io/ioutil" "os" "path/filepath" "strings" "time" "unicode/utf8" "github.com/golang/glog" "k8s.io/client-go/util/clock" ) // NewFileSync returns a Sync that scans the given dir periodically for config data func NewFileSync(dir string, period time.Duration) Sync { return newSync(newFileSyncSource(dir, period, clock.RealClock{})) } // newFileSyncSource returns a syncSource that scans the given dir periodically as determined by the specified clock func newFileSyncSource(dir string, period time.Duration, clock clock.Clock) syncSource { return &kubeFileSyncSource{ dir: dir, clock: clock, period: period, channel: make(chan syncResult), } } type kubeFileSyncSource struct { dir string clock clock.Clock period time.Duration channel chan syncResult } var _ syncSource = (*kubeFileSyncSource)(nil) func (syncSource *kubeFileSyncSource) Once() (syncResult, error) { return syncSource.load() } func (syncSource *kubeFileSyncSource) Periodic() <-chan syncResult { // TODO: drive via inotify? go func() { ticker := syncSource.clock.Tick(syncSource.period) for { if result, err := syncSource.load(); err != nil { glog.Errorf("Error loading config from %s: %v", syncSource.dir, err) } else { syncSource.channel <- result } <-ticker } }() return syncSource.channel } func (syncSource *kubeFileSyncSource) load() (syncResult, error) { hasher := sha256.New() data := map[string]string{} err := filepath.Walk(syncSource.dir, func(path string, info os.FileInfo, err error) error { if err != nil { return err } // special case for the root if path == syncSource.dir { if info.IsDir() { return nil } return fmt.Errorf("config path %q is not a directory", path) } // don't recurse if info.IsDir() { return filepath.SkipDir } // skip hidden files filename := filepath.Base(path) if strings.HasPrefix(filename, ".") { return nil } filedata, err := ioutil.ReadFile(path) if err != nil { return err } if !utf8.Valid(filedata) { return fmt.Errorf("non-utf8 data in %s", path) } // Add data to version hash hasher.Write([]byte(filename)) hasher.Write([]byte{0}) hasher.Write(filedata) hasher.Write([]byte{0}) // Add data to map data[filename] = string(filedata) return nil }) if err != nil { return syncResult{}, err } // compute a version string from the hashed data version := "" if len(data) > 0 { version = fmt.Sprintf("%x", hasher.Sum(nil)) } return syncResult{Version: version, Data: data}, nil }
{ "pile_set_name": "Github" }
package com.fasterxml.jackson.dataformat.avro.deser; import java.io.IOException; import com.fasterxml.jackson.core.TokenStreamContext; import com.fasterxml.jackson.core.sym.FieldNameMatcher; import com.fasterxml.jackson.core.JsonToken; /** * We need to use a custom context to be able to carry along * Object and array records. */ public abstract class AvroReadContext extends TokenStreamContext { protected final AvroReadContext _parent; protected final String _typeId; protected JsonToken _currToken; protected Object _currentValue; /* /********************************************************************** /* Instance construction /********************************************************************** */ public AvroReadContext(AvroReadContext parent, String typeId) { super(); _parent = parent; _typeId = typeId; } /* /********************************************************************** /* Traversal /********************************************************************** */ public abstract JsonToken nextToken() throws IOException; public abstract String nextFieldName() throws IOException; // @since 3.0 public abstract int nextFieldName(FieldNameMatcher matcher) throws IOException; public abstract void skipValue(AvroParserImpl parser) throws IOException; public long getRemainingElements() { return -1L; } @Override public Object getCurrentValue() { return _currentValue; } @Override public void setCurrentValue(Object v) { _currentValue = v; } /* /********************************************************************** /* Accessors /********************************************************************** */ @Override public String currentName() { return null; } public final JsonToken currentToken() { return _currToken; } @Override public final AvroReadContext getParent() { return _parent; } protected abstract void appendDesc(StringBuilder sb); public String getTypeId() { return _typeId; } // !!! TODO: implement from here /** * @since 2.8.7 public abstract boolean isEnd() { } */ /* /********************************************************************** /* Helper methods /********************************************************************** */ protected void _reportError() { throw new IllegalStateException("Can not read Avro input without specifying Schema"); } /* /********************************************************************** /* Overridden standard methods /********************************************************************** */ /** * Overridden to provide developer writeable "JsonPath" representation * of the context. */ @Override public final String toString() { StringBuilder sb = new StringBuilder(64); appendDesc(sb); return sb.toString(); } }
{ "pile_set_name": "Github" }
<===> input.scss foo:nth-child(#{5 + "n"}) {a: b} <===> output.css foo:nth-child(5n) { a: b; }
{ "pile_set_name": "Github" }
VERSION 5.8 ; DIVIDERCHAR "/" ; BUSBITCHARS "[]" ; DESIGN RocketTile ; UNITS DISTANCE MICRONS 2000 ; DIEAREA ( 0 0 ) ( 1849840 1598800 ) ; ROW ROW_0 FreePDK45_38x28_10R_NP_162NW_34O 28000 28000 FS DO 31 BY 1 STEP 380 0 ; ROW ROW_1 FreePDK45_38x28_10R_NP_162NW_34O 28000 30800 N DO 31 BY 1 STEP 380 0 ; TRACKS X 190 DO 4868 STEP 380 LAYER metal1 ; TRACKS Y 140 DO 5710 STEP 280 LAYER metal1 ; TRACKS X 190 DO 4868 STEP 380 LAYER metal2 ; TRACKS Y 140 DO 5710 STEP 280 LAYER metal2 ; TRACKS X 190 DO 4868 STEP 380 LAYER metal3 ; TRACKS Y 140 DO 5710 STEP 280 LAYER metal3 ; TRACKS X 190 DO 3303 STEP 560 LAYER metal4 ; TRACKS Y 140 DO 2855 STEP 560 LAYER metal4 ; TRACKS X 190 DO 3303 STEP 560 LAYER metal5 ; TRACKS Y 140 DO 2855 STEP 560 LAYER metal5 ; TRACKS X 190 DO 3303 STEP 560 LAYER metal6 ; TRACKS Y 140 DO 2855 STEP 560 LAYER metal6 ; TRACKS X 190 DO 1157 STEP 1600 LAYER metal7 ; TRACKS Y 140 DO 1000 STEP 1600 LAYER metal7 ; TRACKS X 190 DO 1157 STEP 1600 LAYER metal8 ; TRACKS Y 140 DO 1000 STEP 1600 LAYER metal8 ; TRACKS X 190 DO 579 STEP 3200 LAYER metal9 ; TRACKS Y 140 DO 500 STEP 3200 LAYER metal9 ; TRACKS X 190 DO 579 STEP 3200 LAYER metal10 ; TRACKS Y 140 DO 500 STEP 3200 LAYER metal10 ; VIAS 9 ; - via1_960x340 + VIARULE Via1Array-0 + CUTSIZE 140 140 + LAYERS metal1 via1 metal2 + CUTSPACING 160 160 + ENCLOSURE 110 100 70 100 + ROWCOL 1 3 ; - via2_960x340 + VIARULE Via2Array-0 + CUTSIZE 140 140 + LAYERS metal2 via2 metal3 + CUTSPACING 180 180 + ENCLOSURE 70 100 90 70 + ROWCOL 1 3 ; - via3_960x340 + VIARULE Via3Array-0 + CUTSIZE 140 140 + LAYERS metal3 via3 metal4 + CUTSPACING 180 180 + ENCLOSURE 90 70 90 100 + ROWCOL 1 3 ; - via4_960x2800 + VIARULE Via4Array-0 + CUTSIZE 280 280 + LAYERS metal4 via4 metal5 + CUTSPACING 320 320 + ENCLOSURE 40 60 40 0 + ROWCOL 5 2 ; - via5_960x2800 + VIARULE Via5Array-0 + CUTSIZE 280 280 + LAYERS metal5 via5 metal6 + CUTSPACING 320 320 + ENCLOSURE 40 0 0 60 + ROWCOL 5 2 ; - via6_960x2800 + VIARULE Via6Array-0 + CUTSIZE 280 280 + LAYERS metal6 via6 metal7 + CUTSPACING 320 320 + ENCLOSURE 0 60 340 260 + ROWCOL 5 1 ; - via4_560x1860 + VIARULE Via4Array-0 + CUTSIZE 280 280 + LAYERS metal4 via4 metal5 + CUTSPACING 320 320 + ENCLOSURE 140 190 140 190 + ROWCOL 3 1 ; - via5_1860x1860 + VIARULE Via5Array-0 + CUTSIZE 280 280 + LAYERS metal5 via5 metal6 + CUTSPACING 320 320 + ENCLOSURE 190 190 190 190 + ROWCOL 3 3 ; - via6_1860x2800 + VIARULE Via6Array-0 + CUTSIZE 280 280 + LAYERS metal6 via6 metal7 + CUTSPACING 320 320 + ENCLOSURE 490 60 490 260 + ROWCOL 5 2 ; END VIAS COMPONENTS 4 ; - u1 BUF_X1 + PLACED ( 31040 28000 ) N ; - pad1 PAD + FIXED ( 809120 2000000 ) N ; END COMPONENTS PINS 1 ; - in1 + NET in1 + DIRECTION INPUT + USE SIGNAL + LAYER metal1 ( 100 0 ) ( 100 100 ) + FIXED ( 0 0 ) N ; END PINS SPECIALNETS 2 ; - VDD ( * VDD ) + USE POWER ; - VSS ( * VSS ) + USE GROUND ; END SPECIALNETS NETS 1 ; - in1 ( PIN in1 ) ( u1 A ) ; - n1 ( u1 Z ) ( pad1 DATA ) ; END NETS END DESIGN
{ "pile_set_name": "Github" }
package tss.tpm; import tss.*; // -----------This is an auto-generated file: do not edit //>>> /** This command is used to determine which commands require assertion of Physical * Presence (PP) in addition to platformAuth/platformPolicy. */ public class TPM2_PP_Commands_REQUEST extends ReqStructure { /** TPM_RH_PLATFORM+PP * Auth Index: 1 * Auth Role: USER + Physical Presence */ public TPM_HANDLE auth; /** List of commands to be added to those that will require that Physical Presence be asserted */ public TPM_CC[] setList; /** List of commands that will no longer require that Physical Presence be asserted */ public TPM_CC[] clearList; public TPM2_PP_Commands_REQUEST() { auth = new TPM_HANDLE(); } /** @param _auth TPM_RH_PLATFORM+PP * Auth Index: 1 * Auth Role: USER + Physical Presence * @param _setList List of commands to be added to those that will require that Physical * Presence be asserted * @param _clearList List of commands that will no longer require that Physical Presence * be asserted */ public TPM2_PP_Commands_REQUEST(TPM_HANDLE _auth, TPM_CC[] _setList, TPM_CC[] _clearList) { auth = _auth; setList = _setList; clearList = _clearList; } /** TpmMarshaller method */ @Override public void toTpm(TpmBuffer buf) { buf.writeObjArr(setList); buf.writeObjArr(clearList); } /** TpmMarshaller method */ @Override public void initFromTpm(TpmBuffer buf) { setList = buf.readObjArr(TPM_CC.class); clearList = buf.readObjArr(TPM_CC.class); } /** @deprecated Use {@link #toBytes()} instead */ public byte[] toTpm () { return toBytes(); } /** Static marshaling helper */ public static TPM2_PP_Commands_REQUEST fromBytes (byte[] byteBuf) { return new TpmBuffer(byteBuf).createObj(TPM2_PP_Commands_REQUEST.class); } /** @deprecated Use {@link #fromBytes()} instead */ public static TPM2_PP_Commands_REQUEST fromTpm (byte[] byteBuf) { return fromBytes(byteBuf); } /** Static marshaling helper */ public static TPM2_PP_Commands_REQUEST fromTpm (TpmBuffer buf) { return buf.createObj(TPM2_PP_Commands_REQUEST.class); } @Override public String toString() { TpmStructurePrinter _p = new TpmStructurePrinter("TPM2_PP_Commands_REQUEST"); toStringInternal(_p, 1); _p.endStruct(); return _p.toString(); } @Override public void toStringInternal(TpmStructurePrinter _p, int d) { _p.add(d, "TPM_HANDLE", "auth", auth); _p.add(d, "TPM_CC[]", "setList", setList); _p.add(d, "TPM_CC[]", "clearList", clearList); } @Override public int numHandles() { return 1; } @Override public int numAuthHandles() { return 1; } @Override public TPM_HANDLE[] getHandles() { return new TPM_HANDLE[] {auth}; } @Override public SessEncInfo sessEncInfo() { return new SessEncInfo(4, 4); } } //<<<
{ "pile_set_name": "Github" }
'use strict'; module.exports = function(app) { app.get('/', controller); app.get('/foo', controller); app.get('/hello', controller); app.get('/hello/other/world', controller); app.get('/world/12', controller); function* controller() { this.body = 'body'; } };
{ "pile_set_name": "Github" }
// [DataType(DataType.Date)] public DateTime ReleaseDate { get; set; }
{ "pile_set_name": "Github" }
const { assertJump } = require('../helpers/assertJump'); const BigNumber = web3.BigNumber; const SafeMathMock = artifacts.require('SafeMathMock'); require('chai') .use(require('chai-bignumber')(BigNumber)) .should(); contract('SafeMath', () => { const MAX_UINT = new BigNumber('115792089237316195423570985008687907853269984665640564039457584007913129639935'); beforeEach(async function () { this.safeMath = await SafeMathMock.new(); }); describe('add', function () { it('adds correctly', async function () { const a = new BigNumber(5678); const b = new BigNumber(1234); const result = await this.safeMath.add(a, b); result.should.be.bignumber.equal(a.plus(b)); }); it('throws an error on addition overflow', async function () { const a = MAX_UINT; const b = new BigNumber(1); await assertJump(this.safeMath.add(a, b)); }); }); describe('sub', function () { it('subtracts correctly', async function () { const a = new BigNumber(5678); const b = new BigNumber(1234); const result = await this.safeMath.sub(a, b); result.should.be.bignumber.equal(a.minus(b)); }); it('throws an error if subtraction result would be negative', async function () { const a = new BigNumber(1234); const b = new BigNumber(5678); await assertJump(this.safeMath.sub(a, b)); }); }); describe('mul', function () { it('multiplies correctly', async function () { const a = new BigNumber(1234); const b = new BigNumber(5678); const result = await this.safeMath.mul(a, b); result.should.be.bignumber.equal(a.times(b)); }); it('handles a zero product correctly', async function () { const a = new BigNumber(0); const b = new BigNumber(5678); const result = await this.safeMath.mul(a, b); result.should.be.bignumber.equal(a.times(b)); }); it('throws an error on multiplication overflow', async function () { const a = MAX_UINT; const b = new BigNumber(2); await assertJump(this.safeMath.mul(a, b)); }); }); describe('div', function () { it('divides correctly', async function () { const a = new BigNumber(5678); const b = new BigNumber(5678); const result = await this.safeMath.div(a, b); result.should.be.bignumber.equal(a.div(b)); }); it('throws an error on zero division', async function () { const a = new BigNumber(5678); const b = new BigNumber(0); await assertJump(this.safeMath.div(a, b)); }); }); });
{ "pile_set_name": "Github" }
-- ======================================================================= -- Player 1 -- ======================================================================= p1:forbid_buildings("all") p1:allow_buildings {"frisians_sentinel","frisians_warehouse"} hq = p1:place_building("frisians_headquarters", map.player_slots [1].starting_field, false, true) hq:set_wares { log = 40, brick = 50, clay = 20, granite = 40, water = 40, coal = 20, reed = 20, fruit = 10, fish = 10, meat = 10, smoked_fish = 10, smoked_meat = 10, bread_frisians = 10, ration = 24, iron = 2, iron_ore = 5, gold_ore = 1, } hq:set_workers { frisians_woodcutter = 3, frisians_forester = 6, frisians_claydigger = 3, frisians_brickmaker = 2, frisians_builder = 10, frisians_blacksmith = 4, frisians_miner = 5, frisians_smelter = 2, frisians_smoker = 2, frisians_seamstress = 2, frisians_landlady = 3, frisians_berry_farmer = 3, frisians_fruit_collector = 3, frisians_beekeeper = 2, frisians_fisher = 3, frisians_hunter = 1, frisians_geologist = 2, frisians_farmer = 3, frisians_reed_farmer = 3, frisians_baker = 1, frisians_brewer = 1, frisians_trainer = 3, } nr_soldiers = {25, 12, 4} nr_soldiers = nr_soldiers[difficulty] hq:set_soldiers({0,0,0,0}, nr_soldiers) -- ======================================================================= -- Player 2 -- ======================================================================= p2:forbid_buildings("all") p2:allow_buildings { "frisians_woodcutters_house", "frisians_foresters_house", "frisians_well", "frisians_reed_farm", "frisians_clay_pit", "frisians_brick_kiln", "frisians_fishers_house", "frisians_hunters_house", "frisians_quarry", "frisians_smokery", "frisians_tavern", "frisians_coalmine", "frisians_ironmine", "frisians_goldmine", "frisians_rockmine", "frisians_coalmine_deep", "frisians_ironmine_deep", "frisians_goldmine_deep", "frisians_rockmine_deep", "frisians_farm", "frisians_bakery", "frisians_brewery", "frisians_furnace", "frisians_blacksmithy", "frisians_mead_brewery", "frisians_honey_bread_bakery", "frisians_drinking_hall", "frisians_sentinel"} hq2 = p2:place_building("frisians_headquarters", map.player_slots [2].starting_field, false, true) hq2:set_wares { log = 50, brick = 200, clay = 30, granite = 200, reed = 200, water = 20, coal = 100, iron = 50, } hq2:set_workers { frisians_woodcutter = 10, frisians_forester = 20, frisians_brickmaker = 10, frisians_builder = 10, frisians_blacksmith = 5, frisians_miner_master = 10, frisians_smelter = 4, frisians_smoker = 10, frisians_landlady = 10, frisians_berry_farmer = 20, frisians_fruit_collector = 20, frisians_beekeeper = 10, frisians_fisher = 20, frisians_hunter = 4, frisians_geologist = 5, frisians_farmer = 20, frisians_reed_farmer = 10, frisians_baker_master = 5, frisians_brewer_master = 5, frisians_reindeer = 50, } nr_soldiers = {30, 50, 90} nr_soldiers = nr_soldiers[difficulty] hq2:set_soldiers({0,6,0,0}, nr_soldiers)
{ "pile_set_name": "Github" }
Trailing comment after braced namespace declaration ----- <?php namespace Foo {} // Comment ----- array( 0: Stmt_Namespace( name: Name( parts: array( 0: Foo ) ) stmts: array( ) ) 1: Stmt_Nop( comments: array( 0: // Comment ) ) )
{ "pile_set_name": "Github" }
name: Guess the Class type: game description: Reckon you know most of Java's renowned classes? Fill the gaps as quick as possible with the missing bits of code to test your knowledge by matching classes with their methods. section: 1 parent: memory
{ "pile_set_name": "Github" }
=pod =head1 NAME EVP_PKEY_decrypt_init, EVP_PKEY_decrypt - decrypt using a public key algorithm =head1 SYNOPSIS #include <openssl/evp.h> int EVP_PKEY_decrypt_init(EVP_PKEY_CTX *ctx); int EVP_PKEY_decrypt(EVP_PKEY_CTX *ctx, unsigned char *out, size_t *outlen, const unsigned char *in, size_t inlen); =head1 DESCRIPTION The EVP_PKEY_decrypt_init() function initializes a public key algorithm context using key B<pkey> for a decryption operation. The EVP_PKEY_decrypt() function performs a public key decryption operation using B<ctx>. The data to be decrypted is specified using the B<in> and B<inlen> parameters. If B<out> is B<NULL> then the maximum size of the output buffer is written to the B<outlen> parameter. If B<out> is not B<NULL> then before the call the B<outlen> parameter should contain the length of the B<out> buffer, if the call is successful the decrypted data is written to B<out> and the amount of data written to B<outlen>. =head1 NOTES After the call to EVP_PKEY_decrypt_init() algorithm specific control operations can be performed to set any appropriate parameters for the operation. The function EVP_PKEY_decrypt() can be called more than once on the same context if several operations are performed using the same parameters. =head1 RETURN VALUES EVP_PKEY_decrypt_init() and EVP_PKEY_decrypt() return 1 for success and 0 or a negative value for failure. In particular a return value of -2 indicates the operation is not supported by the public key algorithm. =head1 EXAMPLE Decrypt data using OAEP (for RSA keys): #include <openssl/evp.h> #include <openssl/rsa.h> EVP_PKEY_CTX *ctx; unsigned char *out, *in; size_t outlen, inlen; EVP_PKEY *key; /* NB: assumes key in, inlen are already set up * and that key is an RSA private key */ ctx = EVP_PKEY_CTX_new(key); if (!ctx) /* Error occurred */ if (EVP_PKEY_decrypt_init(ctx) <= 0) /* Error */ if (EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_OAEP_PADDING) <= 0) /* Error */ /* Determine buffer length */ if (EVP_PKEY_decrypt(ctx, NULL, &outlen, in, inlen) <= 0) /* Error */ out = OPENSSL_malloc(outlen); if (!out) /* malloc failure */ if (EVP_PKEY_decrypt(ctx, out, &outlen, in, inlen) <= 0) /* Error */ /* Decrypted data is outlen bytes written to buffer out */ =head1 SEE ALSO L<EVP_PKEY_CTX_new(3)|EVP_PKEY_CTX_new(3)>, L<EVP_PKEY_encrypt(3)|EVP_PKEY_encrypt(3)>, L<EVP_PKEY_sign(3)|EVP_PKEY_sign(3)>, L<EVP_PKEY_verify(3)|EVP_PKEY_verify(3)>, L<EVP_PKEY_verifyrecover(3)|EVP_PKEY_verifyrecover(3)>, L<EVP_PKEY_derive(3)|EVP_PKEY_derive(3)> =head1 HISTORY These functions were first added to OpenSSL 1.0.0. =cut
{ "pile_set_name": "Github" }
/* * QEMU I/O channels sockets driver * * Copyright (c) 2015 Red Hat, Inc. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, see <http://www.gnu.org/licenses/>. */ #include "qemu/osdep.h" #include "qemu-common.h" #include "qapi/error.h" #include "qapi/qapi-visit-sockets.h" #include "qemu/module.h" #include "io/channel-socket.h" #include "io/channel-watch.h" #include "trace.h" #include "qapi/clone-visitor.h" #define SOCKET_MAX_FDS 16 SocketAddress * qio_channel_socket_get_local_address(QIOChannelSocket *ioc, Error **errp) { return socket_sockaddr_to_address(&ioc->localAddr, ioc->localAddrLen, errp); } SocketAddress * qio_channel_socket_get_remote_address(QIOChannelSocket *ioc, Error **errp) { return socket_sockaddr_to_address(&ioc->remoteAddr, ioc->remoteAddrLen, errp); } QIOChannelSocket * qio_channel_socket_new(void) { QIOChannelSocket *sioc; QIOChannel *ioc; sioc = QIO_CHANNEL_SOCKET(object_new(TYPE_QIO_CHANNEL_SOCKET)); sioc->fd = -1; ioc = QIO_CHANNEL(sioc); qio_channel_set_feature(ioc, QIO_CHANNEL_FEATURE_SHUTDOWN); #ifdef WIN32 ioc->event = CreateEvent(NULL, FALSE, FALSE, NULL); #endif trace_qio_channel_socket_new(sioc); return sioc; } static int qio_channel_socket_set_fd(QIOChannelSocket *sioc, int fd, Error **errp) { if (sioc->fd != -1) { error_setg(errp, "Socket is already open"); return -1; } sioc->fd = fd; sioc->remoteAddrLen = sizeof(sioc->remoteAddr); sioc->localAddrLen = sizeof(sioc->localAddr); if (getpeername(fd, (struct sockaddr *)&sioc->remoteAddr, &sioc->remoteAddrLen) < 0) { if (errno == ENOTCONN) { memset(&sioc->remoteAddr, 0, sizeof(sioc->remoteAddr)); sioc->remoteAddrLen = sizeof(sioc->remoteAddr); } else { error_setg_errno(errp, errno, "Unable to query remote socket address"); goto error; } } if (getsockname(fd, (struct sockaddr *)&sioc->localAddr, &sioc->localAddrLen) < 0) { error_setg_errno(errp, errno, "Unable to query local socket address"); goto error; } #ifndef WIN32 if (sioc->localAddr.ss_family == AF_UNIX) { QIOChannel *ioc = QIO_CHANNEL(sioc); qio_channel_set_feature(ioc, QIO_CHANNEL_FEATURE_FD_PASS); } #endif /* WIN32 */ return 0; error: sioc->fd = -1; /* Let the caller close FD on failure */ return -1; } QIOChannelSocket * qio_channel_socket_new_fd(int fd, Error **errp) { QIOChannelSocket *ioc; ioc = qio_channel_socket_new(); if (qio_channel_socket_set_fd(ioc, fd, errp) < 0) { object_unref(OBJECT(ioc)); return NULL; } trace_qio_channel_socket_new_fd(ioc, fd); return ioc; } int qio_channel_socket_connect_sync(QIOChannelSocket *ioc, SocketAddress *addr, Error **errp) { int fd; trace_qio_channel_socket_connect_sync(ioc, addr); fd = socket_connect(addr, errp); if (fd < 0) { trace_qio_channel_socket_connect_fail(ioc); return -1; } trace_qio_channel_socket_connect_complete(ioc, fd); if (qio_channel_socket_set_fd(ioc, fd, errp) < 0) { close(fd); return -1; } return 0; } static void qio_channel_socket_connect_worker(QIOTask *task, gpointer opaque) { QIOChannelSocket *ioc = QIO_CHANNEL_SOCKET(qio_task_get_source(task)); SocketAddress *addr = opaque; Error *err = NULL; qio_channel_socket_connect_sync(ioc, addr, &err); qio_task_set_error(task, err); } void qio_channel_socket_connect_async(QIOChannelSocket *ioc, SocketAddress *addr, QIOTaskFunc callback, gpointer opaque, GDestroyNotify destroy, GMainContext *context) { QIOTask *task = qio_task_new( OBJECT(ioc), callback, opaque, destroy); SocketAddress *addrCopy; addrCopy = QAPI_CLONE(SocketAddress, addr); /* socket_connect() does a non-blocking connect(), but it * still blocks in DNS lookups, so we must use a thread */ trace_qio_channel_socket_connect_async(ioc, addr); qio_task_run_in_thread(task, qio_channel_socket_connect_worker, addrCopy, (GDestroyNotify)qapi_free_SocketAddress, context); } int qio_channel_socket_listen_sync(QIOChannelSocket *ioc, SocketAddress *addr, int num, Error **errp) { int fd; trace_qio_channel_socket_listen_sync(ioc, addr, num); fd = socket_listen(addr, num, errp); if (fd < 0) { trace_qio_channel_socket_listen_fail(ioc); return -1; } trace_qio_channel_socket_listen_complete(ioc, fd); if (qio_channel_socket_set_fd(ioc, fd, errp) < 0) { close(fd); return -1; } qio_channel_set_feature(QIO_CHANNEL(ioc), QIO_CHANNEL_FEATURE_LISTEN); return 0; } struct QIOChannelListenWorkerData { SocketAddress *addr; int num; /* amount of expected connections */ }; static void qio_channel_listen_worker_free(gpointer opaque) { struct QIOChannelListenWorkerData *data = opaque; qapi_free_SocketAddress(data->addr); g_free(data); } static void qio_channel_socket_listen_worker(QIOTask *task, gpointer opaque) { QIOChannelSocket *ioc = QIO_CHANNEL_SOCKET(qio_task_get_source(task)); struct QIOChannelListenWorkerData *data = opaque; Error *err = NULL; qio_channel_socket_listen_sync(ioc, data->addr, data->num, &err); qio_task_set_error(task, err); } void qio_channel_socket_listen_async(QIOChannelSocket *ioc, SocketAddress *addr, int num, QIOTaskFunc callback, gpointer opaque, GDestroyNotify destroy, GMainContext *context) { QIOTask *task = qio_task_new( OBJECT(ioc), callback, opaque, destroy); struct QIOChannelListenWorkerData *data; data = g_new0(struct QIOChannelListenWorkerData, 1); data->addr = QAPI_CLONE(SocketAddress, addr); data->num = num; /* socket_listen() blocks in DNS lookups, so we must use a thread */ trace_qio_channel_socket_listen_async(ioc, addr, num); qio_task_run_in_thread(task, qio_channel_socket_listen_worker, data, qio_channel_listen_worker_free, context); } int qio_channel_socket_dgram_sync(QIOChannelSocket *ioc, SocketAddress *localAddr, SocketAddress *remoteAddr, Error **errp) { int fd; trace_qio_channel_socket_dgram_sync(ioc, localAddr, remoteAddr); fd = socket_dgram(remoteAddr, localAddr, errp); if (fd < 0) { trace_qio_channel_socket_dgram_fail(ioc); return -1; } trace_qio_channel_socket_dgram_complete(ioc, fd); if (qio_channel_socket_set_fd(ioc, fd, errp) < 0) { close(fd); return -1; } return 0; } struct QIOChannelSocketDGramWorkerData { SocketAddress *localAddr; SocketAddress *remoteAddr; }; static void qio_channel_socket_dgram_worker_free(gpointer opaque) { struct QIOChannelSocketDGramWorkerData *data = opaque; qapi_free_SocketAddress(data->localAddr); qapi_free_SocketAddress(data->remoteAddr); g_free(data); } static void qio_channel_socket_dgram_worker(QIOTask *task, gpointer opaque) { QIOChannelSocket *ioc = QIO_CHANNEL_SOCKET(qio_task_get_source(task)); struct QIOChannelSocketDGramWorkerData *data = opaque; Error *err = NULL; /* socket_dgram() blocks in DNS lookups, so we must use a thread */ qio_channel_socket_dgram_sync(ioc, data->localAddr, data->remoteAddr, &err); qio_task_set_error(task, err); } void qio_channel_socket_dgram_async(QIOChannelSocket *ioc, SocketAddress *localAddr, SocketAddress *remoteAddr, QIOTaskFunc callback, gpointer opaque, GDestroyNotify destroy, GMainContext *context) { QIOTask *task = qio_task_new( OBJECT(ioc), callback, opaque, destroy); struct QIOChannelSocketDGramWorkerData *data = g_new0( struct QIOChannelSocketDGramWorkerData, 1); data->localAddr = QAPI_CLONE(SocketAddress, localAddr); data->remoteAddr = QAPI_CLONE(SocketAddress, remoteAddr); trace_qio_channel_socket_dgram_async(ioc, localAddr, remoteAddr); qio_task_run_in_thread(task, qio_channel_socket_dgram_worker, data, qio_channel_socket_dgram_worker_free, context); } QIOChannelSocket * qio_channel_socket_accept(QIOChannelSocket *ioc, Error **errp) { QIOChannelSocket *cioc; cioc = qio_channel_socket_new(); cioc->remoteAddrLen = sizeof(ioc->remoteAddr); cioc->localAddrLen = sizeof(ioc->localAddr); retry: trace_qio_channel_socket_accept(ioc); cioc->fd = qemu_accept(ioc->fd, (struct sockaddr *)&cioc->remoteAddr, &cioc->remoteAddrLen); if (cioc->fd < 0) { if (errno == EINTR) { goto retry; } error_setg_errno(errp, errno, "Unable to accept connection"); trace_qio_channel_socket_accept_fail(ioc); goto error; } if (getsockname(cioc->fd, (struct sockaddr *)&cioc->localAddr, &cioc->localAddrLen) < 0) { error_setg_errno(errp, errno, "Unable to query local socket address"); goto error; } #ifndef WIN32 if (cioc->localAddr.ss_family == AF_UNIX) { QIOChannel *ioc_local = QIO_CHANNEL(cioc); qio_channel_set_feature(ioc_local, QIO_CHANNEL_FEATURE_FD_PASS); } #endif /* WIN32 */ trace_qio_channel_socket_accept_complete(ioc, cioc, cioc->fd); return cioc; error: object_unref(OBJECT(cioc)); return NULL; } static void qio_channel_socket_init(Object *obj) { QIOChannelSocket *ioc = QIO_CHANNEL_SOCKET(obj); ioc->fd = -1; } static void qio_channel_socket_finalize(Object *obj) { QIOChannelSocket *ioc = QIO_CHANNEL_SOCKET(obj); if (ioc->fd != -1) { QIOChannel *ioc_local = QIO_CHANNEL(ioc); if (qio_channel_has_feature(ioc_local, QIO_CHANNEL_FEATURE_LISTEN)) { Error *err = NULL; socket_listen_cleanup(ioc->fd, &err); if (err) { error_report_err(err); err = NULL; } } #ifdef WIN32 WSAEventSelect(ioc->fd, NULL, 0); #endif closesocket(ioc->fd); ioc->fd = -1; } } #ifndef WIN32 static void qio_channel_socket_copy_fds(struct msghdr *msg, int **fds, size_t *nfds) { struct cmsghdr *cmsg; *nfds = 0; *fds = NULL; for (cmsg = CMSG_FIRSTHDR(msg); cmsg; cmsg = CMSG_NXTHDR(msg, cmsg)) { int fd_size, i; int gotfds; if (cmsg->cmsg_len < CMSG_LEN(sizeof(int)) || cmsg->cmsg_level != SOL_SOCKET || cmsg->cmsg_type != SCM_RIGHTS) { continue; } fd_size = cmsg->cmsg_len - CMSG_LEN(0); if (!fd_size) { continue; } gotfds = fd_size / sizeof(int); *fds = g_renew(int, *fds, *nfds + gotfds); memcpy(*fds + *nfds, CMSG_DATA(cmsg), fd_size); for (i = 0; i < gotfds; i++) { int fd = (*fds)[*nfds + i]; if (fd < 0) { continue; } /* O_NONBLOCK is preserved across SCM_RIGHTS so reset it */ qemu_set_block(fd); #ifndef MSG_CMSG_CLOEXEC qemu_set_cloexec(fd); #endif } *nfds += gotfds; } } static ssize_t qio_channel_socket_readv(QIOChannel *ioc, const struct iovec *iov, size_t niov, int **fds, size_t *nfds, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); ssize_t ret; struct msghdr msg = { NULL, }; char control[CMSG_SPACE(sizeof(int) * SOCKET_MAX_FDS)]; int sflags = 0; memset(control, 0, CMSG_SPACE(sizeof(int) * SOCKET_MAX_FDS)); #ifdef MSG_CMSG_CLOEXEC sflags |= MSG_CMSG_CLOEXEC; #endif msg.msg_iov = (struct iovec *)iov; msg.msg_iovlen = niov; if (fds && nfds) { msg.msg_control = control; msg.msg_controllen = sizeof(control); } retry: ret = recvmsg(sioc->fd, &msg, sflags); if (ret < 0) { if (errno == EAGAIN) { return QIO_CHANNEL_ERR_BLOCK; } if (errno == EINTR) { goto retry; } error_setg_errno(errp, errno, "Unable to read from socket"); return -1; } if (fds && nfds) { qio_channel_socket_copy_fds(&msg, fds, nfds); } return ret; } static ssize_t qio_channel_socket_writev(QIOChannel *ioc, const struct iovec *iov, size_t niov, int *fds, size_t nfds, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); ssize_t ret; struct msghdr msg = { NULL, }; char control[CMSG_SPACE(sizeof(int) * SOCKET_MAX_FDS)]; size_t fdsize = sizeof(int) * nfds; struct cmsghdr *cmsg; memset(control, 0, CMSG_SPACE(sizeof(int) * SOCKET_MAX_FDS)); msg.msg_iov = (struct iovec *)iov; msg.msg_iovlen = niov; if (nfds) { if (nfds > SOCKET_MAX_FDS) { error_setg_errno(errp, EINVAL, "Only %d FDs can be sent, got %zu", SOCKET_MAX_FDS, nfds); return -1; } msg.msg_control = control; msg.msg_controllen = CMSG_SPACE(sizeof(int) * nfds); cmsg = CMSG_FIRSTHDR(&msg); cmsg->cmsg_len = CMSG_LEN(fdsize); cmsg->cmsg_level = SOL_SOCKET; cmsg->cmsg_type = SCM_RIGHTS; memcpy(CMSG_DATA(cmsg), fds, fdsize); } retry: ret = sendmsg(sioc->fd, &msg, 0); if (ret <= 0) { if (errno == EAGAIN) { return QIO_CHANNEL_ERR_BLOCK; } if (errno == EINTR) { goto retry; } error_setg_errno(errp, errno, "Unable to write to socket"); return -1; } return ret; } #else /* WIN32 */ static ssize_t qio_channel_socket_readv(QIOChannel *ioc, const struct iovec *iov, size_t niov, int **fds, size_t *nfds, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); ssize_t done = 0; ssize_t i; for (i = 0; i < niov; i++) { ssize_t ret; retry: ret = recv(sioc->fd, iov[i].iov_base, iov[i].iov_len, 0); if (ret < 0) { if (errno == EAGAIN) { if (done) { return done; } else { return QIO_CHANNEL_ERR_BLOCK; } } else if (errno == EINTR) { goto retry; } else { error_setg_errno(errp, errno, "Unable to read from socket"); return -1; } } done += ret; if (ret < iov[i].iov_len) { return done; } } return done; } static ssize_t qio_channel_socket_writev(QIOChannel *ioc, const struct iovec *iov, size_t niov, int *fds, size_t nfds, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); ssize_t done = 0; ssize_t i; for (i = 0; i < niov; i++) { ssize_t ret; retry: ret = send(sioc->fd, iov[i].iov_base, iov[i].iov_len, 0); if (ret < 0) { if (errno == EAGAIN) { if (done) { return done; } else { return QIO_CHANNEL_ERR_BLOCK; } } else if (errno == EINTR) { goto retry; } else { error_setg_errno(errp, errno, "Unable to write to socket"); return -1; } } done += ret; if (ret < iov[i].iov_len) { return done; } } return done; } #endif /* WIN32 */ static int qio_channel_socket_set_blocking(QIOChannel *ioc, bool enabled, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); if (enabled) { qemu_set_block(sioc->fd); } else { qemu_set_nonblock(sioc->fd); } return 0; } static void qio_channel_socket_set_delay(QIOChannel *ioc, bool enabled) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); int v = enabled ? 0 : 1; qemu_setsockopt(sioc->fd, IPPROTO_TCP, TCP_NODELAY, &v, sizeof(v)); } static void qio_channel_socket_set_cork(QIOChannel *ioc, bool enabled) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); int v = enabled ? 1 : 0; socket_set_cork(sioc->fd, v); } static int qio_channel_socket_close(QIOChannel *ioc, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); int rc = 0; Error *err = NULL; if (sioc->fd != -1) { #ifdef WIN32 WSAEventSelect(sioc->fd, NULL, 0); #endif if (qio_channel_has_feature(ioc, QIO_CHANNEL_FEATURE_LISTEN)) { socket_listen_cleanup(sioc->fd, errp); } if (closesocket(sioc->fd) < 0) { sioc->fd = -1; error_setg_errno(&err, errno, "Unable to close socket"); error_propagate(errp, err); return -1; } sioc->fd = -1; } return rc; } static int qio_channel_socket_shutdown(QIOChannel *ioc, QIOChannelShutdown how, Error **errp) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); int sockhow; switch (how) { case QIO_CHANNEL_SHUTDOWN_READ: sockhow = SHUT_RD; break; case QIO_CHANNEL_SHUTDOWN_WRITE: sockhow = SHUT_WR; break; case QIO_CHANNEL_SHUTDOWN_BOTH: default: sockhow = SHUT_RDWR; break; } if (shutdown(sioc->fd, sockhow) < 0) { error_setg_errno(errp, errno, "Unable to shutdown socket"); return -1; } return 0; } static void qio_channel_socket_set_aio_fd_handler(QIOChannel *ioc, AioContext *ctx, IOHandler *io_read, IOHandler *io_write, void *opaque) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); aio_set_fd_handler(ctx, sioc->fd, false, io_read, io_write, NULL, opaque); } static GSource *qio_channel_socket_create_watch(QIOChannel *ioc, GIOCondition condition) { QIOChannelSocket *sioc = QIO_CHANNEL_SOCKET(ioc); return qio_channel_create_socket_watch(ioc, sioc->fd, condition); } static void qio_channel_socket_class_init(ObjectClass *klass, void *class_data G_GNUC_UNUSED) { QIOChannelClass *ioc_klass = QIO_CHANNEL_CLASS(klass); ioc_klass->io_writev = qio_channel_socket_writev; ioc_klass->io_readv = qio_channel_socket_readv; ioc_klass->io_set_blocking = qio_channel_socket_set_blocking; ioc_klass->io_close = qio_channel_socket_close; ioc_klass->io_shutdown = qio_channel_socket_shutdown; ioc_klass->io_set_cork = qio_channel_socket_set_cork; ioc_klass->io_set_delay = qio_channel_socket_set_delay; ioc_klass->io_create_watch = qio_channel_socket_create_watch; ioc_klass->io_set_aio_fd_handler = qio_channel_socket_set_aio_fd_handler; } static const TypeInfo qio_channel_socket_info = { .parent = TYPE_QIO_CHANNEL, .name = TYPE_QIO_CHANNEL_SOCKET, .instance_size = sizeof(QIOChannelSocket), .instance_init = qio_channel_socket_init, .instance_finalize = qio_channel_socket_finalize, .class_init = qio_channel_socket_class_init, }; static void qio_channel_socket_register_types(void) { type_register_static(&qio_channel_socket_info); } type_init(qio_channel_socket_register_types);
{ "pile_set_name": "Github" }
# [Juwelo TV Deutschland GmbH](http://alexa.amazon.com/#skills/amzn1.ask.skill.c9ffc884-907d-4289-bb6d-50d52a7836fa) ![0 stars](../../images/ic_star_border_black_18dp_1x.png)![0 stars](../../images/ic_star_border_black_18dp_1x.png)![0 stars](../../images/ic_star_border_black_18dp_1x.png)![0 stars](../../images/ic_star_border_black_18dp_1x.png)![0 stars](../../images/ic_star_border_black_18dp_1x.png) 0 null *** ### Skill Details * **Invocation Name:** null * **Category:** null * **ID:** amzn1.ask.skill.c9ffc884-907d-4289-bb6d-50d52a7836fa * **ASIN:** B01MTTOUJF * **Author:** Juwelo TV Deutschland GmbH * **Release Date:** December 1, 2016 @ 02:05:47 * **In-App Purchasing:** No
{ "pile_set_name": "Github" }
// Copyright Aleksey Gurtovoy 2000-2004 // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // Preprocessed version of "boost/mpl/greater.hpp" header // -- DO NOT modify by hand! namespace boost { namespace mpl { template< typename Tag1 , typename Tag2 , BOOST_MPL_AUX_NTTP_DECL(int, tag1_) = BOOST_MPL_AUX_MSVC_VALUE_WKND(Tag1)::value , BOOST_MPL_AUX_NTTP_DECL(int, tag2_) = BOOST_MPL_AUX_MSVC_VALUE_WKND(Tag2)::value > struct greater_impl : if_c< ( tag1_ > tag2_ ) , aux::cast2nd_impl< greater_impl< Tag1,Tag1 >,Tag1, Tag2 > , aux::cast1st_impl< greater_impl< Tag2,Tag2 >,Tag1, Tag2 > >::type { }; /// for Digital Mars C++/compilers with no CTPS/TTP support template<> struct greater_impl< na,na > { template< typename U1, typename U2 > struct apply { typedef apply type; BOOST_STATIC_CONSTANT(int, value = 0); }; }; template<> struct greater_impl< na,integral_c_tag > { template< typename U1, typename U2 > struct apply { typedef apply type; BOOST_STATIC_CONSTANT(int, value = 0); }; }; template<> struct greater_impl< integral_c_tag,na > { template< typename U1, typename U2 > struct apply { typedef apply type; BOOST_STATIC_CONSTANT(int, value = 0); }; }; template< typename T > struct greater_tag { typedef typename T::tag type; }; template< typename BOOST_MPL_AUX_NA_PARAM(N1) , typename BOOST_MPL_AUX_NA_PARAM(N2) > struct greater : aux::msvc_eti_base< typename apply_wrap2< greater_impl< typename greater_tag<N1>::type , typename greater_tag<N2>::type > , N1 , N2 >::type >::type { BOOST_MPL_AUX_LAMBDA_SUPPORT(2, greater, (N1, N2)) }; BOOST_MPL_AUX_NA_SPEC2(2, 2, greater) }} namespace boost { namespace mpl { template<> struct greater_impl< integral_c_tag,integral_c_tag > { template< typename N1, typename N2 > struct apply { BOOST_STATIC_CONSTANT(bool, value = ( BOOST_MPL_AUX_VALUE_WKND(N1)::value > BOOST_MPL_AUX_VALUE_WKND(N2)::value ) ); typedef bool_<value> type; }; }; }}
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 115fe534d60e58f4cb5c98ad37b98bbd timeCreated: 1530817834 licenseType: Pro MonoImporter: externalObjects: {} serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
{target:win} // // AggPas 2.4 RM3 Demo application // Note: Press F1 key on run to see more info about this demo // // Paths: src;src\ctrl;src\svg;src\util;src\platform\win;expat-wrap // program truetype_test ; uses Windows ,SysUtils , agg_basics , agg_platform_support , agg_color , agg_pixfmt , agg_pixfmt_rgb , agg_ctrl , agg_slider_ctrl , agg_cbox_ctrl , agg_rbox_ctrl , agg_rendering_buffer , agg_renderer_base , agg_renderer_scanline , agg_renderer_primitives , agg_rasterizer_scanline_aa , agg_scanline , agg_scanline_u , agg_scanline_bin , agg_render_scanlines , agg_trans_affine , agg_curves , agg_conv_curve , agg_conv_contour , agg_gamma_lut , agg_gamma_functions , agg_font_win32_tt , agg_font_cache_manager ; {$I agg_mode.inc } const flip_y = true; angle_step = 0.5; text_ : PChar = //'0123456789ABCDEFGHIJKLMNOPRSTUVWXYZabcdefghijklmnoprstuvwxyz ' + 'Anti-Grain Geometry is designed as a set of loosely coupled ' + 'algorithms and class templates united with a common idea, ' + 'so that all the components can be easily combined. Also, ' + 'the template based design allows you to replace any part of ' + 'the library without the necessity to modify a single byte in ' + 'the existing code. ' + 'AGG is designed keeping in mind extensibility and flexibility. ' + 'Basically I just wanted to create a toolkit that would allow me ' + '(and anyone else) to add new fancy algorithms very easily. ' + 'AGG does not dictate you any style of its use, you are free to ' + 'use any part of it. However, AGG is often associated with a tool ' + 'for rendering images in memory. That is not quite true, but it can ' + 'be a good starting point in studying. The tutorials describe the ' + 'use of AGG starting from the low level functionality that deals with ' + 'frame buffers and pixels. Then you will gradually understand how to ' + 'abstract different parts of the library and how to use them separately. ' + 'Remember, the raster picture is often not the only thing you want to ' + 'obtain, you will probably want to print your graphics with highest ' + 'possible quality and in this case you can easily combine the "vectorial" ' + 'part of the library with some API like Windows GDI, having a common ' + 'external interface. If that API can render multi-polygons with non-zero ' + 'and even-odd filling rules it''s all you need to incorporate AGG into ' + 'your application. For example, Windows API PolyPolygon perfectly fits ' + 'these needs, except certain advanced things like gradient filling, ' + 'Gouraud shading, image transformations, and so on. Or, as an alternative, ' + 'you can use all AGG algorithms producing high resolution pixel images and ' + 'then to send the result to the printer as a pixel map.' + 'Below is a typical brief scheme of the AGG rendering pipeline. ' + 'Please note that any component between the Vertex Source ' + 'and Screen Output is not mandatory. It all depends on your ' + 'particular needs. For example, you can use your own rasterizer, ' + 'based on Windows API. In this case you won''t need the AGG rasterizer ' + 'and renderers. Or, if you need to draw only lines, you can use the ' + 'AGG outline rasterizer that has certain restrictions but works faster. ' + 'The number of possibilities is endless. ' + 'Vertex Source is some object that produces polygons or polylines as ' + 'a set of consecutive 2D vertices with commands like MoveTo, LineTo. ' + 'It can be a container or some other object that generates vertices ' + 'on demand. ' + 'Coordinate conversion pipeline consists of a number of coordinate ' + 'converters. It always works with vectorial data (X,Y) represented ' + 'as floating point numbers (double). For example, it can contain an ' + 'affine transformer, outline (stroke) generator, some marker ' + 'generator (like arrowheads/arrowtails), dashed lines generator, ' + 'and so on. The pipeline can have branches and you also can have ' + 'any number of different pipelines. You also can write your own ' + 'converter and include it into the pipeline. ' + 'Scanline Rasterizer converts vectorial data into a number of ' + 'horizontal scanlines. The scanlines usually (but not obligatory) ' + 'carry information about Anti-Aliasing as coverage values. ' + 'Renderers render scanlines, sorry for the tautology. The simplest ' + 'example is solid filling. The renderer just adds a color to the ' + 'scanline and writes the result into the rendering buffer. ' + 'More complex renderers can produce multi-color result, ' + 'like gradients, Gouraud shading, image transformations, ' + 'patterns, and so on. Rendering Buffer is a buffer in memory ' + 'that will be displayed afterwards. Usually but not obligatory ' + 'it contains pixels in format that fits your video system. ' + 'For example, 24 bits B-G-R, 32 bits B-G-R-A, or 15 ' + 'bits R-G-B-555 for Windows. But in general, there''re no ' + 'restrictions on pixel formats or color space if you write ' + 'your own low level class that supports that format. ' + 'Colors in AGG appear only in renderers, that is, when you ' + 'actually put some data to the rendering buffer. In general, ' + 'there''s no general purpose structure or class like color, ' + 'instead, AGG always operates with concrete color space. ' + 'There are plenty of color spaces in the world, like RGB, ' + 'HSV, CMYK, etc., and all of them have certain restrictions. ' + 'For example, the RGB color space is just a poor subset of ' + 'colors that a human eye can recognize. If you look at the full ' + 'CIE Chromaticity Diagram, you will see that the RGB triangle ' + 'is just a little part of it. ' + 'In other words there are plenty of colors in the real world ' + 'that cannot be reproduced with RGB, CMYK, HSV, etc. Any color ' + 'space except the one existing in Nature is restrictive. Thus, ' + 'it was decided not to introduce such an object like color in ' + 'order not to restrict the possibilities in advance. Instead, ' + 'there are objects that operate with concrete color spaces. ' + 'Currently there are agg::rgba and agg::rgba8 that operate ' + 'with the most popular RGB color space (strictly speaking there''s ' + 'RGB plus Alpha). The RGB color space is used with different ' + 'pixel formats, like 24-bit RGB or 32-bit RGBA with different ' + 'order of color components. But the common property of all of ' + 'them is that they are essentially RGB. Although, AGG doesn''t ' + 'explicitly support any other color spaces, there is at least ' + 'a potential possibility of adding them. It means that all ' + 'class and function templates that depend on the color type ' + 'are parameterized with the ColorT argument. ' + 'Basically, AGG operates with coordinates of the output device. ' + 'On your screen there are pixels. But unlike many other libraries ' + 'and APIs AGG initially supports Subpixel Accuracy. It means ' + 'that the coordinates are represented as doubles, where fractional ' + 'values actually take effect. AGG doesn''t have an embedded ' + 'conversion mechanism from world to screen coordinates in order ' + 'not to restrict your freedom. It''s very important where and when ' + 'you do that conversion, so, different applications can require ' + 'different approaches. AGG just provides you a transformer of ' + 'that kind, namely, that can convert your own view port to the ' + 'device one. And it''s your responsibility to include it into ' + 'the proper place of the pipeline. You can also write your ' + 'own very simple class that will allow you to operate with ' + 'millimeters, inches, or any other physical units. ' + 'Internally, the rasterizers use integer coordinates of the ' + 'format 24.8 bits, that is, 24 bits for the integer part and 8 ' + 'bits for the fractional one. In other words, all the internal ' + 'coordinates are multiplied by 256. If you intend to use AGG in ' + 'some embedded system that has inefficient floating point ' + 'processing, you still can use the rasterizers with their ' + 'integer interfaces. Although, you won''t be able to use the ' + 'floating point coordinate pipelines in this case. '; var text_flip : boolean; font_name : AnsiString; type the_application = object(platform_support ) m_ren_type : rbox_ctrl; m_height , m_width , m_weight , m_gamma : slider_ctrl; m_hinting , m_kerning , m_performance : cbox_ctrl; m_feng : font_engine_win32_tt_int32; m_fman : font_cache_manager; m_old_height : double; m_gamma_lut : gamma_lut; // Pipeline to process the vectors glyph paths (curves + contour) m_curves : conv_curve; m_contour : conv_contour; m_angle : double; constructor Construct(dc : HDC; format_ : pix_format_e; flip_y_ : boolean ); destructor Destruct; function draw_text( ras : rasterizer_scanline_aa_ptr; sl : scanline_ptr; ren_solid : renderer_scanline_aa_solid_ptr; ren_bin : renderer_scanline_bin_solid_ptr ) : unsigned; procedure on_draw; virtual; procedure on_key(x ,y : int; key ,flags : unsigned ); virtual; procedure on_ctrl_change; virtual; end; { CONSTRUCT } constructor the_application.Construct; begin inherited Construct(format_ ,flip_y_ ); m_ren_type.Construct (5.0 ,5.0 ,5.0 + 150.0 ,110.0 ,not flip_y_ ); m_height.Construct (160 ,10.0 ,640 - 5.0 ,18.0 ,not flip_y_ ); m_width.Construct (160 ,30.0 ,640 - 5.0 ,38.0 ,not flip_y_ ); m_weight.Construct (160 ,50.0 ,640 - 5.0 ,58.0 ,not flip_y_ ); m_gamma.Construct (260 ,70.0 ,640 - 5.0 ,78.0 ,not flip_y_ ); m_hinting.Construct (160 ,65.0 ,'Hinting' ,not flip_y_ ); m_kerning.Construct (160 ,80.0 ,'Kerning' ,not flip_y_ ); m_performance.Construct(160 ,95.0 ,'Test Performance' ,not flip_y_ ); m_feng.Construct(dc ); m_fman.Construct(@m_feng ); m_old_height:=0.0; m_gamma_lut.Construct_(8 ,16 ); m_curves.Construct (m_fman.path_adaptor ); m_contour.Construct (@m_curves ); m_ren_type.add_item ('Native Mono' ); m_ren_type.add_item ('Native Gray 8' ); m_ren_type.add_item ('AGG Outline' ); m_ren_type.add_item ('AGG Mono' ); m_ren_type.add_item ('AGG Gray 8' ); m_ren_type.cur_item_(1 ); add_ctrl(@m_ren_type ); m_ren_type.no_transform; m_height.label_('Font Height=%.2f' ); m_height.range_(8, 32); m_height.value_(18 ); m_height.num_steps_ (32 - 8 ); m_height.text_thickness_(1.5 ); add_ctrl(@m_height ); m_height.no_transform; m_width.label_('Font Width=%.2f' ); m_width.range_(8 ,32 ); m_width.value_(18 ); m_width.num_steps_ (32 - 8 ); m_width.text_thickness_(1.5 ); add_ctrl(@m_width ); m_width.no_transform; m_weight.label_('Font Weight=%.2f' ); m_weight.range_(-1 ,1 ); m_weight.text_thickness_(1.5 ); add_ctrl(@m_weight ); m_weight.no_transform; m_gamma.label_('Gamma=%.2f' ); m_gamma.range_(0.1 ,2.0 ); m_gamma.value_(1.0 ); m_gamma.text_thickness_(1.5 ); add_ctrl(@m_gamma ); m_gamma.no_transform; add_ctrl(@m_hinting ); m_hinting.status_(true ); m_hinting.no_transform; add_ctrl(@m_kerning ); m_kerning.status_(true ); m_kerning.no_transform; add_ctrl(@m_performance ); m_performance.no_transform; //m_curves.approximation_method_(curve_div ); //m_curves.approximation_scale_ (0.5 ); //m_curves.angle_tolerance_ (0.3 ); m_contour.auto_detect_orientation_(false ); end; { DESTRUCT } destructor the_application.Destruct; begin inherited Destruct; m_ren_type.Destruct; m_height.Destruct; m_width.Destruct; m_weight.Destruct; m_gamma.Destruct; m_hinting.Destruct; m_kerning.Destruct; m_performance.Destruct; m_feng.Destruct; m_fman.Destruct; m_gamma_lut.Destruct; m_curves.Destruct; m_contour.Destruct; end; { DRAW_TEXT } function the_application.draw_text; var gren : glyph_rendering; num_glyphs : unsigned; mtx : trans_affine; taw : trans_affine_skewing; tar : trans_affine_rotation; x ,y0 ,y : double; p : int8u_ptr; rgba : aggclr; glyph : glyph_cache_ptr; begin gren:=glyph_ren_native_mono; case m_ren_type._cur_item of 0 : gren:=glyph_ren_native_mono; 1 : gren:=glyph_ren_native_gray8; 2 : gren:=glyph_ren_outline; 3 : gren:=glyph_ren_agg_mono; 4 : gren:=glyph_ren_agg_gray8; end; num_glyphs:=0; m_contour.width_(-m_weight._value * m_height._value * 0.05 ); m_feng.hinting_(m_hinting._status ); m_feng.height_ (m_height._value ); // Font width in Windows is strange. MSDN says, // "specifies the average width", but there's no clue what // this "average width" means. It'd be logical to specify // the width with regard to the font height, like it's done in // FreeType. That is, width == height should mean the "natural", // not distorted glyphs. In Windows you have to specify // the absolute width, which is very stupid and hard to use // in practice. if m_width._value = m_height._value then m_feng.width_(0.0 ) else m_feng.width_(m_width._value / 2.4 ); // m_feng.italic_(true ); m_feng.flip_y_(text_flip ); mtx.Construct; if m_angle <> 0 then begin tar.Construct(deg2rad(m_angle ) ); mtx.multiply (@tar ); end; //taw.Construct(-0.3 ,0 ); mtx.multiply(@taw ); m_feng.transform_(@mtx ); if m_feng.create_font(@font_name[1 ] ,gren ) then begin m_fman.precache(unsigned(' ' ) ,127 ); x :=10.0; y0:=_height - m_height._value - 10.0; y :=y0; p :=@text_[0 ]; while p^ <> 0 do begin glyph:=m_fman.glyph(p^ ); if glyph <> NIL then begin if m_kerning._status then m_fman.add_kerning(@x ,@y ); if x >= _width - m_height._value then begin x :=10.0; y0:=y0 - m_height._value; if y0 <= 120 then break; y:=y0; end; m_fman.init_embedded_adaptors(glyph ,x ,y ); case glyph.data_type of glyph_data_mono : begin rgba.ConstrInt(0 ,0 ,0 ); ren_bin.color_(@rgba ); render_scanlines( m_fman.mono_adaptor , m_fman.mono_scanline , ren_bin ); end; glyph_data_gray8 : begin rgba.ConstrInt (0 ,0 ,0 ); ren_solid.color_(@rgba ); render_scanlines( m_fman.gray8_adaptor , m_fman.gray8_scanline , ren_solid ); end; glyph_data_outline : begin ras.reset; if Abs(m_weight._value ) <= 0.01 then // For the sake of efficiency skip the // contour converter if the weight is about zero. ras.add_path(@m_curves ) else ras.add_path(@m_contour ); rgba.ConstrInt (0 ,0 ,0 ); ren_solid.color_(@rgba ); render_scanlines(ras ,sl ,ren_solid ); end; end; // increment pen position x:=x + glyph.advance_x; y:=y + glyph.advance_y; inc(num_glyphs ); end; inc(ptrcomp(p ) ,sizeof(int8u ) ); end; end; result:=num_glyphs; end; { ON_DRAW } procedure the_application.on_draw; var pf : pixel_formats; rgba : aggclr; ren_base : renderer_base; ren_solid : renderer_scanline_aa_solid; ren_bin : renderer_scanline_bin_solid; sl : scanline_u8; ras : rasterizer_scanline_aa; gm_th : gamma_threshold; gm_no : gamma_none; gm_pw : gamma_power; begin // Initialize structures pixfmt_bgr24_gamma(pf ,rbuf_window ,@m_gamma_lut ); ren_base.Construct (@pf ); ren_solid.Construct(@ren_base ); ren_bin.Construct (@ren_base ); rgba.ConstrDbl(1 ,1 ,1 ); ren_base.clear(@rgba ); sl.Construct; ras.Construct; if m_height._value <> m_old_height then begin m_old_height:=m_height._value; m_width.value_(m_old_height ); end; // Setup Gamma if m_ren_type._cur_item = 3 then begin // When rendering in mono format, // Set threshold gamma = 0.5 gm_th.Construct(m_gamma._value / 2.0 ); m_feng.gamma_ (@gm_th ); end else begin gm_no.Construct; m_feng.gamma_(@gm_no ); m_gamma_lut.gamma_(m_gamma._value ); end; // Render the text draw_text(@ras ,@sl ,@ren_solid ,@ren_bin ); // Render the controls gm_pw.Construct(1.0 ); ras.gamma (@gm_pw ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_ren_type ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_height ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_width ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_weight ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_gamma ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_hinting ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_kerning ); render_ctrl(@ras ,@sl ,@ren_solid ,@m_performance ); // Free AGG resources sl.Destruct; ras.Destruct; end; { ON_KEY } procedure the_application.on_key; begin if key = byte(' ' ) then begin text_flip:=not text_flip; force_redraw; end; if key = key_kp_minus then begin m_angle:=m_angle + angle_step; if m_angle > 360 then m_angle:=0; force_redraw; end; if key = key_kp_plus then begin m_angle:=m_angle - angle_step; if m_angle < 0 then m_angle:=360 - angle_step; force_redraw; end; if key = key_f1 then message_( 'This example demonstrates the use of the Win32 TrueType font engine with cache. '#13 + 'Cache can keep three types of data, vector path, Anti-Aliased scanline shape, '#13 + 'and monochrome scanline shape. In case of caching scanline shapes the speed '#13 + 'is pretty good and comparable with Windows hardware accelerated font rendering.'#13#13 + 'How to play with:'#13#13 + 'Press the spacebar to flip the text vertically.'#13#13 + 'Key Plus - Increase font angle (not for Natives)'#13 + 'Key Minus - Decrease font angle (not for Natives)' + #13#13'Note: F2 key saves current "screenshot" file in this demo''s directory. ' ); end; { ON_CTRL_CHANGE } procedure the_application.on_ctrl_change; var pf : pixel_formats; rgba : aggclr; ren_base : renderer_base; ren_solid : renderer_scanline_aa_solid; ren_bin : renderer_scanline_bin_solid; sl : scanline_u8; ras : rasterizer_scanline_aa; num_glyphs ,i : unsigned; t : double; buf : array[0..99 ] of char; begin if m_performance._status then begin pixfmt_bgr24_gamma(pf ,rbuf_window ,@m_gamma_lut ); ren_base.Construct (@pf ); ren_solid.Construct(@ren_base ); ren_bin.Construct (@ren_base ); rgba.ConstrDbl(1 ,1 ,1 ); ren_base.clear(@rgba ); sl.Construct; ras.Construct; num_glyphs:=0; start_timer; for i:=0 to 49 do inc(num_glyphs ,draw_text(@ras ,@sl ,@ren_solid ,@ren_bin ) ); t:=elapsed_time; sprintf(@buf[0 ] ,'Glyphs=%u, ' ,num_glyphs ); sprintf(@buf[StrLen(@buf ) ] ,'Time=%.3fms, ' ,t ); sprintf(@buf[StrLen(@buf ) ] ,'%.3f glyps/sec, ' ,(num_glyphs / t ) * 1000.0 ); sprintf(@buf[StrLen(@buf ) ] ,'%.3f microsecond/glyph' , (t / num_glyphs) * 1000.0); message_(@buf[0 ] ); m_performance.status_(false ); force_redraw; sl.Destruct; ras.Destruct; end; end; VAR app : the_application; dc : HDC; BEGIN text_flip:=false; font_name:='Arial'; {$IFDEF WIN32 } if ParamCount > 0 then font_name:=ParamStr(1 ); {$ENDIF } dc:=GetDC(0 ); app.Construct(dc ,pix_format_bgr24 ,flip_y ); app.caption_ ('AGG Example. Rendering TrueType Fonts with WinAPI (F1-Help)' ); if app.init(640 ,520 ,window_resize ) then app.run; app.Destruct; ReleaseDC(0 ,dc ); END.
{ "pile_set_name": "Github" }
// (C) Copyright 2009-2011 Frederic Bron. // // Use, modification and distribution are subject to the Boost Software License, // Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt). // // See http://www.boost.org/libs/type_traits for most recent version including documentation. #ifndef BOOST_TT_HAS_MINUS_ASSIGN_HPP_INCLUDED #define BOOST_TT_HAS_MINUS_ASSIGN_HPP_INCLUDED #define BOOST_TT_TRAIT_NAME has_minus_assign #define BOOST_TT_TRAIT_OP -= #define BOOST_TT_FORBIDDEN_IF\ (\ /* Lhs==pointer and Rhs==fundamental and Rhs!=integral */\ (\ ::boost::is_pointer< Lhs_noref >::value && \ ::boost::is_fundamental< Rhs_nocv >::value && \ (! ::boost::is_integral< Rhs_noref >::value )\ ) || \ /* Lhs==void* and Rhs==fundamental */\ (\ ::boost::is_pointer< Lhs_noref >::value && \ ::boost::is_void< Lhs_noptr >::value && \ ::boost::is_fundamental< Rhs_nocv >::value\ ) || \ /* Rhs==void* and Lhs==fundamental */\ (\ ::boost::is_pointer< Rhs_noref >::value && \ ::boost::is_void< Rhs_noptr >::value && \ ::boost::is_fundamental< Lhs_nocv >::value\ ) || \ /* Lhs=fundamental and Rhs=pointer */\ (\ ::boost::is_fundamental< Lhs_nocv >::value && \ ::boost::is_pointer< Rhs_noref >::value\ ) || \ /* Lhs==pointer and Rhs==pointer */\ (\ ::boost::is_pointer< Lhs_noref >::value && \ ::boost::is_pointer< Rhs_noref >::value\ ) || \ /* (Lhs==fundamental or Lhs==pointer) and (Rhs==fundamental or Rhs==pointer) and (Lhs==const) */\ (\ (\ ::boost::is_fundamental< Lhs_nocv >::value || \ ::boost::is_pointer< Lhs_noref >::value\ ) && \ (\ ::boost::is_fundamental< Rhs_nocv >::value || \ ::boost::is_pointer< Rhs_noref >::value\ ) && \ ::boost::is_const< Lhs_noref >::value\ )\ ) #include <boost/type_traits/detail/has_binary_operator.hpp> #undef BOOST_TT_TRAIT_NAME #undef BOOST_TT_TRAIT_OP #undef BOOST_TT_FORBIDDEN_IF #endif
{ "pile_set_name": "Github" }
// Copyright 2009 Google Inc. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Author: vladl@google.com (Vlad Losev) // // The Google C++ Testing Framework (Google Test) // // This file verifies Google Test event listeners receive events at the // right times. #include "gtest/gtest.h" #include <vector> using ::testing::AddGlobalTestEnvironment; using ::testing::Environment; using ::testing::InitGoogleTest; using ::testing::Test; using ::testing::TestCase; using ::testing::TestEventListener; using ::testing::TestInfo; using ::testing::TestPartResult; using ::testing::UnitTest; // Used by tests to register their events. std::vector<std::string>* g_events = NULL; namespace testing { namespace internal { class EventRecordingListener : public TestEventListener { public: explicit EventRecordingListener(const char* name) : name_(name) {} protected: virtual void OnTestProgramStart(const UnitTest& /*unit_test*/) { g_events->push_back(GetFullMethodName("OnTestProgramStart")); } virtual void OnTestIterationStart(const UnitTest& /*unit_test*/, int iteration) { Message message; message << GetFullMethodName("OnTestIterationStart") << "(" << iteration << ")"; g_events->push_back(message.GetString()); } virtual void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) { g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpStart")); } virtual void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) { g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpEnd")); } virtual void OnTestCaseStart(const TestCase& /*test_case*/) { g_events->push_back(GetFullMethodName("OnTestCaseStart")); } virtual void OnTestStart(const TestInfo& /*test_info*/) { g_events->push_back(GetFullMethodName("OnTestStart")); } virtual void OnTestPartResult(const TestPartResult& /*test_part_result*/) { g_events->push_back(GetFullMethodName("OnTestPartResult")); } virtual void OnTestEnd(const TestInfo& /*test_info*/) { g_events->push_back(GetFullMethodName("OnTestEnd")); } virtual void OnTestCaseEnd(const TestCase& /*test_case*/) { g_events->push_back(GetFullMethodName("OnTestCaseEnd")); } virtual void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) { g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownStart")); } virtual void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) { g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownEnd")); } virtual void OnTestIterationEnd(const UnitTest& /*unit_test*/, int iteration) { Message message; message << GetFullMethodName("OnTestIterationEnd") << "(" << iteration << ")"; g_events->push_back(message.GetString()); } virtual void OnTestProgramEnd(const UnitTest& /*unit_test*/) { g_events->push_back(GetFullMethodName("OnTestProgramEnd")); } private: std::string GetFullMethodName(const char* name) { return name_ + "." + name; } std::string name_; }; class EnvironmentInvocationCatcher : public Environment { protected: virtual void SetUp() { g_events->push_back("Environment::SetUp"); } virtual void TearDown() { g_events->push_back("Environment::TearDown"); } }; class ListenerTest : public Test { protected: static void SetUpTestCase() { g_events->push_back("ListenerTest::SetUpTestCase"); } static void TearDownTestCase() { g_events->push_back("ListenerTest::TearDownTestCase"); } virtual void SetUp() { g_events->push_back("ListenerTest::SetUp"); } virtual void TearDown() { g_events->push_back("ListenerTest::TearDown"); } }; TEST_F(ListenerTest, DoesFoo) { // Test execution order within a test case is not guaranteed so we are not // recording the test name. g_events->push_back("ListenerTest::* Test Body"); SUCCEED(); // Triggers OnTestPartResult. } TEST_F(ListenerTest, DoesBar) { g_events->push_back("ListenerTest::* Test Body"); SUCCEED(); // Triggers OnTestPartResult. } } // namespace internal } // namespace testing using ::testing::internal::EnvironmentInvocationCatcher; using ::testing::internal::EventRecordingListener; void VerifyResults(const std::vector<std::string>& data, const char* const* expected_data, int expected_data_size) { const int actual_size = data.size(); // If the following assertion fails, a new entry will be appended to // data. Hence we save data.size() first. EXPECT_EQ(expected_data_size, actual_size); // Compares the common prefix. const int shorter_size = expected_data_size <= actual_size ? expected_data_size : actual_size; int i = 0; for (; i < shorter_size; ++i) { ASSERT_STREQ(expected_data[i], data[i].c_str()) << "at position " << i; } // Prints extra elements in the actual data. for (; i < actual_size; ++i) { printf(" Actual event #%d: %s\n", i, data[i].c_str()); } } int main(int argc, char **argv) { std::vector<std::string> events; g_events = &events; InitGoogleTest(&argc, argv); UnitTest::GetInstance()->listeners().Append( new EventRecordingListener("1st")); UnitTest::GetInstance()->listeners().Append( new EventRecordingListener("2nd")); AddGlobalTestEnvironment(new EnvironmentInvocationCatcher); GTEST_CHECK_(events.size() == 0) << "AddGlobalTestEnvironment should not generate any events itself."; ::testing::GTEST_FLAG(repeat) = 2; int ret_val = RUN_ALL_TESTS(); const char* const expected_events[] = { "1st.OnTestProgramStart", "2nd.OnTestProgramStart", "1st.OnTestIterationStart(0)", "2nd.OnTestIterationStart(0)", "1st.OnEnvironmentsSetUpStart", "2nd.OnEnvironmentsSetUpStart", "Environment::SetUp", "2nd.OnEnvironmentsSetUpEnd", "1st.OnEnvironmentsSetUpEnd", "1st.OnTestCaseStart", "2nd.OnTestCaseStart", "ListenerTest::SetUpTestCase", "1st.OnTestStart", "2nd.OnTestStart", "ListenerTest::SetUp", "ListenerTest::* Test Body", "1st.OnTestPartResult", "2nd.OnTestPartResult", "ListenerTest::TearDown", "2nd.OnTestEnd", "1st.OnTestEnd", "1st.OnTestStart", "2nd.OnTestStart", "ListenerTest::SetUp", "ListenerTest::* Test Body", "1st.OnTestPartResult", "2nd.OnTestPartResult", "ListenerTest::TearDown", "2nd.OnTestEnd", "1st.OnTestEnd", "ListenerTest::TearDownTestCase", "2nd.OnTestCaseEnd", "1st.OnTestCaseEnd", "1st.OnEnvironmentsTearDownStart", "2nd.OnEnvironmentsTearDownStart", "Environment::TearDown", "2nd.OnEnvironmentsTearDownEnd", "1st.OnEnvironmentsTearDownEnd", "2nd.OnTestIterationEnd(0)", "1st.OnTestIterationEnd(0)", "1st.OnTestIterationStart(1)", "2nd.OnTestIterationStart(1)", "1st.OnEnvironmentsSetUpStart", "2nd.OnEnvironmentsSetUpStart", "Environment::SetUp", "2nd.OnEnvironmentsSetUpEnd", "1st.OnEnvironmentsSetUpEnd", "1st.OnTestCaseStart", "2nd.OnTestCaseStart", "ListenerTest::SetUpTestCase", "1st.OnTestStart", "2nd.OnTestStart", "ListenerTest::SetUp", "ListenerTest::* Test Body", "1st.OnTestPartResult", "2nd.OnTestPartResult", "ListenerTest::TearDown", "2nd.OnTestEnd", "1st.OnTestEnd", "1st.OnTestStart", "2nd.OnTestStart", "ListenerTest::SetUp", "ListenerTest::* Test Body", "1st.OnTestPartResult", "2nd.OnTestPartResult", "ListenerTest::TearDown", "2nd.OnTestEnd", "1st.OnTestEnd", "ListenerTest::TearDownTestCase", "2nd.OnTestCaseEnd", "1st.OnTestCaseEnd", "1st.OnEnvironmentsTearDownStart", "2nd.OnEnvironmentsTearDownStart", "Environment::TearDown", "2nd.OnEnvironmentsTearDownEnd", "1st.OnEnvironmentsTearDownEnd", "2nd.OnTestIterationEnd(1)", "1st.OnTestIterationEnd(1)", "2nd.OnTestProgramEnd", "1st.OnTestProgramEnd" }; VerifyResults(events, expected_events, sizeof(expected_events)/sizeof(expected_events[0])); // We need to check manually for ad hoc test failures that happen after // RUN_ALL_TESTS finishes. if (UnitTest::GetInstance()->Failed()) ret_val = 1; return ret_val; }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- __license__ = 'GPL 3' __copyright__ = '2009, John Schember <john@nachtimwald.com>' __docformat__ = 'restructuredtext en' import os from optparse import OptionParser from calibre.customize.conversion import OptionRecommendation, DummyReporter from calibre.ebooks.conversion.plumber import Plumber from calibre.customize.ui import plugin_for_catalog_format from calibre.utils.logging import Log def gui_convert(input, output, recommendations, notification=DummyReporter(), abort_after_input_dump=False, log=None, override_input_metadata=False): recommendations = list(recommendations) recommendations.append(('verbose', 2, OptionRecommendation.HIGH)) if log is None: log = Log() plumber = Plumber(input, output, log, report_progress=notification, abort_after_input_dump=abort_after_input_dump, override_input_metadata=override_input_metadata) plumber.merge_ui_recommendations(recommendations) plumber.run() def gui_convert_recipe(input, output, recommendations, notification=DummyReporter(), abort_after_input_dump=False, log=None, override_input_metadata=False): os.environ['CALIBRE_RECIPE_URN'] = input gui_convert('from-gui.recipe', output, recommendations, notification=notification, abort_after_input_dump=abort_after_input_dump, log=log, override_input_metadata=override_input_metadata) def gui_convert_override(input, output, recommendations, notification=DummyReporter(), abort_after_input_dump=False, log=None): gui_convert(input, output, recommendations, notification=notification, abort_after_input_dump=abort_after_input_dump, log=log, override_input_metadata=True) def gui_catalog(fmt, title, dbspec, ids, out_file_name, sync, fmt_options, connected_device, notification=DummyReporter(), log=None): if log is None: log = Log() from calibre.library import db from calibre.utils.config import prefs prefs.refresh() db = db(read_only=True) db.catalog_plugin_on_device_temp_mapping = dbspec # Create a minimal OptionParser that we can append to parser = OptionParser() args = [] parser.add_option("--verbose", action="store_true", dest="verbose", default=True) opts, args = parser.parse_args() # Populate opts # opts.gui_search_text = something opts.catalog_title = title opts.connected_device = connected_device opts.ids = ids opts.search_text = None opts.sort_by = None opts.sync = sync # Extract the option dictionary to comma-separated lists for option in fmt_options: if isinstance(fmt_options[option],list): setattr(opts,option, ','.join(fmt_options[option])) else: setattr(opts,option, fmt_options[option]) # Fetch and run the plugin for fmt # Returns 0 if successful, 1 if no catalog built plugin = plugin_for_catalog_format(fmt) return plugin.run(out_file_name, opts, db, notification=notification)
{ "pile_set_name": "Github" }
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <!-- admin-extra.menu-top.html --> <!-- <li> <a href="#" style="background-image: url(img/ico/construction.png);"> FIRST ITEM </a> </li> -->
{ "pile_set_name": "Github" }
/* * This file is part of the TREZOR project, https://trezor.io/ * * Copyright (c) SatoshiLabs * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "py/objstr.h" #include "embed/extmod/trezorobj.h" #include "nem.h" /// package: trezorcrypto.nem /// def validate_address(address: str, network: int) -> bool: /// ''' /// Validate a NEM address /// ''' STATIC mp_obj_t mod_trezorcrypto_nem_validate_address(mp_obj_t address, mp_obj_t network) { mp_buffer_info_t addr; mp_get_buffer_raise(address, &addr, MP_BUFFER_READ); uint32_t n = trezor_obj_get_uint(network); return mp_obj_new_bool(nem_validate_address(addr.buf, n)); } STATIC MP_DEFINE_CONST_FUN_OBJ_2(mod_trezorcrypto_nem_validate_address_obj, mod_trezorcrypto_nem_validate_address); /// def compute_address(public_key: bytes, network: int) -> str: /// ''' /// Compute a NEM address from a public key /// ''' STATIC mp_obj_t mod_trezorcrypto_nem_compute_address(mp_obj_t public_key, mp_obj_t network) { mp_buffer_info_t p; mp_get_buffer_raise(public_key, &p, MP_BUFFER_READ); uint32_t n = trezor_obj_get_uint(network); char address[NEM_ADDRESS_SIZE + 1]; // + 1 for the 0 byte if (!nem_get_address(p.buf, n, address)) { mp_raise_ValueError( "Failed to compute a NEM address from provided public key"); } return mp_obj_new_str_of_type(&mp_type_str, (const uint8_t *)address, strlen(address)); } STATIC MP_DEFINE_CONST_FUN_OBJ_2(mod_trezorcrypto_nem_compute_address_obj, mod_trezorcrypto_nem_compute_address); // objects definition STATIC const mp_rom_map_elem_t mod_trezorcrypto_nem_globals_table[] = { {MP_ROM_QSTR(MP_QSTR_validate_address), MP_ROM_PTR(&mod_trezorcrypto_nem_validate_address_obj)}, {MP_ROM_QSTR(MP_QSTR_compute_address), MP_ROM_PTR(&mod_trezorcrypto_nem_compute_address_obj)}, }; STATIC MP_DEFINE_CONST_DICT(mod_trezorcrypto_nem_globals, mod_trezorcrypto_nem_globals_table); // module definition STATIC const mp_obj_module_t mod_trezorcrypto_nem_module = { .base = {&mp_type_module}, .globals = (mp_obj_dict_t *)&mod_trezorcrypto_nem_globals, };
{ "pile_set_name": "Github" }
# FPGA settings FPGA_PART = xcu280-fsvh2892-2L-e FPGA_TOP = fpga FPGA_ARCH = virtexuplus # Files for synthesis SYN_FILES = rtl/fpga.v SYN_FILES += rtl/fpga_core.v SYN_FILES += rtl/sync_reset.v SYN_FILES += rtl/sync_signal.v SYN_FILES += rtl/axi_ram.v SYN_FILES += rtl/axis_register.v SYN_FILES += lib/pcie/rtl/axis_arb_mux.v SYN_FILES += lib/pcie/rtl/pcie_us_axil_master.v SYN_FILES += lib/pcie/rtl/pcie_us_axi_dma.v SYN_FILES += lib/pcie/rtl/pcie_us_axi_dma_rd.v SYN_FILES += lib/pcie/rtl/pcie_us_axi_dma_wr.v SYN_FILES += lib/pcie/rtl/pcie_tag_manager.v SYN_FILES += lib/pcie/rtl/pcie_us_axi_master.v SYN_FILES += lib/pcie/rtl/pcie_us_axi_master_rd.v SYN_FILES += lib/pcie/rtl/pcie_us_axi_master_wr.v SYN_FILES += lib/pcie/rtl/pcie_us_axis_cq_demux.v SYN_FILES += lib/pcie/rtl/pcie_us_cfg.v SYN_FILES += lib/pcie/rtl/pcie_us_msi.v SYN_FILES += lib/pcie/rtl/arbiter.v SYN_FILES += lib/pcie/rtl/priority_encoder.v SYN_FILES += lib/pcie/rtl/pulse_merge.v # XDC files XDC_FILES = fpga.xdc # IP #XCI_FILES = IP_TCL_FILES = ip/pcie4c_uscale_plus_0.tcl include ../common/vivado.mk program: $(FPGA_TOP).bit echo "open_hw" > program.tcl echo "connect_hw_server" >> program.tcl echo "open_hw_target" >> program.tcl echo "current_hw_device [lindex [get_hw_devices] 0]" >> program.tcl echo "refresh_hw_device -update_hw_probes false [current_hw_device]" >> program.tcl echo "set_property PROGRAM.FILE {$(FPGA_TOP).bit} [current_hw_device]" >> program.tcl echo "program_hw_devices [current_hw_device]" >> program.tcl echo "exit" >> program.tcl vivado -nojournal -nolog -mode batch -source program.tcl
{ "pile_set_name": "Github" }
// // GTLogBuffer.m // GTKit // // Created on 13-3-7. // Tencent is pleased to support the open source community by making // Tencent GT (Version 2.4 and subsequent versions) available. // // Notwithstanding anything to the contrary herein, any previous version // of Tencent GT shall not be subject to the license hereunder. // All right, title, and interest, including all intellectual property rights, // in and to the previous version of Tencent GT (including any and all copies thereof) // shall be owned and retained by Tencent and subject to the license under the // Tencent GT End User License Agreement (http://gt.qq.com/wp-content/EULA_EN.html). // // Copyright (C) 2015 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the MIT License (the "License"); you may not use this file // except in compliance with the License. You may obtain a copy of the License at // // http://opensource.org/licenses/MIT // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. // // #ifndef GT_DEBUG_DISABLE #import "GTLogBuffer.h" #import "GTConfig.h" #import "GTLogConfig.h" typedef enum { GTLogBufferClean = 0, GTLogBufferCleanStart, GTLogBufferCleanEnd, GTLogBufferStart, GTLogBufferEnd } GTLogBufferFileOp; @implementation NSString (FileSort) //按文件修改时间排序,最新的时间放在最前面 - (NSComparisonResult)fileTimeCompare:(NSString *)other { NSFileManager *fileManager = [NSFileManager defaultManager]; NSDictionary *fileAttributes; fileAttributes = [fileManager attributesOfItemAtPath:self error:nil]; NSDate *date1 = [fileAttributes fileModificationDate]; fileAttributes = [fileManager attributesOfItemAtPath:other error:nil]; NSDate *date2 = [fileAttributes fileModificationDate]; int result = [date2 compare:date1]; return result; } @end @implementation GTLogBufferSaveFile @synthesize name = _name; @synthesize buffer = _buffer; @synthesize tmpBuf = _tmpBuf; @synthesize fileOpType = _fileOpType; - (id)initWithName:(NSString *)name { self = [super init]; if(self) { [self setName:name]; _buffer = [[NSMutableString alloc] initWithCapacity:M_GT_KB]; } return self; } - (void)dealloc { [_name release]; [_buffer release]; [super dealloc]; } - (void)addBuffer:(NSString *)buffer { if ((_fileOpType == GTLogBufferCleanEnd) || (_fileOpType == GTLogBufferClean) || (_fileOpType == GTLogBufferEnd)) { return; } @synchronized (_buffer) { [_buffer appendString:@"\n"]; [_buffer appendString:buffer]; } } - (void)clearBuffer { [_buffer setString:@""]; } - (void)newBuffer { if (_buffer) { [_buffer release]; _buffer = nil; } _buffer = [[NSMutableString alloc] initWithCapacity:M_GT_KB]; } @end #define M_GT_INDEX_INVALID 0xffff @implementation GTLogBuffer @synthesize name = _name; - (id)initWithName:(NSString *)dirName { self = [super init]; if(self) { [self setName:dirName]; _buffer = [[NSMutableString alloc] initWithCapacity:M_GT_KB]; _fileIndex = M_GT_INDEX_INVALID; _isSaving = NO; _timeCnt = 0; _timer = [NSTimer scheduledTimerWithTimeInterval:10 target:self selector:@selector(timerCheck:) userInfo:nil repeats:YES]; [_timer retain]; _fileList = [[GTList alloc] init]; } return self; } - (void)dealloc { [self stopTimer]; [_name release]; [_buffer release]; [_tmpBuf release]; [_fileList release]; [super dealloc]; } - (void)stopTimer { if (_timer) { [_timer invalidate]; [_timer release]; _timer = nil; } } - (BOOL)needSave:(NSMutableString *)buffer { if ([buffer length] > 16 * M_GT_KB) { return YES; } return NO; } - (void)addBuffer:(NSString *)buffer { BOOL needSave = NO; M_GT_LOG_SWITCH_CHECK; if ([[GTLogConfig sharedInstance] bufferAutoSave]) { @synchronized (_buffer) { [_buffer appendString:@"\n"]; [_buffer appendString:buffer]; } } for (int i = 0; i < [[_fileList keys] count]; i++) { NSString *key = [[_fileList keys] objectAtIndex:i]; GTLogBufferSaveFile *obj = [_fileList objectForKey:key]; [obj addBuffer:buffer]; needSave = (needSave == YES)? YES : [self needSave:[obj buffer]]; } needSave = (needSave == YES)? YES : [self needSave:_buffer]; if (needSave == YES) { [self saveBuffer]; } } - (void)cleanLog:(NSString *)fileName { @synchronized (self) { GTLogBufferSaveFile *obj = [_fileList objectForKey:fileName]; if (obj == nil) { obj = [[[GTLogBufferSaveFile alloc] initWithName:fileName] autorelease]; } [obj setFileOpType:GTLogBufferClean]; [obj clearBuffer]; [_fileList setObject:obj forKey:fileName]; } } - (void)startLog:(NSString *)fileName { @synchronized (self) { GTLogBufferSaveFile *obj = [_fileList objectForKey:fileName]; if (obj == nil) { obj = [[[GTLogBufferSaveFile alloc] initWithName:fileName] autorelease]; [obj setFileOpType:GTLogBufferStart]; [_fileList setObject:obj forKey:fileName]; } else { if ([obj fileOpType] == GTLogBufferClean) { [obj setFileOpType:GTLogBufferCleanStart]; } else { [obj setFileOpType:GTLogBufferStart]; } } } } - (void)endLog:(NSString *)fileName { @synchronized (self) { GTLogBufferSaveFile *obj = [_fileList objectForKey:fileName]; if (obj != nil) { if ([obj fileOpType] == GTLogBufferClean) { [obj setFileOpType:GTLogBufferCleanEnd]; } else if ([obj fileOpType] == GTLogBufferCleanStart) { [obj setFileOpType:GTLogBufferCleanEnd]; } else { [obj setFileOpType:GTLogBufferEnd]; } [_fileList setObject:obj forKey:fileName]; } } } #pragma mark - - (void)timerCheck:(id)sender { _timeCnt++; if (_timeCnt > 1) { _timeCnt = 0; [self saveBuffer]; } } - (void)updateFileList { for (int i = 0; i < [[_fileList keys] count]; i++) { NSString *key = [[_fileList keys] objectAtIndex:i]; GTLogBufferSaveFile *obj = [_fileList objectForKey:key]; switch ([obj fileOpType]) { case GTLogBufferClean: case GTLogBufferEnd: case GTLogBufferCleanEnd: if (([[obj buffer] length] == 0) && ([[obj tmpBuf] length] == 0)) { [_fileList removeObjectForKey:key]; } break; case GTLogBufferStart: case GTLogBufferCleanStart: break; default: break; } } } - (void)fileListSwitchBuffer { for (int i = 0; i < [[_fileList keys] count]; i++) { NSString *key = [[_fileList keys] objectAtIndex:i]; GTLogBufferSaveFile *obj = [_fileList objectForKey:key]; [obj setTmpBuf:[obj buffer]]; [obj newBuffer]; } } - (void)saveBuffer { if (_isSaving) { return; } BOOL hasContent = NO; for (int i = 0; i < [[_fileList keys] count]; i++) { NSString *key = [[_fileList keys] objectAtIndex:i]; GTLogBufferSaveFile *obj = [_fileList objectForKey:key]; if ([[obj buffer] length] != 0) { hasContent = YES; break; } } if (hasContent == NO) { if ([_buffer length] == 0) { return; } } _isSaving = YES; _tmpBuf = _buffer; _buffer = [[NSMutableString alloc] initWithCapacity:M_GT_KB]; [self fileListSwitchBuffer]; [self updateFileList]; NSThread *thread = [[[NSThread alloc] initWithTarget:self selector:@selector(autoSave:) object:nil] autorelease]; thread.name = [NSString stringWithFormat:@"%@_GTLogBufferAutoSave", _name]; [thread start]; } - (NSArray *)getFilenamelistOfType:(NSString *)type fromDirPath:(NSString *)dirPath { NSMutableArray *filenamelist = [NSMutableArray arrayWithCapacity:10]; NSArray *tmplist = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:dirPath error:nil]; for (NSString *filename in tmplist) { NSString *fullpath = [dirPath stringByAppendingPathComponent:filename]; if ([self isFileExistAtPath:fullpath]) { if ([[filename pathExtension] isEqualToString:type]) { [filenamelist addObject:fullpath]; } } } return filenamelist; } - (BOOL)isFileExistAtPath:(NSString*)fileFullPath { BOOL isExist = NO; isExist = [[NSFileManager defaultManager] fileExistsAtPath:fileFullPath]; return isExist; } - (NSString *)getBufferDir { //对应目录不存在则创建一个新的目录 NSString *filePath = [[GTConfig sharedInstance] pathForDirByCreated:M_GT_SYS_DIR fileName:_name ofType:M_GT_FILE_TYPE_LOG]; return filePath; } -(NSDate *)getFileTime:(NSString *)filePath { NSFileManager *fileManager = [NSFileManager defaultManager]; NSDictionary *fileAttributes = [fileManager attributesOfItemAtPath:filePath error:nil]; if (fileAttributes != nil) { //文件修改日期 return [fileAttributes objectForKey:NSFileModificationDate]; } return nil; } - (unsigned long long)getFileSize:(NSString *)filePath { NSFileManager *fileManager = [NSFileManager defaultManager]; NSDictionary *fileAttributes = [fileManager attributesOfItemAtPath:filePath error:nil]; if (fileAttributes != nil) { //文件大小 return [fileAttributes fileSize]; } return 0; } - (NSUInteger )getFileIndex { NSUInteger index = 0; NSString *logDir = [self getBufferDir]; NSArray *fileList = [self getFilenamelistOfType:@"log" fromDirPath:logDir]; if ([fileList count] == 0) { return index; } NSArray*sortedArray = [fileList sortedArrayUsingSelector:@selector(fileTimeCompare:)]; NSString *fileName = [[[sortedArray objectAtIndex:0] lastPathComponent] stringByDeletingPathExtension]; index = [fileName integerValue]; return index; } - (NSString *)getFilePath { // 获取最新的文件名,若没有则直接从0开始 if (_fileIndex == M_GT_INDEX_INVALID) { _fileIndex = [self getFileIndex]; } NSString *sysDir = [[GTConfig sharedInstance] sysDirByCreated]; NSString *filePath = [NSString stringWithFormat:@"%@%lu.log", sysDir, (unsigned long)_fileIndex]; // 判断文件是否大于设定阈值 unsigned long long fileSize = [self getFileSize:filePath]; if (fileSize > M_GT_MB) { _fileIndex++; if (_fileIndex >= 10) { _fileIndex = 0; } filePath = [NSString stringWithFormat:@"%@%lu.log", sysDir, (unsigned long)_fileIndex]; // 判断文件是否存在,存在则删除 [self delFile:filePath]; } return filePath; } - (NSString *)getFilePathByName:(NSString *)fileName; { //对应目录不存在则创建一个新的目录 NSString *filePath = [[GTConfig sharedInstance] pathForDirByCreated:M_GT_LOG_COMMON_DIR fileName:fileName ofType:M_GT_FILE_TYPE_LOG]; return filePath; } #pragma mark - File - (void)delFile:(NSString *)filePath { if ([self isFileExistAtPath:filePath]) { NSError *error; if ([[NSFileManager defaultManager] removeItemAtPath:filePath error:&error] != YES) { NSLog(@"GT Unable to delete file:%@ error:%@", filePath, [error localizedDescription]); } } } - (void)saveFile:(NSString *)filePath buffer:(NSMutableString *)buffer { FILE *file = fopen([filePath UTF8String], "a+"); if (file) { fprintf(file, "%s", [buffer UTF8String]); fflush(file); fclose(file); } } - (void)autoSave:(id)sender { @autoreleasepool { NSString *filePath = [self getFilePath]; [self saveFile:filePath buffer:_tmpBuf]; for (int i = 0; i < [[_fileList keys] count]; i++) { NSString *key = [[_fileList keys] objectAtIndex:i]; filePath = [self getFilePathByName:key]; GTLogBufferSaveFile *obj = [_fileList objectForKey:key]; switch ([obj fileOpType]) { case GTLogBufferClean: { [self delFile:filePath]; break; } case GTLogBufferCleanStart: case GTLogBufferCleanEnd: { [self delFile:filePath]; [self saveFile:filePath buffer:[obj tmpBuf]]; break; } case GTLogBufferStart: { [self saveFile:filePath buffer:[obj tmpBuf]]; break; } case GTLogBufferEnd: { [self saveFile:filePath buffer:[obj tmpBuf]]; break; } default: break; } } _isSaving = NO; [_tmpBuf release]; [_tmpFileList release]; } } @end #endif
{ "pile_set_name": "Github" }