text
stringlengths
2
9.79k
meta
dict
sentences_perturbed
int64
0
2
doc_stats
dict
zram-objs := zram_drv.o xvmalloc.o obj-$(CONFIG_ZRAM) += zram.o
{ "pile_set_name": "Github" }
null
null
var convert = require('./convert'), func = convert('take', require('../take')); func.placeholder = require('./placeholder'); module.exports = func;
{ "pile_set_name": "Github" }
null
null
import Foundation /// A Nimble matcher that succeeds when the actual sequence's last element /// is equal to the expected value. public func endWith<S: Sequence, T: Equatable>(_ endingElement: T) -> Predicate<S> where S.Iterator.Element == T { return Predicate.fromDeprecatedClosure { actualExpression, failureMessage in failureMessage.postfixMessage = "end with <\(endingElement)>" if let actualValue = try actualExpression.evaluate() { var actualGenerator = actualValue.makeIterator() var lastItem: T? var item: T? repeat { lastItem = item item = actualGenerator.next() } while(item != nil) return lastItem == endingElement } return false }.requireNonNil } /// A Nimble matcher that succeeds when the actual collection's last element /// is equal to the expected object. public func endWith(_ endingElement: Any) -> Predicate<NMBOrderedCollection> { return Predicate.fromDeprecatedClosure { actualExpression, failureMessage in failureMessage.postfixMessage = "end with <\(endingElement)>" guard let collection = try actualExpression.evaluate() else { return false } guard collection.count > 0 else { return false } #if os(Linux) guard let collectionValue = collection.object(at: collection.count - 1) as? NSObject else { return false } #else let collectionValue = collection.object(at: collection.count - 1) as AnyObject #endif return collectionValue.isEqual(endingElement) }.requireNonNil } /// A Nimble matcher that succeeds when the actual string contains the expected substring /// where the expected substring's location is the actual string's length minus the /// expected substring's length. public func endWith(_ endingSubstring: String) -> Predicate<String> { return Predicate.fromDeprecatedClosure { actualExpression, failureMessage in failureMessage.postfixMessage = "end with <\(endingSubstring)>" if let collection = try actualExpression.evaluate() { return collection.hasSuffix(endingSubstring) } return false }.requireNonNil } #if os(macOS) || os(iOS) || os(tvOS) || os(watchOS) extension NMBObjCMatcher { @objc public class func endWithMatcher(_ expected: Any) -> NMBObjCMatcher { return NMBObjCMatcher(canMatchNil: false) { actualExpression, failureMessage in let actual = try! actualExpression.evaluate() if (actual as? String) != nil { let expr = actualExpression.cast { $0 as? String } return try! endWith(expected as! String).matches(expr, failureMessage: failureMessage) } else { let expr = actualExpression.cast { $0 as? NMBOrderedCollection } return try! endWith(expected).matches(expr, failureMessage: failureMessage) } } } } #endif
{ "pile_set_name": "Github" }
null
null
Filter 1: ON PK Fc 26 Hz Gain 7.1 dB Q 1.50 Filter 2: ON PK Fc 3484 Hz Gain 8.5 dB Q 1.82 Filter 3: ON PK Fc 7544 Hz Gain 5.1 dB Q 4.00 Filter 4: ON PK Fc 9715 Hz Gain 10.1 dB Q 1.73 Filter 5: ON PK Fc 16758 Hz Gain -5.9 dB Q 0.09 Filter 6: ON PK Fc 130 Hz Gain -3.1 dB Q 1.37 Filter 7: ON PK Fc 715 Hz Gain -1.3 dB Q 0.26 Filter 8: ON PK Fc 1261 Hz Gain 3.0 dB Q 2.46 Filter 9: ON PK Fc 4780 Hz Gain 3.0 dB Q 1.68 Filter 10: ON PK Fc 5261 Hz Gain -7.0 dB Q 4.11
{ "pile_set_name": "Github" }
null
null
package com.grace.zhihunews.deliveryLayer; import android.content.Context; /** * Created by Administrator on 2016/9/1. */ /** * 数据交付层接口 */ public interface INewsDetailProvider { void getNewsDetail(int id); }
{ "pile_set_name": "Github" }
null
null
{ "CVE_data_meta": { "ASSIGNER": "security@google.com", "ID": "CVE-2018-17466", "STATE": "PUBLIC" }, "affects": { "vendor": { "vendor_data": [ { "product": { "product_data": [ { "product_name": "Chrome", "version": { "version_data": [ { "version_affected": "<", "version_value": " 70.0.3538.67" } ] } } ] }, "vendor_name": "Google" } ] } }, "data_format": "MITRE", "data_type": "CVE", "data_version": "4.0", "description": { "description_data": [ { "lang": "eng", "value": "Incorrect texture handling in Angle in Google Chrome prior to 70.0.3538.67 allowed a remote attacker to perform an out of bounds memory read via a crafted HTML page." } ] }, "problemtype": { "problemtype_data": [ { "description": [ { "lang": "eng", "value": "Out of bounds read" } ] } ] }, "references": { "reference_data": [ { "name": "[debian-lts-announce] 20181213 [SECURITY] [DLA 1605-1] firefox-esr security update", "refsource": "MLIST", "url": "https://lists.debian.org/debian-lts-announce/2018/12/msg00002.html" }, { "name": "RHSA-2018:3833", "refsource": "REDHAT", "url": "https://access.redhat.com/errata/RHSA-2018:3833" }, { "name": "RHSA-2018:3831", "refsource": "REDHAT", "url": "https://access.redhat.com/errata/RHSA-2018:3831" }, { "name": "DSA-4362", "refsource": "DEBIAN", "url": "https://www.debian.org/security/2019/dsa-4362" }, { "name": "DSA-4330", "refsource": "DEBIAN", "url": "https://www.debian.org/security/2018/dsa-4330" }, { "name": "USN-3844-1", "refsource": "UBUNTU", "url": "https://usn.ubuntu.com/3844-1/" }, { "name": "106168", "refsource": "BID", "url": "http://www.securityfocus.com/bid/106168" }, { "name": "RHSA-2019:0159", "refsource": "REDHAT", "url": "https://access.redhat.com/errata/RHSA-2019:0159" }, { "name": "RHSA-2018:3004", "refsource": "REDHAT", "url": "https://access.redhat.com/errata/RHSA-2018:3004" }, { "name": "DSA-4354", "refsource": "DEBIAN", "url": "https://www.debian.org/security/2018/dsa-4354" }, { "name": "GLSA-201811-10", "refsource": "GENTOO", "url": "https://security.gentoo.org/glsa/201811-10" }, { "name": "USN-3868-1", "refsource": "UBUNTU", "url": "https://usn.ubuntu.com/3868-1/" }, { "name": "https://crbug.com/880906", "refsource": "MISC", "url": "https://crbug.com/880906" }, { "name": "https://chromereleases.googleblog.com/2018/10/stable-channel-update-for-desktop.html", "refsource": "CONFIRM", "url": "https://chromereleases.googleblog.com/2018/10/stable-channel-update-for-desktop.html" }, { "name": "105666", "refsource": "BID", "url": "http://www.securityfocus.com/bid/105666" }, { "name": "RHSA-2019:0160", "refsource": "REDHAT", "url": "https://access.redhat.com/errata/RHSA-2019:0160" } ] } }
{ "pile_set_name": "Github" }
null
null
// Copyright Aleksey Gurtovoy 2000-2004 // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // Preprocessed version of "boost/mpl/greater.hpp" header // -- DO NOT modify by hand! namespace boost { namespace mpl { template< typename Tag1 , typename Tag2 > struct greater_impl : if_c< ( BOOST_MPL_AUX_NESTED_VALUE_WKND(int, Tag1) > BOOST_MPL_AUX_NESTED_VALUE_WKND(int, Tag2) ) , aux::cast2nd_impl< greater_impl< Tag1,Tag1 >,Tag1, Tag2 > , aux::cast1st_impl< greater_impl< Tag2,Tag2 >,Tag1, Tag2 > >::type { }; /// for Digital Mars C++/compilers with no CTPS/TTP support template<> struct greater_impl< na,na > { template< typename U1, typename U2 > struct apply { typedef apply type; BOOST_STATIC_CONSTANT(int, value = 0); }; }; template<> struct greater_impl< na,integral_c_tag > { template< typename U1, typename U2 > struct apply { typedef apply type; BOOST_STATIC_CONSTANT(int, value = 0); }; }; template<> struct greater_impl< integral_c_tag,na > { template< typename U1, typename U2 > struct apply { typedef apply type; BOOST_STATIC_CONSTANT(int, value = 0); }; }; template< typename T > struct greater_tag { typedef typename T::tag type; }; template< typename BOOST_MPL_AUX_NA_PARAM(N1) , typename BOOST_MPL_AUX_NA_PARAM(N2) > struct greater : greater_impl< typename greater_tag<N1>::type , typename greater_tag<N2>::type >::template apply< N1,N2 >::type { BOOST_MPL_AUX_LAMBDA_SUPPORT(2, greater, (N1, N2)) }; BOOST_MPL_AUX_NA_SPEC2(2, 2, greater) }} namespace boost { namespace mpl { template<> struct greater_impl< integral_c_tag,integral_c_tag > { template< typename N1, typename N2 > struct apply : bool_< ( BOOST_MPL_AUX_VALUE_WKND(N1)::value > BOOST_MPL_AUX_VALUE_WKND(N2)::value ) > { }; }; }}
{ "pile_set_name": "Github" }
null
null
<!DOCTYPE html> <html> <!-- (C) by Michael Peter Christen, mc (at) yacy.net licensed under a Creative Commons Attribution 2.0 Generic License (CC-BY 2.0) --> <head> <title id="title"></title> <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" /> <!-- scary MS stuff ahead to prevent that IE runs in "IE=EmulateIE10" mode which cannot run our JS code for some reason --> <meta http-equiv="X-UA-Compatible" content="IE=edge"/> <!-- if you don't see the glyphicons in firefox and you opened this file from a local filesystem, then a CORS security setting in firefox caused that. No other browser is that strict and there is no work around. To see the glyphicons in firefox, open about:config and set security.fileuri.strict_origin_policy to ‘false.’ --> <link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" /> <meta name="Content-Language" content="English, Englisch" /> <meta name="keywords" content="YaCy HTTP search engine spider indexer java network open free download Mac Windows Linux Software development" /> <meta name="description" content="Software HTTP Freeware Home Page" /> <meta name="copyright" content="Michael Christen et al." /> <!-- Ensure proper rendering and touch zooming on mobile devices --> <meta name="viewport" content="width=device-width, initial-scale=1"> <!-- Bootstrap core CSS --> <link href="../bootstrap/css/bootstrap.min.css" rel="stylesheet"> <script src="../bootstrap/js/jquery.min.js" type="text/javascript"></script> <script src="../bootstrap/js/bootstrap.min.js" type="text/javascript"></script> <script src="navigation.js" type="text/javascript"></script> <script src="../js/lib/underscore-min.js" type="text/javascript"></script> <script src="../js/lib/backbone-min.js" type="text/javascript"></script> <script src="../js/setup.js" type="text/javascript"></script> <!-- customization --> <script src="../js/yacysearch.js" type="text/javascript"></script> <!-- Custom styles for this template, i.e. navigation (move this to base.css) --> <link href="../css/bootstrap-base.css" rel="stylesheet"> <!-- HTML5 shim and Respond.js IE8 support of HTML5 elements and media queries --> <!--[if lt IE 9]> <script src="../bootstrap/js/html5shiv.js"></script> <script src="../bootstrap/js/respond.min.js"></script> <![endif]--> <!-- old css styles --> <link rel="stylesheet" type="text/css" media="all" href="../css/base.css" /> <link rel="stylesheet" type="text/css" media="screen" href="../css/style.css" /> <!--[if lt IE 6]> <link rel="stylesheet" type="text/css" media="screen" href="../css/oldie.css" /> <![endif]--> <!--[if lte IE 6.0]> <link rel="stylesheet" type="text/css" media="screen" href="../css/ie6.css" /> <![endif]--> <!--[if lte IE 7.0]> <link rel="stylesheet" type="text/css" media="screen" href="../css/ie7.css" /> <![endif]--> <script type="text/javascript"> function handleArrowKeys(evt) { evt = (evt) ? evt : ((window.event) ? event : null); if (evt) { switch (evt.keyCode) { case 9: case 33: window.location.href = document.getElementById("nextpage").href; break; case 34: window.location.href = document.getElementById("prevpage").href; break; case 40: } } } document.onkeydown = handleArrowKeys; </script> <script type="text/javascript" src="../bootstrap/js/typeahead.jquery.min.js"></script> <script type="text/javascript"> var suggestMatcher = function() { return function opensearch(q, cb) { $.getJSON(suggestUrl + "&q="+ q, function(data) { var parsed = []; for (var i = 0; i < data[0][1].length; i++) { var row = data[0][1][i]; if (row) { parsed[parsed.length] = { data: [row], value: row, result: row }; }; }; cb(parsed); }); }; }; $(document).ready(function() { $('#query').typeahead({hint:false,highlight:true,minLength:1}, { name: 'states', displayKey: 'value', source: suggestMatcher() }); }); </script> <style type="text/css">.twitter-typeahead {margin: 0px;padding: 0px;top:2px;}</style> <!-- fix for input window --> </head> <body id="yacysearch" onLoad="document.searchform.query.focus();"> <!-- top navigation --> <div class="navbar navbar-default navbar-fixed-top" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-collapse"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <a id="homepage" class="navbar-brand" href="http://yacy.net" style="position:absolute;top:-6px;display:inline;white-space:nowrap;"> <img id="logo" class="yacylogo" src="../images/YaCyLogo2011_60.png" alt="YaCy" style="height:auto; width:auto; max-width:200px; max-height:32px;vertical-align:middle"> </a> <span id="topmenu" style="position:absolute;top:12px;left:80px;display:inline;white-space:nowrap;font-size:2em;"></span> </div> <div class="navbar-collapse collapse"> <ul class="nav navbar-nav navbar-right"> <li id="header_help" class="dropdown"> <a href="#" data-toggle="dropdown" class="dropdown-toggle"><span class="glyphicon glyphicon-question-sign"></span></a> <ul class="dropdown-menu"> <li id="header_search"><a href="index.html?">Search Page</a></li> <li id="header_profile"><a href="about.html?">About This Page</a></li> <li id="header_usage"><a href="usage.html?">Usage of this page templates</a></li> <li id="header_tutorial"><a href="http://yacy.net/tutorials/">YaCy Tutorials</a></li> <li class="divider"></li> <li id="header_download"><a href="http://yacy.net" target="_blank"><i>external</i>&nbsp;&nbsp;&nbsp;Download YaCy</a></li> <li id="header_community"><a href="http://forum.yacy.de" target="_blank"><i>external</i>&nbsp;&nbsp;&nbsp;Community (Web Forums)</a></li> <li id="header_wiki"><a href="http://wiki.yacy.de" target="_blank"><i>external</i>&nbsp;&nbsp;&nbsp;Project Wiki</a></li> <li id="header_git"><a href="https://github.com/yacy/yacy_search_server/commits/master" target="_blank"><i>external</i>&nbsp;&nbsp;&nbsp;Git Repository</
{ "pile_set_name": "Github" }
null
null
<?php /** * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ declare(strict_types=1); use Magento\Bundle\Model\Product\Price; use Magento\Catalog\Api\Data\ProductInterfaceFactory; use Magento\Catalog\Api\ProductRepositoryInterface; use Magento\Catalog\Model\Product\Attribute\Source\Status; use Magento\Catalog\Model\Product\Type; use Magento\Catalog\Model\Product\Type\AbstractType; use Magento\Catalog\Model\Product\Visibility; use Magento\Store\Api\WebsiteRepositoryInterface; use Magento\TestFramework\Bundle\Model\PrepareBundleLinks; use Magento\TestFramework\Helper\Bootstrap; use Magento\TestFramework\Workaround\Override\Fixture\Resolver; Resolver::getInstance()->requireDataFixture('Magento/Bundle/_files/multiple_products.php'); $objectManager = Bootstrap::getObjectManager(); /** @var PrepareBundleLinks $prepareBundleLinks */ $prepareBundleLinks = $objectManager->get(PrepareBundleLinks::class); /** @var ProductRepositoryInterface $productRepository */ $productRepository = $objectManager->create(ProductRepositoryInterface::class); /** @var ProductInterfaceFactory $productFactory */ $productFactory = $objectManager->get(ProductInterfaceFactory::class); /** @var WebsiteRepositoryInterface $websiteRepository */ $websiteRepository = $objectManager->get(WebsiteRepositoryInterface::class); $defaultWebsiteId = $websiteRepository->get('base')->getId(); $bundleProduct = $productFactory->create(); $bundleProduct->setTypeId(Type::TYPE_BUNDLE) ->setAttributeSetId($bundleProduct->getDefaultAttributeSetId()) ->setWebsiteIds([$defaultWebsiteId]) ->setName('Bundle Product') ->setSku('fixed_bundle_product_with_special_price') ->setVisibility(Visibility::VISIBILITY_BOTH) ->setStatus(Status::STATUS_ENABLED) ->setStockData( [ 'use_config_manage_stock' => 1, 'qty' => 100, 'is_qty_decimal' => 0, 'is_in_stock' => 1, ] ) ->setPriceView(1) ->setSkuType(1) ->setWeightType(1) ->setPriceType(Price::PRICE_TYPE_FIXED) ->setPrice(50.0) ->setSpecialPrice(80) ->setShipmentType(AbstractType::SHIPMENT_TOGETHER); $bundleOptionsData = [ [ 'title' => 'Option 1', 'default_title' => 'Option 1', 'type' => 'radio', 'required' => 1, 'delete' => '', ], ]; $bundleSelectionsData = [ [ 'sku' => 'simple1', 'selection_qty' => 1, 'selection_price_value' => 10, 'selection_price_type' => 0, 'selection_can_change_qty' => 1, ], [ 'sku' => 'simple2', 'selection_qty' => 1, 'selection_price_value' => 25, 'selection_price_type' => 1, 'selection_can_change_qty' => 1, ], [ 'sku' => 'simple3', 'selection_qty' => 1, 'selection_price_value' => 25, 'selection_price_type' => 0, 'selection_can_change_qty' => 1, ], ]; $bundleProduct = $prepareBundleLinks->execute($bundleProduct, $bundleOptionsData, [$bundleSelectionsData]); $productRepository->save($bundleProduct);
{ "pile_set_name": "Github" }
null
null
dueboot ======= Based on https://github.com/neykov/armboot, and is a template for Arduino Due projects Compiling --------- Modify the Rakefile with your paths and ports, and then "rake burn" to upload to the Arduino. Structure --------- core.rs - sample program (blinks the led of the Arduino board) arduino.rs - extern stubs for the core Arduino libraries hardware/ - from a random Arduino IDE for OS X Credits ------- - armboot: https://github.com/neykov/armboot - zero.rs: https://github.com/pcwalton/zero.rs
{ "pile_set_name": "Github" }
null
null
#ifndef VCTRS_SLICE_ASSIGN_H #define VCTRS_SLICE_ASSIGN_H #include "owned.h" struct vec_assign_opts { bool assign_names; bool ignore_outer_names; struct vctrs_arg* x_arg; struct vctrs_arg* value_arg; }; SEXP vec_assign_opts(SEXP x, SEXP index, SEXP value, const struct vec_assign_opts* opts); SEXP vec_proxy_assign_opts(SEXP proxy, SEXP index, SEXP value, const enum vctrs_owned owned, const struct vec_assign_opts* opts); SEXP chr_assign(SEXP out, SEXP index, SEXP value, const enum vctrs_owned owned); SEXP list_assign(SEXP out, SEXP index, SEXP value, const enum vctrs_owned owned); SEXP df_assign(SEXP x, SEXP index, SEXP value, const enum vctrs_owned owned, const struct vec_assign_opts* opts); SEXP vec_assign_shaped(SEXP proxy, SEXP index, SEXP value, const enum vctrs_owned owned, const struct vec_assign_opts* opts); #endif
{ "pile_set_name": "Github" }
null
null
// // ParseEnabled.h // Monal // // Created by Anurodh Pokharel on 2/2/15. // Copyright (c) 2015 Monal.im. All rights reserved. // #import "XMPPParser.h" @interface ParseEnabled : XMPPParser /** supports resume on server */ @property (nonatomic, assign, readonly) BOOL resume; @property (nonatomic, copy, readonly) NSString *streamID; /** server's max resumption time */ @property (nonatomic, copy, readonly) NSNumber *max; /** where to reconnect to -- not implemented */ @property (nonatomic, copy, readonly) NSString *location; @end
{ "pile_set_name": "Github" }
null
null
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to license@magento.com so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magento.com for more information. * * @category Tests * @package Tests_Functional * @copyright Copyright (c) 2006-2020 Magento, Inc. (http://www.magento.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ namespace Mage\Admin\Test\Constraint; use Mage\Admin\Test\Fixture\User; use Magento\Mtf\Constraint\AbstractConstraint; use Mage\Admin\Test\Page\Adminhtml\UserIndex; /** * Asserts that user is present in User Grid. */ class AssertUserInGrid extends AbstractConstraint { /** * Constraint severeness. * * @var string */ protected $severeness = 'low'; /** * Asserts that user is present in User Grid. * * @param UserIndex $userIndex * @param User $user * @param User $customAdmin * @return void */ public function processAssert( UserIndex $userIndex, User $user, User $customAdmin = null ) { $adminUser = ($user->hasData('password') || $user->hasData('username')) ? $user : $customAdmin; $filter = ['username' => $adminUser->getUsername()]; $userIndex->open(); \PHPUnit_Framework_Assert::assertTrue( $userIndex->getUserGrid()->isRowVisible($filter), 'User with name \'' . $adminUser->getUsername() . '\' is absent in User grid.' ); } /** * Return string representation of object. * * @return string */ public function toString() { return 'User is present in Users grid.'; } }
{ "pile_set_name": "Github" }
null
null
{ "_from": "react", "_id": "react@0.14.9", "_inBundle": false, "_integrity": "sha1-uqhDTsZ4C96ZfNw4C3nNM7ljk98=", "_location": "/react", "_phantomChildren": {}, "_requested": { "type": "tag", "registry": true, "raw": "react", "name": "react", "escapedName": "react", "rawSpec": "", "saveSpec": null, "fetchSpec": "latest" }, "_requiredBy": [ "#USER", "/" ], "_resolved": "https://registry.npmjs.org/react/-/react-0.14.9.tgz", "_shasum": "baa8434ec6780bde997cdc380b79cd33b96393df", "_spec": "react", "_where": "/home/thomas/Code/dynamic-cdn-webpack-plugin/test/fixtures/multiple", "browserify": { "transform": [ "loose-envify" ] }, "bugs": { "url": "https://github.com/facebook/react/issues" }, "bundleDependencies": false, "dependencies": { "create-react-class": "^15.6.0", "fbjs": "^0.8.9", "loose-envify": "^1.1.0", "object-assign": "^4.1.0", "prop-types": "^15.5.10" }, "deprecated": false, "description": "React is a JavaScript library for building user interfaces.", "engines": { "node": ">=0.10.0" }, "files": [ "LICENSE", "PATENTS", "addons.js", "react.js", "addons/", "dist/", "lib/" ], "homepage": "https://facebook.github.io/react/", "keywords": [ "react" ], "license": "BSD-3-Clause", "main": "react.js", "name": "react", "repository": { "type": "git", "url": "git+https://github.com/facebook/react.git" }, "version": "0.14.9" }
{ "pile_set_name": "Github" }
null
null
name: "BigPanda" url: "https://bigpanda.io/"
{ "pile_set_name": "Github" }
null
null
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # RAPPOR simulation library. Contains code for encoding simulated data and # creating the map used to encode and decode reports. library(glmnet) library(parallel) # mclapply SetOfStrings <- function(num_strings = 100) { # Generates a set of strings for simulation purposes. strs <- paste0("V_", as.character(1:num_strings)) strs } GetSampleProbs <- function(params) { # Generate different underlying distributions for simulations purposes. # Args: # - params: a list describing the shape of the true distribution: # c(num_strings, prop_nonzero_strings, decay_type, # rate_exponetial). nstrs <- params[[1]] nonzero <- params[[2]] decay <- params[[3]] expo <- params[[4]] background <- params[[5]] probs <- rep(0, nstrs) ind <- floor(nstrs * nonzero) if (decay == "Linear") { probs[1:ind] <- (ind:1) / sum(1:ind) } else if (decay == "Constant") { probs[1:ind] <- 1 / ind } else if (decay == "Exponential") { temp <- seq(0, nonzero, length.out = ind) temp <- exp(-temp * expo) temp <- temp + background temp <- temp / sum(temp) probs[1:ind] <- temp } else { stop('params[[4]] must be in c("Linear", "Exponenential", "Constant")') } probs } EncodeAll <- function(x, cohorts, map, params, num_cores = 1) { # Encodes the ground truth into RAPPOR reports. # # Args: # x: Observed strings for each report, Nx1 vector # cohort: Cohort assignment for each report, Nx1 vector # map: list of matrices encoding locations of hashes for each # string, for each cohort # params: System parameters # # Returns: # RAPPOR reports for each piece of data. p <- params$p q <- params$q f <- params$f k <- params$k qstar <- (1 - f / 2) * q + (f / 2) * p pstar <- (1 - f / 2) * p + (f / 2) * q candidates <- colnames(map[[1]]) if (!all(x %in% candidates)) { stop("Some strings are not in the map. set(X) - set(candidates): ", paste(setdiff(unique(x), candidates), collapse=" "), "\n") } bfs <- mapply(function(x, y) y[, x], x, map[cohorts], SIMPLIFY = FALSE, USE.NAMES = FALSE) reports <- mclapply(bfs, function(x) { noise <- sample(0:1, k, replace = TRUE, prob = c(1 - pstar, pstar)) ind <- which(x) noise[ind] <- sample(0:1, length(ind), replace = TRUE, prob = c(1 - qstar, qstar)) noise }, mc.cores = num_cores) reports } CreateMap <- function(strs, params, generate_pos = TRUE, basic = FALSE) { # Creates a list of 0/1 matrices corresponding to mapping between the strs and # Bloom filters for each instance of the RAPPOR. # Ex. for 3 strings, 2 instances, 1 hash function and Bloom filter of size 4, # the result could look this: # [[1]] # 1 0 0 0 # 0 1 0 0 # 0 0 0 1 # [[2]] # 0 1 0 0 # 0 0 0 1 # 0 0 1 0 # # Args: # strs: a vector of strings # params: a list of parameters in the following format: # (k, h, m, p, q, f). # generate_pos: Tells whether to generate an object storing the # positions of the nonzeros in the matrix # basic: Tells whether to use basic RAPPOR (only works if h=1). M <- length(strs) map_by_cohort <- list() k <- params$k h <- params$h m <- params$m for (i in 1:m) { if (basic && (h == 1) && (k == M)) { ones <- 1:M } else { ones <- sample(1:k, M * h, replace = TRUE) } cols <- rep(1:M, each = h) map_by_cohort[[i]] <- sparseMatrix(ones, cols, dims = c(k, M)) colnames(map_by_cohort[[i]]) <- strs } all_cohorts_map <- do.call("rBind", map_by_cohort) if (generate_pos) { map_pos <- t(apply(all_cohorts_map, 2, function(x) { ind <- which(x == 1) n <- length(ind) if (n < h * m) { ind <- c(ind, rep(NA, h * m - n)) } ind })) } else { map_pos <- NULL } list(map_by_cohort = map_by_cohort, all_cohorts_map = all_cohorts_map, map_pos = map_pos) } GetSample <- function(N, strs, probs) { # Sample for the strs population with distribution probs. sample(strs, N, replace = TRUE, prob = probs) } GetTrueBits <- function(samp, map, params) { # Convert sample generated by GetSample() to Bloom filters where mapping # is defined in map. # Output: # - reports: a matrix of size [num_instances x size] where each row # represents the number of times each bit in the Bloom filter # was set for a particular instance. # Note: reports[, 1] contains the same size for each instance. N <- length(samp) k <- params$k m <- params$m strs <- colnames(map[[1]]) reports <- matrix(0, m, k + 1) inst <- sample(1:m, N, replace = TRUE) for (i in 1:m) { tab <- table(samp[inst == i]) tab2 <- rep(0, length(strs)) tab2[match(names(tab), strs)] <- tab counts <- apply(map[[i]], 1, function(x) x * tab2) # cat(length(tab2), dim(map[[i]]), dim(counts), "\n") reports[i, ] <- c(sum(tab2), apply(counts, 2, sum)) } reports } GetNoisyBits <- function(truth, params) { # Applies RAPPOR to the Bloom filters. # Args: # - truth: a matrix generated by GetTrueBits(). k <- params$k p <- params$p q <- params$q f <- params$f rappors <- apply(truth, 1, function(x) { # The following samples considering 4 cases: # 1. Signal and we lie on
{ "pile_set_name": "Github" }
null
null
import re def parseDeviceId(id): match = re.search('(#|\\\\)vid_([a-f0-9]{4})&pid_([a-f0-9]{4})(&|#|\\\\)', id, re.IGNORECASE) return [int(match.group(i), 16) if match else None for i in [2, 3]]
{ "pile_set_name": "Github" }
null
null
# repeating [![Build Status](https://travis-ci.org/sindresorhus/repeating.svg?branch=master)](https://travis-ci.org/sindresorhus/repeating) > Repeat a string - fast ## Install ``` $ npm install --save repeating ``` ## Usage ```js const repeating = require('repeating'); repeating('unicorn ', 100); //=> 'unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn unicorn ' ``` ## Related - [repeating-cli](https://github.com/sindresorhus/repeating-cli) - CLI for this module ## License MIT © [Sindre Sorhus](https://sindresorhus.com)
{ "pile_set_name": "Github" }
null
null
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #pragma once #include "SystemComponentFixture.h" #include <Tests/TestAssetCode/AnimGraphFactory.h> namespace EMotionFX { class Actor; class ActorInstance; class AnimGraph; class AnimGraphInstance; class AnimGraphMotionNode; class AnimGraphStateMachine; class AnimGraphStateTransition; class AnimGraphObject; class MotionSet; class AnimGraphTransitionConditionFixture : public SystemComponentFixture { public: void SetUp() override; void TearDown() override; virtual void AddNodesToAnimGraph() { } TwoMotionNodeAnimGraph* GetAnimGraph() const { return m_animGraph.get(); } AnimGraphInstance* GetAnimGraphInstance() const { return m_animGraphInstance; } protected: AnimGraphStateMachine* m_stateMachine = nullptr; AnimGraphInstance* m_animGraphInstance = nullptr; AnimGraphMotionNode* m_motionNodeA = nullptr; AnimGraphMotionNode* m_motionNodeB = nullptr; AnimGraphStateTransition* m_transition = nullptr; AZStd::unique_ptr<Actor> m_actor; AZStd::unique_ptr<TwoMotionNodeAnimGraph> m_animGraph; MotionSet* m_motionSet = nullptr; ActorInstance* m_actorInstance = nullptr; }; }
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="utf-8"?> <packages> <package id="System.Reactive" version="4.0.0-preview00001" targetFramework="net462" /> <package id="System.Reactive.Core" version="4.0.0-preview00001" targetFramework="net462" /> <package id="System.Reactive.Interfaces" version="4.0.0-preview00001" targetFramework="net462" /> <package id="System.Reactive.Linq" version="4.0.0-preview00001" targetFramework="net462" /> <package id="System.Reactive.PlatformServices" version="4.0.0-preview00001" targetFramework="net462" /> <package id="System.Reactive.Windows.Threading" version="4.0.0-preview00001" targetFramework="net462" /> <package id="WhiteTie" version="1.3.16" targetFramework="net461" developmentDependency="true" /> </packages>
{ "pile_set_name": "Github" }
null
null
// // TWTRAPIClient.h // // Copyright (c) 2015 Twitter. All rights reserved. // #import "TWTRDefines.h" NS_ASSUME_NONNULL_BEGIN FOUNDATION_EXPORT NSString * const TWTRTweetsNotLoadedKey; @class TWTRUser; @class TWTRTweet; @class TWTRAuthConfig; @class TWTRGuestSession; @protocol TWTRAuthSession; /** * @name Completion Block Types */ /** * Completion block called when the load user request succeeds or fails. * * @param user The Twitter User. * @param error Error that will be set if the API request failed. */ typedef void (^TWTRLoadUserCompletion)(TWTRUser * __twtr_nullable user, NSError * __twtr_nullable error); /** * Completion block called when the load Tweet request succeeds or fails. * * @param tweet The Twitter Tweet. * @param error Error that will be set if the API request failed. */ typedef void (^TWTRLoadTweetCompletion)(TWTRTweet * __twtr_nullable tweet, NSError * __twtr_nullable error); /** * Completion block called when the load Tweets request succeeds or fails. * * @param tweets Tweets that were successfully retrieved. * @param error Error that will be set if the API request failed. */ typedef void (^TWTRLoadTweetsCompletion)(NSArray * __twtr_nullable tweets, NSError * __twtr_nullable error); /** * Completion block called when the network request succeeds or fails. * * @param response Metadata associated with the response to a URL load request. * @param data Content data of the response. * @param connectionError Error object describing the network error that occurred. */ typedef void (^TWTRNetworkCompletion)(NSURLResponse * __twtr_nullable response, NSData * __twtr_nullable data, NSError * __twtr_nullable connectionError); /** * Completion block called when a JSON request to the Twitter API succeeds or fails. * * @param response Metadata associated with the response to a URL load request. * @param responseObject Content data of the response. * @param error Error object describing the network error that occurred. */ typedef void (^TWTRJSONRequestCompletion)(NSURLResponse * __twtr_nullable response, id __twtr_nullable responseObject, NSError * __twtr_nullable error); /** * Client for consuming the Twitter REST API. Provides methods for common API requests, as well as the ability to create and send custom requests. */ @interface TWTRAPIClient : NSObject /** * @name Initialization */ - (instancetype)init __attribute__((unavailable(("Use one of the other `-init...` methods that allow you to provide signing parameters")))); /** * This method is deprecated since TwitterKit v1.4.0. To get an API client, use the one provided by the `Twitter` class. */ - (instancetype)initWithConsumerKey:(NSString *)consumerKey consumerSecret:(NSString *)consumerSecret __attribute__((deprecated)); /** * @name Making Requests */ /** * Returns a signed URL request. * * @param method Request method, GET, POST, PUT, DELETE, etc. * @param URL Request URL. This is the full Twitter API URL. E.g. https://api.twitter.com/1.1/statuses/user_timeline.json * @param parameters Request parameters. * @param error Error that will be set if there was an error signing the request. */ - (NSURLRequest *)URLRequestWithMethod:(NSString *)method URL:(NSString *)URLString parameters:(NSDictionary *)parameters error:(NSError **)error; /** * Sends a Twitter request. * * @param request The request that will be sent asynchronously. * @param completion Completion block to be called on response. Called on main queue. */ - (void)sendTwitterRequest:(NSURLRequest *)request completion:(TWTRNetworkCompletion)completion; /** * @name Common API Actions */ /** * Loads a Twitter User. * * @param userIDString The Twitter user ID of the desired user. * @param completion Completion block to be called on response. Called on main queue. */ - (void)loadUserWithID:(NSString *)userIDString completion:(TWTRLoadUserCompletion)completion; /** * Loads a single Tweet from the network or cache. * * @param tweetIDString The ID of the desired Tweet. * @param completion Completion bock to be called on response. Called on main queue. */ - (void)loadTweetWithID:(NSString *)tweetIDString completion:(TWTRLoadTweetCompletion)completion; /** * Loads a series of Tweets in a batch. The completion block will be passed an array of zero or more * Tweets that loaded successfully. If some Tweets fail to load the array will contain less Tweets than * number of requested IDs. If any Tweets fail to load, the IDs of the Tweets that did not load will * be provided in the userInfo dictionary property of the error parameter under `TWTRTweetsNotLoadedKey`. * * @param tweetIDStrings An array of Tweet IDs. * @param completion Completion block to be called on response. Called on main queue. */ - (void)loadTweetsWithIDs:(NSArray *)tweetIDStrings completion:(TWTRLoadTweetsCompletion)completion; @end NS_ASSUME_NONNULL_END
{ "pile_set_name": "Github" }
null
null
"""An observation wrapper that augments observations by pixel values.""" import collections import copy import numpy as np from gym import spaces from gym import ObservationWrapper STATE_KEY = 'state' class PixelObservationWrapper(ObservationWrapper): """Augment observations by pixel values.""" def __init__(self, env, pixels_only=True, render_kwargs=None, pixel_keys=('pixels', )): """Initializes a new pixel Wrapper. Args: env: The environment to wrap. pixels_only: If `True` (default), the original observation returned by the wrapped environment will be discarded, and a dictionary observation will only include pixels. If `False`, the observation dictionary will contain both the original observations and the pixel observations. render_kwargs: Optional `dict` containing keyword arguments passed to the `self.render` method. pixel_keys: Optional custom string specifying the pixel observation's key in the `OrderedDict` of observations. Defaults to 'pixels'. Raises: ValueError: If `env`'s observation spec is not compatible with the wrapper. Supported formats are a single array, or a dict of arrays. ValueError: If `env`'s observation already contains any of the specified `pixel_keys`. """ super(PixelObservationWrapper, self).__init__(env) if render_kwargs is None: render_kwargs = {} for key in pixel_keys: render_kwargs.setdefault(key, {}) render_mode = render_kwargs[key].pop('mode', 'rgb_array') assert render_mode == 'rgb_array', render_mode render_kwargs[key]['mode'] = 'rgb_array' wrapped_observation_space = env.observation_space if isinstance(wrapped_observation_space, spaces.Box): self._observation_is_dict = False invalid_keys = set([STATE_KEY]) elif isinstance(wrapped_observation_space, (spaces.Dict, collections.MutableMapping)): self._observation_is_dict = True invalid_keys = set(wrapped_observation_space.spaces.keys()) else: raise ValueError("Unsupported observation space structure.") if not pixels_only: # Make sure that now keys in the `pixel_keys` overlap with # `observation_keys` overlapping_keys = set(pixel_keys) & set(invalid_keys) if overlapping_keys: raise ValueError("Duplicate or reserved pixel keys {!r}." .format(overlapping_keys)) if pixels_only: self.observation_space = spaces.Dict() elif self._observation_is_dict: self.observation_space = copy.deepcopy(wrapped_observation_space) else: self.observation_space = spaces.Dict() self.observation_space.spaces[STATE_KEY] = wrapped_observation_space # Extend observation space with pixels. pixels_spaces = {} for pixel_key in pixel_keys: pixels = self.env.render(**render_kwargs[pixel_key]) if np.issubdtype(pixels.dtype, np.integer): low, high = (0, 255) elif np.issubdtype(pixels.dtype, np.float): low, high = (-float('inf'), float('inf')) else: raise TypeError(pixels.dtype) pixels_space = spaces.Box( shape=pixels.shape, low=low, high=high, dtype=pixels.dtype) pixels_spaces[pixel_key] = pixels_space self.observation_space.spaces.update(pixels_spaces) self._env = env self._pixels_only = pixels_only self._render_kwargs = render_kwargs self._pixel_keys = pixel_keys def observation(self, observation): pixel_observation = self._add_pixel_observation(observation) return pixel_observation def _add_pixel_observation(self, wrapped_observation): if self._pixels_only: observation = collections.OrderedDict() elif self._observation_is_dict: observation = type(wrapped_observation)(wrapped_observation) else: observation = collections.OrderedDict() observation[STATE_KEY] = wrapped_observation pixel_observations = { pixel_key: self.env.render(**self._render_kwargs[pixel_key]) for pixel_key in self._pixel_keys } observation.update(pixel_observations) return observation
{ "pile_set_name": "Github" }
null
null
apiVersion: v1 kind: Pod metadata: name: kube-controller-manager namespace: kube-system spec: containers: - command: - /bin/sh - -c - /usr/local/bin/kube-controller-manager --master=127.0.0.1:8080 --cluster-name=${INSTANCE_PREFIX} --cluster-cidr=${CLUSTER_IP_RANGE} --service-cluster-ip-range="${SERVICE_CLUSTER_IP_RANGE}" --allocate-node-cidrs=true --cloud-provider=gce --service-account-private-key-file=/srv/kubernetes/server.key --v=2 --root-ca-file=/srv/kubernetes/ca.crt 1>>/var/log/kube-controller-manager.log 2>&1 image: gcr.io/google_containers/kube-controller-manager:${KUBE_CONTROLLER_MANAGER_DOCKER_TAG} imagePullPolicy: IfNotPresent livenessProbe: httpGet: host: 127.0.0.1 path: /healthz port: 10252 scheme: HTTP initialDelaySeconds: 15 timeoutSeconds: 15 name: kube-controller-manager resources: limits: cpu: 200m requests: cpu: 200m volumeMounts: - mountPath: /srv/kubernetes name: srvkube readOnly: true - mountPath: /var/log/kube-controller-manager.log name: logfile - mountPath: /etc/ssl name: etcssl readOnly: true - mountPath: /usr/share/ca-certificates name: usrsharecacerts readOnly: true dnsPolicy: ClusterFirst hostNetwork: true restartPolicy: Always terminationGracePeriodSeconds: 30 volumes: - hostPath: path: /srv/kubernetes name: srvkube - hostPath: path: /var/log/kube-controller-manager.log name: logfile - hostPath: path: /etc/ssl name: etcssl - hostPath: path: /usr/share/ca-certificates name: usrsharecacerts
{ "pile_set_name": "Github" }
null
null
// Go support for Protocol Buffers - Google's data interchange format // // Copyright 2010 The Go Authors. All rights reserved. // https://github.com/golang/protobuf // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package proto /* * Support for message sets. */ import ( "errors" ) // errNoMessageTypeID occurs when a protocol buffer does not have a message type ID. // A message type ID is required for storing a protocol buffer in a message set. var errNoMessageTypeID = errors.New("proto does not have a message type ID") // The first two types (_MessageSet_Item and messageSet) // model what the protocol compiler produces for the following protocol message: // message MessageSet { // repeated group Item = 1 { // required int32 type_id = 2; // required string message = 3; // }; // } // That is the MessageSet wire format. We can't use a proto to generate these // because that would introduce a circular dependency between it and this package. type _MessageSet_Item struct { TypeId *int32 `protobuf:"varint,2,req,name=type_id"` Message []byte `protobuf:"bytes,3,req,name=message"` } type messageSet struct { Item []*_MessageSet_Item `protobuf:"group,1,rep"` XXX_unrecognized []byte // TODO: caching? } // Make sure messageSet is a Message. var _ Message = (*messageSet)(nil) // messageTypeIder is an interface satisfied by a protocol buffer type // that may be stored in a MessageSet. type messageTypeIder interface { MessageTypeId() int32 } func (ms *messageSet) find(pb Message) *_MessageSet_Item { mti, ok := pb.(messageTypeIder) if !ok { return nil } id := mti.MessageTypeId() for _, item := range ms.Item { if *item.TypeId == id { return item } } return nil } func (ms *messageSet) Has(pb Message) bool { return ms.find(pb) != nil } func (ms *messageSet) Unmarshal(pb Message) error { if item := ms.find(pb); item != nil { return Unmarshal(item.Message, pb) } if _, ok := pb.(messageTypeIder); !ok { return errNoMessageTypeID } return nil // TODO: return error instead? } func (ms *messageSet) Marshal(pb Message) error { msg, err := Marshal(pb) if err != nil { return err } if item := ms.find(pb); item != nil { // reuse existing item item.Message = msg return nil } mti, ok := pb.(messageTypeIder) if !ok { return errNoMessageTypeID } mtid := mti.MessageTypeId() ms.Item = append(ms.Item, &_MessageSet_Item{ TypeId: &mtid, Message: msg, }) return nil } func (ms *messageSet) Reset() { *ms = messageSet{} } func (ms *messageSet) String() string { return CompactTextString(ms) } func (*messageSet) ProtoMessage() {} // Support for the message_set_wire_format message option. func skipVarint(buf []byte) []byte { i := 0 for ; buf[i]&0x80 != 0; i++ { } return buf[i+1:] } // unmarshalMessageSet decodes the extension map encoded in buf in the message set wire format. // It is called by Unmarshal methods on protocol buffer messages with the message_set_wire_format option. func unmarshalMessageSet(buf []byte, exts interface{}) error { var m map[int32]Extension switch exts := exts.(type) { case *XXX_InternalExtensions: m = exts.extensionsWrite() case map[int32]Extension: m = exts default: return errors.New("proto: not an extension map") } ms := new(messageSet) if err := Unmarshal(buf, ms); err != nil { return err } for _, item := range ms.Item { id := *item.TypeId msg := item.Message // Restore wire type and field number varint, plus length varint. // Be careful to preserve duplicate items. b := EncodeVarint(uint64(id)<<3 | WireBytes) if ext, ok := m[id]; ok { // Existing data; rip off the tag and length varint // so we join the new data correctly. // We can assume that ext.enc is set because we are unmarshaling. o := ext.enc[len(b):] // skip wire type and field number _, n := DecodeVarint(o) // calculate length of length varint o = o[n:] // skip length varint msg = append(o, msg...) // join old data and new data } b = append(b, EncodeVarint(uint64(len(msg)))...) b = append(b, msg...) m[id] = Extension{enc: b} } return nil }
{ "pile_set_name": "Github" }
null
null
/* * This file is part of the CmBacktrace Library. * * Copyright (c) 2016-2017, Armink, <armink.ztl@gmail.com> * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * 'Software'), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * Function: Initialize function and other general function. * Created on: 2016-12-15 */ #include "utils/debug/CmBacktrace/cm_backtrace.h" #include <stdbool.h> #include <string.h> #include <stdio.h> #if __STDC_VERSION__ < 199901L #error "must be C99 or higher. try to add '-std=c99' to compile parameters" #endif #if defined(__CC_ARM) #define SECTION_START(_name_) _name_##$$Base #define SECTION_END(_name_) _name_##$$Limit #define IMAGE_SECTION_START(_name_) Image$$##_name_##$$Base #define IMAGE_SECTION_END(_name_) Image$$##_name_##$$Limit #define CSTACK_BLOCK_START(_name_) SECTION_START(_name_) #define CSTACK_BLOCK_END(_name_) SECTION_END(_name_) #define CODE_SECTION_START(_name_) IMAGE_SECTION_START(_name_) #define CODE_SECTION_END(_name_) IMAGE_SECTION_END(_name_) extern const int CSTACK_BLOCK_START(CMB_CSTACK_BLOCK_NAME); extern const int CSTACK_BLOCK_END(CMB_CSTACK_BLOCK_NAME); extern const int CODE_SECTION_START(CMB_CODE_SECTION_NAME); extern const int CODE_SECTION_END(CMB_CODE_SECTION_NAME); #elif defined(__ICCARM__) #pragma section=CMB_CSTACK_BLOCK_NAME #pragma section=CMB_CODE_SECTION_NAME #elif defined(__GNUC__) extern const int CMB_CSTACK_BLOCK_START; extern const int CMB_CSTACK_BLOCK_END; extern const int CMB_CODE_SECTION_START; extern const int CMB_CODE_SECTION_END; #else #error "not supported compiler" #endif enum { PRINT_FIRMWARE_INFO, PRINT_ASSERT_ON_THREAD, PRINT_ASSERT_ON_HANDLER, PRINT_THREAD_STACK_INFO, PRINT_MAIN_STACK_INFO, PRINT_THREAD_STACK_OVERFLOW, PRINT_MAIN_STACK_OVERFLOW, PRINT_CALL_STACK_INFO, PRINT_CALL_STACK_ERR, PRINT_FAULT_ON_THREAD, PRINT_FAULT_ON_HANDLER, PRINT_REGS_TITLE, PRINT_HFSR_VECTBL, PRINT_MFSR_IACCVIOL, PRINT_MFSR_DACCVIOL, PRINT_MFSR_MUNSTKERR, PRINT_MFSR_MSTKERR, PRINT_MFSR_MLSPERR, PRINT_BFSR_IBUSERR, PRINT_BFSR_PRECISERR, PRINT_BFSR_IMPREISERR, PRINT_BFSR_UNSTKERR, PRINT_BFSR_STKERR, PRINT_BFSR_LSPERR, PRINT_UFSR_UNDEFINSTR, PRINT_UFSR_INVSTATE, PRINT_UFSR_INVPC, PRINT_UFSR_NOCP, PRINT_UFSR_UNALIGNED, PRINT_UFSR_DIVBYZERO0, PRINT_DFSR_HALTED, PRINT_DFSR_BKPT, PRINT_DFSR_DWTTRAP, PRINT_DFSR_VCATCH, PRINT_DFSR_EXTERNAL, PRINT_MMAR, PRINT_BFAR, }; static const char * const print_info[] = { #if (CMB_PRINT_LANGUAGE == CMB_PRINT_LANGUAGE_ENGLISH) [PRINT_FIRMWARE_INFO] = "Firmware name: %s, hardware version: %s, software version: %s", [PRINT_ASSERT_ON_THREAD] = "Assert on thread %s", [PRINT_ASSERT_ON_HANDLER] = "Assert on interrupt or bare metal(no OS) environment", [PRINT_THREAD_STACK_INFO] = "===== Thread stack information =====", [PRINT_MAIN_STACK_INFO] = "====== Main stack information ======", [PRINT_THREAD_STACK_OVERFLOW] = "Error: Thread stack(%08x) was overflow", [PRINT_MAIN_STACK_OVERFLOW] = "Error: Main stack(%08x) was overflow", [PRINT_CALL_STACK_INFO] = "Show more call stack info by run: addr2line -e %s%s -a -f %.*s", [PRINT_CALL_STACK_ERR] = "Dump call stack has an error", [PRINT_FAULT_ON_THREAD] = "Fault on thread %s", [PRINT_FAULT_ON_HANDLER] = "Fault on interrupt or bare metal(no OS) environment", [PRINT_REGS_TITLE] = "=================== Registers information ====================", [PRINT_HFSR_VECTBL] = "Hard fault is caused by failed vector fetch", [PRINT_MFSR_IACCVIOL] = "Memory management fault is caused by instruction access violation", [PRINT_MFSR_DACCVIOL] = "Memory management fault is caused by data access violation", [PRINT_MFSR_MUNSTKERR] = "Memory management fault is caused by unstacking error", [PRINT_MFSR_MSTKERR] = "Memory management fault is caused by stacking error", [PRINT_MFSR_MLSPERR] = "Memory management fault is caused by floating-point lazy state preservation", [PRINT_BFSR_IBUSERR] = "Bus fault is caused by instruction access violation", [PRINT_BFSR_PRECISERR] = "Bus fault is caused by precise data access violation", [PRINT_BFSR_IMPREISERR] = "Bus fault is caused by imprecise data access violation", [PRINT_BFSR_UNSTKERR] = "Bus fault is caused by unstacking error", [PRINT_BFSR_STKERR] = "Bus fault is caused by stacking error", [PRINT_BFSR_LSPERR] = "Bus fault is caused by floating-point lazy state preservation", [PRINT_UFSR_UNDEFINSTR] = "Usage fault is caused by attempts to execute an undefined instruction", [PRINT_UFSR_INVSTATE] = "Usage fault is caused by attempts to switch to an invalid state (e.g., ARM)", [PRINT_UFSR_INVPC] = "Usage fault is caused by attempts to do an exception with a bad value in the EXC_RETURN number", [PRINT_UFSR_NOCP] = "Usage fault is caused by attempts to execute a coprocessor instruction", [PRINT_UFSR_UNALIGNED] = "Usage fault is caused by indicates that an unaligned access fault has taken place", [
{ "pile_set_name": "Github" }
null
null
.form-component { display: flex; flex-direction: column; margin: var(--spacing-half) 0; & > *:not(:last-child) { margin-bottom: var(--spacing); } hr { width: 100%; border: none; height: 1px; border-bottom: var(--base-border); } }
{ "pile_set_name": "Github" }
null
null
<html> <head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <title>WebGL Shader Common Function Conformance Tests</title> <link rel="stylesheet" href="../../../resources/js-test-style.css"/> <script src="../../../js/js-test-pre.js"></script> <script src="../../../js/webgl-test-utils.js"></script> <script src="../../../closure-library/closure/goog/base.js"></script> <script src="../../deqp-deps.js"></script> <script>goog.require('functional.gles3.es3fShaderCommonFunctionTests');</script> </head> <body> <div id="description"></div> <div id="console"></div> <canvas id="canvas" width="256" height="256"> </canvas> <script> var wtu = WebGLTestUtils; var gl = wtu.create3DContext('canvas', null, 2); functional.gles3.es3fShaderCommonFunctionTests.run(gl); </script> </body> </html>
{ "pile_set_name": "Github" }
null
null
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!29 &1 OcclusionCullingSettings: m_ObjectHideFlags: 0 serializedVersion: 2 m_OcclusionBakeSettings: smallestOccluder: 5 smallestHole: 0.25 backfaceThreshold: 100 m_SceneGUID: 00000000000000000000000000000000 m_OcclusionCullingData: {fileID: 0} --- !u!104 &2 RenderSettings: m_ObjectHideFlags: 0 serializedVersion: 8 m_Fog: 0 m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1} m_FogMode: 3 m_FogDensity: 0.01 m_LinearFogStart: 0 m_LinearFogEnd: 300 m_AmbientSkyColor: {r: 0.212, g: 0.227, b: 0.259, a: 1} m_AmbientEquatorColor: {r: 0.114, g: 0.125, b: 0.133, a: 1} m_AmbientGroundColor: {r: 0.047, g: 0.043, b: 0.035, a: 1} m_AmbientIntensity: 1 m_AmbientMode: 0 m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1} m_SkyboxMaterial: {fileID: 10304, guid: 0000000000000000f000000000000000, type: 0} m_HaloStrength: 0.5 m_FlareStrength: 1 m_FlareFadeSpeed: 3 m_HaloTexture: {fileID: 0} m_SpotCookie: {fileID: 10001, guid: 0000000000000000e000000000000000, type: 0} m_DefaultReflectionMode: 0 m_DefaultReflectionResolution: 128 m_ReflectionBounces: 1 m_ReflectionIntensity: 1 m_CustomReflection: {fileID: 0} m_Sun: {fileID: 0} m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1} --- !u!157 &3 LightmapSettings: m_ObjectHideFlags: 0 serializedVersion: 11 m_GIWorkflowMode: 1 m_GISettings: serializedVersion: 2 m_BounceScale: 1 m_IndirectOutputScale: 1 m_AlbedoBoost: 1 m_TemporalCoherenceThreshold: 1 m_EnvironmentLightingMode: 0 m_EnableBakedLightmaps: 1 m_EnableRealtimeLightmaps: 1 m_LightmapEditorSettings: serializedVersion: 9 m_Resolution: 2 m_BakeResolution: 40 m_TextureWidth: 1024 m_TextureHeight: 1024 m_AO: 0 m_AOMaxDistance: 1 m_CompAOExponent: 1 m_CompAOExponentDirect: 0 m_Padding: 2 m_LightmapParameters: {fileID: 0} m_LightmapsBakeMode: 1 m_TextureCompression: 1 m_FinalGather: 0 m_FinalGatherFiltering: 1 m_FinalGatherRayCount: 256 m_ReflectionCompression: 2 m_MixedBakeMode: 1 m_BakeBackend: 0 m_PVRSampling: 1 m_PVRDirectSampleCount: 32 m_PVRSampleCount: 500 m_PVRBounces: 2 m_PVRFilterTypeDirect: 0 m_PVRFilterTypeIndirect: 0 m_PVRFilterTypeAO: 0 m_PVRFilteringMode: 0 m_PVRCulling: 1 m_PVRFilteringGaussRadiusDirect: 1 m_PVRFilteringGaussRadiusIndirect: 5 m_PVRFilteringGaussRadiusAO: 2 m_PVRFilteringAtrousPositionSigmaDirect: 0.5 m_PVRFilteringAtrousPositionSigmaIndirect: 2 m_PVRFilteringAtrousPositionSigmaAO: 1 m_LightingDataAsset: {fileID: 0} m_UseShadowmask: 0 --- !u!196 &4 NavMeshSettings: serializedVersion: 2 m_ObjectHideFlags: 0 m_BuildSettings: serializedVersion: 2 agentTypeID: 0 agentRadius: 0.5 agentHeight: 2 agentSlope: 45 agentClimb: 0.4 ledgeDropHeight: 0 maxJumpAcrossDistance: 0 minRegionArea: 2 manualCellSize: 0 cellSize: 0.16666667 manualTileSize: 0 tileSize: 256 accuratePlacement: 0 debug: m_Flags: 0 m_NavMeshData: {fileID: 0} --- !u!1 &137962126 GameObject: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} serializedVersion: 5 m_Component: - component: {fileID: 137962127} - component: {fileID: 137962129} - component: {fileID: 137962128} m_Layer: 0 m_Name: Default m_TagString: Untagged m_Icon: {fileID: 0} m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 --- !u!4 &137962127 Transform: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 137962126} m_LocalRotation: {x: -0, y: -0, z: -0, w: 1} m_LocalPosition: {x: 0.014, y: 0.361, z: -0.55} m_LocalScale: {x: 0.014285714, y: 0.014285714, z: 0.049999997} m_Children: [] m_Father: {fileID: 1541181469} m_RootOrder: 3 m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} --- !u!102 &137962128 TextMesh: serializedVersion: 3 m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 137962126} m_Text: Tap/Hold m_OffsetZ: 0 m_CharacterSize: 1 m_LineSpacing: 1 m_Anchor: 1 m_Alignment: 1 m_TabSize: 4 m_FontSize: 64 m_FontStyle: 0 m_RichText: 1 m_Font: {fileID: 10102, guid: 0000000000000000e000000000000000, type: 0} m_Color: serializedVersion: 2 rgba: 4286444430 --- !u!23 &137962129 MeshRenderer: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} m_GameObject: {fileID: 137962126} m_Enabled: 1 m_CastShadows: 1 m_ReceiveShadows: 1 m_DynamicOccludee: 1 m_MotionVectors: 1 m_LightProbeUsage: 1 m_ReflectionProbeUsage: 1 m_Materials: - {fileID: 10100, guid: 0000000000000000e000000000000000, type: 0} m_StaticBatchInfo: firstSubMesh: 0 subMeshCount: 0 m_StaticBatchRoot: {fileID: 0} m_ProbeAnchor: {fileID: 0} m_LightProbeVolumeOverride: {fileID: 0} m_ScaleInLightmap: 1
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="ascii"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> <head> <title>plaintext</title> <link rel="stylesheet" href="epydoc.css" type="text/css" /> <script type="text/javascript" src="epydoc.js"></script> </head> <body bgcolor="white" text="black" link="blue" vlink="#204080" alink="#204080"> <h1 class="toc">Module plaintext</h1> <hr /> <h2 class="toc">Classes</h2> <a target="mainFrame" href="epydoc.markup.plaintext.ParsedPlaintextDocstring-class.html" >ParsedPlaintextDocstring</a><br /> <h2 class="toc">Functions</h2> <a target="mainFrame" href="epydoc.markup.plaintext-module.html#parse_docstring" >parse_docstring</a><br /><hr /> <span class="options">[<a href="javascript:void(0);" class="privatelink" onclick="toggle_private();">hide&nbsp;private</a>]</span> <script type="text/javascript"> <!-- // Private objects are initially displayed (because if // javascript is turned off then we want them to be // visible); but by default, we want to hide them. So hide // them unless we have a cookie that says to show them. checkCookie(); // --> </script> </body> </html>
{ "pile_set_name": "Github" }
null
null
<?include get_cfg_var("cartulary_conf").'/includes/env.php';?> <?include "$confroot/$templates/php_cgi_init.php"?> <? // Json header header("Cache-control: no-cache, must-revalidate"); header("Content-Type: application/json"); // Globals $jsondata = array(); //debug request //loggit(3, "DEBUG: ".print_r($_REQUEST, TRUE)); //Check that s3 is enabled if( !s3_is_enabled($uid) && !sys_s3_is_enabled() ) { //Log it loggit(2,"User didn't have s3 enabled for opml save: [$uid]."); $jsondata['status'] = "false"; $jsondata['description'] = "Configure s3 in the prefs to enable saving."; echo json_encode($jsondata); exit(1); } //Get the title $title = ""; if ( isset($_REQUEST['title']) ) { $title = $_REQUEST['title']; } //Opml type if(isset($_REQUEST['type']) && is_numeric($_REQUEST['type'])) { $type = $_REQUEST['type']; if( $type == 1 ) loggit(3, "DEBUG: RSS file from editor."); } else { $type = 0; } //Render the title? $rendertitle = TRUE; if ( isset($_REQUEST['rendertitle']) && $_REQUEST['rendertitle'] == "false" ) { $rendertitle = FALSE; } loggit(3, "DEBUG: [".$_REQUEST['rendertitle']."]"); //Get the redirect source $rhost = ""; if ( isset($_REQUEST['redirect']) && !empty($_REQUEST['redirect']) ) { $rhost = $_REQUEST['redirect']; if($rhost == $system_fqdn) { //Log it loggit(2,"User tried to set a document redirect to the system FQDN: [$uid|$rhost]."); $jsondata['status'] = "false"; $jsondata['description'] = "You can't use that host name as a redirect."; echo json_encode($jsondata); exit(1); } } //Get disqus bool $disqus = FALSE; if ( isset($_REQUEST['disqus']) && $_REQUEST['disqus'] == "true" ) { $disqus = TRUE; } //Get wysiwyg bool $wysiwyg = FALSE; if ( isset($_REQUEST['wysiwyg']) && $_REQUEST['wysiwyg'] == "true" ) { $wysiwyg = TRUE; } //Is this an article that was edited $aid = ""; if ( isset($_REQUEST['aid']) && $_REQUEST['aid'] != "false" ) { $aid = $_REQUEST['aid']; } //Do we need to overwrite the existing article $articleoverwrite = ""; if ( isset($_REQUEST['articleoverwrite']) && $_REQUEST['articleoverwrite'] != "false" ) { $articleoverwrite = $_REQUEST['articleoverwrite']; } //Get watched bool $watched = FALSE; if ( isset($_REQUEST['watched']) && $_REQUEST['watched'] == "true" ) { $watched = TRUE; } //Get locked bool $locked = FALSE; if ( isset($_REQUEST['locked']) && $_REQUEST['locked'] == "true" ) { $locked = TRUE; } //Get private bool $private = FALSE; if ( isset($_REQUEST['private']) && $_REQUEST['private'] == "true" ) { $private = TRUE; } //Get private token $privtoken = ""; if ( isset($_REQUEST['privtoken']) && !empty($_REQUEST['privtoken']) ) { $privtoken = $_REQUEST['privtoken']; } //Get variables if any $variables = []; if ( isset($_REQUEST['variables']) && !empty($_REQUEST['variables'])) { $variables = $_REQUEST['variables']; loggit(3, print_r($variables, TRUE)); } $templateid = ""; if ( isset($_REQUEST['templateid']) && !empty($_REQUEST['templateid'])) { $templateid = $_REQUEST['templateid']; loggit(3, "Template id: [$templateid]"); } //Get a template name if set $templatename = ""; if ( isset($_REQUEST['templatename']) && !empty($_REQUEST['templatename']) ) { $templatename = $_REQUEST['templatename']; loggit(3, "Template name: [$templatename]"); } //If the outline is private make sure we have a token if( $private && empty($privtoken) ) { $privtoken = time().random_gen(64); } //Make sure we have a filename to use if ( isset($_REQUEST['filename']) ) { $filename = $_REQUEST['filename']; } else { //Log it loggit(2,"No filename was set for this opml save."); $jsondata['status'] = "false"; $jsondata['description'] = "No filename given."; echo json_encode($jsondata); exit(1); }; //Do we have an old filename? If so, this is a file name change $oldfilename = ""; if ( isset($_REQUEST['oldfilename']) ) { $oldfilename = $_REQUEST['oldfilename']; }; //Get the opml data if ( isset($_REQUEST['opml']) ) { $opml = $_REQUEST['opml']; } else { //Log it loggit(2,"No opml data was set for this opml save."); $jsondata['status'] = "false"; $jsondata['description'] = "No opml data given."; echo json_encode($jsondata); exit(1); }; //We're going to need the S3 url of this file to continue $s3url = get_s3_url($uid, "/opml/", $filename); //Before we do anything we need to confirm that this user id has permissions to update this file. If its a new //file, we need to allow for that if( !empty($templateid) ) { if( !user_can_edit_recent_file_by_id($uid, $templateid) ) { //Log it loggit(2,"User: [$uid] tried to save variables for a template id they don't own."); $jsondata['status'] = "false"; $jsondata['description'] = "You don't have permission to work with this template."; echo json_encode($jsondata); exit(1); } } //Put the opml file in S3 $s3info = get_s3_info($uid); if(!$private) { $s3res = putInS3(gzencode($opml), $filename, $s3info['bucket']."/opml", $s3info['key'], $s3info['secret'], array( 'Content-Type' => 'text/xml', 'Content-Encoding' => 'gzip' ), $private); if(!$s3res) { loggit(2, "Could not create S3 file: [$filename] for user: [$uid]."); loggit(3, "Could not create S3 file: [$filename] for user: [$uid]."); //Log it $jsondata['status'] = "false"; $jsondata['description'] = "Error writing to S3."; echo json_encode($jsondata); exit(1); } else { loggit(1, "Wrote opml to S3 at url: [$s3url]."); } } else { //Delete the opml file from S3 if the outline is marked private since it's not even usable loggit(3, "Deleting private outline OPML in S3: [".$s3info['bucket']."/opml"." | ".$filename."]"); $s3res = deleteFromS3($filename, $s3info['bucket']."/opml", $s3info['key'], $s3info['secret']); } //Put the opml content in IPFS $opmlhash = add_content_to
{ "pile_set_name": "Github" }
null
null
<?xml version='1.0' encoding='utf-8'?> <section xmlns="https://code.dccouncil.us/schemas/dc-library" xmlns:codified="https://code.dccouncil.us/schemas/codified" xmlns:codify="https://code.dccouncil.us/schemas/codify" xmlns:xi="http://www.w3.org/2001/XInclude" containing-doc="D.C. Code"> <num>21-521</num> <heading>Detention of persons believed to be mentally ill; transportation and application to hospital.</heading> <text>An accredited officer or agent of the Department of Mental Health of the District of Columbia, or an officer authorized to make arrests in the District of Columbia, or a physician or qualified psychologist of the person in question, who has reason to believe that a person is mentally ill and, because of the illness, is likely to injure himself or others if he is not immediately detained may, without a warrant, take the person into custody, transport him to a public or private hospital, or to the Department, and make application for his admission thereto for purposes of emergency observation and diagnosis. The application shall reveal the circumstances under which the person was taken into custody and the reasons therefor.</text> <annotations> <annotation doc="Pub. L. 89-183" type="History">Sept. 14, 1965, 79 Stat. 753, Pub. L. 89-183, § 1</annotation> <annotation doc="Pub. L. 91-358" type="History">July 29, 1970, 84 Stat. 567, Pub. L. 91-358, title I, § 150(c)(2)</annotation> <annotation doc="D.C. Law 5-48" type="History">Feb. 24, 1984, D.C. Law 5-48,§ 11(a)(8), 30 DCR 5778</annotation> <annotation doc="D.C. Law 7-104" type="History">Apr. 30, 1988, D.C. Law 7-104, § 6(f), 35 DCR 147</annotation> <annotation doc="D.C. Law 14-56" type="History">Dec. 18, 2001, D.C. Law 14-56, § 116(g)(1), 48 DCR 7674</annotation> <annotation doc="D.C. Law 14-283" type="History">Apr. 4, 2003, D.C. Law 14-283, § 2(h), 50 DCR 917</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(h) of Mental Health Civil Commitment Congressional Review Emergency Act of 2003 (D.C. Act 15-41, March 24, 2003, 50 DCR 2784).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(h) of Mental Health Civil Commitment Emergency Act of 2002 (D.C. Act 14-546, December 12, 2002, 50 DCR 199).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(g) of Mental Health Commitment Congressional Review Emergency Act of 2002 (D.C. Act 14-350, April 24, 2002, 49 DCR 4417).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 2(g) of Mental Health Commitment Emergency Amendment Act of 2002 (D.C. Act 14-265, January 30, 2002, 49 DCR 1450).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 116(g)(1) of Mental Health Service Delivery Reform Congressional Review Emergency Act of 2001 (D.C. Act 14-144, October 23, 2001, 48 DCR 9947).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 16(g)(1) of Department of Mental Health Establishment Congressional Review Emergency Amendment Act of 2001 (D.C. Act 14-101, July 23, 2001, 48 DCR 7123).</annotation> <annotation type="Emergency Legislation">For temporary (90 day) amendment of section, see § 16(g)(1) of Department of Mental Health Establishment Emergency Amendment Act of 2001 (D.C. Act 14-55, May 2, 2001, 48 DCR 4390).</annotation> <annotation type="Temporary Legislation">Section 5(b) of <cite doc="D.C. Law 14-131">D.C. Law 14-131</cite> provided that the act shall expire after 225 days of its having taken effect.</annotation> <annotation type="Temporary Legislation">Section 2(g) of <cite doc="D.C. Law 14-131">D.C. Law 14-131</cite>, in the first sentence, added “, or to the Department,” following “or private hospital”.</annotation> <annotation type="Temporary Legislation">Section 19(b) of <cite doc="D.C. Law 14-51">D.C. Law 14-51</cite> provided that the act shall expire after 225 days of its having taken effect.</annotation> <annotation type="Temporary Legislation">Section 16(g)(1) of <cite doc="D.C. Law 14-51">D.C. Law 14-51</cite> substituted “Department of Mental Health” for “Department of Human Services” in the first sentence.</annotation> <annotation type="Effect of Amendments"><cite doc="D.C. Law 14-283">D.C. Law 14-283</cite> added “, or to the Department” after “or private hospital”.</annotation> <annotation type="Effect of Amendments"><cite doc="D.C. Law 14-56">D.C. Law 14-56</cite> substituted “Department of Mental Health” for “Department of Human Services”.</annotation> <annotation type="Prior Codifications">1973 Ed., § 21-521.</annotation> <annotation type="Prior Codifications">1981 Ed., § 21-521.</annotation> <annotation type="Section References">This section is referenced in <cite path="§7-1203.03">§ 7-1203.03</cite>, <cite path="§16-2315">§ 16-2315</cite>, <cite path="§21-522">§ 21-522</cite>, and <cite path="§21-582">§ 21-582</cite>.</annotation> <annotation type="Cross References">St. Elizabeths Hospital, commitment of mentally ill persons, see §§ <cite path="§21-901">21-901</cite> et seq., <cite path="§44-901">44-901</cite> et seq.</annotation> <annotation type="Cross References">Release of dower, see <cite path="§19-107">§ 19-107</cite>a.</annotation> <annotation type="Cross References">Redemption from tax sale, removal of disability, see <cite path="§47-1304">§ 47-1304</cite>.</annotation> <annotation type="Cross References">Real estate leases, mentally ill person’s rights, see <cite path="§42-3222">§ 42-3222</cite> et seq.</annotation> <annotation type="Cross References">Property of mentally ill persons, see <cite path="§21-2001">§ 21-2001</cite> et seq.</annotation> <annotation type="Cross References">Physician or qualified psychologist related by blood or marriage to alleged mentally ill person, power to apply or certify mental status, see <cite path="§21-582">§ 21-582</cite>.</annotation> <annotation type="Cross References">Personal property schedule filing, persons under disability, see <cite path="§47-1601">§ 47-1601</cite>.</annotation> <annotation type="Cross References">Militia service exemption, see <cite path="§49-401">§ 49-401</cite>.</annotation> <annotation type="Cross References">Emergency disclosure of mental health information, see <cite path="§7-1203.03">§ 7-1203.03</cite>.</annotation> <annotation type="Cross References">Conservator, guardian in proceedings for appointment, see <cite path="§21-2041">§ 21-2041</cite> et seq.</annotation> <annotation type="Cross References">Condemnation of insanitary buildings, appointment of guardian ad litem, see <cite path="§6-909">§ 6-909</cite>.</annotation> <annotation type="Cross References">Annulment of marriage, <cite path="§46-404">§ 46-404</cite>.</annotation> </annotations> </section>
{ "pile_set_name": "Github" }
null
null
/** * Evaluation.java * It is to evaluate the elapsed time of each sameAs approach */ package com.samsung.scrc.wsg.k.eval; import com.samsung.scrc.wsg.k.sa.matcher.BeliefBasedMatcher; import com.samsung.scrc.wsg.k.sa.matcher.FullMatcher; import com.samsung.scrc.wsg.k.sa.matcher.MaxConfMatcher; import com.samsung.scrc.wsg.k.sa.matcher.One2OneMatcher; import com.samsung.scrc.wsg.k.sa.matcher.ThresholdMatcher; import com.samsung.scrc.wsg.k.sa.stat.Stat; /** * @author yuxie * * @date Apr 6, 2015 * */ public class Evaluation { /** * Evaluate pre-processing (<Entity, Language Count>) */ public void evalPreprocess() { long startTime = System.currentTimeMillis(); System.out.println("Preprocess Evaluation starts at: "+startTime); Stat.statLang(); long endTime = System.currentTimeMillis(); System.out.println("Preprocess Evaluation finishes at: "+endTime); System.out.println("Preprocess Elapsed Time: "+(endTime - startTime) / 1000.0+"s."); } /** * Evaluate full matcher approach */ public void evalFullMatcher() { long startTime = System.currentTimeMillis(); System.out.println("Full Matcher Evaluation starts at: "+startTime); FullMatcher matcher = new FullMatcher(); matcher.init(); matcher.match(); matcher.close(); long endTime = System.currentTimeMillis(); System.out.println("Full Matcher Evaluation finishes at: "+endTime); System.out.println("Full Matcher Elapsed Time: " + (endTime - startTime) / 1000.0 + "s."); } /** * Evaluate max confidence approach */ public void evalMaxConfMatcher() { long startTime = System.currentTimeMillis(); System.out.println("Max Confidence Matcher Evaluation starts at: " + startTime); Stat.statSA(); long interTime = System.currentTimeMillis(); System.out.println("Max Confidence Matcher Evaluation intermediates at: " + interTime); MaxConfMatcher matcher = new MaxConfMatcher(); matcher.init(); matcher.match(); matcher.close(); long endTime = System.currentTimeMillis(); System.out.println("Max Confidence Evaluation finishes at: " + endTime); System.out.println("Max Confidence Elapsed Time: " + (endTime - startTime) / 1000.0 + "s."); } /** * Evaluate threshold filtering approach */ public void evalThresholdMatcher() { long startTime = System.currentTimeMillis(); System.out.println("Threshold Matcher Evaluation starts at:\t" + startTime); ThresholdMatcher matcher = new ThresholdMatcher(); matcher.init(); matcher.match(); matcher.close(); long endTime = System.currentTimeMillis(); System.out.println("Threshold Evaluation finishes at:\t" + endTime); System.out.println("Threshold Elapsed Time:\t" + (endTime - startTime) / 1000.0 + "s."); } /** * Evaluate one-to-one mapping approach */ public void evalOne2OneMatcher() { long startTime = System.currentTimeMillis(); System.out.println("1-1 Only Matcher Evaluation starts at:\t" + startTime); One2OneMatcher matcher = new One2OneMatcher(); matcher.init(); matcher.match(); matcher.close(); long endTime = System.currentTimeMillis(); System.out.println("1-1 Only Evaluation finishes at:\t" + endTime); System.out.println("1-1 Only Elapsed Time:\t" + (endTime - startTime) / 1000.0 + "s."); } /** * Evaluate belief-base approach */ public void evalBeliefBasedMatcher() { long startTime = System.currentTimeMillis(); System.out.println("Belief-based Evaluation starts at:\t" + startTime); BeliefBasedMatcher matcher = new BeliefBasedMatcher(); matcher.init(); matcher.match(); matcher.close(); long endTime = System.currentTimeMillis(); System.out.println("Belief-based Evaluation finishes at:\t" + endTime); System.out.println("Belief-based Elapsed Time:\t" + (endTime - startTime) / 1000.0 + "s."); } /** * Control whole evaluation process */ public void eval() { evalPreprocess(); evalFullMatcher(); evalMaxConfMatcher(); evalThresholdMatcher(); evalOne2OneMatcher(); evalBeliefBasedMatcher(); } }
{ "pile_set_name": "Github" }
null
null
Seuss
{ "pile_set_name": "Github" }
null
null
{"threshold": 5, "split": ["jUEumBCZY6GRlXbB/uobM53gis/RldnMAfBAnkg=", "e3WhwYy6YUQGedMXkGnxJO6v0ov4cgteapL17wI="], "shares": 9} {"threshold": 5, "split": ["mrygQzFoasgDY23te4MGqTFXjpS/pMalQiN9Sks=", "XjpXSuBzyfRAbKj7hODzcf0cv0NZsXQDEQiIdtA="], "shares": 9} {"threshold": 5, "split": ["U64tceO4Ddtr4V6FMXSTJre4f6t4nPczWgtIkfo=", "t3hrQmsqonoBCl7T4f3bLqMShchtOtF3WesMGEs="], "shares": 9} {"threshold": 5, "split": ["TcOzpm1jNtwsj0cdiLfd6oVHLGMMKRt52t9kU4E=", "RqJ1WKlufadMFwFLbIjvBs82BH/yP8CAT6kyv3M="], "shares": 9}
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>CFBundleDevelopmentRegion</key> <string>en</string> <key>CFBundleExecutable</key> <string>$(EXECUTABLE_NAME)</string> <key>CFBundleIdentifier</key> <string>$(PRODUCT_BUNDLE_IDENTIFIER)</string> <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> <key>CFBundleName</key> <string>$(PRODUCT_NAME)</string> <key>CFBundlePackageType</key> <string>FMWK</string> <key>CFBundleShortVersionString</key> <string>1.0</string> <key>CFBundleSignature</key> <string>????</string> <key>CFBundleVersion</key> <string>$(CURRENT_PROJECT_VERSION)</string> <key>NSHumanReadableCopyright</key> <string>Copyright © 2016 Aditya Vaidyam. All rights reserved.</string> <key>NSPrincipalClass</key> <string></string> </dict> </plist>
{ "pile_set_name": "Github" }
null
null
/* * Copyright 2014 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include "include/core/SkTypes.h" #include "include/core/SkData.h" #include "include/core/SkFontMgr.h" #include "include/core/SkFontStyle.h" #include "include/core/SkPaint.h" #include "include/core/SkRefCnt.h" #include "include/core/SkStream.h" #include "include/core/SkString.h" #include "include/ports/SkFontMgr_android.h" #include "include/private/SkFixed.h" #include "include/private/SkTArray.h" #include "include/private/SkTDArray.h" #include "include/private/SkTemplates.h" #include "src/core/SkFontDescriptor.h" #include "src/core/SkOSFile.h" #include "src/core/SkTSearch.h" #include "src/core/SkTypefaceCache.h" #include "src/ports/SkFontHost_FreeType_common.h" #include "src/ports/SkFontMgr_android_parser.h" #include <algorithm> #include <limits> class SkData; class SkTypeface_Android : public SkTypeface_FreeType { public: SkTypeface_Android(const SkFontStyle& style, bool isFixedPitch, const SkString& familyName) : INHERITED(style, isFixedPitch) , fFamilyName(familyName) { } protected: void onGetFamilyName(SkString* familyName) const override { *familyName = fFamilyName; } SkString fFamilyName; private: using INHERITED = SkTypeface_FreeType; }; class SkTypeface_AndroidSystem : public SkTypeface_Android { public: SkTypeface_AndroidSystem(const SkString& pathName, const bool cacheFontFiles, int index, const SkFixed* axes, int axesCount, const SkFontStyle& style, bool isFixedPitch, const SkString& familyName, const SkTArray<SkLanguage, true>& lang, FontVariant variantStyle) : INHERITED(style, isFixedPitch, familyName) , fPathName(pathName) , fIndex(index) , fAxes(axes, axesCount) , fLang(lang) , fVariantStyle(variantStyle) , fFile(cacheFontFiles ? sk_fopen(fPathName.c_str(), kRead_SkFILE_Flag) : nullptr) { if (cacheFontFiles) { SkASSERT(fFile); } } std::unique_ptr<SkStreamAsset> makeStream() const { if (fFile) { sk_sp<SkData> data(SkData::MakeFromFILE(fFile)); return data ? std::make_unique<SkMemoryStream>(std::move(data)) : nullptr; } return SkStream::MakeFromFile(fPathName.c_str()); } void onGetFontDescriptor(SkFontDescriptor* desc, bool* serialize) const override { SkASSERT(desc); SkASSERT(serialize); desc->setFamilyName(fFamilyName.c_str()); desc->setStyle(this->fontStyle()); *serialize = false; } std::unique_ptr<SkStreamAsset> onOpenStream(int* ttcIndex) const override { *ttcIndex = fIndex; return this->makeStream(); } std::unique_ptr<SkFontData> onMakeFontData() const override { return std::make_unique<SkFontData>(this->makeStream(), fIndex, fAxes.begin(), fAxes.count()); } sk_sp<SkTypeface> onMakeClone(const SkFontArguments& args) const override { std::unique_ptr<SkFontData> data = this->cloneFontData(args); if (!data) { return nullptr; } return sk_make_sp<SkTypeface_AndroidSystem>(fPathName, fFile, fIndex, data->getAxis(), data->getAxisCount(), this->fontStyle(), this->isFixedPitch(), fFamilyName, fLang, fVariantStyle); } const SkString fPathName; int fIndex; const SkSTArray<4, SkFixed, true> fAxes; const SkSTArray<4, SkLanguage, true> fLang; const FontVariant fVariantStyle; SkAutoTCallVProc<FILE, sk_fclose> fFile; using INHERITED = SkTypeface_Android; }; class SkTypeface_AndroidStream : public SkTypeface_Android { public: SkTypeface_AndroidStream(std::unique_ptr<SkFontData> data, const SkFontStyle& style, bool isFixedPitch, const SkString& familyName) : INHERITED(style, isFixedPitch, familyName) , fData(std::move(data)) { } void onGetFontDescriptor(SkFontDescriptor* desc, bool* serialize) const override { SkASSERT(desc); SkASSERT(serialize); desc->setFamilyName(fFamilyName.c_str()); *serialize = true; } std::unique_ptr<SkStreamAsset> onOpenStream(int* ttcIndex) const override { *ttcIndex = fData->getIndex(); return fData->getStream()->duplicate(); } std::unique_ptr<SkFontData> onMakeFontData() const override { return std::make_unique<SkFontData>(*fData); } sk_sp<SkTypeface> onMakeClone(const SkFontArguments& args) const override { std::unique_ptr<SkFontData> data = this->cloneFontData(args); if (!data) { return nullptr; } return sk_make_sp<SkTypeface_AndroidStream>(std::move(data), this->fontStyle(), this->isFixedPitch(), fFamilyName); } private: const std::unique_ptr<const SkFontData> fData; using INHERITED = SkTypeface_Android; }; class SkFontStyleSet_Android : public SkFontStyleSet { typedef SkTypeface_FreeType::Scanner Scanner; public: explicit SkFontStyleSet_Android(const FontFamily& family, const Scanner& scanner, const bool cacheFontFiles) { const SkString* cannonicalFamilyName = nullptr; if (family.fNames.count() > 0) { cannonicalFamilyName = &family.fNames[0]; } fFallbackFor = family.fFallbackFor; // TODO? make this lazy for (int i = 0; i < family.fFonts.count(); ++i) { const FontFileInfo& fontFile = family.fFonts[i]; SkString pathName(family.fBasePath); pathName.append(fontFile.fFileName); std::unique_ptr<SkStreamAsset> stream = SkStream::MakeFromFile(pathName.c_str()); if (!stream) { SkDEBUGF("Requested font file %s does not exist or cannot be opened.\n", pathName.c_str()); continue; } const int ttcIndex = fontFile.fIndex; SkString familyName; SkFontStyle style; bool isFixedWidth; Scanner::AxisDefinitions axisDefinitions; if (!scanner.scanFont(stream.get(), ttcIndex, &familyName, &style, &isFixedWidth, &axisDefinitions)) { SkDEBUGF("Requested font file %s exists, but is not a valid font.\n", pathName.c_str());
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>CFBundleDevelopmentRegion</key> <string>English</string> <key>CFBundleExecutable</key> <string>${EXECUTABLE_NAME}</string> <key>CFBundleIdentifier</key> <string>com.yourcompany.openFrameworks</string> <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> <key>CFBundlePackageType</key> <string>APPL</string> <key>CFBundleSignature</key> <string>????</string> <key>CFBundleVersion</key> <string>1.0</string> </dict> </plist>
{ "pile_set_name": "Github" }
null
null
# coding: utf-8 """ Kubernetes No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: v1.7.4 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import sys import os import re # python 2 and python 3 compatibility library from six import iteritems from ..configuration import Configuration from ..api_client import ApiClient class AdmissionregistrationApi(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen """ def __init__(self, api_client=None): config = Configuration() if api_client: self.api_client = api_client else: if not config.api_client: config.api_client = ApiClient() self.api_client = config.api_client def get_api_group(self, **kwargs): """ get information of a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_api_group(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :return: V1APIGroup If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_api_group_with_http_info(**kwargs) else: (data) = self.get_api_group_with_http_info(**kwargs) return data def get_api_group_with_http_info(self, **kwargs): """ get information of a group This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_api_group_with_http_info(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :return: V1APIGroup If the method is called asynchronously, returns the request thread. """ all_params = [] all_params.append('callback') all_params.append('_return_http_data_only') all_params.append('_preload_content') all_params.append('_request_timeout') params = locals() for key, val in iteritems(params['kwargs']): if key not in all_params: raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_api_group" % key ) params[key] = val del params['kwargs'] collection_formats = {} resource_path = '/apis/admissionregistration.k8s.io/'.replace('{format}', 'json') path_params = {} query_params = {} header_params = {} form_params = [] local_var_files = {} body_params = None # HTTP header `Accept` header_params['Accept'] = self.api_client.\ select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # HTTP header `Content-Type` header_params['Content-Type'] = self.api_client.\ select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) # Authentication setting auth_settings = ['BearerToken'] return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1APIGroup', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
{ "pile_set_name": "Github" }
null
null
#!/usr/bin/env python # Copyright (c) 2009 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os import sys # Make sure we're using the version of pylib in this repo, not one installed # elsewhere on the system. sys.path.insert(0, os.path.join(os.path.dirname(sys.argv[0]), 'pylib')) import gyp if __name__ == '__main__': sys.exit(gyp.script_main())
{ "pile_set_name": "Github" }
null
null
// // ActionSheetColors.swift // Sheeeeeeeeet // // Created by Daniel Saidi on 2019-08-10. // Copyright © 2019 Daniel Saidi. All rights reserved. // import UIKit /** This enum specifies standard action sheet colors, which are just aliases for system colors. Use the `color` property to get the resolved `UIColor` representation. The enum cases are aliases for iOS 13 semantic and adaptive colors, but provide fallbacks to older, non-adaptive colors on iOS 12 and earlier. */ public enum ActionSheetColor: CaseIterable { case actionText case background case danger case disabledText case discreteText case overlay case separator case text case tint } // MARK: - Public Extensions public extension ActionSheetColor { /** Get the UI color representation of the semantic color. */ var color: UIColor { if #available(iOS 13.0, *) { return adaptiveColor } else { return legacyColor } } } // MARK: - Private Extensions private extension ActionSheetColor { var overlay: UIColor { UIColor.black.withAlphaComponent(0.6) } @available(iOS 13.0, *) var adaptiveColor: UIColor { switch self { case .actionText: return .systemBlue case .background: return .tertiarySystemBackground case .danger: return .systemRed case .disabledText: return .secondaryLabel case .discreteText: return .secondaryLabel case .overlay: return overlay case .separator: return .separator case .text, .tint: return .label } } var legacyColor: UIColor { switch self { case .actionText: return .blue case .background: return .white case .danger: return .red case .disabledText: return .lightGray case .discreteText: return .lightGray case .overlay: return overlay case .separator: return .lightGray case .text, .tint: return .darkText } } }
{ "pile_set_name": "Github" }
null
null
#!/bin/sh # SPDX-License-Identifier: GPL-2.0 # Simple script to update the version of DTC carried by the Linux kernel # # This script assumes that the dtc and the linux git trees are in the # same directory. After building dtc in the dtc directory, it copies the # source files and generated source file(s) into the scripts/dtc directory # in the kernel and creates a git commit updating them to the new # version. # # Usage: from the top level Linux source tree, run: # $ ./scripts/dtc/update-dtc-source.sh # # The script will change into the dtc tree, build and test dtc, copy the # relevant files into the kernel tree and create a git commit. The commit # message will need to be modified to reflect the version of DTC being # imported # # TODO: # This script is pretty basic, but it is seldom used so a few manual tasks # aren't a big deal. If anyone is interested in making it more robust, the # the following would be nice: # * Actually fail to complete if any testcase fails. # - The dtc "make check" target needs to return a failure # * Extract the version number from the dtc repo for the commit message # * Build dtc in the kernel tree # * run 'make check" on dtc built from the kernel tree set -ev DTC_UPSTREAM_PATH=`pwd`/../dtc DTC_LINUX_PATH=`pwd`/scripts/dtc DTC_SOURCE="checks.c data.c dtc.c dtc.h flattree.c fstree.c livetree.c srcpos.c \ srcpos.h treesource.c util.c util.h version_gen.h yamltree.c Makefile.dtc \ dtc-lexer.l dtc-parser.y" LIBFDT_SOURCE="Makefile.libfdt fdt.c fdt.h fdt_addresses.c fdt_empty_tree.c \ fdt_overlay.c fdt_ro.c fdt_rw.c fdt_strerror.c fdt_sw.c \ fdt_wip.c libfdt.h libfdt_env.h libfdt_internal.h" get_last_dtc_version() { git log --oneline scripts/dtc/ | grep 'upstream' | head -1 | sed -e 's/^.* \(.*\)/\1/' } last_dtc_ver=$(get_last_dtc_version) # Build DTC cd $DTC_UPSTREAM_PATH make clean make check dtc_version=$(git describe HEAD) dtc_log=$(git log --oneline ${last_dtc_ver}..) # Copy the files into the Linux tree cd $DTC_LINUX_PATH for f in $DTC_SOURCE; do cp ${DTC_UPSTREAM_PATH}/${f} ${f} git add ${f} done for f in $LIBFDT_SOURCE; do cp ${DTC_UPSTREAM_PATH}/libfdt/${f} libfdt/${f} git add libfdt/${f} done sed -i -- 's/#include <libfdt_env.h>/#include "libfdt_env.h"/g' ./libfdt/libfdt.h sed -i -- 's/#include <fdt.h>/#include "fdt.h"/g' ./libfdt/libfdt.h git add ./libfdt/libfdt.h commit_msg=$(cat << EOF scripts/dtc: Update to upstream version ${dtc_version} This adds the following commits from upstream: ${dtc_log} EOF ) git commit -e -v -s -m "${commit_msg}"
{ "pile_set_name": "Github" }
null
null
// VulcanizeDB // Copyright © 2019 Vulcanize // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package shared // These types serve as very loose wrappers around a generic underlying interface{} type RawChainData interface{} // The concrete type underneath StreamedIPLDs should not be a pointer type ConvertedData interface { Height() int64 } type CIDsForIndexing interface{} type CIDsForFetching interface{} type IPLDs interface { Height() int64 } type Gap struct { Start uint64 Stop uint64 }
{ "pile_set_name": "Github" }
null
null
package utils import ( "testing" ) func TestCompareVersionVersion(t *testing.T) { v1 := "v1.12.1" v2 := "v1.11.2" ret, err := CompareVersion(v1, v2) if err != nil { t.Fatal(err) } if !ret { t.Fatal("Version comparison failed.") } }
{ "pile_set_name": "Github" }
null
null
<section> <section> <title>Properties</title> <table> <thead> <tr> <td>Name</td> </tr> </thead> </table> </section> <section> <title>Methods</title> <table> <thead> <tr> <td>Name</td> </tr> </thead> </table> </section> </section>
{ "pile_set_name": "Github" }
null
null
StartChar: u1EE61 Encoding: 126561 126561 5990 Width: 436 Flags: HW LayerCount: 2 Fore Refer: 6051 126588 N 1 0 0 1 0 0 3 Refer: 194 -1 N 1 0 0 1 244 -239 2 EndChar
{ "pile_set_name": "Github" }
null
null
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head><link rel="apple-touch-icon" sizes="180x180" href="/glide/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/glide/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/glide/favicon-16x16.png"><link rel="manifest" href="/glide/manifest.json"> <!-- Generated by javadoc (1.8.0_151) on Fri Aug 17 09:17:46 PDT 2018 --> <title>com.bumptech.glide.integration.volley (glide API)</title> <meta name="date" content="2018-08-17"> <link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style"> <script type="text/javascript" src="../../../../../script.js"></script> </head> <body> <h1 class="bar"><a href="../../../../../com/bumptech/glide/integration/volley/package-summary.html" target="classFrame">com.bumptech.glide.integration.volley</a></h1> <div class="indexContainer"> <h2 title="Interfaces">Interfaces</h2> <ul title="Interfaces"> <li><a href="VolleyRequestFactory.html" title="interface in com.bumptech.glide.integration.volley" target="classFrame"><span class="interfaceName">VolleyRequestFactory</span></a></li> </ul> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="VolleyGlideModule.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyGlideModule</a></li> <li><a href="VolleyLibraryGlideModule.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyLibraryGlideModule</a></li> <li><a href="VolleyStreamFetcher.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyStreamFetcher</a></li> <li><a href="VolleyStreamFetcher.GlideRequest.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyStreamFetcher.GlideRequest</a></li> <li><a href="VolleyUrlLoader.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyUrlLoader</a></li> <li><a href="VolleyUrlLoader.Factory.html" title="class in com.bumptech.glide.integration.volley" target="classFrame">VolleyUrlLoader.Factory</a></li> </ul> </div> </body> </html>
{ "pile_set_name": "Github" }
null
null
#!/bin/bash mkdir var/build #OS X, prevent ._ files export COPYFILE_DISABLE=true tar -cvf var/build/Pulsestorm_Modulelist.tar app/code/community/Pulsestorm/Modulelist app/etc/modules/Pulsestorm_Modulelist.xml
{ "pile_set_name": "Github" }
null
null
/* (c) copyright 1988 by the Vrije Universiteit, Amsterdam, The Netherlands. See the copyright notice in the ACK home directory, in the file "Copyright". */ /* Module: SYSTEM Author: Ceriel J.H. Jacobs Version: $Id$ */ /* An implementation of the Modula-2 NEWPROCESS and TRANSFER facilities using the topsize, topsave, and topload facilities. For each coroutine, a proc structure is built. For the main routine, a static space is declared to save its stack. For the other coroutines, the user specifies this space. */ #include <unistd.h> #include "libm2.h" #include <m2_traps.h> #define MAXMAIN 2048 static struct proc mainproc[MAXMAIN / sizeof(struct proc) + 1]; static struct proc* curproc = 0; /* current coroutine */ extern char* MainLB; /* stack break of main routine */ void _SYSTEM__NEWPROCESS( int (*p)(void) /* coroutine procedure */, struct proc* a /* pointer to area for saved stack-frame */, unsigned int n /* size of this area */, struct proc** p1 /* where to leave coroutine descriptor, in this implementation the address of the area for saved stack-frame(s) */ ) { /* This procedure creates a new coroutine, but does not transfer control to it. The routine "topsize" will compute the stack break, which will be the local base of this routine. Notice that we can do this because we do not need the stack above this point for this coroutine. In Modula-2, coroutines must be level 0 procedures without parameters. */ char* brk = 0; unsigned sz = topsize(&brk); if (sz + sizeof(struct proc) > n) { /* not enough space */ TRP(M2_TOOLARGE); } a->size = n; a->proc = p; a->brk = brk; *p1 = a; if (topsave(brk, a + 1)) /* stack frame saved; now just return */ ; else { /* We get here through the first transfer to the coroutine created above. This also means that curproc is now set to this coroutine. We cannot trust the parameters anymore. Just call the coroutine procedure. */ (*(curproc->proc))(); _cleanup(); _exit(0); } } void _SYSTEM__TRANSFER(struct proc** a, struct proc** b) { /* transfer from one coroutine to another, saving the current descriptor in the space indicated by "a", and transfering to the coroutine in descriptor "b". */ unsigned size; if (!curproc) { /* the current coroutine is the main process; initialize a coroutine descriptor for it ... */ mainproc[0].brk = MainLB; mainproc[0].size = sizeof(mainproc); curproc = &mainproc[0]; } *a = curproc; /* save current descriptor in "a" */ if (*b == curproc) { /* transfer to itself is a no-op */ return; } size = topsize(&(curproc->brk)); if (size + sizeof(struct proc) > curproc->size) { TRP(M2_TOOLARGE); } if (topsave(curproc->brk, curproc + 1)) { /* stack top saved. Now restore context of target coroutine */ curproc = *b; topload(curproc + 1); /* we never get here ... */ } /* but we do get here, when a transfer is done to the coroutine in "a". */ }
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="UTF-8"?> <dependenciesRoot> <dependency className="testLangWithRT.typesystem.TypesystemDescriptor"> <classNode dependClassName="jetbrains.mps.lang.typesystem.runtime.InferenceRule_Runtime" /> <classNode dependClassName="testLangWithRT.typesystem.typeof_Sout_InferenceRule" /> <classNode extendsClassName="jetbrains.mps.lang.typesystem.runtime.BaseHelginsDescriptor" /> </dependency> <dependency className="testLangWithRT.typesystem.typeof_Sout_InferenceRule"> <classNode dependClassName="jetbrains.mps.lang.typesystem.runtime.IsApplicableStatus" /> <classNode dependClassName="jetbrains.mps.smodel.adapter.structure.MetaAdapterFactory" /> <classNode dependClassName="jetbrains.mps.smodel.builder.SNodeBuilder" /> <classNode dependClassName="jetbrains.mps.typesystem.inference.EquationInfo" /> <classNode dependClassName="jetbrains.mps.typesystem.inference.TypeCheckingContext" /> <classNode dependClassName="org.jetbrains.mps.openapi.language.SAbstractConcept" /> <classNode dependClassName="org.jetbrains.mps.openapi.language.SConcept" /> <classNode dependClassName="org.jetbrains.mps.openapi.model.SNode" /> <classNode dependClassName="org.jetbrains.mps.openapi.persistence.PersistenceFacade" /> <classNode extendsClassName="jetbrains.mps.lang.typesystem.runtime.AbstractInferenceRule_Runtime" /> <classNode extendsClassName="jetbrains.mps.lang.typesystem.runtime.InferenceRule_Runtime" /> </dependency> </dependenciesRoot>
{ "pile_set_name": "Github" }
null
null
/* * Copyright Beijing 58 Information Technology Co.,Ltd. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bj58.oceanus.exchange.executors; import com.bj58.oceanus.core.context.StatementContext; import com.bj58.oceanus.exchange.executors.jdbc.BatchExecutor; import com.bj58.oceanus.exchange.executors.jdbc.SimpleExecutor; /** * 执行器构建 * * @author Service Platform Architecture Team (spat@58.com) */ @SuppressWarnings("rawtypes") public class ExecutorsBuilder { static final Executor DEFAULT_EXECUTOR = new SimpleExecutor(); static final Executor BATCH_EXECUTOR = new BatchExecutor(); public static Executor<?> build(StatementContext context) { if (context.isBatch()) { return BATCH_EXECUTOR; } return DEFAULT_EXECUTOR; } }
{ "pile_set_name": "Github" }
null
null
#!/usr/bin/env perl # [[[ PREPROCESSOR ]]] # <<< EXECUTE_SUCCESS: "$VAR1 = {'number_hashref' => {'a' => 'number'}};" >>> # <<< EXECUTE_SUCCESS: "$VAR1 = {'number_hashref' => {'a' => 'number','b' => 'number'}};" >>> # <<< EXECUTE_SUCCESS: "$VAR1 = {'hashref' => {'a' => 'number','b' => 'integer'}};" >>> # <<< EXECUTE_SUCCESS: "$VAR1 = {'number_hashref' => {'a' => 'number','b' => 'number','c' => 'number','d' => 'number','e' => 'number'}};" >>> # <<< EXECUTE_SUCCESS: "$VAR1 = {'hashref' => {'a' => 'number','b' => 'number','c' => 'number','d' => 'integer','e' => 'number'}};" >>> # [[[ HEADER ]]] use RPerl; use strict; use warnings; our $VERSION = 0.001_000; # [[[ CRITICS ]]] ## no critic qw(ProhibitUselessNoCritic ProhibitMagicNumbers RequireCheckedSyscalls) # USER DEFAULT 1: allow numeric values & print operator ## no critic qw(RequireInterpolationOfMetachars) # USER DEFAULT 2: allow single-quoted control characters & sigils # [[[ OPERATIONS ]]] $Data::Dumper::Indent = 0; my hashref $u = { a => 2.2 }; print Dumper( types($u) ) . "\n"; $u = { a => 2.2, b => 3.2 }; print Dumper( types($u) ) . "\n"; $u = { a => 2.2, b => 3 }; print Dumper( types($u) ) . "\n"; $u = { a => 2.2, b => 3.3, c => 5.5, d => 7.7, e => 9.9 }; print Dumper( types($u) ) . "\n"; $u = { a => 2.2, b => 3.3, c => 5.5, d => 7, e => 9.9 }; print Dumper( types($u) ) . "\n";
{ "pile_set_name": "Github" }
null
null
package com.planet_ink.coffee_mud.Abilities.Spells; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2002-2020 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Spell_Shatter extends Spell { @Override public String ID() { return "Spell_Shatter"; } private final static String localizedName = CMLib.lang().L("Shatter"); @Override public String name() { return localizedName; } @Override protected int canTargetCode() { return CAN_MOBS|CAN_ITEMS; } @Override public int abstractQuality() { return Ability.QUALITY_MALICIOUS; } @Override public int classificationCode() { return Ability.ACODE_SPELL|Ability.DOMAIN_ALTERATION; } public Item getItem(final MOB mobTarget) { final List<Item> goodPossibilities=new ArrayList<Item>(); final List<Item> possibilities=new ArrayList<Item>(); for(int i=0;i<mobTarget.numItems();i++) { final Item item=mobTarget.getItem(i); if((item!=null) &&(item.subjectToWearAndTear())) { if(item.amWearingAt(Wearable.IN_INVENTORY)) possibilities.add(item); else goodPossibilities.add(item); } } if(goodPossibilities.size()>0) return goodPossibilities.get(CMLib.dice().roll(1,goodPossibilities.size(),-1)); else if(possibilities.size()>0) return possibilities.get(CMLib.dice().roll(1,possibilities.size(),-1)); return null; } @Override public int castingQuality(final MOB mob, final Physical target) { if(mob!=null) { if((target instanceof MOB)&&(mob!=target)) { final Item I=getItem((MOB)target); if(I==null) return Ability.QUALITY_INDIFFERENT; } } return super.castingQuality(mob,target); } @Override public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel) { final MOB mobTarget=getTarget(mob,commands,givenTarget,true,false); Item target=null; if(mobTarget!=null) { target=getItem(mobTarget); if(target==null) return maliciousFizzle(mob,mobTarget,L("<S-NAME> attempt(s) a shattering spell at <T-NAMESELF>, but nothing happens.")); } if((target==null)&&(mobTarget!=null)) target=getTarget(mobTarget,mobTarget.location(),givenTarget,commands,Wearable.FILTER_ANY); else if((target==null)&&(mobTarget==null)) target=getTarget(mob,mob.location(),givenTarget,commands,Wearable.FILTER_UNWORNONLY); if(target==null) return false; Room R=CMLib.map().roomLocation(target); if(R==null) R=mob.location(); if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; final boolean success=proficiencyCheck(mob,0,auto); if(success) { final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?L("<T-NAME> starts vibrating!"):L("^S<S-NAME> utter(s) a shattering spell, causing <T-NAMESELF> to vibrate and resonate.^?")); final CMMsg msg2=CMClass.getMsg(mob,mobTarget,this,verbalCastCode(mob,target,auto),null); if((R.okMessage(mob,msg))&&((mobTarget==null)||(R.okMessage(mob,msg2)))) { R.send(mob,msg); if(mobTarget!=null) R.send(mob,msg2); if((msg.value()<=0)&&(msg2.value()<=0)) { int damage=100+adjustedLevel(mob,asLevel)-target.phyStats().level(); if(CMLib.flags().isABonusItems(target)) damage=(int)Math.round(CMath.div(damage,2.0)); switch(target.material()&RawMaterial.MATERIAL_MASK) { case RawMaterial.MATERIAL_PAPER: case RawMaterial.MATERIAL_CLOTH: case RawMaterial.MATERIAL_VEGETATION: case RawMaterial.MATERIAL_SYNTHETIC: case RawMaterial.MATERIAL_LEATHER: case RawMaterial.MATERIAL_FLESH: damage=(int)Math.round(CMath.div(damage,3.0)); break; case RawMaterial.MATERIAL_WOODEN: damage=(int)Math.round(CMath.div(damage,1.5)); break; case RawMaterial.MATERIAL_GLASS: case RawMaterial.MATERIAL_ROCK: damage=(int)Math.round(CMath.mul(damage,2.0)); break; case RawMaterial.MATERIAL_PRECIOUS: break; case RawMaterial.MATERIAL_ENERGY: case RawMaterial.MATERIAL_GAS: damage=0; break; } if((damage>0)&&(target.subjectToWearAndTear())) target.setUsesRemaining(target.usesRemaining()-damage); else { R.show(mob,target,CMMsg.MSG_OK_VISUAL,L("<T-NAME> seems otherwise unaffected.")); return true; } if(target.usesRemaining()>0) target.recoverPhyStats(); else { target.setUsesRemaining(100); if(mobTarget==null) R.show(mob,target,CMMsg.MSG_OK_VISUAL,L("<T-
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="UTF-8"?> <!-- *************************************************************************** Copyright (c) 2010 Qcadoo Limited Project: Qcadoo MES Version: 1.4 This file is part of Qcadoo. Qcadoo is free software; you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA *************************************************************************** --> <ribbonExtension plugin="basic" view="generalParameters" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://schema.qcadoo.org/modules/ribbonExtension" xsi:schemaLocation="http://schema.qcadoo.org/modules/ribbonExtension http://schema.qcadoo.org/modules/ribbonExtension.xsd"> <group name="parameters"> <bigButton name="dashboardParameters" icon="generateIcon24.png"> <script> <![CDATA[ this.addOnChangeListener({ onClick: function() { if(window.canClose()) { #{form}.performEvent('redirectToDashboardParameters', []); } } }); ]]> </script> </bigButton> </group> </ribbonExtension>
{ "pile_set_name": "Github" }
null
null
'label':'car' 'bounding box':(1535,420,1567,442) 'label':'car' 'bounding box':(1572,418,1601,446) 'label':'car' 'bounding box':(1549,423,1586,448) 'label':'car' 'bounding box':(1764,404,1920,463) 'label':'car' 'bounding box':(1933,404,2045,468) 'label':'car' 'bounding box':(1257,420,1294,456) 'label':'car' 'bounding box':(1269,411,1327,453) 'label':'car' 'bounding box':(1296,390,1435,464) 'label':'car' 'bounding box':(591,425,696,501) 'label':'car' 'bounding box':(72,414,266,448) 'label':'person' 'bounding box':(-2,497,122,777) 'label':'person' 'bounding box':(497,261,666,865) 'label':'person' 'bounding box':(333,177,730,928) 'label':'person' 'bounding box':(867,187,1176,1023) 'label':'person' 'bounding box':(661,108,1107,1022) 'label':'person' 'bounding box':(1943,649,2045,834) 'label':'person' 'bounding box':(1368,248,1569,815)
{ "pile_set_name": "Github" }
null
null
/** * Copyright 2011-2019 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.testdriver; import java.io.File; import java.net.URISyntaxException; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; import java.util.function.UnaryOperator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.testdriver.core.DataModelSinkFactory; import com.asakusafw.testdriver.core.DataModelSource; import com.asakusafw.testdriver.core.DataModelSourceFactory; import com.asakusafw.testdriver.core.DifferenceSinkFactory; import com.asakusafw.testdriver.core.ModelTester; import com.asakusafw.testdriver.core.ModelVerifier; import com.asakusafw.testdriver.core.TestDataToolProvider; import com.asakusafw.testdriver.core.TestRule; import com.asakusafw.testdriver.core.VerifierFactory; import com.asakusafw.testdriver.core.VerifyRuleFactory; /** * An abstract super class of test driver outputs. * @since 0.2.0 * @version 0.7.0 * @param <T> the data model type */ public class DriverOutputBase<T> extends DriverInputBase<T> { private static final Logger LOG = LoggerFactory.getLogger(DriverOutputBase.class); private VerifierFactory verifier; private DataModelSinkFactory resultSink; private DifferenceSinkFactory differenceSink; private UnaryOperator<DataModelSource> resultFilter; /** * Creates a new instance. * @param callerClass the current context class * @param testTools the test data tools * @param name the original input name * @param modelType the data model type * @since 0.6.0 */ public DriverOutputBase(Class<?> callerClass, TestDataToolProvider testTools, String name, Class<T> modelType) { super(callerClass, testTools, name, modelType); } /** * Returns the verifier. * @return the verifier, or {@code null} if not defined * @since 0.2.3 */ public VerifierFactory getVerifier() { if (verifier == null) { return null; } else if (resultFilter == null) { return verifier; } else { return toVerifierFactory(verifier, resultFilter); } } /** * Sets the verify rule for this output. * @param verifier the verifier to set, or {@code null} to clear verifier * @since 0.2.3 */ protected final void setVerifier(VerifierFactory verifier) { if (LOG.isDebugEnabled()) { LOG.debug("Verifier: name={}, model={}, verifier={}", new Object[] { //$NON-NLS-1$ getName(), getModelType().getName(), verifier, }); } this.verifier = verifier; } /** * Returns the result data sink for this output. * @return the result data sink, or {@code null} if not defined * @since 0.2.3 */ public DataModelSinkFactory getResultSink() { return resultSink; } /** * Sets the result data sink for this output. * The specified object will save the actual result of this. * @param resultSink the result data sink to set, or {@code null} to clear the sink * @since 0.2.3 */ protected final void setResultSink(DataModelSinkFactory resultSink) { if (LOG.isDebugEnabled()) { LOG.debug("ResultSink: name={}, model={}, sink={}", new Object[] { //$NON-NLS-1$ getName(), getModelType().getName(), resultSink, }); } this.resultSink = resultSink; } /** * Returns the difference information sink for this output. * @return the difference information sink, or {@code null} if not defined * @since 0.2.3 */ public DifferenceSinkFactory getDifferenceSink() { return differenceSink; } /** * Sets the difference information sink for this output. * The specified object will save the difference from expected result of this. * @param differenceSink the difference sink to set, {@code null} to clear the sink * @since 0.2.3 */ protected final void setDifferenceSink(DifferenceSinkFactory differenceSink) { if (LOG.isDebugEnabled()) { LOG.debug("DifferenceSink: name={}, model={}, sink={}", new Object[] { //$NON-NLS-1$ getName(), getModelType().getName(), differenceSink, }); } this.differenceSink = differenceSink; } /** * Sets the data model source filter for actual results of this output. * @param filter the source filter * @since 0.7.0 */ protected final void setResultFilter(UnaryOperator<DataModelSource> filter) { this.resultFilter = filter; } /** * Converts an output path to {@link DataModelSinkFactory} to write to the path. * @param path the output path * @return the target sink factory * @since 0.6.0 */ protected final DataModelSinkFactory toDataModelSinkFactory(String path) { return getTestTools().getDataModelSinkFactory(toOutputUri(path)); } /** * Converts an output path to {@link DataModelSinkFactory} to write to the path. * @param path the output path * @return the target sink factory * @since 0.6.0 */ protected final DataModelSinkFactory toDataModelSinkFactory(File path) { return getTestTools().getDataModelSinkFactory(path.toURI()); } /** * Converts an output path to {@link DifferenceSinkFactory} to write to the path. * @param path the output path * @return the target sink factory * @since 0.6.0 */ protected final DifferenceSinkFactory toDifferenceSinkFactory(String path) { return getTestTools().getDifferenceSinkFactory(toOutputUri(path)); } /** * Converts an output path to {@link DifferenceSinkFactory} to write to the path. * @param path the output path * @return the target sink factory * @since 0.6.0 */ protected final DifferenceSinkFactory toDifferenceSinkFactory(File path) { return getTestTools().getDifferenceSinkFactory(path.toURI()); } /** * Converts {@link ModelVerifier} into {@link VerifyRuleFactory}. * @param rulePath the path which represents the verification rule description * @param extraRules the extra verification rules * @return the equivalent {@link VerifyRuleFactory} * @since 0.6.0 */ protected final VerifyRuleFactory toVerifyRuleFactory( String rulePath, List<? extends ModelTester<? super T>> extraRules) { try { TestDataToolProvider tools = getTestTools(); List<TestRule> fragments = new ArrayList<>(); for (ModelTester<? super T> tester : extraRules) { fragments
{ "pile_set_name": "Github" }
null
null
{ "word": "Leg", "definitions": [ "Each of the limbs on which a person or animal walks and stands.", "A leg of an animal or bird as food.", "A part of a garment covering a leg or part of a leg.", "(with reference to a ball, especially in golf) sufficient momentum to reach the desired point.", "(with reference to a product or idea) sustained popularity or success.", "Each of the supports of a chair, table, or other structure.", "A section or stage of a journey or process.", "A run made on a single tack.", "(in soccer and other sports) each of two games constituting a round of a competition.", "A section of a relay or other race done in stages.", "A single game in a darts match.", "A branch of a forked object.", "The half of the field (as divided lengthways through the pitch) away from which the batsman's feet are pointed when standing to receive the ball.", "A deferential gesture made by drawing back one leg and bending it while keeping the front leg straight." ], "parts-of-speech": "Noun" }
{ "pile_set_name": "Github" }
null
null
/* Copyright 2015 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package runtime import "k8s.io/apimachinery/pkg/runtime/schema" // SetGroupVersionKind satisfies the ObjectKind interface for all objects that embed TypeMeta func (obj *TypeMeta) SetGroupVersionKind(gvk schema.GroupVersionKind) { obj.APIVersion, obj.Kind = gvk.ToAPIVersionAndKind() } // GroupVersionKind satisfies the ObjectKind interface for all objects that embed TypeMeta func (obj *TypeMeta) GroupVersionKind() schema.GroupVersionKind { return schema.FromAPIVersionAndKind(obj.APIVersion, obj.Kind) } func (obj *TypeMeta) GetObjectKind() schema.ObjectKind { return obj }
{ "pile_set_name": "Github" }
null
null
"""Imported from the recipes section of the itertools documentation. All functions taken from the recipes section of the itertools library docs [1]_. Some backward-compatible usability improvements have been made. .. [1] http://docs.python.org/library/itertools.html#recipes """ from collections import deque from itertools import ( chain, combinations, count, cycle, groupby, islice, repeat, starmap, tee ) import operator from random import randrange, sample, choice from six import PY2 from six.moves import filter, filterfalse, map, range, zip, zip_longest __all__ = [ 'accumulate', 'all_equal', 'consume', 'dotproduct', 'first_true', 'flatten', 'grouper', 'iter_except', 'ncycles', 'nth', 'nth_combination', 'padnone', 'pairwise', 'partition', 'powerset', 'prepend', 'quantify', 'random_combination_with_replacement', 'random_combination', 'random_permutation', 'random_product', 'repeatfunc', 'roundrobin', 'tabulate', 'tail', 'take', 'unique_everseen', 'unique_justseen', ] def accumulate(iterable, func=operator.add): """ Return an iterator whose items are the accumulated results of a function (specified by the optional *func* argument) that takes two arguments. By default, returns accumulated sums with :func:`operator.add`. >>> list(accumulate([1, 2, 3, 4, 5])) # Running sum [1, 3, 6, 10, 15] >>> list(accumulate([1, 2, 3], func=operator.mul)) # Running product [1, 2, 6] >>> list(accumulate([0, 1, -1, 2, 3, 2], func=max)) # Running maximum [0, 1, 1, 2, 3, 3] This function is available in the ``itertools`` module for Python 3.2 and greater. """ it = iter(iterable) try: total = next(it) except StopIteration: return else: yield total for element in it: total = func(total, element) yield total def take(n, iterable): """Return first *n* items of the iterable as a list. >>> take(3, range(10)) [0, 1, 2] >>> take(5, range(3)) [0, 1, 2] Effectively a short replacement for ``next`` based iterator consumption when you want more than one item, but less than the whole iterator. """ return list(islice(iterable, n)) def tabulate(function, start=0): """Return an iterator over the results of ``func(start)``, ``func(start + 1)``, ``func(start + 2)``... *func* should be a function that accepts one integer argument. If *start* is not specified it defaults to 0. It will be incremented each time the iterator is advanced. >>> square = lambda x: x ** 2 >>> iterator = tabulate(square, -3) >>> take(4, iterator) [9, 4, 1, 0] """ return map(function, count(start)) def tail(n, iterable): """Return an iterator over the last *n* items of *iterable*. >>> t = tail(3, 'ABCDEFG') >>> list(t) ['E', 'F', 'G'] """ return iter(deque(iterable, maxlen=n)) def consume(iterator, n=None): """Advance *iterable* by *n* steps. If *n* is ``None``, consume it entirely. Efficiently exhausts an iterator without returning values. Defaults to consuming the whole iterator, but an optional second argument may be provided to limit consumption. >>> i = (x for x in range(10)) >>> next(i) 0 >>> consume(i, 3) >>> next(i) 4 >>> consume(i) >>> next(i) Traceback (most recent call last): File "<stdin>", line 1, in <module> StopIteration If the iterator has fewer items remaining than the provided limit, the whole iterator will be consumed. >>> i = (x for x in range(3)) >>> consume(i, 5) >>> next(i) Traceback (most recent call last): File "<stdin>", line 1, in <module> StopIteration """ # Use functions that consume iterators at C speed. if n is None: # feed the entire iterator into a zero-length deque deque(iterator, maxlen=0) else: # advance to the empty slice starting at position n next(islice(iterator, n, n), None) def nth(iterable, n, default=None): """Returns the nth item or a default value. >>> l = range(10) >>> nth(l, 3) 3 >>> nth(l, 20, "zebra") 'zebra' """ return next(islice(iterable, n, None), default) def all_equal(iterable): """ Returns ``True`` if all the elements are equal to each other. >>> all_equal('aaaa') True >>> all_equal('aaab') False """ g = groupby(iterable) return next(g, True) and not next(g, False) def quantify(iterable, pred=bool): """Return the how many times the predicate is true. >>> quantify([True, False, True]) 2 """ return sum(map(pred, iterable)) def padnone(iterable): """Returns the sequence of elements and then returns ``None`` indefinitely. >>> take(5, padnone(range(3))) [0, 1, 2, None, None] Useful for emulating the behavior of the built-in :func:`map` function. See also :func:`padded`. """ return chain(iterable, repeat(None)) def ncycles(iterable, n): """Returns the sequence elements *n* times >>> list(ncycles(["a", "b"], 3)) ['a', 'b', 'a', 'b', 'a', 'b'] """ return chain.from_iterable(repeat(tuple(iterable), n)) def dotproduct(vec1, vec2): """Returns the dot product of the two iterables. >>> dotproduct([10, 10], [20, 20]) 400 """ return sum(map(operator.mul, vec1, vec2)) def flatten(listOfLists): """Return an iterator flattening one level of nesting in a list of lists. >>> list(flatten([[0, 1], [2, 3]])) [0, 1, 2, 3] See also :func:`collapse`, which can flatten multiple levels of nesting. """ return chain.from_iterable(listOfLists) def repeatfunc(func, times=None, *args): """Call *func* with *args* repeatedly, returning an iterable over the results. If *times* is specified, the iterable will terminate after that many repetitions: >>> from operator import add >>> times = 4 >>> args = 3, 5 >>> list(repeatfunc(add, times, *args)) [8, 8, 8, 8] If *times* is ``None`` the iterable will not terminate: >>> from random import randrange >>> times = None >>> args = 1, 11 >>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP [2, 4, 8, 1, 8, 4] """ if times is None:
{ "pile_set_name": "Github" }
null
null
#include <stdlib.h> #include "buffer.h" #include "chunk.h" #include "cmark.h" #include "utf8.h" #include "render.h" #include "node.h" static CMARK_INLINE void S_cr(cmark_renderer *renderer) { if (renderer->need_cr < 1) { renderer->need_cr = 1; } } static CMARK_INLINE void S_blankline(cmark_renderer *renderer) { if (renderer->need_cr < 2) { renderer->need_cr = 2; } } static void S_out(cmark_renderer *renderer, const char *source, bool wrap, cmark_escaping escape) { int length = strlen(source); unsigned char nextc; int32_t c; int i = 0; int last_nonspace; int len; cmark_chunk remainder = cmark_chunk_literal(""); int k = renderer->buffer->size - 1; wrap = wrap && !renderer->no_linebreaks; if (renderer->in_tight_list_item && renderer->need_cr > 1) { renderer->need_cr = 1; } while (renderer->need_cr) { if (k < 0 || renderer->buffer->ptr[k] == '\n') { k -= 1; } else { cmark_strbuf_putc(renderer->buffer, '\n'); if (renderer->need_cr > 1) { cmark_strbuf_put(renderer->buffer, renderer->prefix->ptr, renderer->prefix->size); } } renderer->column = 0; renderer->last_breakable = 0; renderer->begin_line = true; renderer->begin_content = true; renderer->need_cr -= 1; } while (i < length) { if (renderer->begin_line) { cmark_strbuf_put(renderer->buffer, renderer->prefix->ptr, renderer->prefix->size); // note: this assumes prefix is ascii: renderer->column = renderer->prefix->size; } len = cmark_utf8proc_iterate((const uint8_t *)source + i, length - i, &c); if (len == -1) { // error condition return; // return without rendering rest of string } nextc = source[i + len]; if (c == 32 && wrap) { if (!renderer->begin_line) { last_nonspace = renderer->buffer->size; cmark_strbuf_putc(renderer->buffer, ' '); renderer->column += 1; renderer->begin_line = false; renderer->begin_content = false; // skip following spaces while (source[i + 1] == ' ') { i++; } // We don't allow breaks that make a digit the first character // because this causes problems with commonmark output. if (!cmark_isdigit(source[i + 1])) { renderer->last_breakable = last_nonspace; } } } else if (escape == LITERAL) { if (c == 10) { cmark_strbuf_putc(renderer->buffer, '\n'); renderer->column = 0; renderer->begin_line = true; renderer->begin_content = true; renderer->last_breakable = 0; } else { cmark_render_code_point(renderer, c); renderer->begin_line = false; // we don't set 'begin_content' to false til we've // finished parsing a digit. Reason: in commonmark // we need to escape a potential list marker after // a digit: renderer->begin_content = renderer->begin_content && cmark_isdigit(c) == 1; } } else { (renderer->outc)(renderer, escape, c, nextc); renderer->begin_line = false; renderer->begin_content = renderer->begin_content && cmark_isdigit(c) == 1; } // If adding the character went beyond width, look for an // earlier place where the line could be broken: if (renderer->width > 0 && renderer->column > renderer->width && !renderer->begin_line && renderer->last_breakable > 0) { // copy from last_breakable to remainder cmark_chunk_set_cstr(renderer->mem, &remainder, (char *)renderer->buffer->ptr + renderer->last_breakable + 1); // truncate at last_breakable cmark_strbuf_truncate(renderer->buffer, renderer->last_breakable); // add newline, prefix, and remainder cmark_strbuf_putc(renderer->buffer, '\n'); cmark_strbuf_put(renderer->buffer, renderer->prefix->ptr, renderer->prefix->size); cmark_strbuf_put(renderer->buffer, remainder.data, remainder.len); renderer->column = renderer->prefix->size + remainder.len; cmark_chunk_free(renderer->mem, &remainder); renderer->last_breakable = 0; renderer->begin_line = false; renderer->begin_content = false; } i += len; } } // Assumes no newlines, assumes ascii content: void cmark_render_ascii(cmark_renderer *renderer, const char *s) { int origsize = renderer->buffer->size; cmark_strbuf_puts(renderer->buffer, s); renderer->column += renderer->buffer->size - origsize; } void cmark_render_code_point(cmark_renderer *renderer, uint32_t c) { cmark_utf8proc_encode_char(c, renderer->buffer); renderer->column += 1; } char *cmark_render(cmark_node *root, int options, int width, void (*outc)(cmark_renderer *, cmark_escaping, int32_t, unsigned char), int (*render_node)(cmark_renderer *renderer, cmark_node *node, cmark_event_type ev_type, int options)) { cmark_mem *mem = cmark_node_mem(root); cmark_strbuf pref = CMARK_BUF_INIT(mem); cmark_strbuf buf = CMARK_BUF_INIT(mem); cmark_node *cur; cmark_event_type ev_type; char *result; cmark_iter *iter = cmark_iter_new(root); cmark_renderer renderer = {mem, &buf, &pref, 0, width, 0, 0, true, true, false, false, outc, S_cr, S_blankline, S_out}; while ((ev_type = cmark_iter_next(iter)) != CMARK_EVENT_DONE) { cur = cmark_iter_get_node(iter); if (!render_node(&renderer, cur, ev_type, options)) { // a false value causes us to skip processing // the node's contents. this is used for // autolinks. cmark_iter_reset(iter, cur, CMARK_EVENT_EXIT); } } // ensure final newline if (renderer.buffer->size == 0 || renderer.buffer->ptr[renderer.buffer->size - 1] != '\n') { cmark_strbuf_putc(renderer.buffer, '\n'); } result = (char *)cmark_strbuf_detach(renderer.buffer); cmark_iter_free(
{ "pile_set_name": "Github" }
null
null
// // ZZFDRQSuccessCell.m // ZZFLEXDemo // // Created by 李伯坤 on 2018/1/24. // Copyright © 2018年 李伯坤. All rights reserved. // #import "ZZFDRQSuccessCell.h" @implementation ZZFDRQSuccessCell + (CGFloat)viewHeightByDataModel:(id)dataModel { return 180; } - (void)setViewDataModel:(UIColor *)dataModel { [self setBackgroundColor:dataModel]; } - (instancetype)initWithFrame:(CGRect)frame { if (self = [super initWithFrame:frame]) { [self setBackgroundColor:[UIColor whiteColor]]; self.contentView.addLabel(1001) .text(@"请求成功").font([UIFont systemFontOfSize:15]) .masonry(^(UIView *senderView, MASConstraintMaker *make) { make.center.mas_equalTo(0); }); } return self; } @end
{ "pile_set_name": "Github" }
null
null
// Dstl (c) Crown Copyright 2017 package uk.gov.dstl.baleen.annotators.cleaners; import static org.junit.Assert.assertEquals; import org.apache.uima.analysis_engine.AnalysisEngine; import org.apache.uima.fit.factory.AnalysisEngineFactory; import org.apache.uima.fit.util.JCasUtil; import org.junit.Test; import uk.gov.dstl.baleen.annotators.testing.Annotations; import uk.gov.dstl.baleen.annotators.testing.AnnotatorTestBase; import uk.gov.dstl.baleen.types.common.Person; import uk.gov.dstl.baleen.types.semantic.ReferenceTarget; public class CorefCapitalisationAndApostropheTest extends AnnotatorTestBase { private static final String JAMES_UC = "JAMES"; private static final String JAMES = "James"; private static final String TEXT = "James went to London. JAMES has also been to Edinburgh."; @Test public void testNoExistingReferents() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class); jCas.setDocumentText(TEXT); Annotations.createPerson(jCas, 0, 5, JAMES); Annotations.createPerson(jCas, 22, 27, JAMES_UC); corefCapAE.process(jCas); assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size()); ReferenceTarget rt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0); Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0); Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1); assertEquals(rt, p1t.getReferent()); assertEquals(rt, p2t.getReferent()); } @Test public void testOneExistingReferent() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class); jCas.setDocumentText(TEXT); ReferenceTarget rt = Annotations.createReferenceTarget(jCas); Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES); p1.setReferent(rt); Annotations.createPerson(jCas, 22, 27, JAMES_UC); corefCapAE.process(jCas); assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size()); ReferenceTarget rtt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0); Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0); Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1); assertEquals(rtt, p1t.getReferent()); assertEquals(rtt, p2t.getReferent()); } @Test public void testTwoExistingReferent() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class); jCas.setDocumentText(TEXT + " James has not been to Guatemala."); ReferenceTarget rt = Annotations.createReferenceTarget(jCas); Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES); p1.setReferent(rt); Person p2 = Annotations.createPerson(jCas, 22, 27, JAMES_UC); p2.setReferent(rt); Annotations.createPerson(jCas, 56, 61, JAMES); corefCapAE.process(jCas); assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size()); ReferenceTarget rtt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0); Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0); Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1); Person p3t = JCasUtil.selectByIndex(jCas, Person.class, 2); assertEquals(rtt, p1t.getReferent()); assertEquals(rtt, p2t.getReferent()); assertEquals(rtt, p3t.getReferent()); } @Test public void testExistingReferentsMerge() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine( CorefCapitalisationAndApostrophe.class, "mergeReferents", true); jCas.setDocumentText(TEXT); ReferenceTarget rt1 = Annotations.createReferenceTarget(jCas); ReferenceTarget rt2 = Annotations.createReferenceTarget(jCas); Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES); p1.setReferent(rt1); Person p2 = Annotations.createPerson(jCas, 22, 27, JAMES_UC); p2.setReferent(rt2); corefCapAE.process(jCas); assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size()); ReferenceTarget rtt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0); Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0); Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1); assertEquals(rtt, p1t.getReferent()); assertEquals(rtt, p2t.getReferent()); } @Test public void testExistingReferentsNoMerge() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class); jCas.setDocumentText(TEXT); ReferenceTarget rt1 = Annotations.createReferenceTarget(jCas); ReferenceTarget rt2 = Annotations.createReferenceTarget(jCas); Person p1 = Annotations.createPerson(jCas, 0, 5, JAMES); p1.setReferent(rt1); Person p2 = Annotations.createPerson(jCas, 22, 27, JAMES_UC); p2.setReferent(rt2); corefCapAE.process(jCas); assertEquals(2, JCasUtil.select(jCas, ReferenceTarget.class).size()); ReferenceTarget rt1t = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0); ReferenceTarget rt2t = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 1); Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0); Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1); assertEquals(rt1t, p1t.getReferent()); assertEquals(rt2t, p2t.getReferent()); } @Test public void testMissingValue() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine(CorefCapitalisationAndApostrophe.class); jCas.setDocumentText(TEXT); Person p1 = new Person(jCas); p1.setBegin(0); p1.setEnd(5); p1.addToIndexes(); Annotations.createPerson(jCas, 22, 27, JAMES_UC); corefCapAE.process(jCas); assertEquals(1, JCasUtil.select(jCas, ReferenceTarget.class).size()); ReferenceTarget rt = JCasUtil.selectByIndex(jCas, ReferenceTarget.class, 0); Person p1t = JCasUtil.selectByIndex(jCas, Person.class, 0); Person p2t = JCasUtil.selectByIndex(jCas, Person.class, 1); assertEquals(rt, p1t.getReferent()); assertEquals(rt, p2t.getReferent()); } @Test public void testApostropheS() throws Exception { AnalysisEngine corefCapAE = AnalysisEngineFactory.createEngine(CorefCap
{ "pile_set_name": "Github" }
null
null
// // CSLayoutSwitcherHostingView.m // CocoaSplit // // Created by Zakk on 3/5/17. // Copyright © 2017 Zakk. All rights reserved. // #import "CSLayoutSwitcherHostingView.h" #import <Quartz/Quartz.h> #import "CSPreviewGLLayer.h" #import "CSLayoutSwitcherWithPreviewWindowController.h" @implementation CSLayoutSwitcherHostingView @end
{ "pile_set_name": "Github" }
null
null
using CSharpGL; using System; using System.Collections.Generic; using System.Drawing; using System.Linq; using System.Text; namespace PointLight { /// <summary> /// Render a Cube with single color in modern opengl. /// </summary> public class LightPositionNode : PickableNode, IRenderable { private const string inPosition = "inPosition"; private const string projectionMat = "projectionMat"; private const string viewMat = "viewMat"; private const string modelMat = "modelMat"; private const string color = "color"; private const string vertexCode = @"#version 330 core in vec3 " + inPosition + @"; uniform mat4 " + projectionMat + @"; uniform mat4 " + viewMat + @"; uniform mat4 " + modelMat + @"; void main(void) { gl_Position = projectionMat * viewMat * modelMat * vec4(inPosition, 1.0); } "; private const string fragmentCode = @"#version 330 core uniform vec3 " + color + @" = vec3(1, 1, 1); layout(location = 0) out vec4 outColor; //out vec4 outColor; void main(void) { outColor = vec4(color, 1); } "; private CSharpGL.PointLight light; /// <summary> /// Render propeller in modern opengl. /// </summary> /// <returns></returns> public static LightPositionNode Create() { var vs = new VertexShader(vertexCode); var fs = new FragmentShader(fragmentCode); var provider = new ShaderArray(vs, fs); var map = new AttributeMap(); map.Add(inPosition, CubeModel.strPosition); var builder = new RenderMethodBuilder(provider, map, new PolygonModeSwitch(PolygonMode.Line), new LineWidthSwitch(3)); var node = new LightPositionNode(new CubeModel(), CubeModel.strPosition, builder); node.Initialize(); return node; } /// <summary> /// Render propeller in legacy opengl. /// </summary> private LightPositionNode(IBufferSource model, string positionNameInIBufferable, params RenderMethodBuilder[] builders) : base(model, positionNameInIBufferable, builders) { this.ModelSize = new vec3(1, 1, 1) * 0.3f; this.AutoRotate = true; } /// <summary> /// /// </summary> public bool AutoRotate { get; set; } private ThreeFlags enableRendering = ThreeFlags.BeforeChildren | ThreeFlags.Children | ThreeFlags.AfterChildren; /// <summary> /// Render before/after children? Render children? /// RenderAction cares about this property. Other actions, maybe, maybe not, your choice. /// </summary> public ThreeFlags EnableRendering { get { return this.enableRendering; } set { this.enableRendering = value; } } /// <summary> /// /// </summary> /// <param name="arg"></param> public void RenderBeforeChildren(RenderEventArgs arg) { if (!this.IsInitialized) { this.Initialize(); } if (this.AutoRotate) { float delta = 1; this.RotationAngle += delta * 31; var position = new vec3( (float)Math.Cos(this.RotationAngle / 5 * Math.PI / 180.0), (float)Math.Cos(this.RotationAngle / 50 * Math.PI / 180.0), (float)Math.Sin(this.RotationAngle / 5 * Math.PI / 180.0)) * 9; this.light.Position = position; this.WorldPosition = position; } ICamera camera = arg.Camera; mat4 projection = camera.GetProjectionMatrix(); mat4 view = camera.GetViewMatrix(); mat4 model = this.GetModelMatrix(); var method = this.RenderUnit.Methods[0]; // the only render unit in this node. ShaderProgram program = method.Program; program.SetUniform(projectionMat, projection); program.SetUniform(viewMat, view); program.SetUniform(modelMat, model); method.Render(); } public void RenderAfterChildren(RenderEventArgs arg) { } public void SetLight(CSharpGL.PointLight light) { this.light = light; } class CubeModel : IBufferSource { public vec3 ModelSize { get; private set; } public CubeModel() { this.ModelSize = new vec3(xLength * 2, yLength * 2, zLength * 2); } public const string strPosition = "position"; private VertexBuffer positionBuffer; private IDrawCommand drawCmd; #region IBufferable 成员 public IEnumerable<VertexBuffer> GetVertexAttribute(string bufferName) { if (bufferName == strPosition) { if (this.positionBuffer == null) { this.positionBuffer = positions.GenVertexBuffer(VBOConfig.Vec3, BufferUsage.StaticDraw); } yield return this.positionBuffer; } else { throw new ArgumentException(); } } public IEnumerable<IDrawCommand> GetDrawCommand() { if (this.drawCmd == null) { this.drawCmd = new DrawArraysCmd(DrawMode.TriangleStrip, positions.Length); } yield return this.drawCmd; } #endregion private const float xLength = 0.5f; private const float yLength = 0.5f; private const float zLength = 0.5f; /// <summary> /// four vertexes. /// </summary> private static readonly vec3[] positions = new vec3[] { new vec3(+xLength, +yLength, +zLength),// 0 new vec3(+xLength, -yLength, +zLength),// 1 new vec3(+xLength, +yLength, -zLength),// 2 new vec3(+xLength, -yLength, -zLength),// 3 new vec3(-xLength, -yLength, -zLength),// 4 new vec3(+xLength, -yLength, +zLength),// 5 new vec3(-xLength, -yLength, +zLength),// 6 new vec3(+xLength, +yLength, +zLength),// 7 new vec3(-xLength, +yLength, +zLength),// 8 new vec3(+xLength, +yLength, -zLength),// 9 new vec3(-xLength, +yLength, -zLength),// 10 new vec3(-xLength, -yLength, -zLength),// 11 new vec3(-xLength, +yLength, +zLength),// 12 new vec3(-xLength, -yLength, +zLength),// 13 }; } } }
{ "pile_set_name": "Github" }
null
null
/* * Copyright (c) Facebook, Inc. and its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.buck.android; import static com.facebook.buck.android.aapt.RDotTxtEntry.IdType.INT; import static com.facebook.buck.android.aapt.RDotTxtEntry.IdType.INT_ARRAY; import static com.facebook.buck.android.aapt.RDotTxtEntry.RType.ATTR; import static com.facebook.buck.android.aapt.RDotTxtEntry.RType.ID; import static com.facebook.buck.android.aapt.RDotTxtEntry.RType.STYLEABLE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertThat; import com.facebook.buck.android.MergeAndroidResourcesStep.DuplicateResourceException; import com.facebook.buck.android.aapt.RDotTxtEntry; import com.facebook.buck.android.aapt.RDotTxtEntry.RType; import com.facebook.buck.android.aapt.RDotTxtEntryUtil; import com.facebook.buck.android.aapt.RDotTxtEntryUtil.FakeEntry; import com.facebook.buck.core.build.execution.context.ExecutionContext; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.BuildTargetFactory; import com.facebook.buck.core.model.impl.BuildTargetPaths; import com.facebook.buck.core.rules.ActionGraphBuilder; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.rules.SourcePathRuleFinder; import com.facebook.buck.core.rules.resolver.impl.TestActionGraphBuilder; import com.facebook.buck.core.sourcepath.FakeSourcePath; import com.facebook.buck.core.sourcepath.resolver.SourcePathResolverAdapter; import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem; import com.facebook.buck.step.StepExecutionResult; import com.facebook.buck.step.TestExecutionContext; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.SortedSetMultimap; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.stream.Collectors; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; import org.hamcrest.core.StringContains; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class MergeAndroidResourcesStepTest { @Rule public ExpectedException thrown = ExpectedException.none(); private List<RDotTxtEntry> createTestingFakesWithIds(List<RDotTxtEntry> ls) { return ls.stream().map(RDotTxtEntryUtil::matchId).collect(Collectors.toList()); } @Test public void testGenerateRDotJavaForMultipleSymbolsFiles() throws DuplicateResourceException { RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder(); // Merge everything into the same package space. String sharedPackageName = "com.facebook.abc"; entriesBuilder.add( new RDotTxtFile( sharedPackageName, "a-R.txt", ImmutableList.of( "int id a1 0x7f010001", "int id a2 0x7f010002", "int string a1 0x7f020001"))); entriesBuilder.add( new RDotTxtFile( sharedPackageName, "b-R.txt", ImmutableList.of( "int id b1 0x7f010001", "int id b2 0x7f010002", "int string a1 0x7f020001"))); entriesBuilder.add( new RDotTxtFile( sharedPackageName, "c-R.txt", ImmutableList.of("int attr c1 0x7f010001", "int[] styleable c1 { 0x7f010001 }"))); SortedSetMultimap<String, RDotTxtEntry> packageNameToResources = MergeAndroidResourcesStep.sortSymbols( entriesBuilder.buildFilePathToPackageNameSet(), Optional.empty(), ImmutableMap.of(), Optional.empty(), /* bannedDuplicateResourceTypes */ EnumSet.noneOf(RType.class), ImmutableSet.of(), entriesBuilder.getProjectFilesystem(), false); assertEquals(1, packageNameToResources.keySet().size()); SortedSet<RDotTxtEntry> resources = packageNameToResources.get(sharedPackageName); assertEquals(7, resources.size()); Set<String> uniqueEntries = new HashSet<>(); for (RDotTxtEntry resource : resources) { if (!resource.type.equals(STYLEABLE)) { assertFalse( "Duplicate ids should be fixed by renumerate=true; duplicate was: " + resource.idValue, uniqueEntries.contains(resource.idValue)); uniqueEntries.add(resource.idValue); } } assertEquals(6, uniqueEntries.size()); // All good, no need to further test whether we can write the Java file correctly... } @Test public void testGenerateRDotJavaForWithStyleables() throws DuplicateResourceException { RDotTxtEntryBuilder entriesBuilder = new RDotTxtEntryBuilder(); // Merge everything into the same package space. String sharedPackageName = "com.facebook.abc"; entriesBuilder.add( new RDotTxtFile( sharedPackageName, "a-R.txt", ImmutableList.of( "int attr android_layout 0x010100f2", "int attr buttonPanelSideLayout 0x7f01003a", "int attr listLayout 0x7f01003b", "int[] styleable AlertDialog { 0x7f01003a, 0x7f01003b, 0x010100f2 }", "int styleable AlertDialog_android_layout 2", "int styleable AlertDialog_buttonPanelSideLayout 0", "int styleable AlertDialog_multiChoiceItemLayout 1"))); entriesBuilder.add( new RDotTxtFile( sharedPackageName, "b-R.txt", ImmutableList.of( "int id a1 0x7f010001", "int id a2 0x7f010002", "int attr android_layout_gravity 0x7f078008", "int attr background 0x7f078009", "int attr backgroundSplit 0x7f078008", "int attr backgroundStacked 0x7f078010", "int attr layout_heightPercent 0x7f078012", "int[] styleable ActionBar { }", "int styleable ActionBar_background 10", "int styleable ActionBar_backgroundSplit 12", "int styleable ActionBar_backgroundStacked 11", "int[] styleable ActionBarLayout { 0x7f060008 }", "int styleable ActionBarLayout_android_layout 0",
{ "pile_set_name": "Github" }
null
null
/* * SPDX-License-Identifier: BSD-2-Clause * * Copyright 2018, embedded brains GmbH <rtems@embedded-brains.de> * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <rtems/posix/semaphore.h> const uint32_t _Configuration_POSIX_Maximum_named_semaphores;
{ "pile_set_name": "Github" }
null
null
import python import experimental.semmle.python.templates.Chevron from SSTISink s select s
{ "pile_set_name": "Github" }
null
null
#!/usr/bin/env python3 import os import sys import requests import argparse from os import path from datetime import datetime, timedelta from subprocess import check_call time_str = '%Y-%m-%dT%H:%M:%SZ' short_time_str = '%Y-%m-%d' args = None def report_ssim_rebuffer(curr_ts, days): start_ts = curr_ts - timedelta(days=days) curr_dir = path.dirname(path.abspath(__file__)) plot_src = path.join(curr_dir, 'plot_ssim_rebuffer.py') time_range = '{}_{}'.format(start_ts.strftime(short_time_str), curr_ts.strftime(short_time_str)) output_fig_name = time_range + '.png' output_fig = path.join(curr_dir, output_fig_name) # run plot_ssim_rebuffer.py cmd = [plot_src, args.yaml_settings, '-o', output_fig, '--from', start_ts.strftime(time_str), '--to', curr_ts.strftime(time_str)] sys.stderr.write(' '.join(cmd) + '\n') check_call(cmd) # upload output_fig to Google cloud storage bucket_folder = 'puffer-stanford-public/ssim-rebuffer-figs' cmd = 'gsutil cp {} gs://{}'.format(output_fig, bucket_folder) sys.stderr.write(cmd + '\n') check_call(cmd, shell=True) gs_url = ('https://storage.googleapis.com/{}/{}' .format(bucket_folder, output_fig_name)) # remove local output_fig os.remove(output_fig) # post output_fig to Zulip template = 'Performance of ongoing experiments over the past {}:\n' + gs_url if days == 1: content = template.format('day') elif days == 7: content = template.format('week') elif days == 14: content = template.format('two weeks') else: content = template.format('{} days'.format(days)) payload = [ ('type', 'stream'), ('to', 'puffer-notification'), ('subject', 'Daily Report'), ('content', content), ] response = requests.post( os.environ['ZULIP_URL'], data=payload, auth=(os.environ['ZULIP_BOT_EMAIL'], os.environ['ZULIP_BOT_TOKEN'])) if response.status_code == requests.codes.ok: print('Posted to Zulip successfully') else: print('Failed to post to Zulip') def main(): parser = argparse.ArgumentParser() parser.add_argument('yaml_settings') global args args = parser.parse_args() td = datetime.utcnow() curr_ts = datetime(td.year, td.month, td.day, td.hour, 0) # report the performance over the past day report_ssim_rebuffer(curr_ts, 1) # report the performance over the past week report_ssim_rebuffer(curr_ts, 7) # report the performance over the past two weeks report_ssim_rebuffer(curr_ts, 14) if __name__ == '__main__': main()
{ "pile_set_name": "Github" }
null
null
package com.glumes.openglbasicshape.draw.texture import android.content.Context import android.opengl.GLES20 import android.opengl.GLES30 import com.glumes.openglbasicshape.R import com.glumes.openglbasicshape.base.LogUtil import com.glumes.openglbasicshape.draw.BaseShape import com.glumes.openglbasicshape.utils.MatrixStateOnly import com.glumes.openglbasicshape.utils.ShaderHelper import com.glumes.openglbasicshape.utils.TextureHelper import io.reactivex.Observable import java.nio.ByteBuffer import java.nio.ByteOrder import java.util.concurrent.TimeUnit import javax.microedition.khronos.egl.EGLConfig import javax.microedition.khronos.opengles.GL10 /** * Created by glumes on 09/05/2018 */ class CubeTexture(context: Context) : BaseShape(context) { private val U_VIEW_MATRIX = "u_ViewMatrix" private val U_MODEL_MATRIX = "u_ModelMatrix" private val U_PROJECTION_MATRIX = "u_ProjectionMatrix" private val A_POSITION = "a_Position" private val A_TEXTURE_COORDINATE = "a_TextureCoordinates" private val U_TEXTURE_UNIT = "u_TextureUnit" private var uModelMatrixAttr: Int = 0 private var uViewMatrixAttr: Int = 0 private var uProjectionMatrixAttr: Int = 0 private var aPositionAttr: Int = 0 private var aTextureCoordinateAttr: Int = 0 private var uTextureUnitAttr: Int = 0 private var mTextureId: IntArray? = null var vertexFloatBuffer = ByteBuffer .allocateDirect(8 * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() var textureFloagBuffer = ByteBuffer .allocateDirect(8 * 4) .order(ByteOrder.nativeOrder()) .asFloatBuffer() val CubeSize = 1.0f val HalfCubeSize = CubeSize / 2 var eyeX = 0.0f val eyeY = 0.0f var eyeZ = 2.0f val eyeDistance = 3.0f val lookX = 0.0f val lookY = 0.0f val lookZ = 0.0f val upX = 0.0f val upY = 1.0f val upZ = 0.0f private val mMatrixStateOnly: MatrixStateOnly init { LogUtil.d("cube texture") mProgram = ShaderHelper.buildProgram(mContext, R.raw.texture_vertex_shader, R.raw.texture_fragment_shader) GLES20.glUseProgram(mProgram) initVertexData() initTextureData() POSITION_COMPONENT_COUNT = 2 mMatrixStateOnly = MatrixStateOnly() } // 六个面的顶点,都是一样的坐标,通过变换矩阵来转换位置进行绘制。 private fun initVertexData() { val faceLeft = -CubeSize / 2 val faceRight = -faceLeft val faceTop = CubeSize / 2 val faceBottom = -faceTop val vertices = floatArrayOf( faceLeft, faceBottom, faceRight, faceBottom, faceLeft, faceTop, faceRight, faceTop ) vertexFloatBuffer.put(vertices) vertexFloatBuffer.position(0) } // 六个面的纹理坐标,都是一样的坐标,通过变换矩阵来转换位置进行绘制。 private fun initTextureData() { val texCoords = floatArrayOf( 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f ) textureFloagBuffer.put(texCoords) textureFloagBuffer.position(0) } override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) { super.onSurfaceCreated(gl, config) GLES20.glClearColor(0f, 0f, 0f, 1.0f) //打开深度检测 GLES30.glEnable(GLES30.GL_DEPTH_TEST) //打开背面剪裁,面剔除,优化显示速度 GLES30.glEnable(GLES30.GL_CULL_FACE) aPositionAttr = GLES20.glGetAttribLocation(mProgram, A_POSITION) uModelMatrixAttr = GLES20.glGetUniformLocation(mProgram, U_MODEL_MATRIX) uViewMatrixAttr = GLES20.glGetUniformLocation(mProgram, U_VIEW_MATRIX) uProjectionMatrixAttr = GLES20.glGetUniformLocation(mProgram, U_PROJECTION_MATRIX) aTextureCoordinateAttr = GLES20.glGetAttribLocation(mProgram, A_TEXTURE_COORDINATE) uTextureUnitAttr = GLES20.glGetUniformLocation(mProgram, U_TEXTURE_UNIT) mTextureId = TextureHelper.loadCubeTexture(mContext, TextureHelper.CUBE) GLES20.glUniform1i(uTextureUnitAttr, 0) } override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) { super.onSurfaceChanged(gl, width, height) GLES20.glViewport(0, 0, width, height) val ratio = width.toFloat() / height val left = -ratio val bottom = -1.0f val top = 1.0f val near = 1.0f val far = 12.0f Observable.interval(30, TimeUnit.MILLISECONDS) .subscribe { eyeX = eyeDistance * Math.sin((radian * num).toDouble()).toFloat() eyeZ = eyeDistance * Math.cos((radian * num).toDouble()).toFloat() num++ if (num > 360) { num = 0 } } mMatrixStateOnly.setCamera(eyeX, eyeY, eyeZ, lookX, lookY, lookZ, upX, upY, upZ) mMatrixStateOnly.setProjectFrustum(left, ratio, bottom, top, near, far) mMatrixStateOnly.setInitStack() mMatrixStateOnly.rotate(-30f, 0f, 0f, 1f) mMatrixStateOnly.scale(0.398f, 0.555f, 0f) mMatrixStateOnly.translate(0.5f, 0.8f, 0f) } var num = 0 var RotateNum = 360 val radian = (2 * Math.PI / RotateNum).toFloat() override fun onDrawFrame(gl: GL10?) { super.onDrawFrame(gl) GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f) GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT or GLES20.GL_DEPTH_BUFFER_BIT) // GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT) vertexFloatBuffer.position(0) GLES20.glVertexAttribPointer(aPositionAttr, POSITION_COMPONENT_COUNT, GLES20.GL_FLOAT, false, 0, vertexFloatBuffer) GLES20.glEnableVertexAttribArray(aPositionAttr) textureFloagBuffer.position(0) GLES20.glVertexAttribPointer(aTextureCoordinateAttr, POSITION_COMPONENT_COUNT, GLES20.GL_FLOAT, false, 0, textureFloagBuffer) GLES20
{ "pile_set_name": "Github" }
null
null
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package internalversion import ( rest "k8s.io/client-go/rest" "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset/scheme" ) type AppsInterface interface { RESTClient() rest.Interface ControllerRevisionsGetter StatefulSetsGetter } // AppsClient is used to interact with features provided by the apps group. type AppsClient struct { restClient rest.Interface } func (c *AppsClient) ControllerRevisions(namespace string) ControllerRevisionInterface { return newControllerRevisions(c, namespace) } func (c *AppsClient) StatefulSets(namespace string) StatefulSetInterface { return newStatefulSets(c, namespace) } // NewForConfig creates a new AppsClient for the given config. func NewForConfig(c *rest.Config) (*AppsClient, error) { config := *c if err := setConfigDefaults(&config); err != nil { return nil, err } client, err := rest.RESTClientFor(&config) if err != nil { return nil, err } return &AppsClient{client}, nil } // NewForConfigOrDie creates a new AppsClient for the given config and // panics if there is an error in the config. func NewForConfigOrDie(c *rest.Config) *AppsClient { client, err := NewForConfig(c) if err != nil { panic(err) } return client } // New creates a new AppsClient for the given RESTClient. func New(c rest.Interface) *AppsClient { return &AppsClient{c} } func setConfigDefaults(config *rest.Config) error { config.APIPath = "/apis" if config.UserAgent == "" { config.UserAgent = rest.DefaultKubernetesUserAgent() } if config.GroupVersion == nil || config.GroupVersion.Group != scheme.Scheme.PrioritizedVersionsForGroup("apps")[0].Group { gv := scheme.Scheme.PrioritizedVersionsForGroup("apps")[0] config.GroupVersion = &gv } config.NegotiatedSerializer = scheme.Codecs if config.QPS == 0 { config.QPS = 5 } if config.Burst == 0 { config.Burst = 10 } return nil } // RESTClient returns a RESTClient that is used to communicate // with API server by this client implementation. func (c *AppsClient) RESTClient() rest.Interface { if c == nil { return nil } return c.restClient }
{ "pile_set_name": "Github" }
null
null
title: TCC-Transaction 源码分析 —— 调试环境搭建 date: 2018-02-01 tags: categories: TCC-Transaction permalink: TCC-Transaction/build-debugging-environment --- 摘要: 原创出处 http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/ 「芋道源码」欢迎转载,保留摘要,谢谢! **本文主要基于 TCC-Transaction 1.2.3.3 正式版** - [1. 依赖工具](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) - [2. 源码拉取](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) - [3. 初始化数据库](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) - [4. 启动 capital 项目](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) - [5. 启动 redpacket 项目](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) - [6. 启动 order 项目](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) - [666. 彩蛋](http://www.iocoder.cn/TCC-Transaction/build-debugging-environment/) ------- ![](http://www.iocoder.cn/images/common/wechat_mp_2018_05_18.jpg) > 🙂🙂🙂关注**微信公众号:【芋道源码】**有福利: > 1. RocketMQ / MyCAT / Sharding-JDBC **所有**源码分析文章列表 > 2. RocketMQ / MyCAT / Sharding-JDBC **中文注释源码 GitHub 地址** > 3. 您对于源码的疑问每条留言**都**将得到**认真**回复。**甚至不知道如何读源码也可以请教噢**。 > 4. **新的**源码解析文章**实时**收到通知。**每周更新一篇左右**。 > 5. **认真的**源码交流微信群。 ------- # 1. 依赖工具 * Maven * Git * JDK * MySQL * IntelliJ IDEA # 2. 源码拉取 从官方仓库 [https://github.com/changmingxie/tcc-transaction.git](https://github.com/changmingxie/tcc-transaction.git) `Fork` 出属于自己的仓库。为什么要 `Fork` ?既然开始阅读、调试源码,我们可能会写一些注释,有了自己的仓库,可以进行自由的提交。😈 使用 `IntelliJ IDEA` 从 `Fork` 出来的仓库拉取代码。拉取完成后,`Maven` 会下载依赖包,可能会花费一些时间,耐心等待下。 本文基于 `master-1.2.x` 分支。 # 3. 初始化数据库 官方提供了两个 Demo 项目例子: * tcc-transaction-dubbo-sample * tcc-transaction-http-sample 考虑到不是所有所有同学都使用过 Dubbo 服务化框架,我们以 tcc-transaction-http-sample 项目为例子。 > 旁白君,一定注意,是 tcc-transaction-http-sample 项目,千万千万千万不要用错了!!! 打开 tcc-transaction-http-sample/src/main/dbscripts 目录,有四个 SQL 脚本文件: * `create_db_cap.sql` :tcc-transaction-http-capital 项目数据库初始化脚本。 * `create_db_ord.sql` :tcc-transaction-http-order 项目数据库初始化脚本。 * `create_db_red.sql` :tcc-transaction-http-redpacket 项目数据库初始化脚本。 * `create_db_tcc.sql` :tcc-transaction **底层**数据库初始化脚本。 笔者使用 Navicat 进行数据库脚本执行。使用方式为:Navicat 菜单 Connection -> Execute SQL File,选择脚本文件,逐个执行。 目前数据库脚本未使用 `USE` 语句选择对应数据库,每个脚本都需要进行添加。以 `create_db_cap.sql` 举例子: ```SQL CREATE DATABASE `tcc_cap` /*!40100 DEFAULT CHARACTER SET utf8 */; -- 新增 USE USE `tcc_cap`; ``` # 4. 启动 capital 项目 1. 修改项目下 `jdbc.properties` 文件,**填写成你的数据库地址**。 2. 使用 IDEA 配置 Tomcat 进行启动。这里要注意下: ```XML // appcontext-service-provider.xml <bean id="httpServer" class="org.springframework.remoting.support.SimpleHttpServerFactoryBean"> <property name="contexts"> <util:map> <entry key="/remoting/CapitalTradeOrderService" value-ref="capitalTradeOrderServiceExporter"/> <entry key="/remoting/CapitalAccountService" value-ref="capitalAccountServiceExporter"/> </util:map> </property> <property name="port" value="8081"/> </bean> ``` * 默认开启 8081 端口提供接口服务。所以配置 Tomcat 的端口不能再使用 8081,避免冲突。例如,笔者使用 18081。 3. 访问 `http://127.0.0.1:18081/`,看到 "hello tcc transacton http sample capital",代表项目启动完成。**`18081` 为你填写的 Tomcat 端口**。 # 5. 启动 redpacket 项目 同 tcc-transaction-http-capital 项目。 # 6. 启动 order 项目 1. 修改项目下 `jdbc.properties` 文件,**填写成你的数据库地址**。 2. 使用 IDEA 配置 Tomcat 进行启动。 3. 访问 `http://127.0.0.1:8080/`,看到 "sample 说明...",代表项目启动完成。**`8080` 为你填写的 Tomcat 端口**。 4. 点击 [商品列表链接] -> [购买] -> [支付],如果看到 "支付成功" 或者 "支付失败",恭喜你🎉,你已经成功搭建完你的调试环境。愉快的开始
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="utf-8"?> <FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent" android:theme="@style/Base.Theme.OpenWith" android:paddingTop="8dp"> <EditText android:id="@+id/add_to_home_screen_title" android:layout_width="match_parent" android:layout_height="match_parent" android:layout_marginEnd="20dp" android:layout_marginStart="20dp" android:hint="@string/add_to_home_screen_dialog_hint" android:singleLine="true" android:imeOptions="actionGo" android:imeActionLabel="@string/add" android:inputType="textCapSentences" tools:ignore="Autofill" /> <com.tasomaniac.android.widget.DelayedProgressBar android:id="@+id/add_to_home_screen_progress" style="?android:progressBarStyleSmall" android:layout_width="wrap_content" android:layout_height="wrap_content" android:layout_gravity="center_vertical|end" android:layout_marginEnd="24dp" android:visibility="gone" /> </FrameLayout>
{ "pile_set_name": "Github" }
null
null
require 'rack' require 'uri' class Refraction class Request < Rack::Request attr_reader :action, :status, :message def method; request_method; end def query; query_string; end ### actions def set(options) if options.is_a?(String) @re_location = options else @re_scheme = options[:protocol] if options[:protocol] # :protocol is alias for :scheme @re_scheme = options[:scheme] if options[:scheme] @re_host = options[:host] if options[:host] @re_port = options[:port] if options[:port] @re_path = options[:path] if options[:path] @re_query = options[:query] if options[:query] end end def rewrite!(options) @action = :rewrite set(options) end def permanent!(options) @action = :permanent @status = 301 set(options) @message = "moved to #{@uri}" end def found!(options) @action = :found @status = 302 set(options) @message = "moved to #{@uri}" end def respond!(status, headers, content) @action = :respond @status = status @headers = headers @message = content end ### response def response headers = @headers || { 'Location' => location, 'Content-Type' => 'text/plain' } headers['Content-Length'] = message.length.to_s [status, headers, [message]] end def location @re_location || url end def scheme; @re_scheme || super; end def host; @re_host || super; end def path; @re_path || super; end def query_string; @re_query || super; end def port @re_port || ((@re_scheme || @re_host) && default_port) || super end def default_port case scheme when "http" ; 80 when "https" ; 443 end end def http_host self.port ? "#{self.host}:#{self.port}" : self.host end end ### class Request def self.configure(&block) @rules = block end def self.rules @rules end def initialize(app) @app = app end def rules self.class.rules end def call(env) if self.rules request = Request.new(env) self.rules.call(request) case request.action when :permanent, :found, :respond request.response when :rewrite env["rack.url_scheme"] = request.scheme env["HTTP_HOST"] = request.http_host env["SERVER_NAME"] = request.host env["HTTP_PORT"] = request.port if request.port env["PATH_INFO"] = request.path env["QUERY_STRING"] = request.query env["REQUEST_URI"] = request.fullpath @app.call(env) else @app.call(env) end else @app.call(env) end end end # Rack version compatibility shim if Rack.release == "1.0" class Rack::Request def path script_name + path_info end end end
{ "pile_set_name": "Github" }
null
null
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!74 &7400000 AnimationClip: m_ObjectHideFlags: 0 m_PrefabParentObject: {fileID: 0} m_PrefabInternal: {fileID: 0} m_Name: growToHeight180 serializedVersion: 6 m_Legacy: 0 m_Compressed: 0 m_UseHighQualityCurve: 1 m_RotationCurves: [] m_CompressedRotationCurves: [] m_PositionCurves: [] m_ScaleCurves: [] m_FloatCurves: - curve: serializedVersion: 2 m_Curve: - time: 0 value: 0 inSlope: 0 outSlope: 0 tangentMode: 1 - time: .333333343 value: 180 inSlope: -894.097351 outSlope: 800 tangentMode: 1 m_PreInfinity: 2 m_PostInfinity: 2 attribute: m_PreferredHeight path: classID: 114 script: {fileID: 1679637790, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3} m_PPtrCurves: [] m_SampleRate: 60 m_WrapMode: 0 m_Bounds: m_Center: {x: 0, y: 0, z: 0} m_Extent: {x: 0, y: 0, z: 0} m_ClipBindingConstant: genericBindings: - path: 0 attribute: 379034303 script: {fileID: 1679637790, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3} classID: 114 customType: 0 isPPtrCurve: 0 pptrCurveMapping: [] m_AnimationClipSettings: serializedVersion: 2 m_StartTime: 0 m_StopTime: .333333343 m_OrientationOffsetY: 0 m_Level: 0 m_CycleOffset: 0 m_LoopTime: 0 m_LoopBlend: 0 m_LoopBlendOrientation: 0 m_LoopBlendPositionY: 0 m_LoopBlendPositionXZ: 0 m_KeepOriginalOrientation: 0 m_KeepOriginalPositionY: 1 m_KeepOriginalPositionXZ: 0 m_HeightFromFeet: 0 m_Mirror: 0 m_EditorCurves: - curve: serializedVersion: 2 m_Curve: - time: 0 value: 0 inSlope: 0 outSlope: 0 tangentMode: 1 - time: .333333343 value: 180 inSlope: -894.097351 outSlope: 800 tangentMode: 1 m_PreInfinity: 2 m_PostInfinity: 2 attribute: m_PreferredHeight path: classID: 114 script: {fileID: 1679637790, guid: f5f67c52d1564df4a8936ccd202a3bd8, type: 3} m_EulerEditorCurves: [] m_HasGenericRootTransform: 0 m_HasMotionFloatCurves: 0 m_GenerateMotionCurves: 0 m_Events: []
{ "pile_set_name": "Github" }
null
null
import NTask from "../ntask.js"; import Template from "../templates/user.js"; class User extends NTask { constructor(body) { super(); this.body = body; } render() { this.renderUserData(); } addEventListener() { this.userCancelClick(); } renderUserData() { const opts = { method: "GET", url: `${this.URL}/user`, json: true, headers: { authorization: localStorage.getItem("token") } }; this.request(opts, (err, resp, data) => { if (err || resp.status === 412) { this.emit("error", err); } else { this.body.innerHTML = Template.render(data); this.addEventListener(); } }); } userCancelClick() { const button = this.body.querySelector("[data-remove-account]"); button.addEventListener("click", (e) => { e.preventDefault(); if (confirm("This will cancel your account, are you sure?")) { const opts = { method: "DELETE", url: `${this.URL}/user`, headers: { authorization: localStorage.getItem("token") } }; this.request(opts, (err, resp, data) => { if (err || resp.status === 412) { this.emit("remove-error", err); } else { this.emit("remove-account"); } }); } }); } } module.exports = User;
{ "pile_set_name": "Github" }
null
null
--- title: UnionCaseInfo.GetFields Method (F#) description: UnionCaseInfo.GetFields Method (F#) keywords: visual f#, f#, functional programming author: dend manager: danielfe ms.date: 05/16/2016 ms.topic: language-reference ms.prod: visual-studio-dev14 ms.technology: devlang-fsharp ms.assetid: 1d3acc9a-0087-43ef-b07e-32da355b67bd --- # UnionCaseInfo.GetFields Method (F#) The fields associated with the case, represented by a `System.Reflection.PropertyInfo`. **Namespace/Module Path:** Microsoft.FSharp.Reflection **Assembly:** FSharp.Core (in FSharp.Core.dll) ## Syntax ```fsharp // Signature: member this.GetFields : unit -> PropertyInfo [] // Usage: unionCaseInfo.GetFields () ``` ## Return Value The fields associated with the case as an array of `System.Reflection.PropertyInfo`. ## Platforms Windows 8, Windows 7, Windows Server 2012, Windows Server 2008 R2 ## Version Information **F# Core Library Versions** Supported in: 2.0, 4.0, Portable ## See Also [Reflection.UnionCaseInfo Class &#40;F&#35;&#41;](Reflection.UnionCaseInfo-Class-%5BFSharp%5D.md) [Microsoft.FSharp.Reflection Namespace &#40;F&#35;&#41;](Microsoft.FSharp.Reflection-Namespace-%5BFSharp%5D.md)
{ "pile_set_name": "Github" }
null
null
using System.Security.Claims; using GraphQL.Authorization; namespace GraphQl.AspNetCore { public class GraphQLUserContext : IProvideClaimsPrincipal { public ClaimsPrincipal User { get; set; } } }
{ "pile_set_name": "Github" }
null
null
//======= Copyright (c) Valve Corporation, All rights reserved. =============== // // Purpose: Allows Enums to be shown in the inspector as flags // //============================================================================= using UnityEngine; #if UNITY_EDITOR using UnityEditor; #endif namespace Valve.VR.InteractionSystem { //------------------------------------------------------------------------- public class EnumFlags : PropertyAttribute { public EnumFlags() { } } #if UNITY_EDITOR //------------------------------------------------------------------------- [CustomPropertyDrawer( typeof( EnumFlags ) )] public class EnumFlagsPropertyDrawer : PropertyDrawer { public override void OnGUI( Rect position, SerializedProperty property, GUIContent label ) { property.intValue = EditorGUI.MaskField( position, label, property.intValue, property.enumNames ); } } #endif }
{ "pile_set_name": "Github" }
null
null
/* * Copyright (C) 2016, Canon Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Canon Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #pragma once #include "CachedRawResourceClient.h" #include "CachedResourceHandle.h" #include "ResourceRequest.h" namespace WebCore { class CachedRawResource; class Document; class DocumentThreadableLoader; class ResourceError; class CrossOriginPreflightChecker final : private CachedRawResourceClient { public: static void doPreflight(DocumentThreadableLoader&, ResourceRequest&&); CrossOriginPreflightChecker(DocumentThreadableLoader&, ResourceRequest&&); ~CrossOriginPreflightChecker(); void startPreflight(); void setDefersLoading(bool); private: void notifyFinished(CachedResource&) final; void redirectReceived(CachedResource&, ResourceRequest&&, const ResourceResponse&, CompletionHandler<void(ResourceRequest&&)>&&) final; static void handleLoadingFailure(DocumentThreadableLoader&, unsigned long, const ResourceError&); static void validatePreflightResponse(DocumentThreadableLoader&, ResourceRequest&&, unsigned long, const ResourceResponse&); DocumentThreadableLoader& m_loader; CachedResourceHandle<CachedRawResource> m_resource; ResourceRequest m_request; }; } // namespace WebCore
{ "pile_set_name": "Github" }
null
null
import tensorflow as tf from models.stacked_bidirectional import StackedBidirectional class LstmStackedBidirectional(StackedBidirectional): def __init__(self, review_summary_file, checkpointer, num_layers, attention=False): """ :param review_summary_file: :param checkpointer: :param num_layers: :param attention: """ super(LstmStackedBidirectional, self).__init__(review_summary_file, checkpointer, num_layers, attention) def get_cell(self): """ Return the atomic RNN cell type used for this model :return: The atomic RNN Cell """ return tf.nn.rnn_cell.LSTMCell(self.memory_dim)
{ "pile_set_name": "Github" }
null
null
<?xml version='1.0' encoding='utf-8'?> <testproblem> <name>Standing free surface wave in channel using P1DG-P2.</name> <owner userid="skramer"/> <tags>flml</tags> <problem_definition length="short" nprocs="1"> <command_line>fluidity -v3 -l standing_wave.flml</command_line> </problem_definition> <variables> <variable name="solvers_converged" language="python">import os files = os.listdir("./") solvers_converged = not "matrixdump" in files and not "matrixdump.info" in files</variable> <variable name="fs_left" language="python"> import h5py import numpy as np f = h5py.File('standing_wave.detectors.h5part', 'r') lt_id = f.attrs['Left%ids'][0] - 1 fsl = np.array([f['/Step#{}/Water%FreeSurface'.format(i)][lt_id] for i in range(len(f))]) t = np.array([f['/Step#{}'.format(i)].attrs['time'][0] for i in range(len(f))]) fs_left={} fs_left['min']=fsl.min() fs_left['min_time']=t[fsl.argmin()] fs_left['end_value']=fsl[-1]</variable> <variable name="fs_right" language="python"> import h5py import numpy as np f = h5py.File('standing_wave.detectors.h5part', 'r') rt_id = f.attrs['Right%ids'][0] - 1 fsr = np.array([f['/Step#{}/Water%FreeSurface'.format(i)][rt_id] for i in range(len(f))]) t = np.array([f['/Step#{}'.format(i)].attrs['time'][0] for i in range(len(f))]) fs_right={} fs_right['max']=fsr.max() fs_right['max_time']=t[fsr.argmax()] fs_right['end_value']=fsr[-1]</variable> <variable name="fs_integral_max" language="python">import fluidity_tools stat=fluidity_tools.stat_parser('standing_wave.stat') fsi=stat['Water']['FreeSurface']['integral'] fs_integral_max=abs(fsi).max()<comment>Horizontal integral of free surface should be zero</comment></variable> <variable name="fs_integral_range" language="python">import fluidity_tools stat=fluidity_tools.stat_parser('standing_wave.stat') fsi=stat['Water']['FreeSurface']['integral'] fs_integral_range=fsi.max()-fsi.min()<comment>Horizontal integral of free surface should be zero</comment></variable> </variables> <pass_tests> <test name="Solvers converged" language="python">assert(solvers_converged)</test> <test name="fs_left_min" language="python">assert(fs_left['min']&lt;-0.95)</test> <test name="fs_left_min_time" language="python">assert abs(fs_left['min_time']-1e4)&lt;400</test> <test name="fs_end_value" language="python">assert abs(fs_left['end_value']-1.0)&lt;0.2</test> <test name="fs_right_max" language="python">assert(fs_right['max']&gt;-0.95)</test> <test name="fs_right_max_time" language="python">assert abs(fs_right['max_time']-1e4)&lt;200</test> <test name="fs_end_value" language="python">assert abs(fs_right['end_value']+1.0)&lt;0.2</test> <test name="fs_integral_max_zero" language="python">area=1e6*1e5 assert fs_integral_max&lt;area*1e-5</test> <test name="fs_integral_range_zero" language="python">assert fs_integral_range&lt;1.0</test> </pass_tests> <warn_tests/> </testproblem>
{ "pile_set_name": "Github" }
null
null
/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package util import ( "bytes" "errors" "fmt" "io" "net/url" "os" "strconv" "strings" "time" jsonpatch "github.com/evanphx/json-patch" "github.com/spf13/cobra" "github.com/spf13/pflag" apierrors "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/api/meta" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/runtime" utilerrors "k8s.io/apimachinery/pkg/util/errors" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/apimachinery/pkg/util/yaml" "k8s.io/cli-runtime/pkg/genericclioptions" "k8s.io/cli-runtime/pkg/resource" "k8s.io/client-go/dynamic" "k8s.io/client-go/rest" "k8s.io/client-go/scale" "k8s.io/client-go/tools/clientcmd" "k8s.io/klog" utilexec "k8s.io/utils/exec" ) const ( ApplyAnnotationsFlag = "save-config" DefaultErrorExitCode = 1 ) type debugError interface { DebugError() (msg string, args []interface{}) } // AddSourceToErr adds handleResourcePrefix and source string to error message. // verb is the string like "creating", "deleting" etc. // source is the filename or URL to the template file(*.json or *.yaml), or stdin to use to handle the resource. func AddSourceToErr(verb string, source string, err error) error { if source != "" { if statusError, ok := err.(apierrors.APIStatus); ok { status := statusError.Status() status.Message = fmt.Sprintf("error when %s %q: %v", verb, source, status.Message) return &apierrors.StatusError{ErrStatus: status} } return fmt.Errorf("error when %s %q: %v", verb, source, err) } return err } var fatalErrHandler = fatal // BehaviorOnFatal allows you to override the default behavior when a fatal // error occurs, which is to call os.Exit(code). You can pass 'panic' as a function // here if you prefer the panic() over os.Exit(1). func BehaviorOnFatal(f func(string, int)) { fatalErrHandler = f } // DefaultBehaviorOnFatal allows you to undo any previous override. Useful in // tests. func DefaultBehaviorOnFatal() { fatalErrHandler = fatal } // fatal prints the message (if provided) and then exits. If V(2) or greater, // klog.Fatal is invoked for extended information. func fatal(msg string, code int) { if klog.V(2) { klog.FatalDepth(2, msg) } if len(msg) > 0 { // add newline if needed if !strings.HasSuffix(msg, "\n") { msg += "\n" } fmt.Fprint(os.Stderr, msg) } os.Exit(code) } // ErrExit may be passed to CheckError to instruct it to output nothing but exit with // status code 1. var ErrExit = fmt.Errorf("exit") // CheckErr prints a user friendly error to STDERR and exits with a non-zero // exit code. Unrecognized errors will be printed with an "error: " prefix. // // This method is generic to the command in use and may be used by non-Kubectl // commands. func CheckErr(err error) { checkErr(err, fatalErrHandler) } // CheckDiffErr prints a user friendly error to STDERR and exits with a // non-zero and non-one exit code. Unrecognized errors will be printed // with an "error: " prefix. // // This method is meant specifically for `kubectl diff` and may be used // by other commands. func CheckDiffErr(err error) { checkErr(err, func(msg string, code int) { fatalErrHandler(msg, code+1) }) } // checkErr formats a given error as a string and calls the passed handleErr // func with that string and an kubectl exit code. func checkErr(err error, handleErr func(string, int)) { // unwrap aggregates of 1 if agg, ok := err.(utilerrors.Aggregate); ok && len(agg.Errors()) == 1 { err = agg.Errors()[0] } if err == nil { return } switch { case err == ErrExit: handleErr("", DefaultErrorExitCode) case apierrors.IsInvalid(err): details := err.(*apierrors.StatusError).Status().Details s := "The request is invalid" if details == nil { handleErr(s, DefaultErrorExitCode) return } if len(details.Kind) != 0 || len(details.Name) != 0 { s = fmt.Sprintf("The %s %q is invalid", details.Kind, details.Name) } if len(details.Causes) > 0 { errs := statusCausesToAggrError(details.Causes) handleErr(MultilineError(s+": ", errs), DefaultErrorExitCode) } else { handleErr(s, DefaultErrorExitCode) } case clientcmd.IsConfigurationInvalid(err): handleErr(MultilineError("Error in configuration: ", err), DefaultErrorExitCode) default: switch err := err.(type) { case *meta.NoResourceMatchError: switch { case len(err.PartialResource.Group) > 0 && len(err.PartialResource.Version) > 0: handleErr(fmt.Sprintf("the server doesn't have a resource type %q in group %q and version %q", err.PartialResource.Resource, err.PartialResource.Group, err.PartialResource.Version), DefaultErrorExitCode) case len(err.PartialResource.Group) > 0: handleErr(fmt.Sprintf("the server doesn't have a resource type %q in group %q", err.PartialResource.Resource, err.PartialResource.Group), DefaultErrorExitCode) case len(err.PartialResource.Version) > 0: handleErr(fmt.Sprintf("the server doesn't have a resource type %q in version %q", err.PartialResource.Resource, err.PartialResource.Version), DefaultErrorExitCode) default: handleErr(fmt.Sprintf("the server doesn't have a resource type %q", err.PartialResource.Resource), DefaultErrorExitCode) } case utilerrors.Aggregate: handleErr(MultipleErrors(``, err.Errors()), DefaultErrorExitCode) case utilexec.ExitError: handleErr(err.Error(), err.ExitStatus()) default: // for any other error type msg, ok := StandardErrorMessage(err) if !ok { msg = err.Error() if !strings.HasPrefix(msg, "error: ") { msg = fmt.Sprintf("error: %s", msg) } } handleErr(msg, DefaultErrorExitCode) } } } func statusCausesToAggrError(scs []metav1.
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0"?> <Document style="helpdocument"> <p style="htitle">Obair</p> <p style="hp">Solairidh daoine a bhios a' fuireach ann an còmhnaidh obair. Chleachdaidh a' mhòrchuid a raointean obair ach an obraich iad. Chan eil cuid a raointean a leithid carraighean feumach air obair ach fhad 's a thèid an togail.</p> <p style="hp">'S urrainn dha mhargaidean obair a sholar nuair a bhios tu air margaid a thogail am fagas an raoin a bhios feumach air obair. Feumaidh a' mhargaid an obair fhaighinn o àite air choireigin co-dhiù 's tha dà dhòigh ann: air an dàrna làimh, ma tha còmhnaidh am fagas na margaid, solairidh e obair dhan mhargaid an coimeas ris an t-sluagh a bhios a' fuireach ann is na h-àireamh de mhargaidean a cho-roinneas an obair. 'S e giùlan a tha san dòigh eile. Ma tha barrachd obrach aig margaid na tha i feumach air, cuiridh i cuid dhiubh air a' ghiùlan. Gheibh margaid sam bith a bhios ceangailte ris a' ghiùlan traca, rathaid no rèile greim air an obair ma bhios i feumach oirre.</p> <p style="hp">Ma dh'fhàsas magaid "làn" de dh'obair cha ghabh i rithe tuilleadh o chòmhnaidhean. Bidh daoine gun chosnadh ann nuair a thachras seo is gun mhargaid nach eil làn am fagas.</p> <p style="hp">Cuiridh ionad-slàinte am fagas còmhnaidh ris an uiread de dh'obair a thèid a sholar do mhagaidean ionadail.</p> <p style="hsubtitle">Faic cuideachd:</p> <li><a href="commodities">Bathar-amh</a></li> <li><a href="residential">Còmhnaidhean</a></li> <li><a href="transport">Giùlan</a></li> <li><a href="market">Margaid</a></li> </Document>
{ "pile_set_name": "Github" }
null
null
name=Sewers of Estark image=https://magiccards.info/scans/en/mbp/2.jpg value=2.781 rarity=R type=Instant cost={2}{B}{B} ability=Cast SN with AI only during combat. timing=pump oracle=Choose target creature. If it's attacking, it can't be blocked this turn. If it's blocking, prevent all combat damage that would be dealt this combat by it and each creature it's blocking. requires_groovy_code
{ "pile_set_name": "Github" }
null
null
/* dporfsx.f -- translated by f2c (version 20061008). You must link the resulting object file with libf2c: on Microsoft Windows system, link with libf2c.lib; on Linux or Unix systems, link with .../path/to/libf2c.a -lm or, if you install libf2c.a in a standard place, with -lf2c -lm -- in that order, at the end of the command line, as in cc *.o -lf2c -lm Source for libf2c is in /netlib/f2c/libf2c.zip, e.g., http://www.netlib.org/f2c/libf2c.zip */ #include "f2c.h" #include "blaswrap.h" /* Table of constant values */ static integer c_n1 = -1; static integer c__0 = 0; static integer c__1 = 1; /* Subroutine */ int dporfsx_(char *uplo, char *equed, integer *n, integer * nrhs, doublereal *a, integer *lda, doublereal *af, integer *ldaf, doublereal *s, doublereal *b, integer *ldb, doublereal *x, integer * ldx, doublereal *rcond, doublereal *berr, integer *n_err_bnds__, doublereal *err_bnds_norm__, doublereal *err_bnds_comp__, integer * nparams, doublereal *params, doublereal *work, integer *iwork, integer *info) { /* System generated locals */ integer a_dim1, a_offset, af_dim1, af_offset, b_dim1, b_offset, x_dim1, x_offset, err_bnds_norm_dim1, err_bnds_norm_offset, err_bnds_comp_dim1, err_bnds_comp_offset, i__1; doublereal d__1, d__2; /* Builtin functions */ double sqrt(doublereal); /* Local variables */ doublereal illrcond_thresh__, unstable_thresh__, err_lbnd__; integer ref_type__, j; doublereal rcond_tmp__; integer prec_type__; extern doublereal dla_porcond__(char *, integer *, doublereal *, integer * , doublereal *, integer *, integer *, doublereal *, integer *, doublereal *, integer *, ftnlen); doublereal cwise_wrong__; extern /* Subroutine */ int dla_porfsx_extended__(integer *, char *, integer *, integer *, doublereal *, integer *, doublereal *, integer *, logical *, doublereal *, doublereal *, integer *, doublereal *, integer *, doublereal *, integer *, doublereal *, doublereal *, doublereal *, doublereal *, doublereal *, doublereal *, doublereal *, integer *, doublereal *, doublereal *, logical *, integer *, ftnlen); char norm[1]; logical ignore_cwise__; extern logical lsame_(char *, char *); doublereal anorm; logical rcequ; extern doublereal dlamch_(char *); extern /* Subroutine */ int xerbla_(char *, integer *), dpocon_( char *, integer *, doublereal *, integer *, doublereal *, doublereal *, doublereal *, integer *, integer *); extern doublereal dlansy_(char *, char *, integer *, doublereal *, integer *, doublereal *); extern integer ilaprec_(char *); integer ithresh, n_norms__; doublereal rthresh; /* -- LAPACK routine (version 3.2.1) -- */ /* -- Contributed by James Demmel, Deaglan Halligan, Yozo Hida and -- */ /* -- Jason Riedy of Univ. of California Berkeley. -- */ /* -- April 2009 -- */ /* -- LAPACK is a software package provided by Univ. of Tennessee, -- */ /* -- Univ. of California Berkeley and NAG Ltd. -- */ /* .. */ /* .. Scalar Arguments .. */ /* .. */ /* .. Array Arguments .. */ /* .. */ /* Purpose */ /* ======= */ /* DPORFSX improves the computed solution to a system of linear */ /* equations when the coefficient matrix is symmetric positive */ /* definite, and provides error bounds and backward error estimates */ /* for the solution. In addition to normwise error bound, the code */ /* provides maximum componentwise error bound if possible. See */ /* comments for ERR_BNDS_NORM and ERR_BNDS_COMP for details of the */ /* error bounds. */ /* The original system of linear equations may have been equilibrated */ /* before calling this routine, as described by arguments EQUED and S */ /* below. In this case, the solution and error bounds returned are */ /* for the original unequilibrated system. */ /* Arguments */ /* ========= */ /* Some optional parameters are bundled in the PARAMS array. These */ /* settings determine how refinement is performed, but often the */ /* defaults are acceptable. If the defaults are acceptable, users */ /* can pass NPARAMS = 0 which prevents the source code from accessing */ /* the PARAMS argument. */ /* UPLO (input) CHARACTER*1 */ /* = 'U': Upper triangle of A is stored; */ /* = 'L': Lower triangle of A is stored. */ /* EQUED (input) CHARACTER*1 */ /* Specifies the form of equilibration that was done to A */ /* before calling this routine. This is needed to compute */ /* the solution and error bounds correctly. */ /* = 'N': No equilibration */ /* = 'Y': Both row and column equilibration, i.e., A has been */ /* replaced by diag(S) * A * diag(S). */ /* The right hand side B has been changed accordingly. */ /* N (input) INTEGER */ /* The order of the matrix A. N >= 0. */ /* NRHS (input) INTEGER */ /* The number of right hand sides, i.e., the number of columns */ /* of the matrices B and X. NRHS >= 0. */ /* A (input) DOUBLE PRECISION array, dimension (LDA,N) */ /* The symmetric matrix A. If UPLO = 'U', the leading N-by-N */ /* upper triangular part of A contains the upper triangular part */ /* of the matrix A, and the strictly lower triangular part of A */ /* is not referenced. If UPLO = 'L', the leading N-by-N lower */ /* triangular part of A contains the lower triangular part of */ /* the matrix A, and the strictly upper triangular part of A is */ /* not referenced. */ /* LDA (input) INTEGER */ /* The leading dimension of the array A. LDA >= max(1,N). */ /* AF (input) DOUBLE PRECISION array, dimension (LDAF,N) */ /* The triangular factor U or L from the Cholesky factorization */ /* A = U**T*U or A = L*L**T, as computed by DPOTRF. */ /* LDAF (input) INTEGER */ /* The leading dimension of the array AF. LDAF >= max(1,N). */ /* S (input or output) DOUBLE PRECISION array, dimension (N) */ /* The row scale factors for A. If EQUED = 'Y', A is multiplied on */ /* the left and right by diag(S). S is an input argument if FACT = */ /* 'F'; otherwise, S is an output argument. If FACT = 'F' and EQUED */ /* = 'Y', each element of S must be positive. If S is output, each */ /* element of S is a power of the radix. If S is input, each element */ /* of S should be a power of the radix to ensure a reliable solution */ /* and error estimates. Scaling by powers of the radix does not cause */ /* rounding errors unless the result underflows or overflows. */ /* Rounding errors during scaling lead to refining with
{ "pile_set_name": "Github" }
null
null
<?xml version="1.0" encoding="utf-8"?> <root> <!-- Microsoft ResX Schema Version 2.0 The primary goals of this format is to allow a simple XML format that is mostly human readable. The generation and parsing of the various data types are done through the TypeConverter classes associated with the data types. Example: ... ado.net/XML headers & schema ... <resheader name="resmimetype">text/microsoft-resx</resheader> <resheader name="version">2.0</resheader> <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader> <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader> <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data> <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data> <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64"> <value>[base64 mime encoded serialized .NET Framework object]</value> </data> <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64"> <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value> <comment>This is a comment</comment> </data> There are any number of "resheader" rows that contain simple name/value pairs. Each data row contains a name, and value. The row also contains a type or mimetype. Type corresponds to a .NET class that support text/value conversion through the TypeConverter architecture. Classes that don't support this are serialized and stored with the mimetype set. The mimetype is used for serialized objects, and tells the ResXResourceReader how to depersist the object. This is currently not extensible. For a given mimetype the value must be set accordingly: Note - application/x-microsoft.net.object.binary.base64 is the format that the ResXResourceWriter will generate, however the reader can read any of the formats listed below. mimetype: application/x-microsoft.net.object.binary.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.soap.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Soap.SoapFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.bytearray.base64 value : The object must be serialized into a byte array : using a System.ComponentModel.TypeConverter : and then encoded with base64 encoding. --> <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata"> <xsd:import namespace="http://www.w3.org/XML/1998/namespace" /> <xsd:element name="root" msdata:IsDataSet="true"> <xsd:complexType> <xsd:choice maxOccurs="unbounded"> <xsd:element name="metadata"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" /> </xsd:sequence> <xsd:attribute name="name" use="required" type="xsd:string" /> <xsd:attribute name="type" type="xsd:string" /> <xsd:attribute name="mimetype" type="xsd:string" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="assembly"> <xsd:complexType> <xsd:attribute name="alias" type="xsd:string" /> <xsd:attribute name="name" type="xsd:string" /> </xsd:complexType> </xsd:element> <xsd:element name="data"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" /> <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" /> <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="resheader"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" /> </xsd:complexType> </xsd:element> </xsd:choice> </xsd:complexType> </xsd:element> </xsd:schema> <resheader name="resmimetype"> <value>text/microsoft-resx</value> </resheader> <resheader name="version"> <value>2.0</value> </resheader> <resheader name="reader"> <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <resheader name="writer"> <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <data name="SplitButtonSecondaryButtonName" xml:space="preserve"> <value>Máis opcións</value> <comment>Automation name for the secondary button.</comment> </data> </root>
{ "pile_set_name": "Github" }
null
null
class GenerateConsoleViewTask < Rake::TaskLib attr_accessor :layout, :views def initialize(name) yield self if block_given? define(name) end def define(name) task name => [:environment] do views.each_pair do |view_path, file| File.open(File.join(Rails.root, 'public', file), 'w') do |f| f.write(render(view_path)) end end end end protected def render(template) view.render :template => template.dup, :layout => layout end def controller_class ConsoleController end def controller controller = controller_class.new controller.request = ActionDispatch::TestRequest.new({'SCRIPT_NAME' => ENV['RAILS_RELATIVE_URL_ROOT']}) controller.request.host = host controller.env = controller.request.env controller end def add_view_helpers(view, routes) view.class_eval do include routes.url_helpers include Console::CommunityAware include Console::LayoutHelper include Console::HelpHelper include Console::Html5BoilerplateHelper include Console::ModelHelper include Console::SecuredHelper include Console::CommunityHelper include Console::ConsoleHelper def active_tab nil end def account_settings_redirect account_path end end end def subclass_view(view, routes) host = self.host view.class_eval do def protect_against_forgery? false end def default_url_options {:host => host} end end end def view view = ActionView::Base.new(ActionController::Base.view_paths, {}, controller) routes = Rails.application.routes routes.default_url_options = {:host => self.host} add_view_helpers(view, routes) subclass_view(view, routes) view end def host ENV['RAILS_HOST'] || 'localhost' end end namespace :assets do GenerateConsoleViewTask.new(:public_pages) do |t| t.layout = 'layouts/console' t.views = { 'console/not_found' => '404.html', 'console/error' => '500.html', } end end
{ "pile_set_name": "Github" }
null
null
(function() { var Gitlab, credentials, gitlab; process.stdout.write('\u001B[2J\u001B[0;0f'); Gitlab = require('..'); credentials = require('./credentials'); gitlab = new Gitlab({ url: credentials.url, token: credentials.token }); gitlab.projects.all(function(projects) { var _project, i, len, results; results = []; for (i = 0, len = projects.length; i < len; i++) { _project = projects[i]; results.push((function() { var project; project = _project; return gitlab.projects.hooks.list(project.id, function(hooks) { var hook, j, len1, url; url = "" + credentials.service_hook_base + project.path_with_namespace; if (hooks.length > 1) { return console.log(url + " too much hooks"); } else if (hooks.length === 1) { for (j = 0, len1 = hooks.length; j < len1; j++) { hook = hooks[j]; if (hook.url !== url) { gitlab.projects.hooks.remove(project.id, hook.id, function(ret) { return console.log(ret); }); } } return console.log(url + " is already OK"); } else { return gitlab.projects.hooks.add(project.id, url, function() { return console.log(url + " has been added"); }); } }); })()); } return results; }); }).call(this);
{ "pile_set_name": "Github" }
null
null
namespace GraphQL.Types.Relay { public class EdgeType<TNodeType> : ObjectGraphType<object> where TNodeType : IGraphType { public EdgeType() { Name = string.Format("{0}Edge", typeof(TNodeType).GraphQLName()); Description = string.Format( "An edge in a connection from an object to another object of type `{0}`.", typeof(TNodeType).GraphQLName()); Field<NonNullGraphType<StringGraphType>>() .Name("cursor") .Description("A cursor for use in pagination"); Field<TNodeType>() .Name("node") .Description("The item at the end of the edge"); } } }
{ "pile_set_name": "Github" }
null
null
In this game, you are robot (#). Your job is to find kitten. This task is complicated by the existence of various things which are not kitten. Robot must touch items to determine if they are kitten or not. The game ends when robotfindskitten.
{ "pile_set_name": "Github" }
null
null
package com.uwsoft.editor.proxy; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map.Entry; import com.badlogic.gdx.graphics.Color; import com.badlogic.gdx.graphics.Pixmap; import com.badlogic.gdx.graphics.Texture; import com.uwsoft.editor.renderer.data.*; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import com.badlogic.gdx.Files; import com.badlogic.gdx.Gdx; import com.badlogic.gdx.files.FileHandle; import com.badlogic.gdx.graphics.g2d.BitmapFont; import com.badlogic.gdx.graphics.g2d.ParticleEffect; import com.badlogic.gdx.graphics.g2d.TextureAtlas; import com.badlogic.gdx.graphics.g2d.TextureRegion; import com.badlogic.gdx.graphics.g2d.freetype.FreeTypeFontGenerator; import com.badlogic.gdx.graphics.glutils.ShaderProgram; import com.badlogic.gdx.utils.Json; import com.puremvc.patterns.proxy.BaseProxy; import com.uwsoft.editor.data.SpineAnimData; import com.uwsoft.editor.Overlap2DFacade; import com.uwsoft.editor.renderer.resources.FontSizePair; import com.uwsoft.editor.renderer.resources.IResourceRetriever; import com.uwsoft.editor.renderer.utils.MySkin; /** * Created by azakhary on 4/26/2015. */ public class ResourceManager extends BaseProxy implements IResourceRetriever { public String packResolutionName = "orig"; private static final String TAG = ResourceManager.class.getCanonicalName(); public static final String NAME = TAG; private HashMap<String, ParticleEffect> particleEffects = new HashMap<String, ParticleEffect>(1); private TextureAtlas currentProjectAtlas; private HashMap<String, SpineAnimData> spineAnimAtlases = new HashMap<String, SpineAnimData>(); private HashMap<String, TextureAtlas> spriteAnimAtlases = new HashMap<String, TextureAtlas>(); private HashMap<String, FileHandle> spriterAnimFiles = new HashMap<String, FileHandle>(); private HashMap<FontSizePair, BitmapFont> bitmapFonts = new HashMap<>(); private HashMap<String, ShaderProgram> shaderPrograms = new HashMap<String, ShaderProgram>(1); private TextureRegion defaultRegion; private ResolutionManager resolutionManager; public ResourceManager() { super(NAME); } @Override public void onRegister() { super.onRegister(); facade = Overlap2DFacade.getInstance(); resolutionManager = facade.retrieveProxy(ResolutionManager.NAME); // TODO: substitute this with "NO IMAGE" icon Pixmap pixmap = new Pixmap(50, 50, Pixmap.Format.RGBA8888); pixmap.setColor(new Color(1, 1, 1, 0.4f)); pixmap.fill(); defaultRegion = new TextureRegion(new Texture(pixmap)); } @Override public TextureRegion getTextureRegion(String name) { TextureRegion reg = currentProjectAtlas.findRegion(name); if(reg == null) { reg = defaultRegion; } return reg; } public TextureAtlas getTextureAtlas() { return currentProjectAtlas; } @Override public ParticleEffect getParticleEffect(String name) { return new ParticleEffect(particleEffects.get(name)); } @Override public TextureAtlas getSkeletonAtlas(String animationName) { SpineAnimData animData = spineAnimAtlases.get(animationName); return animData.atlas; } /** * Sets working resolution, please set before doing any loading * @param resolution String resolution name, default is "orig" later use resolution names created in editor */ public void setWorkingResolution(String resolution) { ResolutionEntryVO resolutionObject = getProjectVO().getResolution("resolutionName"); if(resolutionObject != null) { packResolutionName = resolution; } } @Override public FileHandle getSkeletonJSON(String animationName) { SpineAnimData animData = spineAnimAtlases.get(animationName); return animData.jsonFile; } @Override public FileHandle getSCMLFile(String name) { return spriterAnimFiles.get(name); } @Override public TextureAtlas getSpriteAnimation(String animationName) { return spriteAnimAtlases.get(animationName); } @Override public BitmapFont getBitmapFont(String fontName, int fontSize) { FontSizePair pair = new FontSizePair(fontName, fontSize); return bitmapFonts.get(pair); } @Override public MySkin getSkin() { //return textureManager.projectSkin; // not sure if we are going to use skins for labels return null; } @Override public ProjectInfoVO getProjectVO() { ProjectManager projectManager = facade.retrieveProxy(ProjectManager.NAME); return projectManager.getCurrentProjectInfoVO(); } @Override public SceneVO getSceneVO(String name) { SceneDataManager sceneDataManager = facade.retrieveProxy(SceneDataManager.NAME); // TODO: this should be cached FileHandle file = Gdx.files.internal(sceneDataManager.getCurrProjectScenePathByName(name)); Json json = new Json(); json.setIgnoreUnknownFields(true); return json.fromJson(SceneVO.class, file.readString()); } public void loadCurrentProjectData(String projectPath, String curResolution) { packResolutionName = curResolution; loadCurrentProjectAssets(projectPath + "/assets/" + curResolution + "/pack/pack.atlas"); loadCurrentProjectSkin(projectPath + "/assets/orig/styles"); loadCurrentProjectParticles(projectPath + "/assets/orig/particles"); loadCurrentProjectSpineAnimations(projectPath + "/assets/", curResolution); loadCurrentProjectSpriteAnimations(projectPath + "/assets/", curResolution); loadCurrentProjectSpriterAnimations(projectPath + "/assets/", curResolution); loadCurrentProjectBitmapFonts(projectPath, curResolution); loadCurrentProjectShaders(projectPath + "/assets/shaders/"); } private void loadCurrentProjectParticles(String path) { particleEffects.clear(); FileHandle sourceDir = new FileHandle(path); for (FileHandle entry : sourceDir.list()) { File file = entry.file(); String filename = file.getName(); if (file.isDirectory() || filename.endsWith(".DS_Store")) continue; ParticleEffect particleEffect = new ParticleEffect(); particleEffect.load(Gdx.files.internal(file.getAbsolutePath()), currentProjectAtlas, ""); particleEffects.put(filename, particleEffect); } } private void loadCurrentProjectSpineAnimations(String path, String curResolution) { spineAnimAtlases.clear(); FileHandle sourceDir = new FileHandle(path + "orig/spine-animations"); for (FileHandle entry : sourceDir.list()) { if (entry.file().isDirectory()) { String animName = FilenameUtils.removeExtension(entry.file().getName()); TextureAtlas atlas = new TextureAtlas(Gdx.files.internal(path + curResolution + "/spine-animations/" + File.separator + animName + File.separator + animName + ".atlas")); FileHandle animJsonFile = Gdx.files.internal(entry.file().getAbsolutePath() + File.separator + animName + ".json"); Spine
{ "pile_set_name": "Github" }
null
null
/* * Copyright (C) 2010-2012 Project SkyFire <http://www.projectskyfire.org/> * Copyright (C) 2005-2012 MaNGOS <http://www.getmangos.com/> * Copyright (C) 2008-2012 Trinity <http://www.trinitycore.org/> * Copyright (C) 2005-2012 ScriptDev2 <http://http://www.scriptdev2.com/> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #ifndef DEF_ONYXIAS_LAIR_H #define DEF_ONYXIAS_LAIR_H enum eData64 { DATA_ONYXIA_GUID, DATA_FLOOR_ERUPTION_GUID }; enum eInstanceData { DATA_ONYXIA, MAX_ENCOUNTER, DATA_ONYXIA_PHASE, DATA_SHE_DEEP_BREATH_MORE, DATA_MANY_WHELPS_COUNT }; enum eCreatures { NPC_WHELP = 11262, NPC_LAIRGUARD = 36561, NPC_ONYXIA = 10184 }; enum eOnyxiaPhases { PHASE_START = 1, PHASE_BREATH = 2, PHASE_END = 3 }; enum eGameObjects { GO_WHELP_SPAWNER = 176510, GO_WHELP_EGG = 176511 }; enum eAchievementData { ACHIEV_CRITERIA_MANY_WHELPS_10_PLAYER = 12565, // Criteria for achievement 4403: Many Whelps! Handle It! (10 player) Hatch 50 eggs in 10s ACHIEV_CRITERIA_MANY_WHELPS_25_PLAYER = 12568, // Criteria for achievement 4406: Many Whelps! Handle It! (25 player) Hatch 50 eggs in 10s ACHIEV_CRITERIA_DEEP_BREATH_10_PLAYER = 12566, // Criteria for achievement 4404: She Deep Breaths More (10 player) Everybody evade Deep Breath ACHIEV_CRITERIA_DEEP_BREATH_25_PLAYER = 12569, // Criteria for achievement 4407: She Deep Breaths More (25 player) Everybody evade Deep Breath ACHIEV_TIMED_START_EVENT = 6601, // Timed event for achievement 4402, 4005: More Dots! (10, 25 player) 5 min kill }; #endif
{ "pile_set_name": "Github" }
null
null
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.hadoop.resourceestimator.translator.api; import java.text.ParseException; import org.apache.hadoop.resourceestimator.common.api.RecurrenceId; import org.apache.hadoop.resourceestimator.translator.impl.LogParserUtil; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.server.resourcemanager.reservation.RLESparseResourceAllocation; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * Test JobMetaData. */ public class TestJobMetaData { /** * TODO: parametrize this test. */ private LogParserUtil logParserUtil = new LogParserUtil(); private JobMetaData jobMetaData; private RecurrenceId recurrenceId; @Before public final void setup() throws ParseException { recurrenceId = new RecurrenceId("Fraud Detection", "17/07/16 16:27:25"); jobMetaData = new JobMetaData( logParserUtil.stringToUnixTimestamp("17/07/16 16:27:25")); jobMetaData.setRecurrenceId(recurrenceId); jobMetaData.setContainerStart("C1", logParserUtil.stringToUnixTimestamp("17/07/16 16:27:30")); jobMetaData.setContainerEnd("C1", logParserUtil.stringToUnixTimestamp("17/07/16 16:37:30")); jobMetaData.setContainerStart("C2", logParserUtil.stringToUnixTimestamp("17/07/16 16:27:40")); jobMetaData.setContainerEnd("C2", logParserUtil.stringToUnixTimestamp("17/07/16 16:37:40")); jobMetaData.setJobFinishTime( logParserUtil.stringToUnixTimestamp("17/07/16 16:37:45")); final Resource containerAlloc = Resource.newInstance(1, 1); jobMetaData.getResourceSkyline().setContainerSpec(containerAlloc); jobMetaData.getResourceSkyline().setJobInputDataSize(1024.5); jobMetaData.createSkyline(); } @Test public final void testGetContainerSpec() { final Resource containerAlloc = jobMetaData.getResourceSkyline().getContainerSpec(); final Resource containerAlloc2 = Resource.newInstance(1, 1); Assert.assertEquals(containerAlloc.getMemorySize(), containerAlloc2.getMemorySize()); Assert.assertEquals(containerAlloc.getVirtualCores(), containerAlloc2.getVirtualCores()); } @Test public final void testGetJobSize() { Assert.assertEquals(jobMetaData.getResourceSkyline().getJobInputDataSize(), 1024.5, 0); } @Test public final void testGetRecurrenceeId() { final RecurrenceId recurrenceIdTest = new RecurrenceId("Fraud Detection", "17/07/16 16:27:25"); Assert.assertEquals(recurrenceIdTest, jobMetaData.getRecurrenceId()); } @Test public final void testStringToUnixTimestamp() throws ParseException { final long submissionTime = logParserUtil.stringToUnixTimestamp("17/07/16 16:27:25"); Assert.assertEquals(jobMetaData.getResourceSkyline().getJobSubmissionTime(), submissionTime); } @Test public final void testResourceSkyline() { final RLESparseResourceAllocation skylineList = jobMetaData.getResourceSkyline().getSkylineList(); final int containerCPU = jobMetaData.getResourceSkyline().getContainerSpec().getVirtualCores(); int k; for (k = 0; k < 5; k++) { Assert.assertEquals(0, skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU); } for (k = 5; k < 15; k++) { Assert.assertEquals(1, skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU); } for (k = 15; k < 605; k++) { Assert.assertEquals(2, skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU); } for (k = 605; k < 615; k++) { Assert.assertEquals(1, skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU); } Assert.assertEquals(0, skylineList.getCapacityAtTime(615).getVirtualCores() / containerCPU); } @Test public final void testContainerReleaseTimeMissing() throws ParseException { // create an invalid JobMetaData recurrenceId = new RecurrenceId("Fraud Detection", "17/07/16 16:27:25"); jobMetaData = new JobMetaData( logParserUtil.stringToUnixTimestamp("17/07/16 16:27:25")); jobMetaData.setRecurrenceId(recurrenceId); jobMetaData.setContainerStart("C1", logParserUtil.stringToUnixTimestamp("17/07/16 16:27:30")); jobMetaData.setContainerEnd("C1", logParserUtil.stringToUnixTimestamp("17/07/16 16:37:30")); jobMetaData.setContainerStart("C2", logParserUtil.stringToUnixTimestamp("17/07/16 16:27:40")); jobMetaData.setJobFinishTime( logParserUtil.stringToUnixTimestamp("17/07/16 16:37:45")); final Resource containerAlloc = Resource.newInstance(1, 1); jobMetaData.getResourceSkyline().setContainerSpec(containerAlloc); jobMetaData.getResourceSkyline().setJobInputDataSize(1024.5); jobMetaData.createSkyline(); // test the generated ResourceSkyline final RLESparseResourceAllocation skylineList = jobMetaData.getResourceSkyline().getSkylineList(); final int containerCPU = jobMetaData.getResourceSkyline().getContainerSpec().getVirtualCores(); int k; for (k = 0; k < 5; k++) { Assert.assertEquals(0, skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU); } for (k = 5; k < 605; k++) { Assert.assertEquals(1, skylineList.getCapacityAtTime(k).getVirtualCores() / containerCPU); } Assert.assertEquals(0, skylineList.getCapacityAtTime(605).getVirtualCores() / containerCPU); } @After public final void cleanUp() { jobMetaData = null; recurrenceId = null; logParserUtil = null; } }
{ "pile_set_name": "Github" }
null
null
include_directories( include ${intergen_SOURCE_DIR}/model/include ${intergen_SOURCE_DIR}/utils/include ) set (SOURCES src/cppgen/comment.cc src/cppgen/cpp_api_code_generator.cc src/cppgen/cpp_class.cc src/cppgen/cpp_file.cc src/cppgen/cpp_function.cc src/cppgen/cpp_interface_code_generator.cc src/cppgen/declaration_generator.cc src/cppgen/definition_generator.cc src/cppgen/enum_from_json_value_function.cc src/cppgen/enum_to_json_value_function.cc src/cppgen/function_id_method.cc src/cppgen/generator_preferences.cc src/cppgen/handler_interface.cc src/cppgen/is_valid_enum_function.cc src/cppgen/literal_generator.cc src/cppgen/message_factory_function.cc src/cppgen/message_handle_with_method.cc src/cppgen/message_interface.cc src/cppgen/module_manager.cc src/cppgen/namespace.cc src/cppgen/naming_convention.cc src/cppgen/struct_type_constructor.cc src/cppgen/struct_type_from_json_method.cc src/cppgen/struct_type_is_initialized_method.cc src/cppgen/struct_type_is_valid_method.cc src/cppgen/struct_type_report_erros_method.cc src/cppgen/type_name_code_generator.cc ) set (HEADERS include/cppgen/comment.h include/cppgen/cpp_api_code_generator.h include/cppgen/cpp_class.h include/cppgen/cpp_file.h include/cppgen/cpp_function.h include/cppgen/cpp_interface_code_generator.h include/cppgen/declaration_generator.h include/cppgen/definition_generator.h include/cppgen/enum_from_json_value_function.h include/cppgen/enum_to_json_value_function.h include/cppgen/function_id_method.h include/cppgen/generator_preferences.h include/cppgen/handler_interface.h include/cppgen/is_valid_enum_function.h include/cppgen/literal_generator.h include/cppgen/message_factory_function.h include/cppgen/message_handle_with_method.h include/cppgen/message_interface.h include/cppgen/module_manager.h include/cppgen/namespace.h include/cppgen/naming_convention.h include/cppgen/struct_type_constructor.h include/cppgen/struct_type_from_json_method.h include/cppgen/struct_type_is_initialized_method.h include/cppgen/struct_type_is_valid_method.h include/cppgen/struct_type_report_erros_method.h include/cppgen/type_name_code_generator.h ) add_library(intergen_cppgen ${HEADERS} ${SOURCES}) target_link_libraries(intergen_cppgen intergen_model intergen_utils)
{ "pile_set_name": "Github" }
null
null
/* * Copyright (c) 2016 Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /** * JDK-8170594: >>>=0 generates invalid bytecode for BaseNode LHS * * @test * @run */ var obj1 = {x: "100"}; (function (o, p) { if (p) { o.x >>>= 0; } })(obj1, true) Assert.assertTrue(obj1.x === 100) var obj2 = ["100"]; (function (o, p) { if (p) { o[0] >>>= 0; } })(obj2, true) Assert.assertTrue(obj2[0] === 100)
{ "pile_set_name": "Github" }
null
null
package drds //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // VersionsItem is a nested struct in drds response type VersionsItem struct { DrdsVersion string `json:"DrdsVersion" xml:"DrdsVersion"` Latest bool `json:"Latest" xml:"Latest"` }
{ "pile_set_name": "Github" }
null
null
package solver import ( "context" "io" "time" "github.com/moby/buildkit/client" "github.com/moby/buildkit/util/progress" digest "github.com/opencontainers/go-digest" "github.com/sirupsen/logrus" ) func (j *Job) Status(ctx context.Context, ch chan *client.SolveStatus) error { vs := &vertexStream{cache: map[digest.Digest]*client.Vertex{}} pr := j.pr.Reader(ctx) defer func() { if enc := vs.encore(); len(enc) > 0 { ch <- &client.SolveStatus{Vertexes: enc} } close(ch) }() for { p, err := pr.Read(ctx) if err != nil { if err == io.EOF { return nil } return err } ss := &client.SolveStatus{} for _, p := range p { switch v := p.Sys.(type) { case client.Vertex: ss.Vertexes = append(ss.Vertexes, vs.append(v)...) case progress.Status: vtx, ok := p.Meta("vertex") if !ok { logrus.Warnf("progress %s status without vertex info", p.ID) continue } vs := &client.VertexStatus{ ID: p.ID, Vertex: vtx.(digest.Digest), Name: v.Action, Total: int64(v.Total), Current: int64(v.Current), Timestamp: p.Timestamp, Started: v.Started, Completed: v.Completed, } ss.Statuses = append(ss.Statuses, vs) case client.VertexLog: vtx, ok := p.Meta("vertex") if !ok { logrus.Warnf("progress %s log without vertex info", p.ID) continue } v.Vertex = vtx.(digest.Digest) v.Timestamp = p.Timestamp ss.Logs = append(ss.Logs, &v) } } select { case <-ctx.Done(): return ctx.Err() case ch <- ss: } } } type vertexStream struct { cache map[digest.Digest]*client.Vertex } func (vs *vertexStream) append(v client.Vertex) []*client.Vertex { var out []*client.Vertex vs.cache[v.Digest] = &v if v.Started != nil { for _, inp := range v.Inputs { if inpv, ok := vs.cache[inp]; ok { if !inpv.Cached && inpv.Completed == nil { inpv.Cached = true inpv.Started = v.Started inpv.Completed = v.Started out = append(out, vs.append(*inpv)...) delete(vs.cache, inp) } } } } vcopy := v return append(out, &vcopy) } func (vs *vertexStream) encore() []*client.Vertex { var out []*client.Vertex for _, v := range vs.cache { if v.Started != nil && v.Completed == nil { now := time.Now() v.Completed = &now v.Error = context.Canceled.Error() out = append(out, v) } } return out }
{ "pile_set_name": "Github" }
null
null
proc testAnonRanges(type lowT, type countT) { var zero = 0:countT; // Applying #0 to a 0.. uint range results in wraparound leading to // an error when trying to iterate over it when bounds checks are // on. for i in 0:lowT..#(0:countT) do write(i, ' '); writeln(); for i in 0:lowT..#(zero) do write(i, ' '); writeln(); for i in 0:lowT..#(1:countT) do write(i, ' '); writeln(); for i in 0:lowT..#(10:countT) by 2:lowT do write(i, ' '); writeln(); for i in (0:lowT.. by 2:lowT) #(10:countT) do write(i, ' '); writeln(); for i in 10:lowT..#10:countT do write(i, ' '); writeln(); } testAnonRanges(uint(64), int(64));
{ "pile_set_name": "Github" }
null
null
using System; using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore.Infrastructure; using Microsoft.EntityFrameworkCore.Metadata; using Microsoft.EntityFrameworkCore.Migrations; using Migrations.Context; namespace ComputedColumns.EF.Migrations { [DbContext(typeof(StoreContext))] partial class StoreContextModelSnapshot : ModelSnapshot { protected override void BuildModel(ModelBuilder modelBuilder) { modelBuilder .HasAnnotation("ProductVersion", "1.1.0-rtm-22752") .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); modelBuilder.Entity("ComputedColumns.Models.Order", b => { b.Property<int>("Id") .ValueGeneratedOnAdd() .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); b.Property<int>("CustomerId"); b.Property<DateTime>("OrderDate") .ValueGeneratedOnAdd() .HasColumnType("datetime") .HasDefaultValueSql("getdate()"); b.Property<decimal?>("OrderTotal") .ValueGeneratedOnAddOrUpdate() .HasColumnType("money") .HasComputedColumnSql("Store.GetOrderTotal([Id])"); b.Property<DateTime>("ShipDate") .ValueGeneratedOnAdd() .HasColumnType("datetime") .HasDefaultValueSql("getdate()"); b.Property<byte[]>("TimeStamp") .IsConcurrencyToken() .ValueGeneratedOnAddOrUpdate(); b.HasKey("Id"); b.ToTable("Orders","Store"); }); modelBuilder.Entity("ComputedColumns.Models.OrderDetail", b => { b.Property<int>("Id") .ValueGeneratedOnAdd() .HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn); b.Property<decimal?>("LineItemTotal") .ValueGeneratedOnAddOrUpdate() .HasColumnType("money") .HasComputedColumnSql("[Quantity]*[UnitCost]"); b.Property<int>("OrderId"); b.Property<int>("Quantity"); b.Property<byte[]>("TimeStamp") .IsConcurrencyToken() .ValueGeneratedOnAddOrUpdate(); b.Property<decimal>("UnitCost") .HasColumnType("money"); b.HasKey("Id"); b.HasIndex("OrderId"); b.ToTable("OrderDetails","Store"); }); modelBuilder.Entity("ComputedColumns.Models.OrderDetail", b => { b.HasOne("ComputedColumns.Models.Order", "Order") .WithMany("OrderDetails") .HasForeignKey("OrderId") .OnDelete(DeleteBehavior.Cascade); }); } } }
{ "pile_set_name": "Github" }
null
null
/* This source file must have a .cpp extension so that all C++ compilers recognize the extension without flags. Borland does not know .cxx for example. */ #ifndef __cplusplus # error "A C compiler has been selected for C++." #endif /* Version number components: V=Version, R=Revision, P=Patch Version date components: YYYY=Year, MM=Month, DD=Day */ #if defined(__COMO__) # define COMPILER_ID "Comeau" /* __COMO_VERSION__ = VRR */ # define COMPILER_VERSION_MAJOR DEC(__COMO_VERSION__ / 100) # define COMPILER_VERSION_MINOR DEC(__COMO_VERSION__ % 100) #elif defined(__INTEL_COMPILER) || defined(__ICC) # define COMPILER_ID "Intel" /* __INTEL_COMPILER = VRP */ # define COMPILER_VERSION_MAJOR DEC(__INTEL_COMPILER/100) # define COMPILER_VERSION_MINOR DEC(__INTEL_COMPILER/10 % 10) # define COMPILER_VERSION_PATCH DEC(__INTEL_COMPILER % 10) # if defined(__INTEL_COMPILER_BUILD_DATE) /* __INTEL_COMPILER_BUILD_DATE = YYYYMMDD */ # define COMPILER_VERSION_TWEAK DEC(__INTEL_COMPILER_BUILD_DATE) # endif #elif defined(__PATHCC__) # define COMPILER_ID "PathScale" # define COMPILER_VERSION_MAJOR DEC(__PATHCC__) # define COMPILER_VERSION_MINOR DEC(__PATHCC_MINOR__) # if defined(__PATHCC_PATCHLEVEL__) # define COMPILER_VERSION_PATCH DEC(__PATHCC_PATCHLEVEL__) # endif #elif defined(__clang__) # define COMPILER_ID "Clang" # define COMPILER_VERSION_MAJOR DEC(__clang_major__) # define COMPILER_VERSION_MINOR DEC(__clang_minor__) # define COMPILER_VERSION_PATCH DEC(__clang_patchlevel__) #elif defined(__BORLANDC__) && defined(__CODEGEARC_VERSION__) # define COMPILER_ID "Embarcadero" # define COMPILER_VERSION_MAJOR HEX(__CODEGEARC_VERSION__>>24 & 0x00FF) # define COMPILER_VERSION_MINOR HEX(__CODEGEARC_VERSION__>>16 & 0x00FF) # define COMPILER_VERSION_PATCH HEX(__CODEGEARC_VERSION__ & 0xFFFF) #elif defined(__BORLANDC__) # define COMPILER_ID "Borland" /* __BORLANDC__ = 0xVRR */ # define COMPILER_VERSION_MAJOR HEX(__BORLANDC__>>8) # define COMPILER_VERSION_MINOR HEX(__BORLANDC__ & 0xFF) #elif defined(__WATCOMC__) # define COMPILER_ID "Watcom" /* __WATCOMC__ = VVRR */ # define COMPILER_VERSION_MAJOR DEC(__WATCOMC__ / 100) # define COMPILER_VERSION_MINOR DEC(__WATCOMC__ % 100) #elif defined(__SUNPRO_CC) # define COMPILER_ID "SunPro" # if __SUNPRO_CC >= 0x5100 /* __SUNPRO_CC = 0xVRRP */ # define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>12) # define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xFF) # define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF) # else /* __SUNPRO_CC = 0xVRP */ # define COMPILER_VERSION_MAJOR HEX(__SUNPRO_CC>>8) # define COMPILER_VERSION_MINOR HEX(__SUNPRO_CC>>4 & 0xF) # define COMPILER_VERSION_PATCH HEX(__SUNPRO_CC & 0xF) # endif #elif defined(__HP_aCC) # define COMPILER_ID "HP" /* __HP_aCC = VVRRPP */ # define COMPILER_VERSION_MAJOR DEC(__HP_aCC/10000) # define COMPILER_VERSION_MINOR DEC(__HP_aCC/100 % 100) # define COMPILER_VERSION_PATCH DEC(__HP_aCC % 100) #elif defined(__DECCXX) # define COMPILER_ID "Compaq" /* __DECCXX_VER = VVRRTPPPP */ # define COMPILER_VERSION_MAJOR DEC(__DECCXX_VER/10000000) # define COMPILER_VERSION_MINOR DEC(__DECCXX_VER/100000 % 100) # define COMPILER_VERSION_PATCH DEC(__DECCXX_VER % 10000) #elif defined(__IBMCPP__) # if defined(__COMPILER_VER__) # define COMPILER_ID "zOS" # else # if __IBMCPP__ >= 800 # define COMPILER_ID "XL" # else # define COMPILER_ID "VisualAge" # endif /* __IBMCPP__ = VRP */ # define COMPILER_VERSION_MAJOR DEC(__IBMCPP__/100) # define COMPILER_VERSION_MINOR DEC(__IBMCPP__/10 % 10) # define COMPILER_VERSION_PATCH DEC(__IBMCPP__ % 10) # endif #elif defined(__PGI) # define COMPILER_ID "PGI" # define COMPILER_VERSION_MAJOR DEC(__PGIC__) # define COMPILER_VERSION_MINOR DEC(__PGIC_MINOR__) # if defined(__PGIC_PATCHLEVEL__) # define COMPILER_VERSION_PATCH DEC(__PGIC_PATCHLEVEL__) # endif #elif defined(_CRAYC) # define COMPILER_ID "Cray" # define COMPILER_VERSION_MAJOR DEC(_RELEASE) # define COMPILER_VERSION_MINOR DEC(_RELEASE_MINOR) #elif defined(__TI_COMPILER_VERSION__) # define COMPILER_ID "TI" /* __TI_COMPILER_VERSION__ = VVVRRRPPP */ # define COMPILER_VERSION_MAJOR DEC(__TI_COMPILER_VERSION__/1000000) # define COMPILER_VERSION_MINOR DEC(__TI_COMPILER_VERSION__/1000 % 1000) # define COMPILER_VERSION_PATCH DEC(__TI_COMPILER_VERSION__ % 1000) #elif defined(__SCO_VERSION__) # define COMPILER_ID "SCO" #elif defined(__GNUC__) # define COMPILER_ID "GNU" # define COMPILER_VERSION_MAJOR DEC(__GNUC__) # define COMPILER_VERSION_MINOR DEC(__GNUC_MINOR__) # if defined(__GNUC_PATCHLEVEL__) # define COMPILER_VERSION_PATCH DEC(__GNUC_PATCHLEVEL__) # endif #elif defined(_MSC_VER) # define COMPILER_ID "MSVC" /* _MSC_VER = VVRR */ # define COMPILER_VERSION_MAJOR DEC(_MSC_VER / 100) # define COMPILER_VERSION_MINOR DEC(_MSC_VER % 100) # if defined(_MSC_FULL_VER) # if _MSC_VER >= 1400 /* _MSC_FULL_VER = VVRRPPPPP */ # define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 100000) # else /* _MSC_FULL_VER = VVRRPPPP */ # define COMPILER_VERSION_PATCH DEC(_MSC_FULL_VER % 10000) # endif # endif # if defined(_MSC_BUILD) # define COMPILER_VERSION_TWEAK DEC(_MSC_BUILD) # endif /* Analog VisualDSP++ >= 4.5.6 */ #elif defined(__VISUALDSPVERSION__) # define COMPILER_ID "ADSP"
{ "pile_set_name": "Github" }
null
null
/* * This file is part of Telegram Server * Copyright (C) 2015 Aykut Alparslan KOÇ * * Telegram Server is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Telegram Server is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.telegram.tl.auth; import org.telegram.mtproto.ProtocolBuffer; import org.telegram.tl.*; public class BindTempAuthKey extends TLObject { public static final int ID = -841733627; public long perm_auth_key_id; public long nonce; public int expires_at; public byte[] encrypted_message; public BindTempAuthKey() { } public BindTempAuthKey(long perm_auth_key_id, long nonce, int expires_at, byte[] encrypted_message){ this.perm_auth_key_id = perm_auth_key_id; this.nonce = nonce; this.expires_at = expires_at; this.encrypted_message = encrypted_message; } @Override public void deserialize(ProtocolBuffer buffer) { perm_auth_key_id = buffer.readLong(); nonce = buffer.readLong(); expires_at = buffer.readInt(); encrypted_message = buffer.readBytes(); } @Override public ProtocolBuffer serialize() { ProtocolBuffer buffer = new ProtocolBuffer(32); serializeTo(buffer); return buffer; } @Override public void serializeTo(ProtocolBuffer buff) { buff.writeInt(getConstructor()); buff.writeLong(perm_auth_key_id); buff.writeLong(nonce); buff.writeInt(expires_at); buff.writeBytes(encrypted_message); } public int getConstructor() { return ID; } }
{ "pile_set_name": "Github" }
null
null