repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
valadas/Dnn.Platform | Dnn.AdminExperience/ClientSide/Seo.Web/src/constants/actionTypes/seo.js | 1401 | const seoActionTypes = {
RETRIEVED_SEO_GENERAL_SETTINGS: "RETRIEVED_SEO_GENERAL_SETTINGS",
UPDATED_SEO_GENERAL_SETTINGS: "UPDATED_SEO_GENERAL_SETTINGS",
SEO_GENERAL_SETTINS_CLIENT_MODIFIED: "SEO_GENERAL_SETTINS_CLIENT_MODIFIED",
RETRIEVED_SEO_REGEX_SETTINGS: "RETRIEVED_SEO_REGEX_SETTINGS",
UPDATED_SEO_REGEX_SETTINGS: "UPDATED_SEO_REGEX_SETTINGS",
SEO_REGEX_SETTINS_CLIENT_MODIFIED: "SEO_REGEX_SETTINS_CLIENT_MODIFIED",
TESTED_SEO_PAGE_URL: "TESTED_SEO_PAGE_URL",
TESTED_SEO_URL_REWRITING: "TESTED_SEO_URL_REWRITING",
CLEARED_SEO_TEST_PAGE_URL_RESULTS: "CLEARED_SEO_TEST_PAGE_URL_RESULTS",
CLEARED_SEO_TEST_URL_REWRITING_RESULTS: "CLEARED_SEO_TEST_URL_REWRITING_RESULTS",
RETRIEVED_SEO_SITEMAP_SETTINGS: "RETRIEVED_SEO_SITEMAP_SETTINGS",
UPDATED_SEO_SITEMAP_SETTINGS: "UPDATED_SEO_SITEMAP_SETTINGS",
SEO_SITEMAP_SETTINS_CLIENT_MODIFIED: "SEO_SITEMAP_SETTINS_CLIENT_MODIFIED",
CLEARED_SEO_SITEMAP_CACHE: "CLEARED_SEO_SITEMAP_CACHE",
RETRIEVED_SEO_SITEMAP_PROVIDERS: "RETRIEVED_SEO_SITEMAP_PROVIDERS",
UPDATED_SEO_SITEMAP_PROVIDER: "UPDATED_SEO_SITEMAP_PROVIDER",
CREATED_SEO_SITEMAP_VERIFICATION: "CREATED_SEO_SITEMAP_VERIFICATION",
RETRIEVED_SEO_EXTENSION_URL_PROVIDERS: "RETRIEVED_SEO_EXTENSION_URL_PROVIDERS",
UPDATED_SEO_EXTENSION_URL_PROVIDER: "UPDATED_SEO_EXTENSION_URL_PROVIDER"
};
export default seoActionTypes;
| mit |
gmoigneu/platformsh-integrations | src/Sylius/Component/Channel/spec/Context/RequestBased/CompositeRequestResolverSpec.php | 2822 | <?php
/*
* This file is part of the Sylius package.
*
* (c) Paweł Jędrzejewski
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
declare(strict_types=1);
namespace spec\Sylius\Component\Channel\Context\RequestBased;
use PhpSpec\ObjectBehavior;
use Sylius\Component\Channel\Context\RequestBased\RequestResolverInterface;
use Sylius\Component\Channel\Model\ChannelInterface;
use Symfony\Component\HttpFoundation\Request;
/**
* @author Kamil Kokot <kamil@kokot.me>
*/
final class CompositeRequestResolverSpec extends ObjectBehavior
{
function it_implements_request_resolver_interface(): void
{
$this->shouldImplement(RequestResolverInterface::class);
}
function it_returns_null_if_there_are_no_nested_request_resolvers_added(Request $request): void
{
$this->findChannel($request)->shouldReturn(null);
}
function it_returns_null_if_none_of_nested_request_resolvers_returned_channel(
Request $request,
RequestResolverInterface $requestResolver
): void {
$requestResolver->findChannel($request)->willReturn(null);
$this->addResolver($requestResolver);
$this->findChannel($request)->shouldReturn(null);
}
function it_returns_first_result_returned_by_nested_request_resolvers(
Request $request,
RequestResolverInterface $firstRequestResolver,
RequestResolverInterface $secondRequestResolver,
RequestResolverInterface $thirdRequestResolver,
ChannelInterface $channel
): void {
$firstRequestResolver->findChannel($request)->willReturn(null);
$secondRequestResolver->findChannel($request)->willReturn($channel);
$thirdRequestResolver->findChannel($request)->shouldNotBeCalled();
$this->addResolver($firstRequestResolver);
$this->addResolver($secondRequestResolver);
$this->addResolver($thirdRequestResolver);
$this->findChannel($request)->shouldReturn($channel);
}
function its_nested_request_resolvers_can_have_priority(
Request $request,
RequestResolverInterface $firstRequestResolver,
RequestResolverInterface $secondRequestResolver,
RequestResolverInterface $thirdRequestResolver,
ChannelInterface $channel
): void {
$firstRequestResolver->findChannel($request)->shouldNotBeCalled();
$secondRequestResolver->findChannel($request)->willReturn($channel);
$thirdRequestResolver->findChannel($request)->willReturn(null);
$this->addResolver($firstRequestResolver, -5);
$this->addResolver($secondRequestResolver, 0);
$this->addResolver($thirdRequestResolver, 5);
$this->findChannel($request)->shouldReturn($channel);
}
}
| mit |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/Twisted-15.2.1/twisted/internet/test/test_qtreactor.py | 752 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import sys
from twisted.trial import unittest
from twisted.python.reflect import namedModule
class QtreactorTests(unittest.TestCase):
"""
Tests for L{twisted.internet.qtreactor}.
"""
def test_importQtreactor(self):
"""
Attempting to import L{twisted.internet.qtreactor} should raise an
C{ImportError} indicating that C{qtreactor} is no longer a part of
Twisted.
"""
sys.modules["qtreactor"] = None
from twisted.plugins.twisted_qtstub import errorMessage
try:
namedModule('twisted.internet.qtreactor')
except ImportError, e:
self.assertEqual(str(e), errorMessage)
| mit |
hyonholee/azure-sdk-for-net | sdk/managementpartner/Microsoft.Azure.Management.ManagementPartner/src/Generated/IACEProvisioningManagementPartnerAPIClient.cs | 2519 | // <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.Management.ManagementPartner
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using Newtonsoft.Json;
/// <summary>
/// This API describe ACE Provisioning ManagementPartner
/// </summary>
public partial interface IACEProvisioningManagementPartnerAPIClient : System.IDisposable
{
/// <summary>
/// The base URI of the service.
/// </summary>
System.Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
JsonSerializerSettings SerializationSettings { get; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
JsonSerializerSettings DeserializationSettings { get; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
ServiceClientCredentials Credentials { get; }
/// <summary>
/// Supported version.
/// </summary>
string ApiVersion { get; }
/// <summary>
/// The preferred language for the response.
/// </summary>
string AcceptLanguage { get; set; }
/// <summary>
/// The retry timeout in seconds for Long Running Operations. Default
/// value is 30.
/// </summary>
int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// Whether a unique x-ms-client-request-id should be generated. When
/// set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IPartnerOperations.
/// </summary>
IPartnerOperations Partner { get; }
/// <summary>
/// Gets the IOperationOperations.
/// </summary>
IOperationOperations Operation { get; }
/// <summary>
/// Gets the IPartnersOperations.
/// </summary>
IPartnersOperations Partners { get; }
}
}
| mit |
kaostao/bitcoin | src/rpcwallet.cpp | 83583 | // Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2014 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "base58.h"
#include "rpcserver.h"
#include "init.h"
#include "net.h"
#include "netbase.h"
#include "util.h"
#include "wallet.h"
#include "walletdb.h"
#include <stdint.h>
#include <boost/assign/list_of.hpp>
#include "json/json_spirit_utils.h"
#include "json/json_spirit_value.h"
using namespace std;
using namespace boost;
using namespace boost::assign;
using namespace json_spirit;
int64_t nWalletUnlockTime;
static CCriticalSection cs_nWalletUnlockTime;
std::string HelpRequiringPassphrase()
{
return pwalletMain && pwalletMain->IsCrypted()
? "\nRequires wallet passphrase to be set with walletpassphrase call."
: "";
}
void EnsureWalletIsUnlocked()
{
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
}
void WalletTxToJSON(const CWalletTx& wtx, Object& entry)
{
int confirms = wtx.GetDepthInMainChain();
entry.push_back(Pair("confirmations", confirms));
if (wtx.IsCoinBase())
entry.push_back(Pair("generated", true));
if (confirms > 0)
{
entry.push_back(Pair("blockhash", wtx.hashBlock.GetHex()));
entry.push_back(Pair("blockindex", wtx.nIndex));
entry.push_back(Pair("blocktime", (boost::int64_t)(mapBlockIndex[wtx.hashBlock]->nTime)));
}
uint256 hash = wtx.GetHash();
entry.push_back(Pair("txid", hash.GetHex()));
Array conflicts;
BOOST_FOREACH(const uint256& conflict, wtx.GetConflicts())
conflicts.push_back(conflict.GetHex());
entry.push_back(Pair("walletconflicts", conflicts));
entry.push_back(Pair("time", (boost::int64_t)wtx.GetTxTime()));
entry.push_back(Pair("timereceived", (boost::int64_t)wtx.nTimeReceived));
BOOST_FOREACH(const PAIRTYPE(string,string)& item, wtx.mapValue)
entry.push_back(Pair(item.first, item.second));
}
string AccountFromValue(const Value& value)
{
string strAccount = value.get_str();
if (strAccount == "*")
throw JSONRPCError(RPC_WALLET_INVALID_ACCOUNT_NAME, "Invalid account name");
return strAccount;
}
Value getnewaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getnewaddress ( \"account\" )\n"
"\nReturns a new Bitcoin address for receiving payments.\n"
"If 'account' is specified (recommended), it is added to the address book \n"
"so payments received with the address will be credited to 'account'.\n"
"\nArguments:\n"
"1. \"account\" (string, optional) The account name for the address to be linked to. if not provided, the default account \"\" is used. It can also be set to the empty string \"\" to represent the default account. The account does not need to exist, it will be created if there is no account by the given name.\n"
"\nResult:\n"
"\"bitcoinaddress\" (string) The new bitcoin address\n"
"\nExamples:\n"
+ HelpExampleCli("getnewaddress", "")
+ HelpExampleCli("getnewaddress", "\"\"")
+ HelpExampleCli("getnewaddress", "\"myaccount\"")
+ HelpExampleRpc("getnewaddress", "\"myaccount\"")
);
// Parse the account first so we don't generate a key if there's an error
string strAccount;
if (params.size() > 0)
strAccount = AccountFromValue(params[0]);
if (!pwalletMain->IsLocked())
pwalletMain->TopUpKeyPool();
// Generate a new key that is added to wallet
CPubKey newKey;
if (!pwalletMain->GetKeyFromPool(newKey))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
CKeyID keyID = newKey.GetID();
pwalletMain->SetAddressBook(keyID, strAccount, "receive");
return CBitcoinAddress(keyID).ToString();
}
CBitcoinAddress GetAccountAddress(string strAccount, bool bForceNew=false)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
CAccount account;
walletdb.ReadAccount(strAccount, account);
bool bKeyUsed = false;
// Check if the current key has been used
if (account.vchPubKey.IsValid())
{
CScript scriptPubKey;
scriptPubKey.SetDestination(account.vchPubKey.GetID());
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin();
it != pwalletMain->mapWallet.end() && account.vchPubKey.IsValid();
++it)
{
const CWalletTx& wtx = (*it).second;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
bKeyUsed = true;
}
}
// Generate a new key
if (!account.vchPubKey.IsValid() || bForceNew || bKeyUsed)
{
if (!pwalletMain->GetKeyFromPool(account.vchPubKey))
throw JSONRPCError(RPC_WALLET_KEYPOOL_RAN_OUT, "Error: Keypool ran out, please call keypoolrefill first");
pwalletMain->SetAddressBook(account.vchPubKey.GetID(), strAccount, "receive");
walletdb.WriteAccount(strAccount, account);
}
return CBitcoinAddress(account.vchPubKey.GetID());
}
Value getaccountaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaccountaddress \"account\"\n"
"\nReturns the current Bitcoin address for receiving payments to this account.\n"
"\nArguments:\n"
"1. \"account\" (string, required) The account name for the address. It can also be set to the empty string \"\" to represent the default account. The account does not need to exist, it will be created and a new address created if there is no account by the given name.\n"
"\nResult:\n"
"\"bitcoinaddress\" (string) The account bitcoin address\n"
"\nExamples:\n"
+ HelpExampleCli("getaccountaddress", "")
+ HelpExampleCli("getaccountaddress", "\"\"")
+ HelpExampleCli("getaccountaddress", "\"myaccount\"")
+ HelpExampleRpc("getaccountaddress", "\"myaccount\"")
);
// Parse the account first so we don't generate a key if there's an error
string strAccount = AccountFromValue(params[0]);
Value ret;
ret = GetAccountAddress(strAccount).ToString();
return ret;
}
Value getrawchangeaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"getrawchangeaddress\n"
"\nReturns a new Bitcoin address, for receiving change.\n"
"This is for use with raw transactions, NOT normal use.\n"
"\nResult:\n"
"\"address\" (string) The address\n"
"\nExamples:\n"
+ HelpExampleCli("getrawchangeaddress", "")
+ HelpExampleRpc("getrawchangeaddress", "")
);
if (!pwalletMain->IsLocked())
pwalletMain->TopUpKeyPool();
CReserveKey reservekey(pwalletMain);
CPubKey vchPubKey;
if (!reservekey.GetReservedKey(vchPubKey))
throw JSONRPCError(RPC_WALLET_ERROR, "Error: Unable to obtain key for change");
reservekey.KeepKey();
CKeyID keyID = vchPubKey.GetID();
return CBitcoinAddress(keyID).ToString();
}
Value setaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"setaccount \"bitcoinaddress\" \"account\"\n"
"\nSets the account associated with the given address.\n"
"\nArguments:\n"
"1. \"bitcoinaddress\" (string, required) The bitcoin address to be associated with an account.\n"
"2. \"account\" (string, required) The account to assign the address to.\n"
"\nExamples:\n"
+ HelpExampleCli("setaccount", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\" \"tabby\"")
+ HelpExampleRpc("setaccount", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\", \"tabby\"")
);
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
string strAccount;
if (params.size() > 1)
strAccount = AccountFromValue(params[1]);
// Detect when changing the account of an address that is the 'unused current key' of another account:
if (pwalletMain->mapAddressBook.count(address.Get()))
{
string strOldAccount = pwalletMain->mapAddressBook[address.Get()].name;
if (address == GetAccountAddress(strOldAccount))
GetAccountAddress(strOldAccount, true);
}
pwalletMain->SetAddressBook(address.Get(), strAccount, "receive");
return Value::null;
}
Value getaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaccount \"bitcoinaddress\"\n"
"\nReturns the account associated with the given address.\n"
"\nArguments:\n"
"1. \"bitcoinaddress\" (string, required) The bitcoin address for account lookup.\n"
"\nResult:\n"
"\"accountname\" (string) the account address\n"
"\nExamples:\n"
+ HelpExampleCli("getaccount", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\"")
+ HelpExampleRpc("getaccount", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\"")
);
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
string strAccount;
map<CTxDestination, CAddressBookData>::iterator mi = pwalletMain->mapAddressBook.find(address.Get());
if (mi != pwalletMain->mapAddressBook.end() && !(*mi).second.name.empty())
strAccount = (*mi).second.name;
return strAccount;
}
Value getaddressesbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getaddressesbyaccount \"account\"\n"
"\nReturns the list of addresses for the given account.\n"
"\nArguments:\n"
"1. \"account\" (string, required) The account name.\n"
"\nResult:\n"
"[ (json array of string)\n"
" \"bitcoinaddress\" (string) a bitcoin address associated with the given account\n"
" ,...\n"
"]\n"
"\nExamples:\n"
+ HelpExampleCli("getaddressesbyaccount", "\"tabby\"")
+ HelpExampleRpc("getaddressesbyaccount", "\"tabby\"")
);
string strAccount = AccountFromValue(params[0]);
// Find all addresses that have the given account
Array ret;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, CAddressBookData)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const string& strName = item.second.name;
if (strName == strAccount)
ret.push_back(address.ToString());
}
return ret;
}
Value sendtoaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw runtime_error(
"sendtoaddress \"bitcoinaddress\" amount ( \"comment\" \"comment-to\" )\n"
"\nSent an amount to a given address. The amount is a real and is rounded to the nearest 0.00000001\n"
+ HelpRequiringPassphrase() +
"\nArguments:\n"
"1. \"bitcoinaddress\" (string, required) The bitcoin address to send to.\n"
"2. \"amount\" (numeric, required) The amount in btc to send. eg 0.1\n"
"3. \"comment\" (string, optional) A comment used to store what the transaction is for. \n"
" This is not part of the transaction, just kept in your wallet.\n"
"4. \"comment-to\" (string, optional) A comment to store the name of the person or organization \n"
" to which you're sending the transaction. This is not part of the \n"
" transaction, just kept in your wallet.\n"
"\nResult:\n"
"\"transactionid\" (string) The transaction id. (view at https://blockchain.info/tx/[transactionid])\n"
"\nExamples:\n"
+ HelpExampleCli("sendtoaddress", "\"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\" 0.1")
+ HelpExampleCli("sendtoaddress", "\"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\" 0.1 \"donation\" \"seans outpost\"")
+ HelpExampleRpc("sendtoaddress", "\"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\", 0.1, \"donation\", \"seans outpost\"")
);
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
// Amount
int64_t nAmount = AmountFromValue(params[1]);
// Wallet comments
CWalletTx wtx;
if (params.size() > 2 && params[2].type() != null_type && !params[2].get_str().empty())
wtx.mapValue["comment"] = params[2].get_str();
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["to"] = params[3].get_str();
EnsureWalletIsUnlocked();
string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value listaddressgroupings(const Array& params, bool fHelp)
{
if (fHelp)
throw runtime_error(
"listaddressgroupings\n"
"\nLists groups of addresses which have had their common ownership\n"
"made public by common use as inputs or as the resulting change\n"
"in past transactions\n"
"\nResult:\n"
"[\n"
" [\n"
" [\n"
" \"bitcoinaddress\", (string) The bitcoin address\n"
" amount, (numeric) The amount in btc\n"
" \"account\" (string, optional) The account\n"
" ]\n"
" ,...\n"
" ]\n"
" ,...\n"
"]\n"
"\nExamples:\n"
+ HelpExampleCli("listaddressgroupings", "")
+ HelpExampleRpc("listaddressgroupings", "")
);
Array jsonGroupings;
map<CTxDestination, int64_t> balances = pwalletMain->GetAddressBalances();
BOOST_FOREACH(set<CTxDestination> grouping, pwalletMain->GetAddressGroupings())
{
Array jsonGrouping;
BOOST_FOREACH(CTxDestination address, grouping)
{
Array addressInfo;
addressInfo.push_back(CBitcoinAddress(address).ToString());
addressInfo.push_back(ValueFromAmount(balances[address]));
{
LOCK(pwalletMain->cs_wallet);
if (pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get()) != pwalletMain->mapAddressBook.end())
addressInfo.push_back(pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get())->second.name);
}
jsonGrouping.push_back(addressInfo);
}
jsonGroupings.push_back(jsonGrouping);
}
return jsonGroupings;
}
Value signmessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw runtime_error(
"signmessage \"bitcoinaddress\" \"message\"\n"
"\nSign a message with the private key of an address"
+ HelpRequiringPassphrase() + "\n"
"\nArguments:\n"
"1. \"bitcoinaddress\" (string, required) The bitcoin address to use for the private key.\n"
"2. \"message\" (string, required) The message to create a signature of.\n"
"\nResult:\n"
"\"signature\" (string) The signature of the message encoded in base 64\n"
"\nExamples:\n"
"\nUnlock the wallet for 30 seconds\n"
+ HelpExampleCli("walletpassphrase", "\"mypassphrase\" 30") +
"\nCreate the signature\n"
+ HelpExampleCli("signmessage", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\" \"my message\"") +
"\nVerify the signature\n"
+ HelpExampleCli("verifymessage", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\" \"signature\" \"my message\"") +
"\nAs json rpc\n"
+ HelpExampleRpc("signmessage", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\", \"my message\"")
);
EnsureWalletIsUnlocked();
string strAddress = params[0].get_str();
string strMessage = params[1].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
CKey key;
if (!pwalletMain->GetKey(keyID, key))
throw JSONRPCError(RPC_WALLET_ERROR, "Private key not available");
CHashWriter ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
vector<unsigned char> vchSig;
if (!key.SignCompact(ss.GetHash(), vchSig))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Sign failed");
return EncodeBase64(&vchSig[0], vchSig.size());
}
Value getreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getreceivedbyaddress \"bitcoinaddress\" ( minconf )\n"
"\nReturns the total amount received by the given bitcoinaddress in transactions with at least minconf confirmations.\n"
"\nArguments:\n"
"1. \"bitcoinaddress\" (string, required) The bitcoin address for transactions.\n"
"2. minconf (numeric, optional, default=1) Only include transactions confirmed at least this many times.\n"
"\nResult:\n"
"amount (numeric) The total amount in btc received at this address.\n"
"\nExamples:\n"
"\nThe amount from transactions with at least 1 confirmation\n"
+ HelpExampleCli("getreceivedbyaddress", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\"") +
"\nThe amount including unconfirmed transactions, zero confirmations\n"
+ HelpExampleCli("getreceivedbyaddress", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\" 0") +
"\nThe amount with at least 6 confirmation, very safe\n"
+ HelpExampleCli("getreceivedbyaddress", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\" 6") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("getreceivedbyaddress", "\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\", 6")
);
// Bitcoin address
CBitcoinAddress address = CBitcoinAddress(params[0].get_str());
CScript scriptPubKey;
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
scriptPubKey.SetDestination(address.Get());
if (!IsMine(*pwalletMain,scriptPubKey))
return (double)0.0;
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Tally
int64_t nAmount = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || !IsFinalTx(wtx))
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
return ValueFromAmount(nAmount);
}
Value getreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getreceivedbyaccount \"account\" ( minconf )\n"
"\nReturns the total amount received by addresses with <account> in transactions with at least [minconf] confirmations.\n"
"\nArguments:\n"
"1. \"account\" (string, required) The selected account, may be the default account using \"\".\n"
"2. minconf (numeric, optional, default=1) Only include transactions confirmed at least this many times.\n"
"\nResult:\n"
"amount (numeric) The total amount in btc received for this account.\n"
"\nExamples:\n"
"\nAmount received by the default account with at least 1 confirmation\n"
+ HelpExampleCli("getreceivedbyaccount", "\"\"") +
"\nAmount received at the tabby account including unconfirmed amounts with zero confirmations\n"
+ HelpExampleCli("getreceivedbyaccount", "\"tabby\" 0") +
"\nThe amount with at least 6 confirmation, very safe\n"
+ HelpExampleCli("getreceivedbyaccount", "\"tabby\" 6") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("getreceivedbyaccount", "\"tabby\", 6")
);
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Get the set of pub keys assigned to account
string strAccount = AccountFromValue(params[0]);
set<CTxDestination> setAddress = pwalletMain->GetAccountAddresses(strAccount);
// Tally
int64_t nAmount = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || !IsFinalTx(wtx))
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (ExtractDestination(txout.scriptPubKey, address) && IsMine(*pwalletMain, address) && setAddress.count(address))
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
}
return (double)nAmount / (double)COIN;
}
int64_t GetAccountBalance(CWalletDB& walletdb, const string& strAccount, int nMinDepth)
{
int64_t nBalance = 0;
// Tally wallet transactions
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!IsFinalTx(wtx) || wtx.GetBlocksToMaturity() > 0 || wtx.GetDepthInMainChain() < 0)
continue;
int64_t nReceived, nSent, nFee;
wtx.GetAccountAmounts(strAccount, nReceived, nSent, nFee);
if (nReceived != 0 && wtx.GetDepthInMainChain() >= nMinDepth)
nBalance += nReceived;
nBalance -= nSent + nFee;
}
// Tally internal accounting entries
nBalance += walletdb.GetAccountCreditDebit(strAccount);
return nBalance;
}
int64_t GetAccountBalance(const string& strAccount, int nMinDepth)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
return GetAccountBalance(walletdb, strAccount, nMinDepth);
}
Value getbalance(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"getbalance ( \"account\" minconf )\n"
"\nIf account is not specified, returns the server's total available balance.\n"
"If account is specified, returns the balance in the account.\n"
"Note that the account \"\" is not the same as leaving the parameter out.\n"
"The server total may be different to the balance in the default \"\" account.\n"
"\nArguments:\n"
"1. \"account\" (string, optional) The selected account, or \"*\" for entire wallet. It may be the default account using \"\".\n"
"2. minconf (numeric, optional, default=1) Only include transactions confirmed at least this many times.\n"
"\nResult:\n"
"amount (numeric) The total amount in btc received for this account.\n"
"\nExamples:\n"
"\nThe total amount in the server across all accounts\n"
+ HelpExampleCli("getbalance", "") +
"\nThe total amount in the server across all accounts, with at least 5 confirmations\n"
+ HelpExampleCli("getbalance", "\"*\" 6") +
"\nThe total amount in the default account with at least 1 confirmation\n"
+ HelpExampleCli("getbalance", "\"\"") +
"\nThe total amount in the account named tabby with at least 6 confirmations\n"
+ HelpExampleCli("getbalance", "\"tabby\" 6") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("getbalance", "\"tabby\", 6")
);
if (params.size() == 0)
return ValueFromAmount(pwalletMain->GetBalance());
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
if (params[0].get_str() == "*") {
// Calculate total balance a different way from GetBalance()
// (GetBalance() sums up all unspent TxOuts)
// getbalance and getbalance '*' 0 should return the same number
int64_t nBalance = 0;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!wtx.IsTrusted() || wtx.GetBlocksToMaturity() > 0)
continue;
int64_t allFee;
string strSentAccount;
list<pair<CTxDestination, int64_t> > listReceived;
list<pair<CTxDestination, int64_t> > listSent;
wtx.GetAmounts(listReceived, listSent, allFee, strSentAccount);
if (wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64_t)& r, listReceived)
nBalance += r.second;
}
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64_t)& r, listSent)
nBalance -= r.second;
nBalance -= allFee;
}
return ValueFromAmount(nBalance);
}
string strAccount = AccountFromValue(params[0]);
int64_t nBalance = GetAccountBalance(strAccount, nMinDepth);
return ValueFromAmount(nBalance);
}
Value getunconfirmedbalance(const Array ¶ms, bool fHelp)
{
if (fHelp || params.size() > 0)
throw runtime_error(
"getunconfirmedbalance\n"
"Returns the server's total unconfirmed balance\n");
return ValueFromAmount(pwalletMain->GetUnconfirmedBalance());
}
Value movecmd(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 5)
throw runtime_error(
"move \"fromaccount\" \"toaccount\" amount ( minconf \"comment\" )\n"
"\nMove a specified amount from one account in your wallet to another.\n"
"\nArguments:\n"
"1. \"fromaccount\" (string, required) The name of the account to move funds from. May be the default account using \"\".\n"
"2. \"toaccount\" (string, required) The name of the account to move funds to. May be the default account using \"\".\n"
"3. minconf (numeric, optional, default=1) Only use funds with at least this many confirmations.\n"
"4. \"comment\" (string, optional) An optional comment, stored in the wallet only.\n"
"\nResult:\n"
"true|false (boolean) true if successfull.\n"
"\nExamples:\n"
"\nMove 0.01 btc from the default account to the account named tabby\n"
+ HelpExampleCli("move", "\"\" \"tabby\" 0.01") +
"\nMove 0.01 btc timotei to akiko with a comment and funds have 6 confirmations\n"
+ HelpExampleCli("move", "\"timotei\" \"akiko\" 0.01 6 \"happy birthday!\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("move", "\"timotei\", \"akiko\", 0.01, 6, \"happy birthday!\"")
);
string strFrom = AccountFromValue(params[0]);
string strTo = AccountFromValue(params[1]);
int64_t nAmount = AmountFromValue(params[2]);
if (params.size() > 3)
// unused parameter, used to be nMinDepth, keep type-checking it though
(void)params[3].get_int();
string strComment;
if (params.size() > 4)
strComment = params[4].get_str();
CWalletDB walletdb(pwalletMain->strWalletFile);
if (!walletdb.TxnBegin())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
int64_t nNow = GetAdjustedTime();
// Debit
CAccountingEntry debit;
debit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
debit.strAccount = strFrom;
debit.nCreditDebit = -nAmount;
debit.nTime = nNow;
debit.strOtherAccount = strTo;
debit.strComment = strComment;
walletdb.WriteAccountingEntry(debit);
// Credit
CAccountingEntry credit;
credit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
credit.strAccount = strTo;
credit.nCreditDebit = nAmount;
credit.nTime = nNow;
credit.strOtherAccount = strFrom;
credit.strComment = strComment;
walletdb.WriteAccountingEntry(credit);
if (!walletdb.TxnCommit())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
return true;
}
Value sendfrom(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 6)
throw runtime_error(
"sendfrom \"fromaccount\" \"tobitcoinaddress\" amount ( minconf \"comment\" \"comment-to\" )\n"
"\nSent an amount from an account to a bitcoin address.\n"
"The amount is a real and is rounded to the nearest 0.00000001."
+ HelpRequiringPassphrase() + "\n"
"\nArguments:\n"
"1. \"fromaccount\" (string, required) The name of the account to send funds from. May be the default account using \"\".\n"
"2. \"tobitcoinaddress\" (string, required) The bitcoin address to send funds to.\n"
"3. amount (numeric, required) The amount in btc. (transaction fee is added on top).\n"
"4. minconf (numeric, optional, default=1) Only use funds with at least this many confirmations.\n"
"5. \"comment\" (string, optional) A comment used to store what the transaction is for. \n"
" This is not part of the transaction, just kept in your wallet.\n"
"6. \"comment-to\" (string, optional) An optional comment to store the name of the person or organization \n"
" to which you're sending the transaction. This is not part of the transaction, \n"
" it is just kept in your wallet.\n"
"\nResult:\n"
"\"transactionid\" (string) The transaction id. (view at https://blockchain.info/tx/[transactionid])\n"
"\nExamples:\n"
"\nSend 0.01 btc from the default account to the address, must have at least 1 confirmation\n"
+ HelpExampleCli("sendfrom", "\"\" \"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\" 0.01") +
"\nSend 0.01 from the tabby account to the given address, funds must have at least 6 confirmations\n"
+ HelpExampleCli("sendfrom", "\"tabby\" \"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\" 0.01 6 \"donation\" \"seans outpost\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("sendfrom", "\"tabby\", \"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\", 0.01, 6, \"donation\", \"seans outpost\"")
);
string strAccount = AccountFromValue(params[0]);
CBitcoinAddress address(params[1].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid Bitcoin address");
int64_t nAmount = AmountFromValue(params[2]);
int nMinDepth = 1;
if (params.size() > 3)
nMinDepth = params[3].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["comment"] = params[4].get_str();
if (params.size() > 5 && params[5].type() != null_type && !params[5].get_str().empty())
wtx.mapValue["to"] = params[5].get_str();
EnsureWalletIsUnlocked();
// Check funds
int64_t nBalance = GetAccountBalance(strAccount, nMinDepth);
if (nAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value sendmany(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw runtime_error(
"sendmany \"fromaccount\" {\"address\":amount,...} ( minconf \"comment\" )\n"
"\nSend multiple times. Amounts are double-precision floating point numbers."
+ HelpRequiringPassphrase() + "\n"
"\nArguments:\n"
"1. \"fromaccount\" (string, required) The account to send the funds from, can be \"\" for the default account\n"
"2. \"amounts\" (string, required) A json object with addresses and amounts\n"
" {\n"
" \"address\":amount (numeric) The bitcoin address is the key, the numeric amount in btc is the value\n"
" ,...\n"
" }\n"
"3. minconf (numeric, optional, default=1) Only use the balance confirmed at least this many times.\n"
"4. \"comment\" (string, optional) A comment\n"
"\nResult:\n"
"\"transactionid\" (string) The transaction id for the send. Only 1 transaction is created regardless of \n"
" the number of addresses. See https://blockchain.info/tx/[transactionid]\n"
"\nExamples:\n"
"\nSend two amounts to two different addresses:\n"
+ HelpExampleCli("sendmany", "\"tabby\" \"{\\\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\\\":0.01,\\\"1353tsE8YMTA4EuV7dgUXGjNFf9KpVvKHz\\\":0.02}\"") +
"\nSend two amounts to two different addresses setting the confirmation and comment:\n"
+ HelpExampleCli("sendmany", "\"tabby\" \"{\\\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\\\":0.01,\\\"1353tsE8YMTA4EuV7dgUXGjNFf9KpVvKHz\\\":0.02}\" 6 \"testing\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("sendmany", "\"tabby\", \"{\\\"1D1ZrZNe3JUo7ZycKEYQQiQAWd9y54F4XZ\\\":0.01,\\\"1353tsE8YMTA4EuV7dgUXGjNFf9KpVvKHz\\\":0.02}\", 6, \"testing\"")
);
string strAccount = AccountFromValue(params[0]);
Object sendTo = params[1].get_obj();
int nMinDepth = 1;
if (params.size() > 2)
nMinDepth = params[2].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["comment"] = params[3].get_str();
set<CBitcoinAddress> setAddress;
vector<pair<CScript, int64_t> > vecSend;
int64_t totalAmount = 0;
BOOST_FOREACH(const Pair& s, sendTo)
{
CBitcoinAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Bitcoin address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64_t nAmount = AmountFromValue(s.value_);
totalAmount += nAmount;
vecSend.push_back(make_pair(scriptPubKey, nAmount));
}
EnsureWalletIsUnlocked();
// Check funds
int64_t nBalance = GetAccountBalance(strAccount, nMinDepth);
if (totalAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
CReserveKey keyChange(pwalletMain);
int64_t nFeeRequired = 0;
string strFailReason;
bool fCreated = pwalletMain->CreateTransaction(vecSend, wtx, keyChange, nFeeRequired, strFailReason);
if (!fCreated)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, strFailReason);
if (!pwalletMain->CommitTransaction(wtx, keyChange))
throw JSONRPCError(RPC_WALLET_ERROR, "Transaction commit failed");
return wtx.GetHash().GetHex();
}
// Defined in rpcmisc.cpp
extern CScript _createmultisig(const Array& params);
Value addmultisigaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 3)
{
string msg = "addmultisigaddress nrequired [\"key\",...] ( \"account\" )\n"
"\nAdd a nrequired-to-sign multisignature address to the wallet.\n"
"Each key is a Bitcoin address or hex-encoded public key.\n"
"If 'account' is specified, assign address to that account.\n"
"\nArguments:\n"
"1. nrequired (numeric, required) The number of required signatures out of the n keys or addresses.\n"
"2. \"keysobject\" (string, required) A json array of bitcoin addresses or hex-encoded public keys\n"
" [\n"
" \"address\" (string) bitcoin address or hex-encoded public key\n"
" ...,\n"
" ]\n"
"3. \"account\" (string, optional) An account to assign the addresses to.\n"
"\nResult:\n"
"\"bitcoinaddress\" (string) A bitcoin address associated with the keys.\n"
"\nExamples:\n"
"\nAdd a multisig address from 2 addresses\n"
+ HelpExampleCli("addmultisigaddress", "2 \"[\\\"16sSauSf5pF2UkUwvKGq4qjNRzBZYqgEL5\\\",\\\"171sgjn4YtPu27adkKGrdDwzRTxnRkBfKV\\\"]\"") +
"\nAs json rpc call\n"
+ HelpExampleRpc("addmultisigaddress", "2, \"[\\\"16sSauSf5pF2UkUwvKGq4qjNRzBZYqgEL5\\\",\\\"171sgjn4YtPu27adkKGrdDwzRTxnRkBfKV\\\"]\"")
;
throw runtime_error(msg);
}
string strAccount;
if (params.size() > 2)
strAccount = AccountFromValue(params[2]);
// Construct using pay-to-script-hash:
CScript inner = _createmultisig(params);
CScriptID innerID = inner.GetID();
pwalletMain->AddCScript(inner);
pwalletMain->SetAddressBook(innerID, strAccount, "send");
return CBitcoinAddress(innerID).ToString();
}
struct tallyitem
{
int64_t nAmount;
int nConf;
vector<uint256> txids;
tallyitem()
{
nAmount = 0;
nConf = std::numeric_limits<int>::max();
}
};
Value ListReceived(const Array& params, bool fByAccounts)
{
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
// Whether to include empty accounts
bool fIncludeEmpty = false;
if (params.size() > 1)
fIncludeEmpty = params[1].get_bool();
// Tally
map<CBitcoinAddress, tallyitem> mapTally;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || !IsFinalTx(wtx))
continue;
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < nMinDepth)
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (!ExtractDestination(txout.scriptPubKey, address) || !IsMine(*pwalletMain, address))
continue;
tallyitem& item = mapTally[address];
item.nAmount += txout.nValue;
item.nConf = min(item.nConf, nDepth);
item.txids.push_back(wtx.GetHash());
}
}
// Reply
Array ret;
map<string, tallyitem> mapAccountTally;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, CAddressBookData)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const string& strAccount = item.second.name;
map<CBitcoinAddress, tallyitem>::iterator it = mapTally.find(address);
if (it == mapTally.end() && !fIncludeEmpty)
continue;
int64_t nAmount = 0;
int nConf = std::numeric_limits<int>::max();
if (it != mapTally.end())
{
nAmount = (*it).second.nAmount;
nConf = (*it).second.nConf;
}
if (fByAccounts)
{
tallyitem& item = mapAccountTally[strAccount];
item.nAmount += nAmount;
item.nConf = min(item.nConf, nConf);
}
else
{
Object obj;
obj.push_back(Pair("address", address.ToString()));
obj.push_back(Pair("account", strAccount));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
Array transactions;
if (it != mapTally.end())
{
BOOST_FOREACH(const uint256& item, (*it).second.txids)
{
transactions.push_back(item.GetHex());
}
}
obj.push_back(Pair("txids", transactions));
ret.push_back(obj);
}
}
if (fByAccounts)
{
for (map<string, tallyitem>::iterator it = mapAccountTally.begin(); it != mapAccountTally.end(); ++it)
{
int64_t nAmount = (*it).second.nAmount;
int nConf = (*it).second.nConf;
Object obj;
obj.push_back(Pair("account", (*it).first));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
ret.push_back(obj);
}
}
return ret;
}
Value listreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"listreceivedbyaddress ( minconf includeempty )\n"
"\nList balances by receiving address.\n"
"\nArguments:\n"
"1. minconf (numeric, optional, default=1) The minimum number of confirmations before payments are included.\n"
"2. includeempty (numeric, optional, dafault=false) Whether to include addresses that haven't received any payments.\n"
"\nResult:\n"
"[\n"
" {\n"
" \"address\" : \"receivingaddress\", (string) The receiving address\n"
" \"account\" : \"accountname\", (string) The account of the receiving address. The default account is \"\".\n"
" \"amount\" : x.xxx, (numeric) The total amount in btc received by the address\n"
" \"confirmations\" : n (numeric) The number of confirmations of the most recent transaction included\n"
" }\n"
" ,...\n"
"]\n"
"\nExamples:\n"
+ HelpExampleCli("listreceivedbyaddress", "")
+ HelpExampleCli("listreceivedbyaddress", "6 true")
+ HelpExampleRpc("listreceivedbyaddress", "6, true")
);
return ListReceived(params, false);
}
Value listreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw runtime_error(
"listreceivedbyaccount ( minconf includeempty )\n"
"\nList balances by account.\n"
"\nArguments:\n"
"1. minconf (numeric, optional, default=1) The minimum number of confirmations before payments are included.\n"
"2. includeempty (boolean, optional, default=false) Whether to include accounts that haven't received any payments.\n"
"\nResult:\n"
"[\n"
" {\n"
" \"account\" : \"accountname\", (string) The account name of the receiving account\n"
" \"amount\" : x.xxx, (numeric) The total amount received by addresses with this account\n"
" \"confirmations\" : n (numeric) The number of confirmations of the most recent transaction included\n"
" }\n"
" ,...\n"
"]\n"
"\nExamples:\n"
+ HelpExampleCli("listreceivedbyaccount", "")
+ HelpExampleCli("listreceivedbyaccount", "6 true")
+ HelpExampleRpc("listreceivedbyaccount", "6, true")
);
return ListReceived(params, true);
}
static void MaybePushAddress(Object & entry, const CTxDestination &dest)
{
CBitcoinAddress addr;
if (addr.Set(dest))
entry.push_back(Pair("address", addr.ToString()));
}
void ListTransactions(const CWalletTx& wtx, const string& strAccount, int nMinDepth, bool fLong, Array& ret)
{
int64_t nFee;
string strSentAccount;
list<pair<CTxDestination, int64_t> > listReceived;
list<pair<CTxDestination, int64_t> > listSent;
wtx.GetAmounts(listReceived, listSent, nFee, strSentAccount);
bool fAllAccounts = (strAccount == string("*"));
// Sent
if ((!listSent.empty() || nFee != 0) && (fAllAccounts || strAccount == strSentAccount))
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& s, listSent)
{
Object entry;
entry.push_back(Pair("account", strSentAccount));
MaybePushAddress(entry, s.first);
entry.push_back(Pair("category", "send"));
entry.push_back(Pair("amount", ValueFromAmount(-s.second)));
entry.push_back(Pair("fee", ValueFromAmount(-nFee)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
}
// Received
if (listReceived.size() > 0 && wtx.GetDepthInMainChain() >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& r, listReceived)
{
string account;
if (pwalletMain->mapAddressBook.count(r.first))
account = pwalletMain->mapAddressBook[r.first].name;
if (fAllAccounts || (account == strAccount))
{
Object entry;
entry.push_back(Pair("account", account));
MaybePushAddress(entry, r.first);
if (wtx.IsCoinBase())
{
if (wtx.GetDepthInMainChain() < 1)
entry.push_back(Pair("category", "orphan"));
else if (wtx.GetBlocksToMaturity() > 0)
entry.push_back(Pair("category", "immature"));
else
entry.push_back(Pair("category", "generate"));
}
else
{
entry.push_back(Pair("category", "receive"));
}
entry.push_back(Pair("amount", ValueFromAmount(r.second)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
}
}
}
}
void AcentryToJSON(const CAccountingEntry& acentry, const string& strAccount, Array& ret)
{
bool fAllAccounts = (strAccount == string("*"));
if (fAllAccounts || acentry.strAccount == strAccount)
{
Object entry;
entry.push_back(Pair("account", acentry.strAccount));
entry.push_back(Pair("category", "move"));
entry.push_back(Pair("time", (boost::int64_t)acentry.nTime));
entry.push_back(Pair("amount", ValueFromAmount(acentry.nCreditDebit)));
entry.push_back(Pair("otheraccount", acentry.strOtherAccount));
entry.push_back(Pair("comment", acentry.strComment));
ret.push_back(entry);
}
}
Value listtransactions(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 3)
throw runtime_error(
"listtransactions ( \"account\" count from )\n"
"\nReturns up to 'count' most recent transactions skipping the first 'from' transactions for account 'account'.\n"
"\nArguments:\n"
"1. \"account\" (string, optional) The account name. If not included, it will list all transactions for all accounts.\n"
" If \"\" is set, it will list transactions for the default account.\n"
"2. count (numeric, optional, default=10) The number of transactions to return\n"
"3. from (numeric, optional, default=0) The number of transactions to skip\n"
"\nResult:\n"
"[\n"
" {\n"
" \"account\":\"accountname\", (string) The account name associated with the transaction. \n"
" It will be \"\" for the default account.\n"
" \"address\":\"bitcoinaddress\", (string) The bitcoin address of the transaction. Not present for \n"
" move transactions (category = move).\n"
" \"category\":\"send|receive|move\", (string) The transaction category. 'move' is a local (off blockchain)\n"
" transaction between accounts, and not associated with an address,\n"
" transaction id or block. 'send' and 'receive' transactions are \n"
" associated with an address, transaction id and block details\n"
" \"amount\": x.xxx, (numeric) The amount in btc. This is negative for the 'send' category, and for the\n"
" 'move' category for moves outbound. It is positive for the 'receive' category,\n"
" and for the 'move' category for inbound funds.\n"
" \"fee\": x.xxx, (numeric) The amount of the fee in btc. This is negative and only available for the \n"
" 'send' category of transactions.\n"
" \"confirmations\": n, (numeric) The number of confirmations for the transaction. Available for 'send' and \n"
" 'receive' category of transactions.\n"
" \"blockhash\": \"hashvalue\", (string) The block hash containing the transaction. Available for 'send' and 'receive'\n"
" category of transactions.\n"
" \"blockindex\": n, (numeric) The block index containing the transaction. Available for 'send' and 'receive'\n"
" category of transactions.\n"
" \"txid\": \"transactionid\", (string) The transaction id (see https://blockchain.info/tx/[transactionid]. Available \n"
" for 'send' and 'receive' category of transactions.\n"
" \"time\": xxx, (numeric) The transaction time in seconds since epoch (midnight Jan 1 1970 GMT).\n"
" \"timereceived\": xxx, (numeric) The time received in seconds since epoch (midnight Jan 1 1970 GMT). Available \n"
" for 'send' and 'receive' category of transactions.\n"
" \"comment\": \"...\", (string) If a comment is associated with the transaction.\n"
" \"otheraccount\": \"accountname\", (string) For the 'move' category of transactions, the account the funds came \n"
" from (for receiving funds, positive amounts), or went to (for sending funds,\n"
" negative amounts).\n"
" }\n"
"]\n"
"\nExamples:\n"
"\nList the most recent 10 transactions in the systems\n"
+ HelpExampleCli("listtransactions", "") +
"\nList the most recent 10 transactions for the tabby account\n"
+ HelpExampleCli("listtransactions", "\"tabby\"") +
"\nList transactions 100 to 120 from the tabby account\n"
+ HelpExampleCli("listtransactions", "\"tabby\" 20 100") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("listtransactions", "\"tabby\", 20, 100")
);
string strAccount = "*";
if (params.size() > 0)
strAccount = params[0].get_str();
int nCount = 10;
if (params.size() > 1)
nCount = params[1].get_int();
int nFrom = 0;
if (params.size() > 2)
nFrom = params[2].get_int();
if (nCount < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative count");
if (nFrom < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative from");
Array ret;
std::list<CAccountingEntry> acentries;
CWallet::TxItems txOrdered = pwalletMain->OrderedTxItems(acentries, strAccount);
// iterate backwards until we have nCount items to return:
for (CWallet::TxItems::reverse_iterator it = txOrdered.rbegin(); it != txOrdered.rend(); ++it)
{
CWalletTx *const pwtx = (*it).second.first;
if (pwtx != 0)
ListTransactions(*pwtx, strAccount, 0, true, ret);
CAccountingEntry *const pacentry = (*it).second.second;
if (pacentry != 0)
AcentryToJSON(*pacentry, strAccount, ret);
if ((int)ret.size() >= (nCount+nFrom)) break;
}
// ret is newest to oldest
if (nFrom > (int)ret.size())
nFrom = ret.size();
if ((nFrom + nCount) > (int)ret.size())
nCount = ret.size() - nFrom;
Array::iterator first = ret.begin();
std::advance(first, nFrom);
Array::iterator last = ret.begin();
std::advance(last, nFrom+nCount);
if (last != ret.end()) ret.erase(last, ret.end());
if (first != ret.begin()) ret.erase(ret.begin(), first);
std::reverse(ret.begin(), ret.end()); // Return oldest to newest
return ret;
}
Value listaccounts(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"listaccounts ( minconf )\n"
"\nReturns Object that has account names as keys, account balances as values.\n"
"\nArguments:\n"
"1. minconf (numeric, optional, default=1) Only onclude transactions with at least this many confirmations\n"
"\nResult:\n"
"{ (json object where keys are account names, and values are numeric balances\n"
" \"account\": x.xxx, (numeric) The property name is the account name, and the value is the total balance for the account.\n"
" ...\n"
"}\n"
"\nExamples:\n"
"\nList account balances where there at least 1 confirmation\n"
+ HelpExampleCli("listaccounts", "") +
"\nList account balances including zero confirmation transactions\n"
+ HelpExampleCli("listaccounts", "0") +
"\nList account balances for 6 or more confirmations\n"
+ HelpExampleCli("listaccounts", "6") +
"\nAs json rpc call\n"
+ HelpExampleRpc("listaccounts", "6")
);
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
map<string, int64_t> mapAccountBalances;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, CAddressBookData)& entry, pwalletMain->mapAddressBook) {
if (IsMine(*pwalletMain, entry.first)) // This address belongs to me
mapAccountBalances[entry.second.name] = 0;
}
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
int64_t nFee;
string strSentAccount;
list<pair<CTxDestination, int64_t> > listReceived;
list<pair<CTxDestination, int64_t> > listSent;
int nDepth = wtx.GetDepthInMainChain();
if (wtx.GetBlocksToMaturity() > 0 || nDepth < 0)
continue;
wtx.GetAmounts(listReceived, listSent, nFee, strSentAccount);
mapAccountBalances[strSentAccount] -= nFee;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& s, listSent)
mapAccountBalances[strSentAccount] -= s.second;
if (nDepth >= nMinDepth)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& r, listReceived)
if (pwalletMain->mapAddressBook.count(r.first))
mapAccountBalances[pwalletMain->mapAddressBook[r.first].name] += r.second;
else
mapAccountBalances[""] += r.second;
}
}
list<CAccountingEntry> acentries;
CWalletDB(pwalletMain->strWalletFile).ListAccountCreditDebit("*", acentries);
BOOST_FOREACH(const CAccountingEntry& entry, acentries)
mapAccountBalances[entry.strAccount] += entry.nCreditDebit;
Object ret;
BOOST_FOREACH(const PAIRTYPE(string, int64_t)& accountBalance, mapAccountBalances) {
ret.push_back(Pair(accountBalance.first, ValueFromAmount(accountBalance.second)));
}
return ret;
}
Value listsinceblock(const Array& params, bool fHelp)
{
if (fHelp)
throw runtime_error(
"listsinceblock ( \"blockhash\" target-confirmations )\n"
"\nGet all transactions in blocks since block [blockhash], or all transactions if omitted\n"
"\nArguments:\n"
"1. \"blockhash\" (string, optional) The block hash to list transactions since\n"
"2. target-confirmations: (numeric, optional) The confirmations required, must be 1 or more\n"
"\nResult:\n"
"{\n"
" \"transactions\": [\n"
" \"account\":\"accountname\", (string) The account name associated with the transaction. Will be \"\" for the default account.\n"
" \"address\":\"bitcoinaddress\", (string) The bitcoin address of the transaction. Not present for move transactions (category = move).\n"
" \"category\":\"send|receive\", (string) The transaction category. 'send' has negative amounts, 'receive' has positive amounts.\n"
" \"amount\": x.xxx, (numeric) The amount in btc. This is negative for the 'send' category, and for the 'move' category for moves \n"
" outbound. It is positive for the 'receive' category, and for the 'move' category for inbound funds.\n"
" \"fee\": x.xxx, (numeric) The amount of the fee in btc. This is negative and only available for the 'send' category of transactions.\n"
" \"confirmations\": n, (numeric) The number of confirmations for the transaction. Available for 'send' and 'receive' category of transactions.\n"
" \"blockhash\": \"hashvalue\", (string) The block hash containing the transaction. Available for 'send' and 'receive' category of transactions.\n"
" \"blockindex\": n, (numeric) The block index containing the transaction. Available for 'send' and 'receive' category of transactions.\n"
" \"blocktime\": xxx, (numeric) The block time in seconds since epoch (1 Jan 1970 GMT).\n"
" \"txid\": \"transactionid\", (string) The transaction id (see https://blockchain.info/tx/[transactionid]. Available for 'send' and 'receive' category of transactions.\n"
" \"time\": xxx, (numeric) The transaction time in seconds since epoch (Jan 1 1970 GMT).\n"
" \"timereceived\": xxx, (numeric) The time received in seconds since epoch (Jan 1 1970 GMT). Available for 'send' and 'receive' category of transactions.\n"
" \"comment\": \"...\", (string) If a comment is associated with the transaction.\n"
" \"to\": \"...\", (string) If a comment to is associated with the transaction.\n"
" ],\n"
" \"lastblock\": \"lastblockhash\" (string) The hash of the last block\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("listsinceblock", "")
+ HelpExampleCli("listsinceblock", "\"000000000000000bacf66f7497b7dc45ef753ee9a7d38571037cdb1a57f663ad\" 6")
+ HelpExampleRpc("listsinceblock", "\"000000000000000bacf66f7497b7dc45ef753ee9a7d38571037cdb1a57f663ad\", 6")
);
CBlockIndex *pindex = NULL;
int target_confirms = 1;
if (params.size() > 0)
{
uint256 blockId = 0;
blockId.SetHex(params[0].get_str());
std::map<uint256, CBlockIndex*>::iterator it = mapBlockIndex.find(blockId);
if (it != mapBlockIndex.end())
pindex = it->second;
}
if (params.size() > 1)
{
target_confirms = params[1].get_int();
if (target_confirms < 1)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
}
int depth = pindex ? (1 + chainActive.Height() - pindex->nHeight) : -1;
Array transactions;
for (map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); it++)
{
CWalletTx tx = (*it).second;
if (depth == -1 || tx.GetDepthInMainChain() < depth)
ListTransactions(tx, "*", 0, true, transactions);
}
CBlockIndex *pblockLast = chainActive[chainActive.Height() + 1 - target_confirms];
uint256 lastblock = pblockLast ? pblockLast->GetBlockHash() : 0;
Object ret;
ret.push_back(Pair("transactions", transactions));
ret.push_back(Pair("lastblock", lastblock.GetHex()));
return ret;
}
Value gettransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"gettransaction \"txid\"\n"
"\nGet detailed information about in-wallet transaction <txid>\n"
"\nArguments:\n"
"1. \"txid\" (string, required) The transaction id\n"
"\nResult:\n"
"{\n"
" \"amount\" : x.xxx, (numeric) The transaction amount in btc\n"
" \"confirmations\" : n, (numeric) The number of confirmations\n"
" \"blockhash\" : \"hash\", (string) The block hash\n"
" \"blockindex\" : xx, (numeric) The block index\n"
" \"blocktime\" : ttt, (numeric) The time in seconds since epoch (1 Jan 1970 GMT)\n"
" \"txid\" : \"transactionid\", (string) The transaction id, see also https://blockchain.info/tx/[transactionid]\n"
" \"time\" : ttt, (numeric) The transaction time in seconds since epoch (1 Jan 1970 GMT)\n"
" \"timereceived\" : ttt, (numeric) The time received in seconds since epoch (1 Jan 1970 GMT)\n"
" \"details\" : [\n"
" {\n"
" \"account\" : \"accountname\", (string) The account name involved in the transaction, can be \"\" for the default account.\n"
" \"address\" : \"bitcoinaddress\", (string) The bitcoin address involved in the transaction\n"
" \"category\" : \"send|receive\", (string) The category, either 'send' or 'receive'\n"
" \"amount\" : x.xxx (numeric) The amount in btc\n"
" }\n"
" ,...\n"
" ],\n"
" \"hex\" : \"data\" (string) Raw data for transaction\n"
"}\n"
"\nbExamples\n"
+ HelpExampleCli("gettransaction", "\"1075db55d416d3ca199f55b6084e2115b9345e16c5cf302fc80e9d5fbf5d48d\"")
+ HelpExampleRpc("gettransaction", "\"1075db55d416d3ca199f55b6084e2115b9345e16c5cf302fc80e9d5fbf5d48d\"")
);
uint256 hash;
hash.SetHex(params[0].get_str());
Object entry;
if (!pwalletMain->mapWallet.count(hash))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid or non-wallet transaction id");
const CWalletTx& wtx = pwalletMain->mapWallet[hash];
int64_t nCredit = wtx.GetCredit();
int64_t nDebit = wtx.GetDebit();
int64_t nNet = nCredit - nDebit;
int64_t nFee = (wtx.IsFromMe() ? wtx.GetValueOut() - nDebit : 0);
entry.push_back(Pair("amount", ValueFromAmount(nNet - nFee)));
if (wtx.IsFromMe())
entry.push_back(Pair("fee", ValueFromAmount(nFee)));
WalletTxToJSON(wtx, entry);
Array details;
ListTransactions(wtx, "*", 0, false, details);
entry.push_back(Pair("details", details));
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << static_cast<CTransaction>(wtx);
string strHex = HexStr(ssTx.begin(), ssTx.end());
entry.push_back(Pair("hex", strHex));
return entry;
}
Value backupwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"backupwallet \"destination\"\n"
"\nSafely copies wallet.dat to destination, which can be a directory or a path with filename.\n"
"\nArguments:\n"
"1. \"destination\" (string) The destination directory or file\n"
"\nExamples:\n"
+ HelpExampleCli("backupwallet", "\"backup.dat\"")
+ HelpExampleRpc("backupwallet", "\"backup.dat\"")
);
string strDest = params[0].get_str();
if (!BackupWallet(*pwalletMain, strDest))
throw JSONRPCError(RPC_WALLET_ERROR, "Error: Wallet backup failed!");
return Value::null;
}
Value keypoolrefill(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw runtime_error(
"keypoolrefill ( newsize )\n"
"\nFills the keypool."
+ HelpRequiringPassphrase() + "\n"
"\nArguments\n"
"1. newsize (numeric, optional, default=100) The new keypool size\n"
"\nExamples:\n"
+ HelpExampleCli("keypoolrefill", "")
+ HelpExampleRpc("keypoolrefill", "")
);
// 0 is interpreted by TopUpKeyPool() as the default keypool size given by -keypool
unsigned int kpSize = 0;
if (params.size() > 0) {
if (params[0].get_int() < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected valid size.");
kpSize = (unsigned int)params[0].get_int();
}
EnsureWalletIsUnlocked();
pwalletMain->TopUpKeyPool(kpSize);
if (pwalletMain->GetKeyPoolSize() < kpSize)
throw JSONRPCError(RPC_WALLET_ERROR, "Error refreshing keypool.");
return Value::null;
}
static void LockWallet(CWallet* pWallet)
{
LOCK(cs_nWalletUnlockTime);
nWalletUnlockTime = 0;
pWallet->Lock();
}
Value walletpassphrase(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 2))
throw runtime_error(
"walletpassphrase \"passphrase\" timeout\n"
"\nStores the wallet decryption key in memory for 'timeout' seconds.\n"
"This is needed prior to performing transactions related to private keys such as sending bitcoins\n"
"\nArguments:\n"
"1. \"passphrase\" (string, required) The wallet passphrase\n"
"2. timeout (numeric, required) The time to keep the decryption key in seconds.\n"
"\nNote:\n"
"Issuing the walletpassphrase command while the wallet is already unlocked will set a new unlock\n"
"time that overrides the old one.\n"
"\nExamples:\n"
"\nunlock the wallet for 60 seconds\n"
+ HelpExampleCli("walletpassphrase", "\"my pass phrase\" 60") +
"\nLock the wallet again (before 60 seconds)\n"
+ HelpExampleCli("walletlock", "") +
"\nAs json rpc call\n"
+ HelpExampleRpc("walletpassphrase", "\"my pass phrase\", 60")
);
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrase was called.");
// Note that the walletpassphrase is stored in params[0] which is not mlock()ed
SecureString strWalletPass;
strWalletPass.reserve(100);
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() > 0)
{
if (!pwalletMain->Unlock(strWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
}
else
throw runtime_error(
"walletpassphrase <passphrase> <timeout>\n"
"Stores the wallet decryption key in memory for <timeout> seconds.");
pwalletMain->TopUpKeyPool();
int64_t nSleepTime = params[1].get_int64();
LOCK(cs_nWalletUnlockTime);
nWalletUnlockTime = GetTime() + nSleepTime;
RPCRunLater("lockwallet", boost::bind(LockWallet, pwalletMain), nSleepTime);
return Value::null;
}
Value walletpassphrasechange(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 2))
throw runtime_error(
"walletpassphrasechange \"oldpassphrase\" \"newpassphrase\"\n"
"\nChanges the wallet passphrase from 'oldpassphrase' to 'newpassphrase'.\n"
"\nArguments:\n"
"1. \"oldpassphrase\" (string) The current passphrase\n"
"2. \"newpassphrase\" (string) The new passphrase\n"
"\nExamples:\n"
+ HelpExampleCli("walletpassphrasechange", "\"old one\" \"new one\"")
+ HelpExampleRpc("walletpassphrasechange", "\"old one\", \"new one\"")
);
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.");
// TODO: get rid of these .c_str() calls by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strOldWalletPass;
strOldWalletPass.reserve(100);
strOldWalletPass = params[0].get_str().c_str();
SecureString strNewWalletPass;
strNewWalletPass.reserve(100);
strNewWalletPass = params[1].get_str().c_str();
if (strOldWalletPass.length() < 1 || strNewWalletPass.length() < 1)
throw runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (!pwalletMain->ChangeWalletPassphrase(strOldWalletPass, strNewWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
return Value::null;
}
Value walletlock(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 0))
throw runtime_error(
"walletlock\n"
"\nRemoves the wallet encryption key from memory, locking the wallet.\n"
"After calling this method, you will need to call walletpassphrase again\n"
"before being able to call any methods which require the wallet to be unlocked.\n"
"\nExamples:\n"
"\nSet the passphrase for 2 minutes to perform a transaction\n"
+ HelpExampleCli("walletpassphrase", "\"my pass phrase\" 120") +
"\nPerform a send (requires passphrase set)\n"
+ HelpExampleCli("sendtoaddress", "\"1M72Sfpbz1BPpXFHz9m3CdqATR44Jvaydd\" 1.0") +
"\nClear the passphrase since we are done before 2 minutes is up\n"
+ HelpExampleCli("walletlock", "") +
"\nAs json rpc call\n"
+ HelpExampleRpc("walletlock", "")
);
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletlock was called.");
{
LOCK(cs_nWalletUnlockTime);
pwalletMain->Lock();
nWalletUnlockTime = 0;
}
return Value::null;
}
Value encryptwallet(const Array& params, bool fHelp)
{
if (!pwalletMain->IsCrypted() && (fHelp || params.size() != 1))
throw runtime_error(
"encryptwallet \"passphrase\"\n"
"\nEncrypts the wallet with 'passphrase'. This is for first time encryption.\n"
"After this, any calls that interact with private keys such as sending or signing \n"
"will require the passphrase to be set prior the making these calls.\n"
"Use the walletpassphrase call for this, and then walletlock call.\n"
"If the wallet is already encrypted, use the walletpassphrasechange call.\n"
"Note that this will shutdown the server.\n"
"\nArguments:\n"
"1. \"passphrase\" (string) The pass phrase to encrypt the wallet with. It must be at least 1 character, but should be long.\n"
"\nExamples:\n"
"\nEncrypt you wallet\n"
+ HelpExampleCli("encryptwallet", "\"my pass phrase\"") +
"\nNow set the passphrase to use the wallet, such as for signing or sending bitcoin\n"
+ HelpExampleCli("walletpassphrase", "\"my pass phrase\"") +
"\nNow we can so something like sign\n"
+ HelpExampleCli("signmessage", "\"bitcoinaddress\" \"test message\"") +
"\nNow lock the wallet again by removing the passphrase\n"
+ HelpExampleCli("walletlock", "") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("encryptwallet", "\"my pass phrase\"")
);
if (fHelp)
return true;
if (pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an encrypted wallet, but encryptwallet was called.");
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strWalletPass;
strWalletPass.reserve(100);
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() < 1)
throw runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (!pwalletMain->EncryptWallet(strWalletPass))
throw JSONRPCError(RPC_WALLET_ENCRYPTION_FAILED, "Error: Failed to encrypt the wallet.");
// BDB seems to have a bad habit of writing old data into
// slack space in .dat files; that is bad if the old data is
// unencrypted private keys. So:
StartShutdown();
return "wallet encrypted; Bitcoin server stopping, restart to run with encrypted wallet. The keypool has been flushed, you need to make a new backup.";
}
Value lockunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"lockunspent unlock [{\"txid\":\"txid\",\"vout\":n},...]\n"
"\nUpdates list of temporarily unspendable outputs.\n"
"Temporarily lock (lock=true) or unlock (lock=false) specified transaction outputs.\n"
"A locked transaction output will not be chosen by automatic coin selection, when spending bitcoins.\n"
"Locks are stored in memory only. Nodes start with zero locked outputs, and the locked output list\n"
"is always cleared (by virtue of process exit) when a node stops or fails.\n"
"Also see the listunspent call\n"
"\nArguments:\n"
"1. unlock (boolean, required) Whether to unlock (true) or lock (false) the specified transactions\n"
"2. \"transactions\" (string, required) A json array of objects. Each object the txid (string) vout (numeric)\n"
" [ (json array of json objects)\n"
" {\n"
" \"txid\":\"id\", (string) The transaction id\n"
" \"vout\": n (numeric) The output number\n"
" }\n"
" ,...\n"
" ]\n"
"\nResult:\n"
"true|false (boolean) Whether the command was successful or not\n"
"\nExamples:\n"
"\nList the unspent transactions\n"
+ HelpExampleCli("listunspent", "") +
"\nLock an unspent transaction\n"
+ HelpExampleCli("lockunspent", "false \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"") +
"\nList the locked transactions\n"
+ HelpExampleCli("listlockunspent", "") +
"\nUnlock the transaction again\n"
+ HelpExampleCli("lockunspent", "true \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("lockunspent", "false, \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"")
);
if (params.size() == 1)
RPCTypeCheck(params, list_of(bool_type));
else
RPCTypeCheck(params, list_of(bool_type)(array_type));
bool fUnlock = params[0].get_bool();
if (params.size() == 1) {
if (fUnlock)
pwalletMain->UnlockAllCoins();
return true;
}
Array outputs = params[1].get_array();
BOOST_FOREACH(Value& output, outputs)
{
if (output.type() != obj_type)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected object");
const Object& o = output.get_obj();
RPCTypeCheck(o, map_list_of("txid", str_type)("vout", int_type));
string txid = find_value(o, "txid").get_str();
if (!IsHex(txid))
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected hex txid");
int nOutput = find_value(o, "vout").get_int();
if (nOutput < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive");
COutPoint outpt(uint256(txid), nOutput);
if (fUnlock)
pwalletMain->UnlockCoin(outpt);
else
pwalletMain->LockCoin(outpt);
}
return true;
}
Value listlockunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw runtime_error(
"listlockunspent\n"
"\nReturns list of temporarily unspendable outputs.\n"
"See the lockunspent call to lock and unlock transactions for spending.\n"
"\nResult:\n"
"[\n"
" {\n"
" \"txid\" : \"transactionid\", (string) The transaction id locked\n"
" \"vout\" : n (numeric) The vout value\n"
" }\n"
" ,...\n"
"]\n"
"\nExamples:\n"
"\nList the unspent transactions\n"
+ HelpExampleCli("listunspent", "") +
"\nLock an unspent transaction\n"
+ HelpExampleCli("lockunspent", "false \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"") +
"\nList the locked transactions\n"
+ HelpExampleCli("listlockunspent", "") +
"\nUnlock the transaction again\n"
+ HelpExampleCli("lockunspent", "true \"[{\\\"txid\\\":\\\"a08e6907dbbd3d809776dbfc5d82e371b764ed838b5655e72f463568df1aadf0\\\",\\\"vout\\\":1}]\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("listlockunspent", "")
);
vector<COutPoint> vOutpts;
pwalletMain->ListLockedCoins(vOutpts);
Array ret;
BOOST_FOREACH(COutPoint &outpt, vOutpts) {
Object o;
o.push_back(Pair("txid", outpt.hash.GetHex()));
o.push_back(Pair("vout", (int)outpt.n));
ret.push_back(o);
}
return ret;
}
Value settxfee(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 1)
throw runtime_error(
"settxfee amount\n"
"\nSet the transaction fee per kB.\n"
"\nArguments:\n"
"1. amount (numeric, required) The transaction fee in BTC/kB rounded to the nearest 0.00000001\n"
"\nResult\n"
"true|false (boolean) Returns true if successful\n"
"\nExamples:\n"
+ HelpExampleCli("settxfee", "0.00001")
+ HelpExampleRpc("settxfee", "0.00001")
);
// Amount
int64_t nAmount = 0;
if (params[0].get_real() != 0.0)
nAmount = AmountFromValue(params[0]); // rejects 0.0 amounts
nTransactionFee = nAmount;
return true;
}
Value getwalletinfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getwalletinfo\n"
"Returns an object containing various wallet state info.\n"
"\nResult:\n"
"{\n"
" \"walletversion\": xxxxx, (numeric) the wallet version\n"
" \"balance\": xxxxxxx, (numeric) the total bitcoin balance of the wallet\n"
" \"txcount\": xxxxxxx, (numeric) the total number of transactions in the wallet\n"
" \"keypoololdest\": xxxxxx, (numeric) the timestamp (seconds since GMT epoch) of the oldest pre-generated key in the key pool\n"
" \"keypoolsize\": xxxx, (numeric) how many new keys are pre-generated\n"
" \"unlocked_until\": ttt, (numeric) the timestamp in seconds since epoch (midnight Jan 1 1970 GMT) that the wallet is unlocked for transfers, or 0 if the wallet is locked\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("getwalletinfo", "")
+ HelpExampleRpc("getwalletinfo", "")
);
Object obj;
obj.push_back(Pair("walletversion", pwalletMain->GetVersion()));
obj.push_back(Pair("balance", ValueFromAmount(pwalletMain->GetBalance())));
obj.push_back(Pair("txcount", (int)pwalletMain->mapWallet.size()));
obj.push_back(Pair("keypoololdest", (boost::int64_t)pwalletMain->GetOldestKeyPoolTime()));
obj.push_back(Pair("keypoolsize", (int)pwalletMain->GetKeyPoolSize()));
if (pwalletMain->IsCrypted())
obj.push_back(Pair("unlocked_until", (boost::int64_t)nWalletUnlockTime));
return obj;
}
| mit |
iamnader/mongoid-2 | spec/functional/mongoid/versioning_spec.rb | 6113 | require "spec_helper"
describe Mongoid::Versioning do
before do
[ WikiPage, Comment ].each(&:delete_all)
end
describe "#version" do
context "when the document is new" do
it "returns 1" do
WikiPage.new.version.should eq(1)
end
end
context "when the document is persisted once" do
let(:page) do
WikiPage.create(:title => "1")
end
it "returns 1" do
page.version.should eq(1)
end
end
context "when the document is persisted more than once" do
let(:page) do
WikiPage.create(:title => "1")
end
before do
3.times { |n| page.update_attribute(:title, "#{n}") }
end
it "returns the number of versions" do
page.version.should eq(4)
end
end
context "when maximum versions is defined" do
let(:page) do
WikiPage.create(:title => "1")
end
context "when saving over the max versions limit" do
before do
10.times { |n| page.update_attribute(:title, "#{n}") }
end
it "returns the number of versions" do
page.version.should eq(11)
end
end
end
context "when performing versionless saves" do
let(:page) do
WikiPage.create(:title => "1")
end
before do
10.times do |n|
page.versionless { |doc| doc.update_attribute(:title, "#{n}") }
end
end
it "does not increment the version number" do
page.version.should eq(1)
end
end
end
describe "#versions" do
let(:page) do
WikiPage.create(:title => "1")
end
context "when saving the document " do
context "when the document has changed" do
before do
page.update_attribute(:title, "2")
end
let(:version) do
page.versions.first
end
it "creates a new version" do
version.title.should eq("1")
end
it "only creates 1 new version" do
page.versions.count.should eq(1)
end
it "does not version the _id" do
version._id.should be_nil
end
it "does not version the updated_at timestamp" do
version.updated_at.should be_nil
end
end
context "when the document has not changed" do
before do
page.save
end
let(:version) do
page.versions.first
end
it "does not create a new version" do
version.should be_nil
end
end
context "when saving over the number of maximum versions" do
context "when saving in succession" do
before do
10.times do |n|
page.update_attribute(:title, "#{n}")
end
end
let(:versions) do
page.versions
end
it "only versions the maximum amount" do
versions.count.should eq(5)
end
it "shifts the versions in order" do
versions.last.title.should eq("8")
end
it "persists the version shifts" do
page.reload.versions.last.title.should eq("8")
end
end
context "when saving in batches" do
before do
2.times do
5.times do |n|
WikiPage.find(page.id).update_attributes(:title => "#{n}")
end
end
end
let(:from_db) do
WikiPage.find(page.id)
end
let(:versions) do
from_db.versions
end
it "only versions the maximum amount" do
versions.count.should eq(5)
end
end
end
context "when persisting versionless" do
before do
page.versionless { |doc| doc.update_attribute(:title, "2") }
end
it "does not version the document" do
page.versions.count.should eq(0)
end
end
context "when deleting versions" do
let(:comment) do
Comment.new(:title => "Don't delete me!")
end
let!(:orphaned) do
Comment.create(:title => "Annie")
end
before do
page.comments << comment
page.update_attribute(:title, "5")
end
context "when the version had a dependent relation" do
before do
page.versions.delete_all
end
let(:from_db) do
Comment.find(comment.id)
end
it "does not perform dependent cascading" do
from_db.should eq(comment)
end
it "does not delete related orphans" do
Comment.find(orphaned.id).should eq(orphaned)
end
it "deletes the version" do
page.versions.should be_empty
end
it "persists the deletion" do
page.reload.versions.should be_empty
end
it "retains the root relation" do
page.reload.comments.should eq([ comment ])
end
end
end
end
end
context "when appending a self referencing document with versions" do
let(:page) do
WikiPage.create(:title => "1")
end
let(:child) do
WikiPage.new
end
before do
page.child_pages << child
end
it "allows the document to be added" do
page.child_pages.should eq([ child ])
end
end
context "when the identity map is enabled" do
before do
Mongoid.identity_map_enabled = true
end
after do
Mongoid.identity_map_enabled = false
end
context "when updating a loaded attribute" do
let!(:page) do
WikiPage.create(:title => "first")
end
let!(:loaded) do
WikiPage.find(page.id)
end
before do
loaded.update_attribute(:title, "revised")
end
let(:reloaded) do
WikiPage.find(page.id)
end
it "returns the revised im memory document" do
reloaded.title.should eq("revised")
end
end
end
end
| mit |
hyonholee/azure-sdk-for-net | sdk/storagesync/Microsoft.Azure.Management.StorageSync/tests/Tests/SyncGroupTests.cs | 10243 | using Microsoft.Azure.Management.ResourceManager;
using Microsoft.Azure.Management.Storage;
using Microsoft.Azure.Management.Storage.Models;
using Microsoft.Azure.Management.StorageSync.Models;
using Microsoft.Azure.Management.Tests.Common;
using Microsoft.Rest.ClientRuntime.Azure.TestFramework;
using System.Collections.Generic;
using System.Net;
using Xunit;
using System.Linq;
namespace Microsoft.Azure.Management.StorageSync.Tests
{
public class SyncGroupTests
{
[Fact]
public void SyncGroupCreateTest()
{
var handler = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK };
using (MockContext context = MockContext.Start(this.GetType()))
{
IResourceManagementClient resourcesClient = StorageSyncManagementTestUtilities.GetResourceManagementClient(context, handler);
IStorageSyncManagementClient storageSyncManagementClient = StorageSyncManagementTestUtilities.GetStorageSyncManagementClient(context, handler);
// Create ResourceGroup
string resourceGroupName = StorageSyncManagementTestUtilities.CreateResourceGroup(resourcesClient);
// Create SyncGroup Name
string storageSyncServiceName = TestUtilities.GenerateName("ssscreate-sgcreate");
string syncGroupName = TestUtilities.GenerateName("sgcreate");
var storageSyncServiceParameters = StorageSyncManagementTestUtilities.GetDefaultStorageSyncServiceParameters();
var syncGroupParameters = StorageSyncManagementTestUtilities.GetDefaultSyncGroupParameters();
StorageSyncService storageSyncServiceResource = storageSyncManagementClient.StorageSyncServices.Create(resourceGroupName, storageSyncServiceName, storageSyncServiceParameters);
Assert.NotNull(storageSyncServiceResource);
StorageSyncManagementTestUtilities.VerifyStorageSyncServiceProperties(storageSyncServiceResource, true);
SyncGroup syncGroupResource = storageSyncManagementClient.SyncGroups.Create(resourceGroupName, storageSyncServiceResource.Name, syncGroupName, syncGroupParameters);
StorageSyncManagementTestUtilities.VerifySyncGroupProperties(syncGroupResource, true);
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
storageSyncManagementClient.StorageSyncServices.Delete(resourceGroupName, storageSyncServiceResource.Name);
StorageSyncManagementTestUtilities.RemoveResourceGroup(resourcesClient, resourceGroupName);
}
}
[Fact]
public void SyncGroupGetTest()
{
var handler = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK };
using (MockContext context = MockContext.Start(this.GetType()))
{
IResourceManagementClient resourcesClient = StorageSyncManagementTestUtilities.GetResourceManagementClient(context, handler);
IStorageSyncManagementClient storageSyncManagementClient = StorageSyncManagementTestUtilities.GetStorageSyncManagementClient(context, handler);
// Create ResourceGroup
string resourceGroupName = StorageSyncManagementTestUtilities.CreateResourceGroup(resourcesClient);
// Create SyncGroup Name
string storageSyncServiceName = TestUtilities.GenerateName("ssscreate-sgget");
string syncGroupName = TestUtilities.GenerateName("sgget");
var storageSyncServiceParameters = StorageSyncManagementTestUtilities.GetDefaultStorageSyncServiceParameters();
var syncGroupParameters = StorageSyncManagementTestUtilities.GetDefaultSyncGroupParameters();
StorageSyncService storageSyncServiceResource = storageSyncManagementClient.StorageSyncServices.Create(resourceGroupName, storageSyncServiceName, storageSyncServiceParameters);
Assert.NotNull(storageSyncServiceResource);
StorageSyncManagementTestUtilities.VerifyStorageSyncServiceProperties(storageSyncServiceResource, true);
SyncGroup syncGroupResource = storageSyncManagementClient.SyncGroups.Create(resourceGroupName, storageSyncServiceResource.Name, syncGroupName, syncGroupParameters);
syncGroupResource = storageSyncManagementClient.SyncGroups.Get(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
StorageSyncManagementTestUtilities.VerifySyncGroupProperties(syncGroupResource, false);
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
storageSyncManagementClient.StorageSyncServices.Delete(resourceGroupName, storageSyncServiceResource.Name);
StorageSyncManagementTestUtilities.RemoveResourceGroup(resourcesClient, resourceGroupName);
}
}
[Fact]
public void SyncGroupListTest()
{
var handler = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK };
using (MockContext context = MockContext.Start(this.GetType()))
{
IResourceManagementClient resourcesClient = StorageSyncManagementTestUtilities.GetResourceManagementClient(context, handler);
IStorageSyncManagementClient storageSyncManagementClient = StorageSyncManagementTestUtilities.GetStorageSyncManagementClient(context, handler);
// Create ResourceGroup
string resourceGroupName = StorageSyncManagementTestUtilities.CreateResourceGroup(resourcesClient);
// Create SyncGroup Name
string storageSyncServiceName = TestUtilities.GenerateName("ssscreate-sglist");
string syncGroupName = TestUtilities.GenerateName("sglist");
var storageSyncServiceParameters = StorageSyncManagementTestUtilities.GetDefaultStorageSyncServiceParameters();
var syncGroupParameters = StorageSyncManagementTestUtilities.GetDefaultSyncGroupParameters();
StorageSyncService storageSyncServiceResource = storageSyncManagementClient.StorageSyncServices.Create(resourceGroupName, storageSyncServiceName, storageSyncServiceParameters);
Assert.NotNull(storageSyncServiceResource);
StorageSyncManagementTestUtilities.VerifyStorageSyncServiceProperties(storageSyncServiceResource, true);
SyncGroup syncGroupResource = storageSyncManagementClient.SyncGroups.Create(resourceGroupName, storageSyncServiceResource.Name, syncGroupName, syncGroupParameters);
IEnumerable<SyncGroup> syncGroupResources = storageSyncManagementClient.SyncGroups.ListByStorageSyncService(resourceGroupName, storageSyncServiceResource.Name);
Assert.NotNull(syncGroupResources);
Assert.Single(syncGroupResources);
StorageSyncManagementTestUtilities.VerifySyncGroupProperties(syncGroupResources.First(), false);
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
storageSyncManagementClient.StorageSyncServices.Delete(resourceGroupName, storageSyncServiceResource.Name);
StorageSyncManagementTestUtilities.RemoveResourceGroup(resourcesClient, resourceGroupName);
}
}
[Fact]
public void SyncGroupDeleteTest()
{
var handler = new RecordedDelegatingHandler { StatusCodeToReturn = HttpStatusCode.OK };
using (MockContext context = MockContext.Start(this.GetType()))
{
IResourceManagementClient resourcesClient = StorageSyncManagementTestUtilities.GetResourceManagementClient(context, handler);
IStorageSyncManagementClient storageSyncManagementClient = StorageSyncManagementTestUtilities.GetStorageSyncManagementClient(context, handler);
// Create ResourceGroup
string resourceGroupName = StorageSyncManagementTestUtilities.CreateResourceGroup(resourcesClient);
// Create SyncGroup Name
string storageSyncServiceName = TestUtilities.GenerateName("sss-sgdelete");
string syncGroupName = TestUtilities.GenerateName("sgdelete");
var storageSyncServiceParameters = StorageSyncManagementTestUtilities.GetDefaultStorageSyncServiceParameters();
var syncGroupParameters = StorageSyncManagementTestUtilities.GetDefaultSyncGroupParameters();
StorageSyncService storageSyncServiceResource = storageSyncManagementClient.StorageSyncServices.Create(resourceGroupName, storageSyncServiceName, storageSyncServiceParameters);
Assert.NotNull(storageSyncServiceResource);
StorageSyncManagementTestUtilities.VerifyStorageSyncServiceProperties(storageSyncServiceResource, true);
// Delete SyncGroup which does not exists
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
SyncGroup syncGroupResource = storageSyncManagementClient.SyncGroups.Create(resourceGroupName, storageSyncServiceResource.Name, syncGroupName, syncGroupParameters);
// Delete SyncGroup
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
// Delete SyncGroup which was just deleted
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
storageSyncManagementClient.SyncGroups.Delete(resourceGroupName, storageSyncServiceResource.Name, syncGroupName);
storageSyncManagementClient.StorageSyncServices.Delete(resourceGroupName, storageSyncServiceResource.Name);
StorageSyncManagementTestUtilities.RemoveResourceGroup(resourcesClient, resourceGroupName);
}
}
}
}
| mit |
thenexus6/specinfra | lib/specinfra/command/base/cron.rb | 439 | class Specinfra::Command::Base::Cron < Specinfra::Command::Base
class << self
def check_has_entry(user, entry)
entry_escaped = entry.gsub(/\*/, '\\*').gsub(/\[/, '\\[').gsub(/\]/, '\\]')
grep_command = "grep -v '^[[:space:]]*#' | grep -- ^#{escape(entry_escaped)}$"
if user.nil?
"crontab -l | #{grep_command}"
else
"crontab -u #{escape(user)} -l | #{grep_command}"
end
end
end
end
| mit |
derrabus/symfony | src/Symfony/Component/Form/Util/OrderedHashMap.php | 4516 | <?php
/*
* This file is part of the Symfony package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Component\Form\Util;
/**
* A hash map which keeps track of deletions and additions.
*
* Like in associative arrays, elements can be mapped to integer or string keys.
* Unlike associative arrays, the map keeps track of the order in which keys
* were added and removed. This order is reflected during iteration.
*
* The map supports concurrent modification during iteration. That means that
* you can insert and remove elements from within a foreach loop and the
* iterator will reflect those changes accordingly.
*
* While elements that are added during the loop are recognized by the iterator,
* changed elements are not. Otherwise the loop could be infinite if each loop
* changes the current element:
*
* $map = new OrderedHashMap();
* $map[1] = 1;
* $map[2] = 2;
* $map[3] = 3;
*
* foreach ($map as $index => $value) {
* echo "$index: $value\n"
* if (1 === $index) {
* $map[1] = 4;
* $map[] = 5;
* }
* }
*
* print_r(iterator_to_array($map));
*
* // => 1: 1
* // 2: 2
* // 3: 3
* // 4: 5
* // Array
* // (
* // [1] => 4
* // [2] => 2
* // [3] => 3
* // [4] => 5
* // )
*
* The map also supports multiple parallel iterators. That means that you can
* nest foreach loops without affecting each other's iteration:
*
* foreach ($map as $index => $value) {
* foreach ($map as $index2 => $value2) {
* // ...
* }
* }
*
* @author Bernhard Schussek <bschussek@gmail.com>
*
* @template TKey of array-key
* @template TValue
*
* @implements \ArrayAccess<TKey, TValue>
* @implements \IteratorAggregate<TKey, TValue>
*/
class OrderedHashMap implements \ArrayAccess, \IteratorAggregate, \Countable
{
/**
* The elements of the map, indexed by their keys.
*
* @var array<TKey, TValue>
*/
private array $elements = [];
/**
* The keys of the map in the order in which they were inserted or changed.
*
* @var list<TKey>
*/
private array $orderedKeys = [];
/**
* References to the cursors of all open iterators.
*
* @var array<int, int>
*/
private array $managedCursors = [];
/**
* Creates a new map.
*
* @param array<TKey, TValue> $elements The elements to insert initially
*/
public function __construct(array $elements = [])
{
$this->elements = $elements;
$this->orderedKeys = array_keys($elements);
}
public function offsetExists(mixed $key): bool
{
return isset($this->elements[$key]);
}
public function offsetGet(mixed $key): mixed
{
if (!isset($this->elements[$key])) {
throw new \OutOfBoundsException(sprintf('The offset "%s" does not exist.', $key));
}
return $this->elements[$key];
}
public function offsetSet(mixed $key, mixed $value): void
{
if (null === $key || !isset($this->elements[$key])) {
if (null === $key) {
$key = [] === $this->orderedKeys
// If the array is empty, use 0 as key
? 0
// Imitate PHP behavior of generating a key that equals
// the highest existing integer key + 1
: 1 + (int) max($this->orderedKeys);
}
$this->orderedKeys[] = (string) $key;
}
$this->elements[$key] = $value;
}
public function offsetUnset(mixed $key): void
{
if (false !== ($position = array_search((string) $key, $this->orderedKeys))) {
array_splice($this->orderedKeys, $position, 1);
unset($this->elements[$key]);
foreach ($this->managedCursors as $i => $cursor) {
if ($cursor >= $position) {
--$this->managedCursors[$i];
}
}
}
}
public function getIterator(): \Traversable
{
return new OrderedHashMapIterator($this->elements, $this->orderedKeys, $this->managedCursors);
}
public function count(): int
{
return \count($this->elements);
}
}
| mit |
Teino1978-Corp/Teino1978-Corp-cdnjs | auto-update.js | 10355 | #!/usr/bin/env node
var path = require("path"),
assert = require("assert"),
fs = require("fs-extra"),
glob = require("glob"),
_ = require('lodash'),
request = require("superagent"),
async = require("async"),
tarball = require('tarball-extract'),
colors = require('colors'),
isThere = require("is-there"),
libMatch = '*',
stable = require('semver-stable'),
semver = require('semver');
if(!fs.existsSync('/run/shm')) {
tempDirPath = path.join(__dirname, 'temp');
} else {
fs.mkdirsSync('/run/shm/cdnjs_NPM_temp');
tempDirPath = '/run/shm/cdnjs_NPM_temp';
}
colors.setTheme({
prompt: 'cyan',
info: 'grey',
success: 'green',
warn: 'yellow',
error: 'red'
});
var newVersionCount = 0;
var parse = function (json_file, ignore_missing, ignore_parse_fail) {
var content;
try {
content = fs.readFileSync(json_file, 'utf8');
} catch (err1) {
if (!ignore_missing) {
assert.ok(0, json_file + " doesn't exist!");
}
return null;
}
try {
return JSON.parse(content);
} catch (err2) {
if (!ignore_parse_fail) {
//assert.ok(0, json_file + " failed to parse");
}
return null;
}
}
var reEscape = function(s){
return s.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
}
/**
* Check if an npmFileMap object contains any path which are not normalized, and thus could allow access to parent dirs
* @param pkg
* @returns {*}
*/
var isValidFileMap = function(pkg){
var isValidPath = function(p){
if(p !== null){ //don't allow parent dir access, or tricky paths
p = p.replace(/\/+/g, '/'); //dont penalize for consequtive path seperators
return p === path.normalize(p);
}
return false
};
if(pkg && pkg.npmFileMap){
return _.every(pkg.npmFileMap, function(fileSpec){
if(isValidPath(fileSpec.basePath || "/")){
return _.every(fileSpec.files, isValidPath);
}
return false;
});
}
return false
};
var error = function(msg, name){
var err = new Error(msg);
err.name = name;
console.log(msg.error);
return err;
}
error.PKG_NAME = 'BadPackageName'
error.FILE_PATH = 'BadFilePath'
/**
* returns a fucntion that takes N args, where each arg is a path that must not outside of libPath.
* returns true if all paths are within libPath, else false
*/
var isAllowedPathFn = function(libPath){ //is path within the lib dir? if not, they shouldnt be writing/reading there
libPath = path.normalize(libPath || "/");
return function(){
var paths = 1 <= arguments.length ? [].slice.call(arguments, 0) : [];
var re = new RegExp("^"+reEscape(libPath));
return _.every(paths, function(p) {
p = path.normalize(p);
return p.match(re);
});
}
};
var invalidNpmName = function(name){
return !!~name.indexOf(".."); //doesnt contain
}
/**
* Attempt to update the npmFileMap from extracted package.json, then using npmFileMap move required files to libPath/../
* If the npmFileMap tries to modify files outside of libPath, dont let it!
* @param pkg
* @param libPath = root folder for extracted lib
* @returns {Array} = array of security related errors triggered during operation.
*/
var processNewVersion = function(pkg, version){
//sometimes the tar is extracted to a dir that isnt called 'package' - get that dir via glob
var extractLibPath = glob.sync(getPackageTempPath(pkg, version)+"/*/")[0];
if(!extractLibPath){
//even more rarely, the tar doesnt seem to get extracted at all.. which is probably a bug in that lib.
var msg = pkg.npmName + "@" + version + " - never got extracted! This problem usually goes away on next run. Couldnt find extract dir here: " + getPackageTempPath(pkg, version);
console.log(msg.error);
return;
}
var libPath = getPackagePath(pkg, version)
var isAllowedPath = isAllowedPathFn(extractLibPath);
var newPath = path.join(libPath, 'package.json')
if(false && fs.existsSync(newPath)){ //turn this off for now
var newPkg = parse(newPath);
if(isValidFileMap(newPkg)){
pkg.npmFileMap = newPkg.npmFileMap;
}
}
var npmFileMap = pkg.npmFileMap;
var errors = [];
var updated = false;
_.each(npmFileMap, function(fileSpec) {
var basePath = fileSpec.basePath || "";
_.each(fileSpec.files, function(file) {
var libContentsPath = path.normalize(path.join(extractLibPath, basePath));
if(!isAllowedPath(libContentsPath)){
errors.push(error(pkg.npmName+" contains a malicious file path: "+libContentsPath, error.FILE_PATH));
return
}
var files = glob.sync(path.join(libContentsPath, file));
var copyPath = path.join(libPath, basePath)
if(files.length == 0){
//usually old versions have this problem
var msg = (pkg.npmName + "@" + version + " - couldnt find file in npmFileMap.") + (" Doesnt exist: " + path.join(libContentsPath, file)).info;
fs.mkdirsSync(libPath);
console.log(msg);
}
_.each(files, function(extractFilePath) {
if(extractFilePath.match(/(dependencies|\.zip\s*$)/i)){
return;
}
var copyPart = path.relative(libContentsPath, extractFilePath);
var copyPath = path.join(libPath, copyPart)
fs.mkdirsSync(path.dirname(copyPath))
fs.copySync(extractFilePath, copyPath);
updated = true;
});
});
});
if(updated){
newVersionCount++;
var libPatha =path.normalize(path.join(__dirname, 'ajax', 'libs', pkg.name, 'package.json'));
console.log('------------'.red, libPatha.green);
if (stable.is(version) && semver.gt(version, pkg.version)) {
pkg.version = version;
fs.writeFileSync(libPatha, JSON.stringify(pkg, null, 2) + '\n', 'utf8');
}
}
return errors;
}
var getPackageTempPath = function(pkg, version){
return path.normalize(path.join(tempDirPath, pkg.name, version))
}
var getPackagePath = function(pkg, version){
return path.normalize(path.join(__dirname, 'ajax', 'libs', pkg.name, version));
}
/**
* download and extract a tarball for a single npm version, get the files in npmFileMap and delete the rest
* @param pkg
* @param tarballUrl
* @param version
* @param cb
* @returns {*}
*/
var updateLibraryVersion = function(pkg, tarballUrl, version, cb) {
if(invalidNpmName(pkg.name)){
return cb(error(pkg.npmName+" has a malicious package name:"+ pkg.name, error.PKG_NAME));
}
var extractLibPath = getPackageTempPath(pkg, version);
var libPath = getPackagePath(pkg, version);
if(!fs.existsSync(libPath)) {
fs.mkdirsSync(extractLibPath);
var url = tarballUrl;
var downloadFile = path.join(extractLibPath, 'dist.tar.gz');
tarball.extractTarballDownload(url , downloadFile, extractLibPath, {}, function(err, result) {
if(fs.existsSync(downloadFile)){
processNewVersion(pkg, version);
var msg = "Do not have version " + version + " of " + pkg.npmName;
console.log(msg.warn);
} else {
if ('Server respond 404' == result.error) {
fs.mkdirsSync('./ajax/libs/' + pkg.name + '/' + version);
}
var msg = "error downloading " + version + " of " + pkg.npmName + " it didnt exist: " + result + err;
console.log(msg.error);
}
cb()
});
} else {
cb()
}
};
/**
* grab all versions of a lib that has an 'npmFileMap' and 'npmName' in its package.json
* @param pkg
* @param tarballUrl
* @param cb
*/
var updateLibrary = function (pkg, cb) {
if(!isValidFileMap(pkg)){
var msg = pkg.npmName.error + " has a malicious npmFileMap";
console.log(msg.warn);
return cb(null);
}
var msg = 'Checking versions for ' + pkg.npmName;
if (pkg.name != pkg.npmName) {
msg += ' (' + pkg.name + ')';
}
console.log(msg.prompt);
request.get('http://registry.npmjs.org/' + pkg.npmName).end(function(error, result) {
if (result.body != undefined) {
async.each(_.pairs(result.body.versions), function(p, cb){
var data = p[1];
var version = p[0];
updateLibraryVersion(pkg, data.dist.tarball, version, cb)
}, function(err){
var msg = 'Library finished' + (err ? ' ' + err.error : '');
console.log(msg);
cb(null);
});
} else {
error('Got error!', pkg.name);
}
});
}
exports.run = function(){
fs.removeSync(path.join(tempDirPath, '/*'))
console.log('Looking for npm enabled libraries...');
// load up those files
if (args.length === 2 && isThere('./ajax/libs/' + args[1] + '/package.json')) {
var packages = glob.sync("./ajax/libs/" + args[1]+ "/package.json");
} else {
var packages = glob.sync("./ajax/libs/*/package.json");
}
packages = _(packages).map(function (pkg) {
var parsedPkg = parse(pkg);
return (parsedPkg.npmName && parsedPkg.npmFileMap) ? parsedPkg : null;
}).compact().value();
var msg = 'Found ' + packages.length + ' npm enabled libraries';
console.log(msg.prompt);
async.each(packages, updateLibrary, function(err) {
var msg = 'Auto Update Completed - ' + newVersionCount + ' versions were updated';
console.log(msg.prompt);
fs.removeSync(path.join(tempDirPath, '/*'))
});
}
exports.updateLibrary = updateLibrary;
exports.updateLibraryVersion = updateLibraryVersion;
exports.processNewVersion = processNewVersion;
exports.error = error;
exports.isAllowedPathFn = isAllowedPathFn;
exports.isValidFileMap = isValidFileMap;
exports.invalidNpmName = invalidNpmName;
var args = process.argv.slice(2);
if(args.length > 0 && args[0] == 'run'){
exports.run();
} else {
console.log('to start, pass the "run" arg'.prompt)
}
| mit |
t0tec/dotfiles | vim/vim.symlink/bundle/Command-T/ruby/command-t/scanner/tag_scanner.rb | 760 | # Copyright 2011-2014 Greg Hurrell. All rights reserved.
# Licensed under the terms of the BSD 2-clause license.
module CommandT
class Scanner
class TagScanner < Scanner
attr_reader :include_filenames
def initialize(options = {})
@include_filenames = options[:include_filenames] || false
@cached_tags = nil
end
def paths
@cached_tags ||= taglist.map do |tag|
path = tag['name']
path << ":#{tag['filename']}" if @include_filenames
path
end.uniq.sort
end
def flush
@cached_tags = nil
end
private
def taglist
::VIM::evaluate 'taglist(".")'
end
end # class TagScanner
end # class Scanner
end # module CommandT
| mit |
Siv3D/OpenSiv3D | Siv3D/src/ThirdParty-prebuilt/opencv2/core/cuda/functional.hpp | 32256 | /*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#ifndef OPENCV_CUDA_FUNCTIONAL_HPP
#define OPENCV_CUDA_FUNCTIONAL_HPP
#include <functional>
#include "saturate_cast.hpp"
#include "vec_traits.hpp"
#include "type_traits.hpp"
/** @file
* @deprecated Use @ref cudev instead.
*/
//! @cond IGNORED
namespace cv { namespace cuda { namespace device
{
// Function Objects
template<typename Argument, typename Result> struct unary_function
{
typedef Argument argument_type;
typedef Result result_type;
};
template<typename Argument1, typename Argument2, typename Result> struct binary_function
{
typedef Argument1 first_argument_type;
typedef Argument2 second_argument_type;
typedef Result result_type;
};
// Arithmetic Operations
template <typename T> struct plus : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a + b;
}
__host__ __device__ __forceinline__ plus() {}
__host__ __device__ __forceinline__ plus(const plus&) {}
};
template <typename T> struct minus : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a - b;
}
__host__ __device__ __forceinline__ minus() {}
__host__ __device__ __forceinline__ minus(const minus&) {}
};
template <typename T> struct multiplies : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a * b;
}
__host__ __device__ __forceinline__ multiplies() {}
__host__ __device__ __forceinline__ multiplies(const multiplies&) {}
};
template <typename T> struct divides : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a / b;
}
__host__ __device__ __forceinline__ divides() {}
__host__ __device__ __forceinline__ divides(const divides&) {}
};
template <typename T> struct modulus : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a % b;
}
__host__ __device__ __forceinline__ modulus() {}
__host__ __device__ __forceinline__ modulus(const modulus&) {}
};
template <typename T> struct negate : unary_function<T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a) const
{
return -a;
}
__host__ __device__ __forceinline__ negate() {}
__host__ __device__ __forceinline__ negate(const negate&) {}
};
// Comparison Operations
template <typename T> struct equal_to : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a == b;
}
__host__ __device__ __forceinline__ equal_to() {}
__host__ __device__ __forceinline__ equal_to(const equal_to&) {}
};
template <typename T> struct not_equal_to : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a != b;
}
__host__ __device__ __forceinline__ not_equal_to() {}
__host__ __device__ __forceinline__ not_equal_to(const not_equal_to&) {}
};
template <typename T> struct greater : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a > b;
}
__host__ __device__ __forceinline__ greater() {}
__host__ __device__ __forceinline__ greater(const greater&) {}
};
template <typename T> struct less : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a < b;
}
__host__ __device__ __forceinline__ less() {}
__host__ __device__ __forceinline__ less(const less&) {}
};
template <typename T> struct greater_equal : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a >= b;
}
__host__ __device__ __forceinline__ greater_equal() {}
__host__ __device__ __forceinline__ greater_equal(const greater_equal&) {}
};
template <typename T> struct less_equal : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a <= b;
}
__host__ __device__ __forceinline__ less_equal() {}
__host__ __device__ __forceinline__ less_equal(const less_equal&) {}
};
// Logical Operations
template <typename T> struct logical_and : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a && b;
}
__host__ __device__ __forceinline__ logical_and() {}
__host__ __device__ __forceinline__ logical_and(const logical_and&) {}
};
template <typename T> struct logical_or : binary_function<T, T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a || b;
}
__host__ __device__ __forceinline__ logical_or() {}
__host__ __device__ __forceinline__ logical_or(const logical_or&) {}
};
template <typename T> struct logical_not : unary_function<T, bool>
{
__device__ __forceinline__ bool operator ()(typename TypeTraits<T>::ParameterType a) const
{
return !a;
}
__host__ __device__ __forceinline__ logical_not() {}
__host__ __device__ __forceinline__ logical_not(const logical_not&) {}
};
// Bitwise Operations
template <typename T> struct bit_and : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a & b;
}
__host__ __device__ __forceinline__ bit_and() {}
__host__ __device__ __forceinline__ bit_and(const bit_and&) {}
};
template <typename T> struct bit_or : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a | b;
}
__host__ __device__ __forceinline__ bit_or() {}
__host__ __device__ __forceinline__ bit_or(const bit_or&) {}
};
template <typename T> struct bit_xor : binary_function<T, T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType a,
typename TypeTraits<T>::ParameterType b) const
{
return a ^ b;
}
__host__ __device__ __forceinline__ bit_xor() {}
__host__ __device__ __forceinline__ bit_xor(const bit_xor&) {}
};
template <typename T> struct bit_not : unary_function<T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType v) const
{
return ~v;
}
__host__ __device__ __forceinline__ bit_not() {}
__host__ __device__ __forceinline__ bit_not(const bit_not&) {}
};
// Generalized Identity Operations
template <typename T> struct identity : unary_function<T, T>
{
__device__ __forceinline__ typename TypeTraits<T>::ParameterType operator()(typename TypeTraits<T>::ParameterType x) const
{
return x;
}
__host__ __device__ __forceinline__ identity() {}
__host__ __device__ __forceinline__ identity(const identity&) {}
};
template <typename T1, typename T2> struct project1st : binary_function<T1, T2, T1>
{
__device__ __forceinline__ typename TypeTraits<T1>::ParameterType operator()(typename TypeTraits<T1>::ParameterType lhs, typename TypeTraits<T2>::ParameterType rhs) const
{
return lhs;
}
__host__ __device__ __forceinline__ project1st() {}
__host__ __device__ __forceinline__ project1st(const project1st&) {}
};
template <typename T1, typename T2> struct project2nd : binary_function<T1, T2, T2>
{
__device__ __forceinline__ typename TypeTraits<T2>::ParameterType operator()(typename TypeTraits<T1>::ParameterType lhs, typename TypeTraits<T2>::ParameterType rhs) const
{
return rhs;
}
__host__ __device__ __forceinline__ project2nd() {}
__host__ __device__ __forceinline__ project2nd(const project2nd&) {}
};
// Min/Max Operations
#define OPENCV_CUDA_IMPLEMENT_MINMAX(name, type, op) \
template <> struct name<type> : binary_function<type, type, type> \
{ \
__device__ __forceinline__ type operator()(type lhs, type rhs) const {return op(lhs, rhs);} \
__host__ __device__ __forceinline__ name() {}\
__host__ __device__ __forceinline__ name(const name&) {}\
};
template <typename T> struct maximum : binary_function<T, T, T>
{
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType lhs, typename TypeTraits<T>::ParameterType rhs) const
{
return max(lhs, rhs);
}
__host__ __device__ __forceinline__ maximum() {}
__host__ __device__ __forceinline__ maximum(const maximum&) {}
};
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, uchar, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, schar, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, char, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, ushort, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, short, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, int, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, uint, ::max)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, float, ::fmax)
OPENCV_CUDA_IMPLEMENT_MINMAX(maximum, double, ::fmax)
template <typename T> struct minimum : binary_function<T, T, T>
{
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType lhs, typename TypeTraits<T>::ParameterType rhs) const
{
return min(lhs, rhs);
}
__host__ __device__ __forceinline__ minimum() {}
__host__ __device__ __forceinline__ minimum(const minimum&) {}
};
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, uchar, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, schar, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, char, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, ushort, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, short, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, int, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, uint, ::min)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, float, ::fmin)
OPENCV_CUDA_IMPLEMENT_MINMAX(minimum, double, ::fmin)
#undef OPENCV_CUDA_IMPLEMENT_MINMAX
// Math functions
template <typename T> struct abs_func : unary_function<T, T>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType x) const
{
return abs(x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<unsigned char> : unary_function<unsigned char, unsigned char>
{
__device__ __forceinline__ unsigned char operator ()(unsigned char x) const
{
return x;
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<signed char> : unary_function<signed char, signed char>
{
__device__ __forceinline__ signed char operator ()(signed char x) const
{
return ::abs((int)x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<char> : unary_function<char, char>
{
__device__ __forceinline__ char operator ()(char x) const
{
return ::abs((int)x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<unsigned short> : unary_function<unsigned short, unsigned short>
{
__device__ __forceinline__ unsigned short operator ()(unsigned short x) const
{
return x;
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<short> : unary_function<short, short>
{
__device__ __forceinline__ short operator ()(short x) const
{
return ::abs((int)x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<unsigned int> : unary_function<unsigned int, unsigned int>
{
__device__ __forceinline__ unsigned int operator ()(unsigned int x) const
{
return x;
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<int> : unary_function<int, int>
{
__device__ __forceinline__ int operator ()(int x) const
{
return ::abs(x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<float> : unary_function<float, float>
{
__device__ __forceinline__ float operator ()(float x) const
{
return ::fabsf(x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
template <> struct abs_func<double> : unary_function<double, double>
{
__device__ __forceinline__ double operator ()(double x) const
{
return ::fabs(x);
}
__host__ __device__ __forceinline__ abs_func() {}
__host__ __device__ __forceinline__ abs_func(const abs_func&) {}
};
#define OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(name, func) \
template <typename T> struct name ## _func : unary_function<T, float> \
{ \
__device__ __forceinline__ float operator ()(typename TypeTraits<T>::ParameterType v) const \
{ \
return func ## f(v); \
} \
__host__ __device__ __forceinline__ name ## _func() {} \
__host__ __device__ __forceinline__ name ## _func(const name ## _func&) {} \
}; \
template <> struct name ## _func<double> : unary_function<double, double> \
{ \
__device__ __forceinline__ double operator ()(double v) const \
{ \
return func(v); \
} \
__host__ __device__ __forceinline__ name ## _func() {} \
__host__ __device__ __forceinline__ name ## _func(const name ## _func&) {} \
};
#define OPENCV_CUDA_IMPLEMENT_BIN_FUNCTOR(name, func) \
template <typename T> struct name ## _func : binary_function<T, T, float> \
{ \
__device__ __forceinline__ float operator ()(typename TypeTraits<T>::ParameterType v1, typename TypeTraits<T>::ParameterType v2) const \
{ \
return func ## f(v1, v2); \
} \
__host__ __device__ __forceinline__ name ## _func() {} \
__host__ __device__ __forceinline__ name ## _func(const name ## _func&) {} \
}; \
template <> struct name ## _func<double> : binary_function<double, double, double> \
{ \
__device__ __forceinline__ double operator ()(double v1, double v2) const \
{ \
return func(v1, v2); \
} \
__host__ __device__ __forceinline__ name ## _func() {} \
__host__ __device__ __forceinline__ name ## _func(const name ## _func&) {} \
};
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(sqrt, ::sqrt)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(exp, ::exp)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(exp2, ::exp2)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(exp10, ::exp10)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(log, ::log)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(log2, ::log2)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(log10, ::log10)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(sin, ::sin)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(cos, ::cos)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(tan, ::tan)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(asin, ::asin)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(acos, ::acos)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(atan, ::atan)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(sinh, ::sinh)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(cosh, ::cosh)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(tanh, ::tanh)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(asinh, ::asinh)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(acosh, ::acosh)
OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR(atanh, ::atanh)
OPENCV_CUDA_IMPLEMENT_BIN_FUNCTOR(hypot, ::hypot)
OPENCV_CUDA_IMPLEMENT_BIN_FUNCTOR(atan2, ::atan2)
OPENCV_CUDA_IMPLEMENT_BIN_FUNCTOR(pow, ::pow)
#undef OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR
#undef OPENCV_CUDA_IMPLEMENT_UN_FUNCTOR_NO_DOUBLE
#undef OPENCV_CUDA_IMPLEMENT_BIN_FUNCTOR
template<typename T> struct hypot_sqr_func : binary_function<T, T, float>
{
__device__ __forceinline__ T operator ()(typename TypeTraits<T>::ParameterType src1, typename TypeTraits<T>::ParameterType src2) const
{
return src1 * src1 + src2 * src2;
}
__host__ __device__ __forceinline__ hypot_sqr_func() {}
__host__ __device__ __forceinline__ hypot_sqr_func(const hypot_sqr_func&) {}
};
// Saturate Cast Functor
template <typename T, typename D> struct saturate_cast_func : unary_function<T, D>
{
__device__ __forceinline__ D operator ()(typename TypeTraits<T>::ParameterType v) const
{
return saturate_cast<D>(v);
}
__host__ __device__ __forceinline__ saturate_cast_func() {}
__host__ __device__ __forceinline__ saturate_cast_func(const saturate_cast_func&) {}
};
// Threshold Functors
template <typename T> struct thresh_binary_func : unary_function<T, T>
{
__host__ __device__ __forceinline__ thresh_binary_func(T thresh_, T maxVal_) : thresh(thresh_), maxVal(maxVal_) {}
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType src) const
{
return (src > thresh) * maxVal;
}
__host__ __device__ __forceinline__ thresh_binary_func() {}
__host__ __device__ __forceinline__ thresh_binary_func(const thresh_binary_func& other)
: thresh(other.thresh), maxVal(other.maxVal) {}
T thresh;
T maxVal;
};
template <typename T> struct thresh_binary_inv_func : unary_function<T, T>
{
__host__ __device__ __forceinline__ thresh_binary_inv_func(T thresh_, T maxVal_) : thresh(thresh_), maxVal(maxVal_) {}
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType src) const
{
return (src <= thresh) * maxVal;
}
__host__ __device__ __forceinline__ thresh_binary_inv_func() {}
__host__ __device__ __forceinline__ thresh_binary_inv_func(const thresh_binary_inv_func& other)
: thresh(other.thresh), maxVal(other.maxVal) {}
T thresh;
T maxVal;
};
template <typename T> struct thresh_trunc_func : unary_function<T, T>
{
explicit __host__ __device__ __forceinline__ thresh_trunc_func(T thresh_, T maxVal_ = 0) : thresh(thresh_) {CV_UNUSED(maxVal_);}
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType src) const
{
return minimum<T>()(src, thresh);
}
__host__ __device__ __forceinline__ thresh_trunc_func() {}
__host__ __device__ __forceinline__ thresh_trunc_func(const thresh_trunc_func& other)
: thresh(other.thresh) {}
T thresh;
};
template <typename T> struct thresh_to_zero_func : unary_function<T, T>
{
explicit __host__ __device__ __forceinline__ thresh_to_zero_func(T thresh_, T maxVal_ = 0) : thresh(thresh_) {CV_UNUSED(maxVal_);}
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType src) const
{
return (src > thresh) * src;
}
__host__ __device__ __forceinline__ thresh_to_zero_func() {}
__host__ __device__ __forceinline__ thresh_to_zero_func(const thresh_to_zero_func& other)
: thresh(other.thresh) {}
T thresh;
};
template <typename T> struct thresh_to_zero_inv_func : unary_function<T, T>
{
explicit __host__ __device__ __forceinline__ thresh_to_zero_inv_func(T thresh_, T maxVal_ = 0) : thresh(thresh_) {CV_UNUSED(maxVal_);}
__device__ __forceinline__ T operator()(typename TypeTraits<T>::ParameterType src) const
{
return (src <= thresh) * src;
}
__host__ __device__ __forceinline__ thresh_to_zero_inv_func() {}
__host__ __device__ __forceinline__ thresh_to_zero_inv_func(const thresh_to_zero_inv_func& other)
: thresh(other.thresh) {}
T thresh;
};
// Function Object Adaptors
template <typename Predicate> struct unary_negate : unary_function<typename Predicate::argument_type, bool>
{
explicit __host__ __device__ __forceinline__ unary_negate(const Predicate& p) : pred(p) {}
__device__ __forceinline__ bool operator()(typename TypeTraits<typename Predicate::argument_type>::ParameterType x) const
{
return !pred(x);
}
__host__ __device__ __forceinline__ unary_negate() {}
__host__ __device__ __forceinline__ unary_negate(const unary_negate& other) : pred(other.pred) {}
Predicate pred;
};
template <typename Predicate> __host__ __device__ __forceinline__ unary_negate<Predicate> not1(const Predicate& pred)
{
return unary_negate<Predicate>(pred);
}
template <typename Predicate> struct binary_negate : binary_function<typename Predicate::first_argument_type, typename Predicate::second_argument_type, bool>
{
explicit __host__ __device__ __forceinline__ binary_negate(const Predicate& p) : pred(p) {}
__device__ __forceinline__ bool operator()(typename TypeTraits<typename Predicate::first_argument_type>::ParameterType x,
typename TypeTraits<typename Predicate::second_argument_type>::ParameterType y) const
{
return !pred(x,y);
}
__host__ __device__ __forceinline__ binary_negate() {}
__host__ __device__ __forceinline__ binary_negate(const binary_negate& other) : pred(other.pred) {}
Predicate pred;
};
template <typename BinaryPredicate> __host__ __device__ __forceinline__ binary_negate<BinaryPredicate> not2(const BinaryPredicate& pred)
{
return binary_negate<BinaryPredicate>(pred);
}
template <typename Op> struct binder1st : unary_function<typename Op::second_argument_type, typename Op::result_type>
{
__host__ __device__ __forceinline__ binder1st(const Op& op_, const typename Op::first_argument_type& arg1_) : op(op_), arg1(arg1_) {}
__device__ __forceinline__ typename Op::result_type operator ()(typename TypeTraits<typename Op::second_argument_type>::ParameterType a) const
{
return op(arg1, a);
}
__host__ __device__ __forceinline__ binder1st() {}
__host__ __device__ __forceinline__ binder1st(const binder1st& other) : op(other.op), arg1(other.arg1) {}
Op op;
typename Op::first_argument_type arg1;
};
template <typename Op, typename T> __host__ __device__ __forceinline__ binder1st<Op> bind1st(const Op& op, const T& x)
{
return binder1st<Op>(op, typename Op::first_argument_type(x));
}
template <typename Op> struct binder2nd : unary_function<typename Op::first_argument_type, typename Op::result_type>
{
__host__ __device__ __forceinline__ binder2nd(const Op& op_, const typename Op::second_argument_type& arg2_) : op(op_), arg2(arg2_) {}
__forceinline__ __device__ typename Op::result_type operator ()(typename TypeTraits<typename Op::first_argument_type>::ParameterType a) const
{
return op(a, arg2);
}
__host__ __device__ __forceinline__ binder2nd() {}
__host__ __device__ __forceinline__ binder2nd(const binder2nd& other) : op(other.op), arg2(other.arg2) {}
Op op;
typename Op::second_argument_type arg2;
};
template <typename Op, typename T> __host__ __device__ __forceinline__ binder2nd<Op> bind2nd(const Op& op, const T& x)
{
return binder2nd<Op>(op, typename Op::second_argument_type(x));
}
// Functor Traits
template <typename F> struct IsUnaryFunction
{
typedef char Yes;
struct No {Yes a[2];};
template <typename T, typename D> static Yes check(unary_function<T, D>);
static No check(...);
static F makeF();
enum { value = (sizeof(check(makeF())) == sizeof(Yes)) };
};
template <typename F> struct IsBinaryFunction
{
typedef char Yes;
struct No {Yes a[2];};
template <typename T1, typename T2, typename D> static Yes check(binary_function<T1, T2, D>);
static No check(...);
static F makeF();
enum { value = (sizeof(check(makeF())) == sizeof(Yes)) };
};
namespace functional_detail
{
template <size_t src_elem_size, size_t dst_elem_size> struct UnOpShift { enum { shift = 1 }; };
template <size_t src_elem_size> struct UnOpShift<src_elem_size, 1> { enum { shift = 4 }; };
template <size_t src_elem_size> struct UnOpShift<src_elem_size, 2> { enum { shift = 2 }; };
template <typename T, typename D> struct DefaultUnaryShift
{
enum { shift = UnOpShift<sizeof(T), sizeof(D)>::shift };
};
template <size_t src_elem_size1, size_t src_elem_size2, size_t dst_elem_size> struct BinOpShift { enum { shift = 1 }; };
template <size_t src_elem_size1, size_t src_elem_size2> struct BinOpShift<src_elem_size1, src_elem_size2, 1> { enum { shift = 4 }; };
template <size_t src_elem_size1, size_t src_elem_size2> struct BinOpShift<src_elem_size1, src_elem_size2, 2> { enum { shift = 2 }; };
template <typename T1, typename T2, typename D> struct DefaultBinaryShift
{
enum { shift = BinOpShift<sizeof(T1), sizeof(T2), sizeof(D)>::shift };
};
template <typename Func, bool unary = IsUnaryFunction<Func>::value> struct ShiftDispatcher;
template <typename Func> struct ShiftDispatcher<Func, true>
{
enum { shift = DefaultUnaryShift<typename Func::argument_type, typename Func::result_type>::shift };
};
template <typename Func> struct ShiftDispatcher<Func, false>
{
enum { shift = DefaultBinaryShift<typename Func::first_argument_type, typename Func::second_argument_type, typename Func::result_type>::shift };
};
}
template <typename Func> struct DefaultTransformShift
{
enum { shift = functional_detail::ShiftDispatcher<Func>::shift };
};
template <typename Func> struct DefaultTransformFunctorTraits
{
enum { simple_block_dim_x = 16 };
enum { simple_block_dim_y = 16 };
enum { smart_block_dim_x = 16 };
enum { smart_block_dim_y = 16 };
enum { smart_shift = DefaultTransformShift<Func>::shift };
};
template <typename Func> struct TransformFunctorTraits : DefaultTransformFunctorTraits<Func> {};
#define OPENCV_CUDA_TRANSFORM_FUNCTOR_TRAITS(type) \
template <> struct TransformFunctorTraits< type > : DefaultTransformFunctorTraits< type >
}}} // namespace cv { namespace cuda { namespace cudev
//! @endcond
#endif // OPENCV_CUDA_FUNCTIONAL_HPP
| mit |
nicsnet/our-boxen | vendor/bundle/ruby/2.0.0/gems/puppet-3.6.1/spec/unit/provider/file/windows_spec.rb | 4716 | #! /usr/bin/env ruby
require 'spec_helper'
if Puppet.features.microsoft_windows?
require 'puppet/util/windows'
class WindowsSecurity
extend Puppet::Util::Windows::Security
end
end
describe Puppet::Type.type(:file).provider(:windows), :if => Puppet.features.microsoft_windows? do
include PuppetSpec::Files
let(:path) { tmpfile('windows_file_spec') }
let(:resource) { Puppet::Type.type(:file).new :path => path, :mode => 0777, :provider => described_class.name }
let(:provider) { resource.provider }
let(:sid) { 'S-1-1-50' }
let(:account) { 'quinn' }
describe "#mode" do
it "should return a string with the higher-order bits stripped away" do
FileUtils.touch(path)
WindowsSecurity.set_mode(0644, path)
provider.mode.should == '644'
end
it "should return absent if the file doesn't exist" do
provider.mode.should == :absent
end
end
describe "#mode=" do
it "should chmod the file to the specified value" do
FileUtils.touch(path)
WindowsSecurity.set_mode(0644, path)
provider.mode = '0755'
provider.mode.should == '755'
end
it "should pass along any errors encountered" do
expect do
provider.mode = '644'
end.to raise_error(Puppet::Error, /failed to set mode/)
end
end
describe "#id2name" do
it "should return the name of the user identified by the sid" do
Puppet::Util::Windows::Security.expects(:valid_sid?).with(sid).returns(true)
Puppet::Util::Windows::Security.expects(:sid_to_name).with(sid).returns(account)
provider.id2name(sid).should == account
end
it "should return the argument if it's already a name" do
Puppet::Util::Windows::Security.expects(:valid_sid?).with(account).returns(false)
Puppet::Util::Windows::Security.expects(:sid_to_name).never
provider.id2name(account).should == account
end
it "should return nil if the user doesn't exist" do
Puppet::Util::Windows::Security.expects(:valid_sid?).with(sid).returns(true)
Puppet::Util::Windows::Security.expects(:sid_to_name).with(sid).returns(nil)
provider.id2name(sid).should == nil
end
end
describe "#name2id" do
it "should delegate to name_to_sid" do
Puppet::Util::Windows::Security.expects(:name_to_sid).with(account).returns(sid)
provider.name2id(account).should == sid
end
end
describe "#owner" do
it "should return the sid of the owner if the file does exist" do
FileUtils.touch(resource[:path])
provider.stubs(:get_owner).with(resource[:path]).returns(sid)
provider.owner.should == sid
end
it "should return absent if the file doesn't exist" do
provider.owner.should == :absent
end
end
describe "#owner=" do
it "should set the owner to the specified value" do
provider.expects(:set_owner).with(sid, resource[:path])
provider.owner = sid
end
it "should propagate any errors encountered when setting the owner" do
provider.stubs(:set_owner).raises(ArgumentError)
expect {
provider.owner = sid
}.to raise_error(Puppet::Error, /Failed to set owner/)
end
end
describe "#group" do
it "should return the sid of the group if the file does exist" do
FileUtils.touch(resource[:path])
provider.stubs(:get_group).with(resource[:path]).returns(sid)
provider.group.should == sid
end
it "should return absent if the file doesn't exist" do
provider.group.should == :absent
end
end
describe "#group=" do
it "should set the group to the specified value" do
provider.expects(:set_group).with(sid, resource[:path])
provider.group = sid
end
it "should propagate any errors encountered when setting the group" do
provider.stubs(:set_group).raises(ArgumentError)
expect {
provider.group = sid
}.to raise_error(Puppet::Error, /Failed to set group/)
end
end
describe "when validating" do
{:owner => 'foo', :group => 'foo', :mode => 0777}.each do |k,v|
it "should fail if the filesystem doesn't support ACLs and we're managing #{k}" do
described_class.any_instance.stubs(:supports_acl?).returns false
expect {
Puppet::Type.type(:file).new :path => path, k => v
}.to raise_error(Puppet::Error, /Can only manage owner, group, and mode on filesystems that support Windows ACLs, such as NTFS/)
end
end
it "should not fail if the filesystem doesn't support ACLs and we're not managing permissions" do
described_class.any_instance.stubs(:supports_acl?).returns false
Puppet::Type.type(:file).new :path => path
end
end
end
| mit |
cdnjs/cdnjs | ajax/libs/simple-icons/6.8.0/pagekit.js | 618 | console.warn("warn -",`Imports like "const pagekit = require('simple-icons/icons/pagekit');" have been deprecated in v6.0.0 and will no longer work from v7.0.0, use "const { siPagekit } = require('simple-icons/icons');" instead`),module.exports={title:"Pagekit",slug:"pagekit",get svg(){return'<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Pagekit</title><path d="'+this.path+'"/></svg>'},path:"M2.401 0v24h9.6v-3.527H5.929V3.526h12.146v13.421h-6.073v3.525H21.6V0H2.401z",source:"https://pagekit.com/logo-guide",hex:"212121",guidelines:"https://pagekit.com/logo-guide",license:void 0};
| mit |
cdnjs/cdnjs | ajax/libs/simple-icons/6.4.0/bulma.min.js | 568 | console.warn("warn -",`Imports like "const bulma = require('simple-icons/icons/bulma');" have been deprecated in v6.0.0 and will no longer work from v7.0.0, use "const { siBulma } = require('simple-icons/icons');" instead`),module.exports={title:"Bulma",slug:"bulma",get svg(){return'<svg role="img" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><title>Bulma</title><path d="'+this.path+'"/></svg>'},path:"M11.25 0l-6 6 -1.5 10.5 7.5 7.5 9 -6 -6 -6 4.5 -4.5 -7.5 -7.5Z",source:"https://github.com/jgthms/bulma/",hex:"00D1B2",guidelines:void 0,license:void 0}; | mit |
cdnjs/cdnjs | ajax/libs/simple-icons/6.5.0/roblox.d.ts | 136 | /**@deprecated use "const { siRoblox } = require('simple-icons/icons');" instead*/declare const i:import("../alias").I;export default i; | mit |
tinnerm/openhab | bundles/model/org.openhab.model.persistence.ui/src/org/openhab/model/persistence/ui/labeling/PersistenceDescriptionLabelProvider.java | 951 | /**
* Copyright (c) 2010-2014, openHAB.org and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
/*
* generated by Xtext
*/
package org.openhab.model.persistence.ui.labeling;
import org.eclipse.xtext.ui.label.DefaultDescriptionLabelProvider;
/**
* Provides labels for a IEObjectDescriptions and IResourceDescriptions.
*
* see http://www.eclipse.org/Xtext/documentation/latest/xtext.html#labelProvider
*/
public class PersistenceDescriptionLabelProvider extends DefaultDescriptionLabelProvider {
/*
//Labels and icons can be computed like this:
String text(IEObjectDescription ele) {
return "my "+ele.getName();
}
String image(IEObjectDescription ele) {
return ele.getEClass().getName() + ".gif";
}
*/
}
| epl-1.0 |
md-5/jdk10 | test/jdk/java/util/PluggableLocale/BreakIteratorProviderTest.java | 4672 | /*
* Copyright (c) 2007, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4052440 8062588 8165804 8210406
* @summary BreakIteratorProvider tests
* @library providersrc/foobarutils
* providersrc/fooprovider
* @modules java.base/sun.util.locale.provider
* java.base/sun.util.resources
* @build com.foobar.Utils
* com.foo.*
* @run main/othervm -Djava.locale.providers=JRE,SPI BreakIteratorProviderTest
*/
import java.text.BreakIterator;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.ResourceBundle;
import java.util.Set;
import com.foo.BreakIteratorProviderImpl;
import sun.util.locale.provider.LocaleProviderAdapter;
import sun.util.locale.provider.ResourceBundleBasedAdapter;
public class BreakIteratorProviderTest extends ProviderTest {
BreakIteratorProviderImpl bip = new BreakIteratorProviderImpl();
List<Locale> availloc = Arrays.asList(BreakIterator.getAvailableLocales());
List<Locale> providerloc = Arrays.asList(bip.getAvailableLocales());
List<Locale> jreloc = Arrays.asList(LocaleProviderAdapter.forJRE().getAvailableLocales());
List<Locale> jreimplloc = Arrays.asList(LocaleProviderAdapter.forJRE().getBreakIteratorProvider().getAvailableLocales());
public static void main(String[] s) {
new BreakIteratorProviderTest();
}
BreakIteratorProviderTest() {
availableLocalesTest();
objectValidityTest();
}
void availableLocalesTest() {
Set<Locale> localesFromAPI = new HashSet<>(availloc);
Set<Locale> localesExpected = new HashSet<>(jreloc);
localesExpected.addAll(providerloc);
if (localesFromAPI.equals(localesExpected)) {
System.out.println("availableLocalesTest passed.");
} else {
throw new RuntimeException("availableLocalesTest failed");
}
}
void objectValidityTest() {
for (Locale target: availloc) {
// pure JRE implementation
ResourceBundle rb = ((ResourceBundleBasedAdapter)LocaleProviderAdapter.forJRE()).getLocaleData().getBreakIteratorInfo(target);
String[] classNames = rb.getStringArray("BreakIteratorClasses");
boolean jreSupportsLocale = jreimplloc.contains(target);
// result object
String[] result = new String[4];
result[0] = BreakIterator.getCharacterInstance(target).getClass().getName();
result[1] = BreakIterator.getWordInstance(target).getClass().getName();
result[2] = BreakIterator.getLineInstance(target).getClass().getName();
result[3] = BreakIterator.getSentenceInstance(target).getClass().getName();
// provider's object (if any)
String[] providersResult = new String[4];
if (providerloc.contains(target)) {
providersResult[0] = bip.getCharacterInstance(target).getClass().getName();
providersResult[1] = bip.getWordInstance(target).getClass().getName();
providersResult[2] = bip.getLineInstance(target).getClass().getName();
providersResult[3] = bip.getSentenceInstance(target).getClass().getName();
}
// JRE
String[] jresResult = new String[4];
if (jreSupportsLocale) {
for (int i = 0; i < 4; i++) {
jresResult[i] = "sun.text." + classNames[i];
}
}
for (int i = 0; i < 4; i++) {
checkValidity(target, jresResult[i], providersResult[i], result[i], jreSupportsLocale);
}
}
}
} | gpl-2.0 |
b2z/joomla-cms | layouts/joomla/form/field/textarea.php | 3361 | <?php
/**
* @package Joomla.Site
* @subpackage Layout
*
* @copyright Copyright (C) 2005 - 2017 Open Source Matters, Inc. All rights reserved.
* @license GNU General Public License version 2 or later; see LICENSE.txt
*/
defined('JPATH_BASE') or die;
extract($displayData);
/**
* Layout variables
* -----------------
* @var string $autocomplete Autocomplete attribute for the field.
* @var boolean $autofocus Is autofocus enabled?
* @var string $class Classes for the input.
* @var string $description Description of the field.
* @var boolean $disabled Is this field disabled?
* @var string $group Group the field belongs to. <fields> section in form XML.
* @var boolean $hidden Is this field hidden in the form?
* @var string $hint Placeholder for the field.
* @var string $id DOM id of the field.
* @var string $label Label of the field.
* @var string $labelclass Classes to apply to the label.
* @var boolean $multiple Does this field support multiple values?
* @var string $name Name of the input field.
* @var string $onchange Onchange attribute for the field.
* @var string $onclick Onclick attribute for the field.
* @var string $pattern Pattern (Reg Ex) of value of the form field.
* @var boolean $readonly Is this field read only?
* @var boolean $repeat Allows extensions to duplicate elements.
* @var boolean $required Is this field required?
* @var integer $size Size attribute of the input.
* @var boolean $spellcheck Spellcheck state for the form field.
* @var string $validate Validation rules to apply.
* @var string $value Value attribute of the field.
* @var array $checkedOptions Options that will be set as checked.
* @var boolean $hasValue Has this field a value assigned?
* @var array $options Options available for this field.
* @var array $inputType Options available for this field.
* @var string $accept File types that are accepted.
*/
// Including fallback code for HTML5 non supported browsers.
JHtml::_('jquery.framework');
JHtml::_('script', 'system/html5fallback.js', array('version' => 'auto', 'relative' => true, 'conditional' => 'lt IE 9'));
// Initialize some field attributes.
$autocomplete = !$autocomplete ? 'autocomplete="off"' : 'autocomplete="' . $autocomplete . '"';
$autocomplete = $autocomplete == 'autocomplete="on"' ? '' : $autocomplete;
$attributes = array(
$columns ?: '',
$rows ?: '',
!empty($class) ? 'class="' . $class . '"' : '',
strlen($hint) ? 'placeholder="' . htmlspecialchars($hint, ENT_COMPAT, 'UTF-8') . '"' : '',
$disabled ? 'disabled' : '',
$readonly ? 'readonly' : '',
$onchange ? 'onchange="' . $onchange . '"' : '',
$onclick ? 'onclick="' . $onclick . '"' : '',
$required ? 'required aria-required="true"' : '',
$autocomplete,
$autofocus ? 'autofocus' : '',
$spellcheck ? '' : 'spellcheck="false"',
$maxlength ? $maxlength: ''
);
?>
<textarea name="<?php
echo $name; ?>" id="<?php
echo $id; ?>" <?php
echo implode(' ', $attributes); ?> ><?php echo htmlspecialchars($value, ENT_COMPAT, 'UTF-8'); ?></textarea>
| gpl-2.0 |
egberts/unicorn | bindings/python/unicorn/m68k_const.py | 489 | # For Unicorn Engine. AUTO-GENERATED FILE, DO NOT EDIT [m68k_const.py]
# M68K registers
UC_M68K_REG_INVALID = 0
UC_M68K_REG_A0 = 1
UC_M68K_REG_A1 = 2
UC_M68K_REG_A2 = 3
UC_M68K_REG_A3 = 4
UC_M68K_REG_A4 = 5
UC_M68K_REG_A5 = 6
UC_M68K_REG_A6 = 7
UC_M68K_REG_A7 = 8
UC_M68K_REG_D0 = 9
UC_M68K_REG_D1 = 10
UC_M68K_REG_D2 = 11
UC_M68K_REG_D3 = 12
UC_M68K_REG_D4 = 13
UC_M68K_REG_D5 = 14
UC_M68K_REG_D6 = 15
UC_M68K_REG_D7 = 16
UC_M68K_REG_SR = 17
UC_M68K_REG_PC = 18
UC_M68K_REG_ENDING = 19
| gpl-2.0 |
mbnshankar/KBWS | gsoap/mod_gsoap/gsoap_win/isapi/gsoap/isapistream.cpp | 2329 | /** Implementation of the isapistream class.
* @file isapistream.cpp
* @author Christian Aberger
* Copyright (C) 2001 WebWare (http://www.webware.at)
*/
#include "isapistream.h"
#include <cassert>
using namespace std;
isapistreambuf::isapistreambuf(EXTENSION_CONTROL_BLOCK *pECB)
: _pECB(pECB)
{
static const int nMinBufSize = 1024;
assert(NULL != pECB);
setp(_obuf, _obuf + sizeof _obuf);
_cbTotalBytes = pECB->cbTotalBytes;
_ibuflen = pECB->cbAvailable > nMinBufSize ? pECB->cbAvailable : nMinBufSize;
_ibuf = new char[_ibuflen + 1]; // we allocate it one byte more, adding a trailing '\0', then it is easier with strings
memset(_ibuf, 0, _ibuflen + 1);
memcpy(_ibuf, pECB->lpbData, pECB->cbAvailable);
_cbRead = pECB->cbAvailable;
setg(_ibuf, _ibuf, _ibuf + _cbRead);
}
isapistreambuf::~isapistreambuf() {
delete _ibuf;
}
int isapistreambuf::sync() {
BOOL bWrite = TRUE;
if (NULL != _pECB) {
DWORD dwBytesWritten = pptr() - pbase();
if (dwBytesWritten) {
bWrite = _pECB->WriteClient(_pECB->ConnID, (PVOID)_obuf, &dwBytesWritten, 0);
setp(_obuf, _obuf + sizeof _obuf);
}
}
return bWrite ? 0 : -1;
}
int isapistreambuf::overflow(int ch) {
if (char_traits<char>::eof() == ch) {
return streambuf::overflow(ch);
}
if (pptr() < epptr()) {
return sputc(ch);
}
int ret = sync();
if (0 == ret && char_traits<char>::eof() != ch) {
sputc(ch);
}
return ret;
}
int isapistreambuf::underflow() {
int retval = char_traits<char>::eof();
if (gptr() < egptr()) {
//retval = snextc();
retval = sgetc();
} else {
if (_cbRead < _cbTotalBytes) {
memset(_ibuf, 0, _ibuflen + 1);
DWORD dwLen = _ibuflen;
BOOL bRead = (*_pECB->ReadClient)(_pECB->ConnID, _ibuf, &dwLen);
if (bRead && dwLen > 0) {
_cbRead += dwLen;
setg(_ibuf, _ibuf, _ibuf + dwLen);
//retval = snextc();
retval = sgetc();
}
}
}
return retval;
}
isapistream::~isapistream() {
}
EXTENSION_CONTROL_BLOCK *isapistreambuf::ECB() {
return _pECB;
}
EXTENSION_CONTROL_BLOCK *isapistream::ECB() {
return _buf.ECB();
}
| gpl-2.0 |
switzer/revive-adserver | lib/OA/Dal/Maintenance/tests/unit/Priority_getCampaignsInfoByAgencyId.dal.test.php | 9359 | <?php
/*
+---------------------------------------------------------------------------+
| Revive Adserver |
| http://www.revive-adserver.com |
| |
| Copyright: See the COPYRIGHT.txt file. |
| License: GPLv2 or later, see the LICENSE.txt file. |
+---------------------------------------------------------------------------+
*/
require_once MAX_PATH . '/lib/OA/Dal/DataGenerator.php';
require_once MAX_PATH . '/lib/OA/Dal/Maintenance/Priority.php';
require_once MAX_PATH . '/lib/max/Dal/DataObjects/Campaigns.php';
/**
* A class for testing the getCampaignsInfoByAgencyId() method of
* OA_Dal_Maintenance_Priority class.
*
* @package OpenXDal
* @subpackage TestSuite
*/
class Test_OA_Dal_Maintenance_Priority_getCampaignsInfoByAgencyId extends UnitTestCase
{
public $aExpectedData = array();
private $firsCampaignId;
private $secondCampaignId;
private $firsAdId;
private $secondAdId;
/**
* The constructor method.
*/
function __construct()
{
parent::__construct();
}
/**
* A method to test the getCampaignsInfoByAgencyId method.
*/
function testGetCampaignsInfoByAgencyId()
{
list($agencyId1, $agencyId2) = $this->_generateAgencyCampaigns(DataObjects_Campaigns::PRIORITY_ECPM);
$da = new OA_Dal_Maintenance_Priority();
// Test 1 getCampaignsInfoByAgencyId method.
$ret = $da->getCampaignsInfoByAgencyId($agencyId1);
$this->checkTestResults($ret, $this->firsCampaignId, $this->firsAdId);
// Test 2 getCampaignsInfoByAgencyId method.
$ret = $da->getCampaignsInfoByAgencyId($agencyId2);
$this->checkTestResults($ret, $this->secondCampaignId, $this->secondAdId);
DataGenerator::cleanUp();
}
/**
* A method to test the getCampaignsInfoByAgencyId method.
*/
function testGetCampaignsInfoByAgencyIdAndPriority()
{
list($agencyId1, $agencyId2) = $this->_generateAgencyCampaigns($priority = 4, 1);
$da = new OA_Dal_Maintenance_Priority();
// Test 1 getCampaignsInfoByAgencyId method.
$ret = $da->getCampaignsInfoByAgencyIdAndPriority($agencyId1, $priority);
$this->checkTestResults($ret, $this->firsCampaignId, $this->firsAdId);
// Test 2 getCampaignsInfoByAgencyId method.
$ret = $da->getCampaignsInfoByAgencyIdAndPriority($agencyId2, $priority);
$this->checkTestResults($ret, $this->secondCampaignId, $this->secondAdId);
DataGenerator::cleanUp();
}
function checkTestResults($ret, $campaignId, $adId)
{
$this->assertTrue(is_array($ret));
$aExpectedCampaign = $this->aExpectedData['campaigns'][$campaignId];
$aExpectedAd = $this->aExpectedData['banners'][$adId];
$idxAds = OA_Maintenance_Priority_AdServer_Task_ECPMCommon::IDX_ADS;
$idxZones = OA_Maintenance_Priority_AdServer_Task_ECPMCommon::IDX_ZONES;
$idxWeight = OA_Maintenance_Priority_AdServer_Task_ECPMCommon::IDX_WEIGHT;
$idxRevenue = OA_Maintenance_Priority_AdServer_Task_ECPMCommon::IDX_REVENUE;
$idxRevenueType = OA_Maintenance_Priority_AdServer_Task_ECPMCommon::IDX_REVENUE_TYPE;
$idxImpr = OA_Maintenance_Priority_AdServer_Task_ECPMCommon::IDX_MIN_IMPRESSIONS;
$aCampaign = $ret[$campaignId];
$this->assertEqual($aExpectedCampaign['revenue'], $aCampaign[$idxRevenue]);
$this->assertEqual($aExpectedCampaign['revenue_type'], $aCampaign[$idxRevenueType]);
$this->assertTrue(isset($aCampaign[$idxAds][$adId]));
$aAd = $aCampaign[$idxAds][$adId];
$this->assertEqual($aExpectedAd['weight'], $aAd[$idxWeight]);
$zoneId = array_shift($aAd[$idxZones]);
$this->assertEqual($zoneId, $aExpectedAd['zone']);
}
/**
* A method to generate data for testing.
*
* @access private
*/
function _generateAgencyCampaigns($priority, $ecpmEnabled = 0)
{
// Add agencies
$agencyId1 = DataGenerator::generateOne('agency', true);
$agencyId2 = DataGenerator::generateOne('agency', true);
// Add clients
$doClients = OA_Dal::factoryDO('clients');
$doClients->agencyid = $agencyId1;
$clientId1 = DataGenerator::generateOne($doClients);
$doClients = OA_Dal::factoryDO('clients');
$doClients->agencyid = $agencyId2;
$clientId2 = DataGenerator::generateOne($doClients);
// Add campaigns
$doCampaigns = OA_Dal::factoryDO('campaigns');
$doCampaigns->campaignname = 'Test eCPM Campaign 1';
$doCampaigns->revenue = 0.1;
$doCampaigns->min_impressions = 100;
$doCampaigns->priority = $priority;
$doCampaigns->status = OA_ENTITY_STATUS_RUNNING;
$doCampaigns->ecpm_enabled = $ecpmEnabled;
$doCampaigns->clientid = $clientId1;
$idCampaign11 = DataGenerator::generateOne($doCampaigns);
$this->firsCampaignId = $idCampaign11;
$this->aExpectedData['campaigns'][$idCampaign11]['revenue'] = $doCampaigns->revenue;
$this->aExpectedData['campaigns'][$idCampaign11]['revenue_type'] = $doCampaigns->revenue_type;
$this->aExpectedData['campaigns'][$idCampaign11]['min_impressions'] =
$doCampaigns->min_impressions;
$doCampaigns = OA_Dal::factoryDO('campaigns');
$doCampaigns->campaignname = 'Test non eCPM Campaign 2';
$doCampaigns->revenue = 0.2;
$doCampaigns->min_impressions = 200;
$doCampaigns->priority = 1;
$doCampaigns->status = OA_ENTITY_STATUS_RUNNING;
$doCampaigns->clientid = $clientId1;
$idCampaign12 = DataGenerator::generateOne($doCampaigns);
$doCampaigns = OA_Dal::factoryDO('campaigns');
$doCampaigns->campaignname = 'Test eCPM Campaign 2';
$doCampaigns->revenue = 0.5;
$doCampaigns->min_impressions = 300;
$doCampaigns->priority = $priority;
$doCampaigns->status = OA_ENTITY_STATUS_RUNNING;
$doCampaigns->ecpm_enabled = $ecpmEnabled;
$doCampaigns->clientid = $clientId2;
$idCampaign2 = DataGenerator::generateOne($doCampaigns);
$this->secondCampaignId = $idCampaign2;
$this->aExpectedData['campaigns'][$idCampaign2]['revenue'] = $doCampaigns->revenue;
$this->aExpectedData['campaigns'][$idCampaign2]['revenue_type'] = $doCampaigns->revenue_type;
$this->aExpectedData['campaigns'][$idCampaign2]['min_impressions'] =
$doCampaigns->min_impressions;
// Add banners to campaign 11
$doBanners = OA_Dal::factoryDO('banners');
$doBanners->weight = 1;
$doBanners->campaignid = $idCampaign11;
$doBanners->status = OA_ENTITY_STATUS_RUNNING;
$idBanner11_1 = DataGenerator::generateOne($doBanners);
$this->firsAdId = $idBanner11_1;
$this->aExpectedData['banners'][$idBanner11_1]['weight'] = $doBanners->weight;
$doBanners = OA_Dal::factoryDO('banners');
$doBanners->weight = 2;
$doBanners->campaignid = $idCampaign11;
$doBanners->status = OA_ENTITY_STATUS_INACTIVE;
$idBanner11_2 = DataGenerator::generateOne($doBanners);
// Add banner to campaign 12
$doBanners = OA_Dal::factoryDO('banners');
$doBanners->weight = 3;
$doBanners->campaignid = $idCampaign12;
$doBanners->status = OA_ENTITY_STATUS_RUNNING;
$idBanner12 = DataGenerator::generateOne($doBanners);
// Add banner to campaign 2
$doBanners = OA_Dal::factoryDO('banners');
$doBanners->weight = 4;
$doBanners->campaignid = $idCampaign2;
$doBanners->status = OA_ENTITY_STATUS_RUNNING;
$idBanner2 = DataGenerator::generateOne($doBanners);
$this->secondAdId = $idBanner2;
$this->aExpectedData['banners'][$idBanner2]['weight'] = $doBanners->weight;
// Add zones
$idZone1 = DataGenerator::generateOne('zones', true);
$idZone2 = DataGenerator::generateOne('zones', true);
// Connect zones with banners (ad_zone_assoc)
$doAd_zone_assoc = OA_Dal::factoryDO('ad_zone_assoc');
$doAd_zone_assoc->ad_id = $idBanner11_1;
$doAd_zone_assoc->zone_id = $idZone1;
DataGenerator::generateOne($doAd_zone_assoc);
$this->aExpectedData['banners'][$idBanner11_1]['zone'] = $idZone1;
$doAd_zone_assoc = OA_Dal::factoryDO('ad_zone_assoc');
$doAd_zone_assoc->ad_id = $idBanner11_2;
$doAd_zone_assoc->zone_id = $idZone1;
DataGenerator::generateOne($doAd_zone_assoc);
$doAd_zone_assoc = OA_Dal::factoryDO('ad_zone_assoc');
$doAd_zone_assoc->ad_id = $idBanner12;
$doAd_zone_assoc->zone_id = $idZone1;
DataGenerator::generateOne($doAd_zone_assoc);
$doAd_zone_assoc = OA_Dal::factoryDO('ad_zone_assoc');
$doAd_zone_assoc->ad_id = $idBanner2;
$doAd_zone_assoc->zone_id = $idZone2;
DataGenerator::generateOne($doAd_zone_assoc);
$this->aExpectedData['banners'][$idBanner2]['zone'] = $idZone2;
return array($agencyId1, $agencyId2);
}
}
?> | gpl-2.0 |
dmlloyd/openjdk-modules | langtools/src/jdk.compiler/share/classes/com/sun/tools/javac/comp/Annotate.java | 52426 | /*
* Copyright (c) 2003, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.javac.comp;
import com.sun.tools.javac.code.*;
import com.sun.tools.javac.code.Attribute.Compound;
import com.sun.tools.javac.code.Attribute.TypeCompound;
import com.sun.tools.javac.code.Scope.WriteableScope;
import com.sun.tools.javac.code.Symbol.*;
import com.sun.tools.javac.code.TypeMetadata.Entry.Kind;
import com.sun.tools.javac.resources.CompilerProperties.Errors;
import com.sun.tools.javac.tree.JCTree;
import com.sun.tools.javac.tree.JCTree.*;
import com.sun.tools.javac.tree.TreeInfo;
import com.sun.tools.javac.tree.TreeMaker;
import com.sun.tools.javac.tree.TreeScanner;
import com.sun.tools.javac.util.*;
import com.sun.tools.javac.util.JCDiagnostic.DiagnosticPosition;
import com.sun.tools.javac.util.List;
import javax.tools.JavaFileObject;
import java.util.*;
import static com.sun.tools.javac.code.Flags.SYNTHETIC;
import static com.sun.tools.javac.code.Kinds.Kind.MDL;
import static com.sun.tools.javac.code.Kinds.Kind.MTH;
import static com.sun.tools.javac.code.Kinds.Kind.PCK;
import static com.sun.tools.javac.code.Kinds.Kind.VAR;
import static com.sun.tools.javac.code.Scope.LookupKind.NON_RECURSIVE;
import static com.sun.tools.javac.code.TypeTag.ARRAY;
import static com.sun.tools.javac.code.TypeTag.CLASS;
import static com.sun.tools.javac.tree.JCTree.Tag.ANNOTATION;
import static com.sun.tools.javac.tree.JCTree.Tag.ASSIGN;
import static com.sun.tools.javac.tree.JCTree.Tag.IDENT;
import static com.sun.tools.javac.tree.JCTree.Tag.NEWARRAY;
import com.sun.tools.javac.util.JCDiagnostic.DiagnosticFlag;
/** Enter annotations onto symbols and types (and trees).
*
* This is also a pseudo stage in the compiler taking care of scheduling when annotations are
* entered.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*/
public class Annotate {
protected static final Context.Key<Annotate> annotateKey = new Context.Key<>();
public static Annotate instance(Context context) {
Annotate instance = context.get(annotateKey);
if (instance == null)
instance = new Annotate(context);
return instance;
}
private final Attr attr;
private final Check chk;
private final ConstFold cfolder;
private final DeferredLintHandler deferredLintHandler;
private final Enter enter;
private final Lint lint;
private final Log log;
private final Names names;
private final Resolve resolve;
private final TreeMaker make;
private final Symtab syms;
private final TypeEnvs typeEnvs;
private final Types types;
private final Attribute theUnfinishedDefaultValue;
private final boolean allowRepeatedAnnos;
private final String sourceName;
protected Annotate(Context context) {
context.put(annotateKey, this);
attr = Attr.instance(context);
chk = Check.instance(context);
cfolder = ConstFold.instance(context);
deferredLintHandler = DeferredLintHandler.instance(context);
enter = Enter.instance(context);
log = Log.instance(context);
lint = Lint.instance(context);
make = TreeMaker.instance(context);
names = Names.instance(context);
resolve = Resolve.instance(context);
syms = Symtab.instance(context);
typeEnvs = TypeEnvs.instance(context);
types = Types.instance(context);
theUnfinishedDefaultValue = new Attribute.Error(syms.errType);
Source source = Source.instance(context);
allowRepeatedAnnos = source.allowRepeatedAnnotations();
sourceName = source.name;
blockCount = 1;
}
/** Semaphore to delay annotation processing */
private int blockCount = 0;
/** Called when annotations processing needs to be postponed. */
public void blockAnnotations() {
blockCount++;
}
/** Called when annotation processing can be resumed. */
public void unblockAnnotations() {
blockCount--;
if (blockCount == 0)
flush();
}
/** Variant which allows for a delayed flush of annotations.
* Needed by ClassReader */
public void unblockAnnotationsNoFlush() {
blockCount--;
}
/** are we blocking annotation processing? */
public boolean annotationsBlocked() {return blockCount > 0; }
public void enterDone() {
unblockAnnotations();
}
public List<TypeCompound> fromAnnotations(List<JCAnnotation> annotations) {
if (annotations.isEmpty()) {
return List.nil();
}
ListBuffer<TypeCompound> buf = new ListBuffer<>();
for (JCAnnotation anno : annotations) {
Assert.checkNonNull(anno.attribute);
buf.append((TypeCompound) anno.attribute);
}
return buf.toList();
}
/** Annotate (used for everything else) */
public void normal(Runnable r) {
q.append(r);
}
/** Validate, triggers after 'normal' */
public void validate(Runnable a) {
validateQ.append(a);
}
/** Flush all annotation queues */
public void flush() {
if (annotationsBlocked()) return;
if (isFlushing()) return;
startFlushing();
try {
while (q.nonEmpty()) {
q.next().run();
}
while (typesQ.nonEmpty()) {
typesQ.next().run();
}
while (afterTypesQ.nonEmpty()) {
afterTypesQ.next().run();
}
while (validateQ.nonEmpty()) {
validateQ.next().run();
}
} finally {
doneFlushing();
}
}
private ListBuffer<Runnable> q = new ListBuffer<>();
private ListBuffer<Runnable> validateQ = new ListBuffer<>();
private int flushCount = 0;
private boolean isFlushing() { return flushCount > 0; }
private void startFlushing() { flushCount++; }
private void doneFlushing() { flushCount--; }
ListBuffer<Runnable> typesQ = new ListBuffer<>();
ListBuffer<Runnable> afterTypesQ = new ListBuffer<>();
public void typeAnnotation(Runnable a) {
typesQ.append(a);
}
public void afterTypes(Runnable a) {
afterTypesQ.append(a);
}
/**
* Queue annotations for later attribution and entering. This is probably the method you are looking for.
*
* @param annotations the list of JCAnnotations to attribute and enter
* @param localEnv the enclosing env
* @param s ths Symbol on which to enter the annotations
* @param deferPos report errors here
*/
public void annotateLater(List<JCAnnotation> annotations, Env<AttrContext> localEnv,
Symbol s, DiagnosticPosition deferPos)
{
if (annotations.isEmpty()) {
return;
}
s.resetAnnotations(); // mark Annotations as incomplete for now
normal(() -> {
// Packages are unusual, in that they are the only type of declaration that can legally appear
// more than once in a compilation, and in all cases refer to the same underlying symbol.
// This means they are the only kind of declaration that syntactically may have multiple sets
// of annotations, each on a different package declaration, even though that is ultimately
// forbidden by JLS 8 section 7.4.
// The corollary here is that all of the annotations on a package symbol may have already
// been handled, meaning that the set of annotations pending completion is now empty.
Assert.check(s.kind == PCK || s.annotationsPendingCompletion());
JavaFileObject prev = log.useSource(localEnv.toplevel.sourcefile);
DiagnosticPosition prevLintPos =
deferPos != null
? deferredLintHandler.setPos(deferPos)
: deferredLintHandler.immediate();
Lint prevLint = deferPos != null ? null : chk.setLint(lint);
try {
if (s.hasAnnotations() && annotations.nonEmpty())
log.error(annotations.head.pos, "already.annotated", Kinds.kindName(s), s);
Assert.checkNonNull(s, "Symbol argument to actualEnterAnnotations is null");
// false is passed as fifth parameter since annotateLater is
// never called for a type parameter
annotateNow(s, annotations, localEnv, false, false);
} finally {
if (prevLint != null)
chk.setLint(prevLint);
deferredLintHandler.setPos(prevLintPos);
log.useSource(prev);
}
});
validate(() -> { //validate annotations
JavaFileObject prev = log.useSource(localEnv.toplevel.sourcefile);
try {
chk.validateAnnotations(annotations, s);
} finally {
log.useSource(prev);
}
});
}
/** Queue processing of an attribute default value. */
public void annotateDefaultValueLater(JCExpression defaultValue, Env<AttrContext> localEnv,
MethodSymbol m, DiagnosticPosition deferPos)
{
normal(() -> {
JavaFileObject prev = log.useSource(localEnv.toplevel.sourcefile);
DiagnosticPosition prevLintPos = deferredLintHandler.setPos(deferPos);
try {
enterDefaultValue(defaultValue, localEnv, m);
} finally {
deferredLintHandler.setPos(prevLintPos);
log.useSource(prev);
}
});
validate(() -> { //validate annotations
JavaFileObject prev = log.useSource(localEnv.toplevel.sourcefile);
try {
// if default value is an annotation, check it is a well-formed
// annotation value (e.g. no duplicate values, no missing values, etc.)
chk.validateAnnotationTree(defaultValue);
} finally {
log.useSource(prev);
}
});
}
/** Enter a default value for an annotation element. */
private void enterDefaultValue(JCExpression defaultValue,
Env<AttrContext> localEnv, MethodSymbol m) {
m.defaultValue = attributeAnnotationValue(m.type.getReturnType(), defaultValue, localEnv);
}
/**
* Gather up annotations into a map from type symbols to lists of Compound attributes,
* then continue on with repeating annotations processing.
*/
private <T extends Attribute.Compound> void annotateNow(Symbol toAnnotate,
List<JCAnnotation> withAnnotations, Env<AttrContext> env, boolean typeAnnotations,
boolean isTypeParam)
{
Map<TypeSymbol, ListBuffer<T>> annotated = new LinkedHashMap<>();
Map<T, DiagnosticPosition> pos = new HashMap<>();
for (List<JCAnnotation> al = withAnnotations; !al.isEmpty(); al = al.tail) {
JCAnnotation a = al.head;
T c;
if (typeAnnotations) {
@SuppressWarnings("unchecked")
T tmp = (T)attributeTypeAnnotation(a, syms.annotationType, env);
c = tmp;
} else {
@SuppressWarnings("unchecked")
T tmp = (T)attributeAnnotation(a, syms.annotationType, env);
c = tmp;
}
Assert.checkNonNull(c, "Failed to create annotation");
if (annotated.containsKey(a.type.tsym)) {
if (!allowRepeatedAnnos) {
log.error(DiagnosticFlag.SOURCE_LEVEL, a.pos(), "repeatable.annotations.not.supported.in.source", sourceName);
}
ListBuffer<T> l = annotated.get(a.type.tsym);
l = l.append(c);
annotated.put(a.type.tsym, l);
pos.put(c, a.pos());
} else {
annotated.put(a.type.tsym, ListBuffer.of(c));
pos.put(c, a.pos());
}
// Note: @Deprecated has no effect on local variables and parameters
if (!c.type.isErroneous()
&& (toAnnotate.kind == MDL || toAnnotate.owner.kind != MTH)
&& types.isSameType(c.type, syms.deprecatedType)) {
toAnnotate.flags_field |= (Flags.DEPRECATED | Flags.DEPRECATED_ANNOTATION);
Attribute fr = c.member(names.forRemoval);
if (fr instanceof Attribute.Constant) {
Attribute.Constant v = (Attribute.Constant) fr;
if (v.type == syms.booleanType && ((Integer) v.value) != 0) {
toAnnotate.flags_field |= Flags.DEPRECATED_REMOVAL;
}
}
}
}
List<T> buf = List.nil();
for (ListBuffer<T> lb : annotated.values()) {
if (lb.size() == 1) {
buf = buf.prepend(lb.first());
} else {
AnnotationContext<T> ctx = new AnnotationContext<>(env, annotated, pos, typeAnnotations);
T res = makeContainerAnnotation(lb.toList(), ctx, toAnnotate, isTypeParam);
if (res != null)
buf = buf.prepend(res);
}
}
if (typeAnnotations) {
@SuppressWarnings("unchecked")
List<TypeCompound> attrs = (List<TypeCompound>)buf.reverse();
toAnnotate.appendUniqueTypeAttributes(attrs);
} else {
@SuppressWarnings("unchecked")
List<Attribute.Compound> attrs = (List<Attribute.Compound>)buf.reverse();
toAnnotate.resetAnnotations();
toAnnotate.setDeclarationAttributes(attrs);
}
}
/**
* Attribute and store a semantic representation of the annotation tree {@code tree} into the
* tree.attribute field.
*
* @param tree the tree representing an annotation
* @param expectedAnnotationType the expected (super)type of the annotation
* @param env the current env in where the annotation instance is found
*/
public Attribute.Compound attributeAnnotation(JCAnnotation tree, Type expectedAnnotationType,
Env<AttrContext> env)
{
// The attribute might have been entered if it is Target or Repetable
// Because TreeCopier does not copy type, redo this if type is null
if (tree.attribute != null && tree.type != null)
return tree.attribute;
List<Pair<MethodSymbol, Attribute>> elems = attributeAnnotationValues(tree, expectedAnnotationType, env);
Attribute.Compound ac = new Attribute.Compound(tree.type, elems);
return tree.attribute = ac;
}
/** Attribute and store a semantic representation of the type annotation tree {@code tree} into
* the tree.attribute field.
*
* @param a the tree representing an annotation
* @param expectedAnnotationType the expected (super)type of the annotation
* @param env the the current env in where the annotation instance is found
*/
public Attribute.TypeCompound attributeTypeAnnotation(JCAnnotation a, Type expectedAnnotationType,
Env<AttrContext> env)
{
// The attribute might have been entered if it is Target or Repetable
// Because TreeCopier does not copy type, redo this if type is null
if (a.attribute == null || a.type == null || !(a.attribute instanceof Attribute.TypeCompound)) {
// Create a new TypeCompound
List<Pair<MethodSymbol,Attribute>> elems =
attributeAnnotationValues(a, expectedAnnotationType, env);
Attribute.TypeCompound tc =
new Attribute.TypeCompound(a.type, elems, TypeAnnotationPosition.unknown);
a.attribute = tc;
return tc;
} else {
// Use an existing TypeCompound
return (Attribute.TypeCompound)a.attribute;
}
}
/**
* Attribute annotation elements creating a list of pairs of the Symbol representing that
* element and the value of that element as an Attribute. */
private List<Pair<MethodSymbol, Attribute>> attributeAnnotationValues(JCAnnotation a,
Type expected, Env<AttrContext> env)
{
// The annotation might have had its type attributed (but not
// checked) by attr.attribAnnotationTypes during MemberEnter,
// in which case we do not need to do it again.
Type at = (a.annotationType.type != null ?
a.annotationType.type : attr.attribType(a.annotationType, env));
a.type = chk.checkType(a.annotationType.pos(), at, expected);
boolean isError = a.type.isErroneous();
if (!a.type.tsym.isAnnotationType() && !isError) {
log.error(a.annotationType.pos(),
"not.annotation.type", a.type.toString());
isError = true;
}
// List of name=value pairs (or implicit "value=" if size 1)
List<JCExpression> args = a.args;
boolean elidedValue = false;
// special case: elided "value=" assumed
if (args.length() == 1 && !args.head.hasTag(ASSIGN)) {
args.head = make.at(args.head.pos).
Assign(make.Ident(names.value), args.head);
elidedValue = true;
}
ListBuffer<Pair<MethodSymbol,Attribute>> buf = new ListBuffer<>();
for (List<JCExpression> tl = args; tl.nonEmpty(); tl = tl.tail) {
Pair<MethodSymbol, Attribute> p = attributeAnnotationNameValuePair(tl.head, a.type, isError, env, elidedValue);
if (p != null && !p.fst.type.isErroneous())
buf.append(p);
}
return buf.toList();
}
// where
private Pair<MethodSymbol, Attribute> attributeAnnotationNameValuePair(JCExpression nameValuePair,
Type thisAnnotationType, boolean badAnnotation, Env<AttrContext> env, boolean elidedValue)
{
if (!nameValuePair.hasTag(ASSIGN)) {
log.error(nameValuePair.pos(), "annotation.value.must.be.name.value");
attributeAnnotationValue(nameValuePair.type = syms.errType, nameValuePair, env);
return null;
}
JCAssign assign = (JCAssign)nameValuePair;
if (!assign.lhs.hasTag(IDENT)) {
log.error(nameValuePair.pos(), "annotation.value.must.be.name.value");
attributeAnnotationValue(nameValuePair.type = syms.errType, nameValuePair, env);
return null;
}
// Resolve element to MethodSym
JCIdent left = (JCIdent)assign.lhs;
Symbol method = resolve.resolveQualifiedMethod(elidedValue ? assign.rhs.pos() : left.pos(),
env, thisAnnotationType,
left.name, List.nil(), null);
left.sym = method;
left.type = method.type;
if (method.owner != thisAnnotationType.tsym && !badAnnotation)
log.error(left.pos(), "no.annotation.member", left.name, thisAnnotationType);
Type resultType = method.type.getReturnType();
// Compute value part
Attribute value = attributeAnnotationValue(resultType, assign.rhs, env);
nameValuePair.type = resultType;
return method.type.isErroneous() ? null : new Pair<>((MethodSymbol)method, value);
}
/** Attribute an annotation element value */
private Attribute attributeAnnotationValue(Type expectedElementType, JCExpression tree,
Env<AttrContext> env)
{
//first, try completing the symbol for the annotation value - if acompletion
//error is thrown, we should recover gracefully, and display an
//ordinary resolution diagnostic.
try {
expectedElementType.tsym.complete();
} catch(CompletionFailure e) {
log.error(tree.pos(), "cant.resolve", Kinds.kindName(e.sym), e.sym);
expectedElementType = syms.errType;
}
if (expectedElementType.hasTag(ARRAY)) {
return getAnnotationArrayValue(expectedElementType, tree, env);
}
//error recovery
if (tree.hasTag(NEWARRAY)) {
if (!expectedElementType.isErroneous())
log.error(tree.pos(), "annotation.value.not.allowable.type");
JCNewArray na = (JCNewArray)tree;
if (na.elemtype != null) {
log.error(na.elemtype.pos(), "new.not.allowed.in.annotation");
}
for (List<JCExpression> l = na.elems; l.nonEmpty(); l=l.tail) {
attributeAnnotationValue(syms.errType,
l.head,
env);
}
return new Attribute.Error(syms.errType);
}
if (expectedElementType.tsym.isAnnotationType()) {
if (tree.hasTag(ANNOTATION)) {
return attributeAnnotation((JCAnnotation)tree, expectedElementType, env);
} else {
log.error(tree.pos(), "annotation.value.must.be.annotation");
expectedElementType = syms.errType;
}
}
//error recovery
if (tree.hasTag(ANNOTATION)) {
if (!expectedElementType.isErroneous())
log.error(tree.pos(), "annotation.not.valid.for.type", expectedElementType);
attributeAnnotation((JCAnnotation)tree, syms.errType, env);
return new Attribute.Error(((JCAnnotation)tree).annotationType.type);
}
if (expectedElementType.isPrimitive() ||
(types.isSameType(expectedElementType, syms.stringType) && !expectedElementType.hasTag(TypeTag.ERROR))) {
return getAnnotationPrimitiveValue(expectedElementType, tree, env);
}
if (expectedElementType.tsym == syms.classType.tsym) {
return getAnnotationClassValue(expectedElementType, tree, env);
}
if (expectedElementType.hasTag(CLASS) &&
(expectedElementType.tsym.flags() & Flags.ENUM) != 0) {
return getAnnotationEnumValue(expectedElementType, tree, env);
}
//error recovery:
if (!expectedElementType.isErroneous())
log.error(tree.pos(), "annotation.value.not.allowable.type");
return new Attribute.Error(attr.attribExpr(tree, env, expectedElementType));
}
private Attribute getAnnotationEnumValue(Type expectedElementType, JCExpression tree, Env<AttrContext> env) {
Type result = attr.attribExpr(tree, env, expectedElementType);
Symbol sym = TreeInfo.symbol(tree);
if (sym == null ||
TreeInfo.nonstaticSelect(tree) ||
sym.kind != VAR ||
(sym.flags() & Flags.ENUM) == 0) {
log.error(tree.pos(), "enum.annotation.must.be.enum.constant");
return new Attribute.Error(result.getOriginalType());
}
VarSymbol enumerator = (VarSymbol) sym;
return new Attribute.Enum(expectedElementType, enumerator);
}
private Attribute getAnnotationClassValue(Type expectedElementType, JCExpression tree, Env<AttrContext> env) {
Type result = attr.attribExpr(tree, env, expectedElementType);
if (result.isErroneous()) {
// Does it look like an unresolved class literal?
if (TreeInfo.name(tree) == names._class &&
((JCFieldAccess) tree).selected.type.isErroneous()) {
Name n = (((JCFieldAccess) tree).selected).type.tsym.flatName();
return new Attribute.UnresolvedClass(expectedElementType,
types.createErrorType(n,
syms.unknownSymbol, syms.classType));
} else {
return new Attribute.Error(result.getOriginalType());
}
}
// Class literals look like field accesses of a field named class
// at the tree level
if (TreeInfo.name(tree) != names._class) {
log.error(tree.pos(), "annotation.value.must.be.class.literal");
return new Attribute.Error(syms.errType);
}
return new Attribute.Class(types,
(((JCFieldAccess) tree).selected).type);
}
private Attribute getAnnotationPrimitiveValue(Type expectedElementType, JCExpression tree, Env<AttrContext> env) {
Type result = attr.attribExpr(tree, env, expectedElementType);
if (result.isErroneous())
return new Attribute.Error(result.getOriginalType());
if (result.constValue() == null) {
log.error(tree.pos(), "attribute.value.must.be.constant");
return new Attribute.Error(expectedElementType);
}
result = cfolder.coerce(result, expectedElementType);
return new Attribute.Constant(expectedElementType, result.constValue());
}
private Attribute getAnnotationArrayValue(Type expectedElementType, JCExpression tree, Env<AttrContext> env) {
// Special case, implicit array
if (!tree.hasTag(NEWARRAY)) {
tree = make.at(tree.pos).
NewArray(null, List.nil(), List.of(tree));
}
JCNewArray na = (JCNewArray)tree;
if (na.elemtype != null) {
log.error(na.elemtype.pos(), "new.not.allowed.in.annotation");
}
ListBuffer<Attribute> buf = new ListBuffer<>();
for (List<JCExpression> l = na.elems; l.nonEmpty(); l=l.tail) {
buf.append(attributeAnnotationValue(types.elemtype(expectedElementType),
l.head,
env));
}
na.type = expectedElementType;
return new Attribute.
Array(expectedElementType, buf.toArray(new Attribute[buf.length()]));
}
/* *********************************
* Support for repeating annotations
***********************************/
/**
* This context contains all the information needed to synthesize new
* annotations trees for repeating annotations.
*/
private class AnnotationContext<T extends Attribute.Compound> {
public final Env<AttrContext> env;
public final Map<Symbol.TypeSymbol, ListBuffer<T>> annotated;
public final Map<T, JCDiagnostic.DiagnosticPosition> pos;
public final boolean isTypeCompound;
public AnnotationContext(Env<AttrContext> env,
Map<Symbol.TypeSymbol, ListBuffer<T>> annotated,
Map<T, JCDiagnostic.DiagnosticPosition> pos,
boolean isTypeCompound) {
Assert.checkNonNull(env);
Assert.checkNonNull(annotated);
Assert.checkNonNull(pos);
this.env = env;
this.annotated = annotated;
this.pos = pos;
this.isTypeCompound = isTypeCompound;
}
}
/* Process repeated annotations. This method returns the
* synthesized container annotation or null IFF all repeating
* annotation are invalid. This method reports errors/warnings.
*/
private <T extends Attribute.Compound> T processRepeatedAnnotations(List<T> annotations,
AnnotationContext<T> ctx, Symbol on, boolean isTypeParam)
{
T firstOccurrence = annotations.head;
List<Attribute> repeated = List.nil();
Type origAnnoType = null;
Type arrayOfOrigAnnoType = null;
Type targetContainerType = null;
MethodSymbol containerValueSymbol = null;
Assert.check(!annotations.isEmpty() && !annotations.tail.isEmpty()); // i.e. size() > 1
int count = 0;
for (List<T> al = annotations; !al.isEmpty(); al = al.tail) {
count++;
// There must be more than a single anno in the annotation list
Assert.check(count > 1 || !al.tail.isEmpty());
T currentAnno = al.head;
origAnnoType = currentAnno.type;
if (arrayOfOrigAnnoType == null) {
arrayOfOrigAnnoType = types.makeArrayType(origAnnoType);
}
// Only report errors if this isn't the first occurrence I.E. count > 1
boolean reportError = count > 1;
Type currentContainerType = getContainingType(currentAnno, ctx.pos.get(currentAnno), reportError);
if (currentContainerType == null) {
continue;
}
// Assert that the target Container is == for all repeated
// annos of the same annotation type, the types should
// come from the same Symbol, i.e. be '=='
Assert.check(targetContainerType == null || currentContainerType == targetContainerType);
targetContainerType = currentContainerType;
containerValueSymbol = validateContainer(targetContainerType, origAnnoType, ctx.pos.get(currentAnno));
if (containerValueSymbol == null) { // Check of CA type failed
// errors are already reported
continue;
}
repeated = repeated.prepend(currentAnno);
}
if (!repeated.isEmpty() && targetContainerType == null) {
log.error(ctx.pos.get(annotations.head), "duplicate.annotation.invalid.repeated", origAnnoType);
return null;
}
if (!repeated.isEmpty()) {
repeated = repeated.reverse();
DiagnosticPosition pos = ctx.pos.get(firstOccurrence);
TreeMaker m = make.at(pos);
Pair<MethodSymbol, Attribute> p =
new Pair<MethodSymbol, Attribute>(containerValueSymbol,
new Attribute.Array(arrayOfOrigAnnoType, repeated));
if (ctx.isTypeCompound) {
/* TODO: the following code would be cleaner:
Attribute.TypeCompound at = new Attribute.TypeCompound(targetContainerType, List.of(p),
((Attribute.TypeCompound)annotations.head).position);
JCTypeAnnotation annoTree = m.TypeAnnotation(at);
at = attributeTypeAnnotation(annoTree, targetContainerType, ctx.env);
*/
// However, we directly construct the TypeCompound to keep the
// direct relation to the contained TypeCompounds.
Attribute.TypeCompound at = new Attribute.TypeCompound(targetContainerType, List.of(p),
((Attribute.TypeCompound)annotations.head).position);
JCAnnotation annoTree = m.TypeAnnotation(at);
if (!chk.validateAnnotationDeferErrors(annoTree))
log.error(annoTree.pos(), Errors.DuplicateAnnotationInvalidRepeated(origAnnoType));
if (!chk.isTypeAnnotation(annoTree, isTypeParam)) {
log.error(pos, isTypeParam ? Errors.InvalidRepeatableAnnotationNotApplicable(targetContainerType, on)
: Errors.InvalidRepeatableAnnotationNotApplicableInContext(targetContainerType));
}
at.setSynthesized(true);
@SuppressWarnings("unchecked")
T x = (T) at;
return x;
} else {
Attribute.Compound c = new Attribute.Compound(targetContainerType, List.of(p));
JCAnnotation annoTree = m.Annotation(c);
if (!chk.annotationApplicable(annoTree, on)) {
log.error(annoTree.pos(),
Errors.InvalidRepeatableAnnotationNotApplicable(targetContainerType, on));
}
if (!chk.validateAnnotationDeferErrors(annoTree))
log.error(annoTree.pos(), "duplicate.annotation.invalid.repeated", origAnnoType);
c = attributeAnnotation(annoTree, targetContainerType, ctx.env);
c.setSynthesized(true);
@SuppressWarnings("unchecked")
T x = (T) c;
return x;
}
} else {
return null; // errors should have been reported elsewhere
}
}
/**
* Fetches the actual Type that should be the containing annotation.
*/
private Type getContainingType(Attribute.Compound currentAnno,
DiagnosticPosition pos,
boolean reportError)
{
Type origAnnoType = currentAnno.type;
TypeSymbol origAnnoDecl = origAnnoType.tsym;
// Fetch the Repeatable annotation from the current
// annotation's declaration, or null if it has none
Attribute.Compound ca = origAnnoDecl.getAnnotationTypeMetadata().getRepeatable();
if (ca == null) { // has no Repeatable annotation
if (reportError)
log.error(pos, "duplicate.annotation.missing.container", origAnnoType, syms.repeatableType);
return null;
}
return filterSame(extractContainingType(ca, pos, origAnnoDecl),
origAnnoType);
}
// returns null if t is same as 's', returns 't' otherwise
private Type filterSame(Type t, Type s) {
if (t == null || s == null) {
return t;
}
return types.isSameType(t, s) ? null : t;
}
/** Extract the actual Type to be used for a containing annotation. */
private Type extractContainingType(Attribute.Compound ca,
DiagnosticPosition pos,
TypeSymbol annoDecl)
{
// The next three checks check that the Repeatable annotation
// on the declaration of the annotation type that is repeating is
// valid.
// Repeatable must have at least one element
if (ca.values.isEmpty()) {
log.error(pos, "invalid.repeatable.annotation", annoDecl);
return null;
}
Pair<MethodSymbol,Attribute> p = ca.values.head;
Name name = p.fst.name;
if (name != names.value) { // should contain only one element, named "value"
log.error(pos, "invalid.repeatable.annotation", annoDecl);
return null;
}
if (!(p.snd instanceof Attribute.Class)) { // check that the value of "value" is an Attribute.Class
log.error(pos, "invalid.repeatable.annotation", annoDecl);
return null;
}
return ((Attribute.Class)p.snd).getValue();
}
/* Validate that the suggested targetContainerType Type is a valid
* container type for repeated instances of originalAnnoType
* annotations. Return null and report errors if this is not the
* case, return the MethodSymbol of the value element in
* targetContainerType if it is suitable (this is needed to
* synthesize the container). */
private MethodSymbol validateContainer(Type targetContainerType,
Type originalAnnoType,
DiagnosticPosition pos) {
MethodSymbol containerValueSymbol = null;
boolean fatalError = false;
// Validate that there is a (and only 1) value method
Scope scope = targetContainerType.tsym.members();
int nr_value_elems = 0;
boolean error = false;
for(Symbol elm : scope.getSymbolsByName(names.value)) {
nr_value_elems++;
if (nr_value_elems == 1 &&
elm.kind == MTH) {
containerValueSymbol = (MethodSymbol)elm;
} else {
error = true;
}
}
if (error) {
log.error(pos,
"invalid.repeatable.annotation.multiple.values",
targetContainerType,
nr_value_elems);
return null;
} else if (nr_value_elems == 0) {
log.error(pos,
"invalid.repeatable.annotation.no.value",
targetContainerType);
return null;
}
// validate that the 'value' element is a method
// probably "impossible" to fail this
if (containerValueSymbol.kind != MTH) {
log.error(pos,
"invalid.repeatable.annotation.invalid.value",
targetContainerType);
fatalError = true;
}
// validate that the 'value' element has the correct return type
// i.e. array of original anno
Type valueRetType = containerValueSymbol.type.getReturnType();
Type expectedType = types.makeArrayType(originalAnnoType);
if (!(types.isArray(valueRetType) &&
types.isSameType(expectedType, valueRetType))) {
log.error(pos,
"invalid.repeatable.annotation.value.return",
targetContainerType,
valueRetType,
expectedType);
fatalError = true;
}
return fatalError ? null : containerValueSymbol;
}
private <T extends Attribute.Compound> T makeContainerAnnotation(List<T> toBeReplaced,
AnnotationContext<T> ctx, Symbol sym, boolean isTypeParam)
{
// Process repeated annotations
T validRepeated =
processRepeatedAnnotations(toBeReplaced, ctx, sym, isTypeParam);
if (validRepeated != null) {
// Check that the container isn't manually
// present along with repeated instances of
// its contained annotation.
ListBuffer<T> manualContainer = ctx.annotated.get(validRepeated.type.tsym);
if (manualContainer != null) {
log.error(ctx.pos.get(manualContainer.first()),
"invalid.repeatable.annotation.repeated.and.container.present",
manualContainer.first().type.tsym);
}
}
// A null return will delete the Placeholder
return validRepeated;
}
/********************
* Type annotations *
********************/
/**
* Attribute the list of annotations and enter them onto s.
*/
public void enterTypeAnnotations(List<JCAnnotation> annotations, Env<AttrContext> env,
Symbol s, DiagnosticPosition deferPos, boolean isTypeParam)
{
Assert.checkNonNull(s, "Symbol argument to actualEnterTypeAnnotations is nul/");
JavaFileObject prev = log.useSource(env.toplevel.sourcefile);
DiagnosticPosition prevLintPos = null;
if (deferPos != null) {
prevLintPos = deferredLintHandler.setPos(deferPos);
}
try {
annotateNow(s, annotations, env, true, isTypeParam);
} finally {
if (prevLintPos != null)
deferredLintHandler.setPos(prevLintPos);
log.useSource(prev);
}
}
/**
* Enqueue tree for scanning of type annotations, attaching to the Symbol sym.
*/
public void queueScanTreeAndTypeAnnotate(JCTree tree, Env<AttrContext> env, Symbol sym,
DiagnosticPosition deferPos)
{
Assert.checkNonNull(sym);
normal(() -> tree.accept(new TypeAnnotate(env, sym, deferPos)));
}
/**
* Apply the annotations to the particular type.
*/
public void annotateTypeSecondStage(JCTree tree, List<JCAnnotation> annotations, Type storeAt) {
typeAnnotation(() -> {
List<Attribute.TypeCompound> compounds = fromAnnotations(annotations);
Assert.check(annotations.size() == compounds.size());
storeAt.getMetadataOfKind(Kind.ANNOTATIONS).combine(new TypeMetadata.Annotations(compounds));
});
}
/**
* Apply the annotations to the particular type.
*/
public void annotateTypeParameterSecondStage(JCTree tree, List<JCAnnotation> annotations) {
typeAnnotation(() -> {
List<Attribute.TypeCompound> compounds = fromAnnotations(annotations);
Assert.check(annotations.size() == compounds.size());
});
}
/**
* We need to use a TreeScanner, because it is not enough to visit the top-level
* annotations. We also need to visit type arguments, etc.
*/
private class TypeAnnotate extends TreeScanner {
private final Env<AttrContext> env;
private final Symbol sym;
private DiagnosticPosition deferPos;
public TypeAnnotate(Env<AttrContext> env, Symbol sym, DiagnosticPosition deferPos) {
this.env = env;
this.sym = sym;
this.deferPos = deferPos;
}
@Override
public void visitAnnotatedType(JCAnnotatedType tree) {
enterTypeAnnotations(tree.annotations, env, sym, deferPos, false);
scan(tree.underlyingType);
}
@Override
public void visitTypeParameter(JCTypeParameter tree) {
enterTypeAnnotations(tree.annotations, env, sym, deferPos, true);
scan(tree.bounds);
}
@Override
public void visitNewArray(JCNewArray tree) {
enterTypeAnnotations(tree.annotations, env, sym, deferPos, false);
for (List<JCAnnotation> dimAnnos : tree.dimAnnotations)
enterTypeAnnotations(dimAnnos, env, sym, deferPos, false);
scan(tree.elemtype);
scan(tree.elems);
}
@Override
public void visitMethodDef(JCMethodDecl tree) {
scan(tree.mods);
scan(tree.restype);
scan(tree.typarams);
scan(tree.recvparam);
scan(tree.params);
scan(tree.thrown);
scan(tree.defaultValue);
// Do not annotate the body, just the signature.
}
@Override
public void visitVarDef(JCVariableDecl tree) {
DiagnosticPosition prevPos = deferPos;
deferPos = tree.pos();
try {
if (sym != null && sym.kind == VAR) {
// Don't visit a parameter once when the sym is the method
// and once when the sym is the parameter.
scan(tree.mods);
scan(tree.vartype);
}
scan(tree.init);
} finally {
deferPos = prevPos;
}
}
@Override
public void visitClassDef(JCClassDecl tree) {
// We can only hit a classdef if it is declared within
// a method. Ignore it - the class will be visited
// separately later.
}
@Override
public void visitNewClass(JCNewClass tree) {
scan(tree.encl);
scan(tree.typeargs);
scan(tree.clazz);
scan(tree.args);
// the anonymous class instantiation if any will be visited separately.
}
}
/*********************
* Completer support *
*********************/
private AnnotationTypeCompleter theSourceCompleter = new AnnotationTypeCompleter() {
@Override
public void complete(ClassSymbol sym) throws CompletionFailure {
Env<AttrContext> context = typeEnvs.get(sym);
Annotate.this.attributeAnnotationType(context);
}
};
/* Last stage completer to enter just enough annotations to have a prototype annotation type.
* This currently means entering @Target and @Repetable.
*/
public AnnotationTypeCompleter annotationTypeSourceCompleter() {
return theSourceCompleter;
}
private void attributeAnnotationType(Env<AttrContext> env) {
Assert.check(((JCClassDecl)env.tree).sym.isAnnotationType(),
"Trying to annotation type complete a non-annotation type");
JavaFileObject prev = log.useSource(env.toplevel.sourcefile);
try {
JCClassDecl tree = (JCClassDecl)env.tree;
AnnotationTypeVisitor v = new AnnotationTypeVisitor(attr, chk, syms, typeEnvs);
v.scanAnnotationType(tree);
tree.sym.getAnnotationTypeMetadata().setRepeatable(v.repeatable);
tree.sym.getAnnotationTypeMetadata().setTarget(v.target);
} finally {
log.useSource(prev);
}
}
public Attribute unfinishedDefaultValue() {
return theUnfinishedDefaultValue;
}
public static interface AnnotationTypeCompleter {
void complete(ClassSymbol sym) throws CompletionFailure;
}
/** Visitor to determine a prototype annotation type for a class declaring an annotation type.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*/
public class AnnotationTypeVisitor extends TreeScanner {
private Env<AttrContext> env;
private final Attr attr;
private final Check check;
private final Symtab tab;
private final TypeEnvs typeEnvs;
private Compound target;
private Compound repeatable;
public AnnotationTypeVisitor(Attr attr, Check check, Symtab tab, TypeEnvs typeEnvs) {
this.attr = attr;
this.check = check;
this.tab = tab;
this.typeEnvs = typeEnvs;
}
public Compound getRepeatable() {
return repeatable;
}
public Compound getTarget() {
return target;
}
public void scanAnnotationType(JCClassDecl decl) {
visitClassDef(decl);
}
@Override
public void visitClassDef(JCClassDecl tree) {
Env<AttrContext> prevEnv = env;
env = typeEnvs.get(tree.sym);
try {
scan(tree.mods); // look for repeatable and target
// don't descend into body
} finally {
env = prevEnv;
}
}
@Override
public void visitAnnotation(JCAnnotation tree) {
Type t = tree.annotationType.type;
if (t == null) {
t = attr.attribType(tree.annotationType, env);
tree.annotationType.type = t = check.checkType(tree.annotationType.pos(), t, tab.annotationType);
}
if (t == tab.annotationTargetType) {
target = Annotate.this.attributeAnnotation(tree, tab.annotationTargetType, env);
} else if (t == tab.repeatableType) {
repeatable = Annotate.this.attributeAnnotation(tree, tab.repeatableType, env);
}
}
}
/** Represents the semantics of an Annotation Type.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*/
public static class AnnotationTypeMetadata {
final ClassSymbol metaDataFor;
private Compound target;
private Compound repeatable;
private AnnotationTypeCompleter annotationTypeCompleter;
public AnnotationTypeMetadata(ClassSymbol metaDataFor, AnnotationTypeCompleter annotationTypeCompleter) {
this.metaDataFor = metaDataFor;
this.annotationTypeCompleter = annotationTypeCompleter;
}
private void init() {
// Make sure metaDataFor is member entered
while (!metaDataFor.isCompleted())
metaDataFor.complete();
if (annotationTypeCompleter != null) {
AnnotationTypeCompleter c = annotationTypeCompleter;
annotationTypeCompleter = null;
c.complete(metaDataFor);
}
}
public void complete() {
init();
}
public Compound getRepeatable() {
init();
return repeatable;
}
public void setRepeatable(Compound repeatable) {
Assert.checkNull(this.repeatable);
this.repeatable = repeatable;
}
public Compound getTarget() {
init();
return target;
}
public void setTarget(Compound target) {
Assert.checkNull(this.target);
this.target = target;
}
public Set<MethodSymbol> getAnnotationElements() {
init();
Set<MethodSymbol> members = new LinkedHashSet<>();
WriteableScope s = metaDataFor.members();
Iterable<Symbol> ss = s.getSymbols(NON_RECURSIVE);
for (Symbol sym : ss)
if (sym.kind == MTH &&
sym.name != sym.name.table.names.clinit &&
(sym.flags() & SYNTHETIC) == 0)
members.add((MethodSymbol)sym);
return members;
}
public Set<MethodSymbol> getAnnotationElementsWithDefault() {
init();
Set<MethodSymbol> members = getAnnotationElements();
Set<MethodSymbol> res = new LinkedHashSet<>();
for (MethodSymbol m : members)
if (m.defaultValue != null)
res.add(m);
return res;
}
@Override
public String toString() {
return "Annotation type for: " + metaDataFor;
}
public boolean isMetadataForAnnotationType() { return true; }
public static AnnotationTypeMetadata notAnAnnotationType() {
return NOT_AN_ANNOTATION_TYPE;
}
private static final AnnotationTypeMetadata NOT_AN_ANNOTATION_TYPE =
new AnnotationTypeMetadata(null, null) {
@Override
public void complete() {
} // do nothing
@Override
public String toString() {
return "Not an annotation type";
}
@Override
public Set<MethodSymbol> getAnnotationElements() {
return new LinkedHashSet<>(0);
}
@Override
public Set<MethodSymbol> getAnnotationElementsWithDefault() {
return new LinkedHashSet<>(0);
}
@Override
public boolean isMetadataForAnnotationType() {
return false;
}
@Override
public Compound getTarget() {
return null;
}
@Override
public Compound getRepeatable() {
return null;
}
};
}
public void newRound() {
blockCount = 1;
}
}
| gpl-2.0 |
conwayje/ase-python | ase/test/fio/info.py | 1043 | from ase import Atoms
from ase.io import PickleTrajectory
class Foo(object):
def __init__(self, value):
self.value = value
def __cmp__(self, other):
return int(self.value - other.value)
if __name__ == '__main__':
import info # import ourselves to make info.Foo reachable
# Create a molecule with an info attribute
info = dict(creation_date='2011-06-27',
chemical_name='Hydrogen',
# custom classes also works provided that it is
# imported and pickleable...
foo=info.Foo(7),
)
molecule = Atoms('H2', positions=[(0., 0., 0.), (0., 0., 1.1)], info=info)
assert molecule.info == info
# Copy molecule
atoms = molecule.copy()
assert atoms.info == info
# Save molecule to trajectory
traj = PickleTrajectory('info.traj', 'w', atoms=molecule)
traj.write()
del traj
# Load molecule from trajectory
t = PickleTrajectory('info.traj')
atoms = t[-1]
assert atoms.info == info
| gpl-2.0 |
epfl-cosmo/lammps | src/fix_adapt.cpp | 21657 | /* ----------------------------------------------------------------------
LAMMPS - Large-scale Atomic/Molecular Massively Parallel Simulator
http://lammps.sandia.gov, Sandia National Laboratories
Steve Plimpton, sjplimp@sandia.gov
Copyright (2003) Sandia Corporation. Under the terms of Contract
DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
certain rights in this software. This software is distributed under
the GNU General Public License.
See the README file in the top-level LAMMPS directory.
------------------------------------------------------------------------- */
#include <math.h>
#include <string.h>
#include <stdlib.h>
#include "fix_adapt.h"
#include "atom.h"
#include "bond.h"
#include "update.h"
#include "group.h"
#include "modify.h"
#include "force.h"
#include "pair.h"
#include "pair_hybrid.h"
#include "kspace.h"
#include "fix_store.h"
#include "input.h"
#include "variable.h"
#include "respa.h"
#include "math_const.h"
#include "memory.h"
#include "error.h"
using namespace LAMMPS_NS;
using namespace FixConst;
using namespace MathConst;
enum{PAIR,KSPACE,ATOM,BOND};
enum{DIAMETER,CHARGE};
/* ---------------------------------------------------------------------- */
FixAdapt::FixAdapt(LAMMPS *lmp, int narg, char **arg) : Fix(lmp, narg, arg),
nadapt(0), id_fix_diam(NULL), id_fix_chg(NULL), adapt(NULL)
{
if (narg < 5) error->all(FLERR,"Illegal fix adapt command");
nevery = force->inumeric(FLERR,arg[3]);
if (nevery < 0) error->all(FLERR,"Illegal fix adapt command");
dynamic_group_allow = 1;
create_attribute = 1;
// count # of adaptations
nadapt = 0;
int iarg = 4;
while (iarg < narg) {
if (strcmp(arg[iarg],"pair") == 0) {
if (iarg+6 > narg) error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 6;
} else if (strcmp(arg[iarg],"kspace") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 2;
} else if (strcmp(arg[iarg],"atom") == 0) {
if (iarg+3 > narg) error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 3;
} else if (strcmp(arg[iarg],"bond") == 0 ){
if (iarg+5 > narg) error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 5;
} else break;
}
if (nadapt == 0) error->all(FLERR,"Illegal fix adapt command");
adapt = new Adapt[nadapt];
// parse keywords
nadapt = 0;
diamflag = 0;
chgflag = 0;
iarg = 4;
while (iarg < narg) {
if (strcmp(arg[iarg],"pair") == 0) {
if (iarg+6 > narg) error->all(FLERR,"Illegal fix adapt command");
adapt[nadapt].which = PAIR;
int n = strlen(arg[iarg+1]) + 1;
adapt[nadapt].pstyle = new char[n];
strcpy(adapt[nadapt].pstyle,arg[iarg+1]);
n = strlen(arg[iarg+2]) + 1;
adapt[nadapt].pparam = new char[n];
adapt[nadapt].pair = NULL;
strcpy(adapt[nadapt].pparam,arg[iarg+2]);
force->bounds(FLERR,arg[iarg+3],atom->ntypes,
adapt[nadapt].ilo,adapt[nadapt].ihi);
force->bounds(FLERR,arg[iarg+4],atom->ntypes,
adapt[nadapt].jlo,adapt[nadapt].jhi);
if (strstr(arg[iarg+5],"v_") == arg[iarg+5]) {
n = strlen(&arg[iarg+5][2]) + 1;
adapt[nadapt].var = new char[n];
strcpy(adapt[nadapt].var,&arg[iarg+5][2]);
} else error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 6;
} else if (strcmp(arg[iarg],"bond") == 0 ){
if (iarg+5 > narg) error->all(FLERR, "Illegal fix adapt command");
adapt[nadapt].which = BOND;
int n = strlen(arg[iarg+1]) + 1;
adapt[nadapt].bstyle = new char[n];
strcpy(adapt[nadapt].bstyle,arg[iarg+1]);
n = strlen(arg[iarg+2]) + 1;
adapt[nadapt].bparam = new char[n];
adapt[nadapt].bond = NULL;
strcpy(adapt[nadapt].bparam,arg[iarg+2]);
force->bounds(FLERR,arg[iarg+3],atom->ntypes,
adapt[nadapt].ilo,adapt[nadapt].ihi);
if (strstr(arg[iarg+4],"v_") == arg[iarg+4]) {
n = strlen(&arg[iarg+4][2]) + 1;
adapt[nadapt].var = new char[n];
strcpy(adapt[nadapt].var,&arg[iarg+4][2]);
} else error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 5;
} else if (strcmp(arg[iarg],"kspace") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix adapt command");
adapt[nadapt].which = KSPACE;
if (strstr(arg[iarg+1],"v_") == arg[iarg+1]) {
int n = strlen(&arg[iarg+1][2]) + 1;
adapt[nadapt].var = new char[n];
strcpy(adapt[nadapt].var,&arg[iarg+1][2]);
} else error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 2;
} else if (strcmp(arg[iarg],"atom") == 0) {
if (iarg+3 > narg) error->all(FLERR,"Illegal fix adapt command");
adapt[nadapt].which = ATOM;
if (strcmp(arg[iarg+1],"diameter") == 0) {
adapt[nadapt].aparam = DIAMETER;
diamflag = 1;
} else if (strcmp(arg[iarg+1],"charge") == 0) {
adapt[nadapt].aparam = CHARGE;
chgflag = 1;
} else error->all(FLERR,"Illegal fix adapt command");
if (strstr(arg[iarg+2],"v_") == arg[iarg+2]) {
int n = strlen(&arg[iarg+2][2]) + 1;
adapt[nadapt].var = new char[n];
strcpy(adapt[nadapt].var,&arg[iarg+2][2]);
} else error->all(FLERR,"Illegal fix adapt command");
nadapt++;
iarg += 3;
} else break;
}
// optional keywords
resetflag = 0;
scaleflag = 0;
while (iarg < narg) {
if (strcmp(arg[iarg],"reset") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix adapt command");
if (strcmp(arg[iarg+1],"no") == 0) resetflag = 0;
else if (strcmp(arg[iarg+1],"yes") == 0) resetflag = 1;
else error->all(FLERR,"Illegal fix adapt command");
iarg += 2;
} else if (strcmp(arg[iarg],"scale") == 0) {
if (iarg+2 > narg) error->all(FLERR,"Illegal fix adapt command");
if (strcmp(arg[iarg+1],"no") == 0) scaleflag = 0;
else if (strcmp(arg[iarg+1],"yes") == 0) scaleflag = 1;
else error->all(FLERR,"Illegal fix adapt command");
iarg += 2;
} else error->all(FLERR,"Illegal fix adapt command");
}
// allocate pair style arrays
int n = atom->ntypes;
for (int m = 0; m < nadapt; m++)
if (adapt[m].which == PAIR)
memory->create(adapt[m].array_orig,n+1,n+1,"adapt:array_orig");
// allocate bond style arrays:
n = atom->nbondtypes;
for (int m = 0; m < nadapt; ++m)
if (adapt[m].which == BOND)
memory->create(adapt[m].vector_orig,n+1,"adapt:vector_orig");
}
/* ---------------------------------------------------------------------- */
FixAdapt::~FixAdapt()
{
for (int m = 0; m < nadapt; m++) {
delete [] adapt[m].var;
if (adapt[m].which == PAIR) {
delete [] adapt[m].pstyle;
delete [] adapt[m].pparam;
memory->destroy(adapt[m].array_orig);
} else if (adapt[m].which == BOND) {
delete [] adapt[m].bstyle;
delete [] adapt[m].bparam;
memory->destroy(adapt[m].vector_orig);
}
}
delete [] adapt;
// check nfix in case all fixes have already been deleted
if (id_fix_diam && modify->nfix) modify->delete_fix(id_fix_diam);
if (id_fix_chg && modify->nfix) modify->delete_fix(id_fix_chg);
delete [] id_fix_diam;
delete [] id_fix_chg;
}
/* ---------------------------------------------------------------------- */
int FixAdapt::setmask()
{
int mask = 0;
mask |= PRE_FORCE;
mask |= POST_RUN;
mask |= PRE_FORCE_RESPA;
return mask;
}
/* ----------------------------------------------------------------------
if need to restore per-atom quantities, create new fix STORE styles
------------------------------------------------------------------------- */
void FixAdapt::post_constructor()
{
if (!resetflag) return;
if (!diamflag && !chgflag) return;
// new id = fix-ID + FIX_STORE_ATTRIBUTE
// new fix group = group for this fix
id_fix_diam = NULL;
id_fix_chg = NULL;
char **newarg = new char*[6];
newarg[1] = group->names[igroup];
newarg[2] = (char *) "STORE";
newarg[3] = (char *) "peratom";
newarg[4] = (char *) "1";
newarg[5] = (char *) "1";
if (diamflag) {
int n = strlen(id) + strlen("_FIX_STORE_DIAM") + 1;
id_fix_diam = new char[n];
strcpy(id_fix_diam,id);
strcat(id_fix_diam,"_FIX_STORE_DIAM");
newarg[0] = id_fix_diam;
modify->add_fix(6,newarg);
fix_diam = (FixStore *) modify->fix[modify->nfix-1];
if (fix_diam->restart_reset) fix_diam->restart_reset = 0;
else {
double *vec = fix_diam->vstore;
double *radius = atom->radius;
int *mask = atom->mask;
int nlocal = atom->nlocal;
for (int i = 0; i < nlocal; i++) {
if (mask[i] & groupbit) vec[i] = radius[i];
else vec[i] = 0.0;
}
}
}
if (chgflag) {
int n = strlen(id) + strlen("_FIX_STORE_CHG") + 1;
id_fix_chg = new char[n];
strcpy(id_fix_chg,id);
strcat(id_fix_chg,"_FIX_STORE_CHG");
newarg[0] = id_fix_chg;
modify->add_fix(6,newarg);
fix_chg = (FixStore *) modify->fix[modify->nfix-1];
if (fix_chg->restart_reset) fix_chg->restart_reset = 0;
else {
double *vec = fix_chg->vstore;
double *q = atom->q;
int *mask = atom->mask;
int nlocal = atom->nlocal;
for (int i = 0; i < nlocal; i++) {
if (mask[i] & groupbit) vec[i] = q[i];
else vec[i] = 0.0;
}
}
}
delete [] newarg;
}
/* ---------------------------------------------------------------------- */
void FixAdapt::init()
{
int i,j;
// allow a dynamic group only if ATOM attribute not used
if (group->dynamic[igroup])
for (int i = 0; i < nadapt; i++)
if (adapt[i].which == ATOM)
error->all(FLERR,"Cannot use dynamic group with fix adapt atom");
// setup and error checks
anypair = 0;
anybond = 0;
for (int m = 0; m < nadapt; m++) {
Adapt *ad = &adapt[m];
ad->ivar = input->variable->find(ad->var);
if (ad->ivar < 0)
error->all(FLERR,"Variable name for fix adapt does not exist");
if (!input->variable->equalstyle(ad->ivar))
error->all(FLERR,"Variable for fix adapt is invalid style");
if (ad->which == PAIR) {
anypair = 1;
ad->pair = NULL;
// if ad->pstyle has trailing sub-style annotation ":N",
// strip it for pstyle arg to pair_match() and set nsub = N
// this should work for appended suffixes as well
int n = strlen(ad->pstyle) + 1;
char *pstyle = new char[n];
strcpy(pstyle,ad->pstyle);
char *cptr;
int nsub = 0;
if ((cptr = strchr(pstyle,':'))) {
*cptr = '\0';
nsub = force->inumeric(FLERR,cptr+1);
}
if (lmp->suffix_enable) {
int len = 2 + strlen(pstyle) + strlen(lmp->suffix);
char *psuffix = new char[len];
strcpy(psuffix,pstyle);
strcat(psuffix,"/");
strcat(psuffix,lmp->suffix);
ad->pair = force->pair_match(psuffix,1,nsub);
delete[] psuffix;
}
if (ad->pair == NULL) ad->pair = force->pair_match(pstyle,1,nsub);
if (ad->pair == NULL)
error->all(FLERR,"Fix adapt pair style does not exist");
void *ptr = ad->pair->extract(ad->pparam,ad->pdim);
if (ptr == NULL)
error->all(FLERR,"Fix adapt pair style param not supported");
// for pair styles only parameters that are 2-d arrays in atom types or
// scalars are supported
if (ad->pdim != 2 && ad->pdim != 0)
error->all(FLERR,"Fix adapt pair style param is not compatible");
if (ad->pdim == 2) ad->array = (double **) ptr;
if (ad->pdim == 0) ad->scalar = (double *) ptr;
// if pair hybrid, test that ilo,ihi,jlo,jhi are valid for sub-style
if (strcmp(force->pair_style,"hybrid") == 0 ||
strcmp(force->pair_style,"hybrid/overlay") == 0) {
PairHybrid *pair = (PairHybrid *) force->pair;
for (i = ad->ilo; i <= ad->ihi; i++)
for (j = MAX(ad->jlo,i); j <= ad->jhi; j++)
if (!pair->check_ijtype(i,j,pstyle))
error->all(FLERR,"Fix adapt type pair range is not valid for "
"pair hybrid sub-style");
}
delete [] pstyle;
} else if (ad->which == BOND){
ad->bond = NULL;
anybond = 1;
int n = strlen(ad->bstyle) + 1;
char *bstyle = new char[n];
strcpy(bstyle,ad->bstyle);
if (lmp->suffix_enable) {
int len = 2 + strlen(bstyle) + strlen(lmp->suffix);
char *bsuffix = new char[len];
strcpy(bsuffix,bstyle);
strcat(bsuffix,"/");
strcat(bsuffix,lmp->suffix);
ad->bond = force->bond_match(bsuffix);
delete [] bsuffix;
}
if (ad->bond == NULL) ad->bond = force->bond_match(bstyle);
if (ad->bond == NULL )
error->all(FLERR,"Fix adapt bond style does not exist");
void *ptr = ad->bond->extract(ad->bparam,ad->bdim);
if (ptr == NULL)
error->all(FLERR,"Fix adapt bond style param not supported");
// for bond styles, use a vector
if (ad->bdim == 1) ad->vector = (double *) ptr;
if (strcmp(force->bond_style,"hybrid") == 0 ||
strcmp(force->bond_style,"hybrid_overlay") == 0)
error->all(FLERR,"Fix adapt does not support bond_style hybrid");
delete [] bstyle;
} else if (ad->which == KSPACE) {
if (force->kspace == NULL)
error->all(FLERR,"Fix adapt kspace style does not exist");
kspace_scale = (double *) force->kspace->extract("scale");
} else if (ad->which == ATOM) {
if (ad->aparam == DIAMETER) {
if (!atom->radius_flag)
error->all(FLERR,"Fix adapt requires atom attribute diameter");
}
if (ad->aparam == CHARGE) {
if (!atom->q_flag)
error->all(FLERR,"Fix adapt requires atom attribute charge");
}
}
}
// make copy of original pair/bond array values
for (int m = 0; m < nadapt; m++) {
Adapt *ad = &adapt[m];
if (ad->which == PAIR && ad->pdim == 2) {
for (i = ad->ilo; i <= ad->ihi; i++)
for (j = MAX(ad->jlo,i); j <= ad->jhi; j++)
ad->array_orig[i][j] = ad->array[i][j];
} else if (ad->which == PAIR && ad->pdim == 0){
ad->scalar_orig = *ad->scalar;
} else if (ad->which == BOND && ad->bdim == 1){
for (i = ad->ilo; i <= ad->ihi; ++i )
ad->vector_orig[i] = ad->vector[i];
}
}
// fixes that store initial per-atom values
if (id_fix_diam) {
int ifix = modify->find_fix(id_fix_diam);
if (ifix < 0) error->all(FLERR,"Could not find fix adapt storage fix ID");
fix_diam = (FixStore *) modify->fix[ifix];
}
if (id_fix_chg) {
int ifix = modify->find_fix(id_fix_chg);
if (ifix < 0) error->all(FLERR,"Could not find fix adapt storage fix ID");
fix_chg = (FixStore *) modify->fix[ifix];
}
if (strstr(update->integrate_style,"respa"))
nlevels_respa = ((Respa *) update->integrate)->nlevels;
}
/* ---------------------------------------------------------------------- */
void FixAdapt::setup_pre_force(int vflag)
{
change_settings();
}
/* ---------------------------------------------------------------------- */
void FixAdapt::setup_pre_force_respa(int vflag, int ilevel)
{
if (ilevel < nlevels_respa-1) return;
setup_pre_force(vflag);
}
/* ---------------------------------------------------------------------- */
void FixAdapt::pre_force(int vflag)
{
if (nevery == 0) return;
if (update->ntimestep % nevery) return;
change_settings();
}
/* ---------------------------------------------------------------------- */
void FixAdapt::pre_force_respa(int vflag, int ilevel, int)
{
if (ilevel < nlevels_respa-1) return;
pre_force(vflag);
}
/* ---------------------------------------------------------------------- */
void FixAdapt::post_run()
{
if (resetflag) restore_settings();
}
/* ----------------------------------------------------------------------
change pair,kspace,atom parameters based on variable evaluation
------------------------------------------------------------------------- */
void FixAdapt::change_settings()
{
int i,j;
// variable evaluation may invoke computes so wrap with clear/add
modify->clearstep_compute();
for (int m = 0; m < nadapt; m++) {
Adapt *ad = &adapt[m];
double value = input->variable->compute_equal(ad->ivar);
// set global scalar or type pair array values
if (ad->which == PAIR) {
if (ad->pdim == 0) {
if (scaleflag) *ad->scalar = value * ad->scalar_orig;
else *ad->scalar = value;
} else if (ad->pdim == 2) {
if (scaleflag)
for (i = ad->ilo; i <= ad->ihi; i++)
for (j = MAX(ad->jlo,i); j <= ad->jhi; j++)
ad->array[i][j] = value*ad->array_orig[i][j];
else
for (i = ad->ilo; i <= ad->ihi; i++)
for (j = MAX(ad->jlo,i); j <= ad->jhi; j++)
ad->array[i][j] = value;
}
// set bond type array values:
} else if (ad->which == BOND) {
if (ad->bdim == 1){
if (scaleflag)
for (i = ad->ilo; i <= ad->ihi; ++i )
ad->vector[i] = value*ad->vector_orig[i];
else
for (i = ad->ilo; i <= ad->ihi; ++i )
ad->vector[i] = value;
}
// set kspace scale factor
} else if (ad->which == KSPACE) {
*kspace_scale = value;
// set per atom values, also make changes for ghost atoms
} else if (ad->which == ATOM) {
// reset radius from diameter
// also scale rmass to new value
if (ad->aparam == DIAMETER) {
int mflag = 0;
if (atom->rmass_flag) mflag = 1;
double density;
double *radius = atom->radius;
double *rmass = atom->rmass;
int *mask = atom->mask;
int nlocal = atom->nlocal;
int nall = nlocal + atom->nghost;
if (mflag == 0) {
for (i = 0; i < nall; i++)
if (mask[i] & groupbit)
radius[i] = 0.5*value;
} else {
for (i = 0; i < nall; i++)
if (mask[i] & groupbit) {
density = rmass[i] / (4.0*MY_PI/3.0 *
radius[i]*radius[i]*radius[i]);
radius[i] = 0.5*value;
rmass[i] = 4.0*MY_PI/3.0 *
radius[i]*radius[i]*radius[i] * density;
}
}
} else if (ad->aparam == CHARGE) {
double *q = atom->q;
int *mask = atom->mask;
int nlocal = atom->nlocal;
int nall = nlocal + atom->nghost;
for (i = 0; i < nall; i++)
if (mask[i] & groupbit) q[i] = value;
}
}
}
modify->addstep_compute(update->ntimestep + nevery);
// re-initialize pair styles if any PAIR settings were changed
// ditto for bond styles if any BOND setitings were changes
// this resets other coeffs that may depend on changed values,
// and also offset and tail corrections
if (anypair) {
for (int m = 0; m < nadapt; m++) {
Adapt *ad = &adapt[m];
if (ad->which == PAIR) {
ad->pair->reinit();
}
}
}
if (anybond) {
for (int m = 0; m < nadapt; ++m ) {
Adapt *ad = &adapt[m];
if (ad->which == BOND) {
ad->bond->reinit();
}
}
}
// reset KSpace charges if charges have changed
if (chgflag && force->kspace) force->kspace->qsum_qsq();
}
/* ----------------------------------------------------------------------
restore pair,kspace,atom parameters to original values
------------------------------------------------------------------------- */
void FixAdapt::restore_settings()
{
for (int m = 0; m < nadapt; m++) {
Adapt *ad = &adapt[m];
if (ad->which == PAIR) {
if (ad->pdim == 0) *ad->scalar = ad->scalar_orig;
else if (ad->pdim == 2) {
for (int i = ad->ilo; i <= ad->ihi; i++)
for (int j = MAX(ad->jlo,i); j <= ad->jhi; j++)
ad->array[i][j] = ad->array_orig[i][j];
}
} else if (ad->which == BOND) {
if (ad->pdim == 1) {
for (int i = ad->ilo; i <= ad->ihi; i++)
ad->vector[i] = ad->vector_orig[i];
}
} else if (ad->which == KSPACE) {
*kspace_scale = 1.0;
} else if (ad->which == ATOM) {
if (diamflag) {
double density;
double *vec = fix_diam->vstore;
double *radius = atom->radius;
double *rmass = atom->rmass;
int *mask = atom->mask;
int nlocal = atom->nlocal;
for (int i = 0; i < nlocal; i++)
if (mask[i] & groupbit) {
density = rmass[i] / (4.0*MY_PI/3.0 *
radius[i]*radius[i]*radius[i]);
radius[i] = vec[i];
rmass[i] = 4.0*MY_PI/3.0 * radius[i]*radius[i]*radius[i] * density;
}
}
if (chgflag) {
double *vec = fix_chg->vstore;
double *q = atom->q;
int *mask = atom->mask;
int nlocal = atom->nlocal;
for (int i = 0; i < nlocal; i++)
if (mask[i] & groupbit) q[i] = vec[i];
}
}
}
if (anypair) force->pair->reinit();
if (anybond) force->bond->reinit();
if (chgflag && force->kspace) force->kspace->qsum_qsq();
}
/* ----------------------------------------------------------------------
initialize one atom's storage values, called when atom is created
------------------------------------------------------------------------- */
void FixAdapt::set_arrays(int i)
{
if (fix_diam) fix_diam->vstore[i] = atom->radius[i];
if (fix_chg) fix_chg->vstore[i] = atom->q[i];
}
| gpl-2.0 |
bmcalhoun/Vectre-Consulting | wp-content/plugins/simple-membership/classes/class.swpm-ajax.php | 1727 | <?php
/**
* Description of BAjax
*
* @author nur
*/
class SwpmAjax {
public static function validate_email_ajax() {
global $wpdb;
$field_value = filter_input(INPUT_GET, 'fieldValue');
$field_id = filter_input(INPUT_GET, 'fieldId');
$member_id = filter_input(INPUT_GET, 'member_id');
if (!is_email($field_value)){
echo '[ "' . $field_id . '",false, "'.SwpmUtils::_('Invalid Email Address').'" ]' ;
exit;
}
$table = $wpdb->prefix . "swpm_members_tbl";
$query = $wpdb->prepare("SELECT member_id FROM $table WHERE email = %s AND user_name != ''", $field_value);
$db_id = $wpdb->get_var($query) ;
$exists = ($db_id > 0) && $db_id != $member_id;
echo '[ "' . $field_id . (($exists) ? '",false, "χ '.SwpmUtils::_('Aready taken').'"]' : '",true, "√ Available"]');
exit;
}
public static function validate_user_name_ajax() {
global $wpdb;
$field_value = filter_input(INPUT_GET, 'fieldValue');
$field_id = filter_input(INPUT_GET, 'fieldId');
if (!SwpmMemberUtils::is_valid_user_name($field_value)){
echo '[ "' . $field_id . '",false,"χ '. SwpmUtils::_('Name contains invalid character'). '"]';
exit;
}
$table = $wpdb->prefix . "swpm_members_tbl";
$query = $wpdb->prepare("SELECT COUNT(*) FROM $table WHERE user_name = %s", $field_value);
$exists = $wpdb->get_var($query) > 0;
echo '[ "' . $field_id . (($exists) ? '",false,"χ '. SwpmUtils::_('Aready taken'). '"]' :
'",true,"√ '.SwpmUtils::_('Available'). '"]');
exit;
}
}
| gpl-2.0 |
sangwook236/general-development-and-testing | sw_dev/cpp/rnd/test/signal_processing/nyu_depth_toolbox_v2_lib/cbf.cpp | 16978 | #include "cbf.h"
#include <fstream> // TODO: remove this.
#include <iostream>
#include <stdlib.h>
#include <math.h>
// Uncomment this define for intermediate filtering results.
// #define DEBUG
#define PI 3.14159
#define UCHAR_MAX 255
#define FILTER_RAD 5
void toc(const char* message, clock_t start) {
#ifdef DEBUG
double d = clock() - start;
d = 1000 * d / CLOCKS_PER_SEC;
printf("[%s] %10.0f\n", message, d);
#endif
}
// Args:
// filter_size - the number of pixels in the filter.
void create_offset_array(int filter_rad, int* offsets_h, int img_height) {
int filter_len = filter_rad * 2 + 1;
int filter_size = filter_len * filter_len;
int kk = 0;
for (int yy = -filter_rad; yy <= filter_rad; ++yy) {
for (int xx = -filter_rad; xx <= filter_rad; ++xx, ++kk) {
offsets_h[kk] = yy + img_height * xx;
}
}
}
void calc_pyr_sizes(int* heights, int* widths, int* pyr_offsets, int orig_height, int orig_width, int num_scales) {
int offset = 0;
for (int scale = 0; scale < num_scales; ++scale) {
pyr_offsets[scale] = offset;
// Calculate the size of the downsampled images.
heights[scale] = static_cast<int>(orig_height / pow((float)2, scale));
widths[scale] = static_cast<int>(orig_width / pow((float)2, scale));
offset += heights[scale] * widths[scale];
}
#ifdef DEBUG
for (int ii = 0; ii < num_scales; ++ii) {
printf("Scale %d: [%d x %d], offset=%d\n", ii, heights[ii], widths[ii], pyr_offsets[ii]);
}
#endif
}
int get_pyr_size(int* heights, int* widths, int num_scales) {
int total_pixels = 0;
for (int ii = 0; ii < num_scales; ++ii) {
total_pixels += heights[ii] * widths[ii];
}
return total_pixels;
}
// We're upsampling from the result matrix (which is small) to the depth matrix,
// which is larger.
//
// For example, dst could be 480x640 and src may be 240x320.
//
// Args:
// depth_dst - H1xW1 matrix where H1 and W1 are equal to height_dst and
// width_dst.
void upsample_cpu(float* depth_dst,
bool* mask_dst,
bool* valid_dst,
float* depth_src,
float* result_src,
bool* mask_src,
bool* valid_src,
int height_src,
int width_src,
int height_dst,
int width_dst,
int dst_img_ind) {
int num_threads = height_dst * width_dst;
// Dont bother if the upsampled one isnt missing.
if (!mask_dst[dst_img_ind]) {
return;
}
int x_dst = floorf((float) dst_img_ind / height_dst);
int y_dst = fmodf(dst_img_ind, height_dst);
int y_src = static_cast<int>((float) y_dst * height_src / height_dst);
int x_src = static_cast<int>((float) x_dst * width_src / width_dst);
// Finally, convert to absolute coords.
int src_img_ind = y_src + height_src * x_src;
if (!mask_src[src_img_ind]) {
depth_dst[dst_img_ind] = depth_src[src_img_ind];
} else {
depth_dst[dst_img_ind] = result_src[src_img_ind];
}
valid_dst[dst_img_ind] = valid_src[src_img_ind];
}
// Args:
// depth - the depth image, a HxW vector
// intensity - the intensity image, a HxW vector.
// is_missing - a binary mask specifying whether each pixel is missing
// (and needs to be filled in) or not.
// valid_in - a mask specifying which of the input values are allowed
// to be used for filtering.
// valid_out - a mask specifying which of the output values are allowed
// to be used for future filtering.
// result - the result of the filtering operation, a HxW matrix.
// abs_inds - the absolute indices (into depth, intensity, etc) which
// need filtering.
// offsets - vector of offsets from the current abs_ind to be used for
// filtering.
// guassian - the values (weights) of the gaussian filter corresponding
// to the offset matrix.
void cbf_cpu(const float* depth, const float* intensity, bool* is_missing,
bool* valid_in, bool* valid_out, float* result,
const int* abs_inds,
const int* offsets,
const float* gaussian_space,
int height,
int width,
int filter_rad,
float sigma_s,
float sigma_r,
int numThreads,
int idx) {
int abs_ind = abs_inds[idx];
int src_Y = abs_ind % height;
int src_X = abs_ind / height;
int filter_len = filter_rad * 2 + 1;
int filter_size = filter_len * filter_len;
float weight_sum = 0;
float value_sum = 0;
float weight_intensity_sum = 0;
float gaussian_range[filter_size];
float gaussian_range_sum = 0;
for (int ii = 0; ii < filter_size; ++ii) {
// Unfortunately we need to double check that the radii are correct
// unless we add better processing of borders.
int abs_offset = abs_ind + offsets[ii]; // THESE ARE CALC TWICE.
int dst_Y = abs_offset % height;
int dst_X = abs_offset / height;
if (abs_offset < 0 || abs_offset >= (height * width)
|| abs(src_Y-dst_Y) > FILTER_RAD || abs(src_X-dst_X) > FILTER_RAD) {
continue;
// The offsets are into ANY part of the image. So they MAY be accessing
// a pixel that was originally missing. However, if that pixel has been
// filled in, then we can still use it.
} else if (is_missing[abs_offset] && !valid_in[abs_offset]) {
continue;
}
float vv = intensity[abs_offset] - intensity[abs_ind];
gaussian_range[ii] = exp(-(vv * vv) / (2*sigma_r * sigma_r));
gaussian_range_sum += gaussian_range[ii];
}
int count = 0;
for (int ii = 0; ii < filter_size; ++ii) {
// Get the Absolute offset into the image (1..N where N=H*W)
int abs_offset = abs_ind + offsets[ii];
int dst_Y = abs_offset % height;
int dst_X = abs_offset / height;
if (abs_offset < 0 || abs_offset >= (height * width)
|| abs(src_Y-dst_Y) > FILTER_RAD || abs(src_X-dst_X) > FILTER_RAD) {
continue;
} else if (is_missing[abs_offset] && !valid_in[abs_offset]) {
continue;
}
++count;
weight_sum += gaussian_space[ii] * gaussian_range[ii];
value_sum += depth[abs_offset] * gaussian_space[ii] * gaussian_range[ii];
}
if (weight_sum == 0) {
return;
}
if (isnan(weight_sum)) {
printf("*******************\n");
printf(" Weight sum is NaN\n");
printf("*******************\n");
}
value_sum /= weight_sum;
result[abs_ind] = value_sum;
valid_out[abs_ind] = 1;
}
// Args:
// filter_size - the number of pixels in the filter.
void create_spatial_gaussian(int filter_rad, float sigma_s, float* gaussian_h) {
int filter_len = filter_rad * 2 + 1;
int filter_size = filter_len * filter_len;
float sum = 0;
int kk = 0;
for (int yy = -filter_rad; yy <= filter_rad; ++yy) {
for (int xx = -filter_rad; xx <= filter_rad; ++xx, ++kk) {
gaussian_h[kk] = exp(-(xx*xx + yy*yy) / (2*sigma_s * sigma_s));
sum += gaussian_h[kk];
}
}
for (int ff = 0; ff < filter_size; ++ff) {
gaussian_h[ff] /= sum;
}
}
// Counts the number of missing pixels in the given mask. Note that the mask
// MUST already be in the appropriate offset location.
//
// Args:
// height - the heigh of the image at the current scale.
// width - the width of the image at the current scale.
// mask - pointer into the mask_ms_d matrix. The offset has already been
// calculated.
// abs_inds_h - pre-allocated GPU memory location.
int get_missing_pixel_coords(int height, int width, bool* mask, int* abs_inds_to_filter_h) {
int num_pixels = height * width;
int num_missing_pixels = 0;
for (int nn = 0; nn < num_pixels; ++nn) {
if (mask[nn]) {
abs_inds_to_filter_h[num_missing_pixels] = nn;
++num_missing_pixels;
}
}
return num_missing_pixels;
}
static void savePGM(bool* imf, const char *name, int height, int width) {
int NN = height * width;
uint8_t im[NN];
for (int nn = 0; nn < NN; ++nn) {
// First convert to X,Y
int y = nn % height;
int x = floor(nn / height);
// Then back to Abs Inds
int mm = y * width + x;
im[mm] = uint8_t(255*imf[nn]);
}
std::ofstream file(name, std::ios::out | std::ios::binary);
file << "P5\n" << width << " " << height << "\n" << UCHAR_MAX << "\n";
file.write((char *)&im, width * height * sizeof(uint8_t));
}
static void savePGM(float* imf, const char *name, int height, int width) {
int NN = height * width;
uint8_t im[NN];
for (int nn = 0; nn < NN; ++nn) {
// First convert to X,Y
int y = nn % height;
int x = floor(nn / height);
// Then back to Abs Inds
int mm = y * width + x;
im[mm] = uint8_t(255*imf[nn]);
}
std::ofstream file(name, std::ios::out | std::ios::binary);
file << "P5\n" << width << " " << height << "\n" << UCHAR_MAX << "\n";
file.write((char *)&im, width * height * sizeof(uint8_t));
}
void filter_at_scale(float* depth_h,
float* intensity_h,
bool* mask_h,
bool* valid_h,
float* result_h,
int* abs_inds_to_filter_h,
int height,
int width,
float sigma_s,
float sigma_r) {
int filter_rad = FILTER_RAD;
int filter_size = 2 * filter_rad + 1;
int F = filter_size * filter_size;
// Create the offset array.
int* offsets_h = (int*) malloc(F * sizeof(int));
create_offset_array(filter_rad, offsets_h, height);
// Create the gaussian.
float* gaussian_h = (float*) malloc(F * sizeof(float));
create_spatial_gaussian(filter_rad, sigma_s, gaussian_h);
// ************************************************
// We need to be smart about how we do this, so rather
// than execute the filter for EVERY point in the image,
// we will only do it for the points missing depth information.
// ************************************************
int num_missing_pixels = get_missing_pixel_coords(height, width, mask_h, abs_inds_to_filter_h);
printf("Num Missing Pixels: %d\n", num_missing_pixels);
clock_t start_filter = clock();
// We should not be writing into the same value for 'valid' that we're passing in.
bool* valid_in = (bool*) malloc(height * width * sizeof(bool));
for (int i = 0; i < height * width; ++i) {
valid_in[i] = valid_h[i];
}
for (int i = 0; i < num_missing_pixels; ++i) {
cbf_cpu(depth_h,
intensity_h,
mask_h,
valid_in,
valid_h,
result_h,
abs_inds_to_filter_h,
offsets_h,
gaussian_h,
height,
width,
filter_rad,
sigma_s,
sigma_r,
num_missing_pixels,
i);
}
toc("FILTER OP", start_filter);
free(valid_in);
free(offsets_h);
free(gaussian_h);
}
void cbf::cbf(int height, int width, uint8_t* depth, uint8_t* intensity,
bool* mask_h, uint8_t* result, unsigned num_scales,
double* sigma_s, double* sigma_r) {
clock_t start_func = clock();
int pyr_heights[num_scales];
int pyr_widths[num_scales];
int pyr_offsets[num_scales];
calc_pyr_sizes(&pyr_heights[0], &pyr_widths[0], &pyr_offsets[0], height, width, num_scales);
// Allocate the memory needed for the absolute missing pixel indices. We'll
// allocate the number of bytes required for the largest image, since the
// smaller ones obviously fit inside of it.
int N = height * width;
int* abs_inds_to_filter_h = (int*) malloc(N * sizeof(int));
int pyr_size = get_pyr_size(&pyr_heights[0], &pyr_widths[0], num_scales);
// ************************
// CREATING THE PYRAMID
// ************************
clock_t start_pyr = clock();
// NEG TIME.
float* depth_ms_h = (float*) malloc(pyr_size * sizeof(float));
float* intensity_ms_h = (float*) malloc(pyr_size * sizeof(float));
bool* mask_ms_h = (bool*) malloc(pyr_size * sizeof(bool));
float* result_ms_h = (float*) malloc(pyr_size * sizeof(float));
bool* valid_ms_h = (bool*) malloc(pyr_size * sizeof(bool));
for (int nn = 0; nn < N; ++nn) {
depth_ms_h[nn] = depth[nn] / 255.0;
intensity_ms_h[nn] = intensity[nn] / 255.0;
mask_ms_h[nn] = mask_h[nn];
valid_ms_h[nn] = !mask_h[nn];
result_ms_h[nn] = 0;
}
float* depth_ms_h_p = depth_ms_h + pyr_offsets[1];
float* intensity_ms_h_p = intensity_ms_h + pyr_offsets[1];
bool* mask_ms_h_p = mask_ms_h + pyr_offsets[1];
bool* valid_ms_h_p = valid_ms_h + pyr_offsets[1];
float* result_ms_h_p = result_ms_h + pyr_offsets[1];
for (int scale = 1; scale < num_scales; ++scale) {
for (int xx = 0; xx < pyr_widths[scale]; ++xx) {
for (int yy = 0; yy < pyr_heights[scale]; ++yy, ++depth_ms_h_p, ++intensity_ms_h_p, ++mask_ms_h_p, ++result_ms_h_p, ++valid_ms_h_p) {
int abs_yy = static_cast<int>(((float)yy / pyr_heights[scale]) * height);
int abs_xx = static_cast<int>(((float)xx / pyr_widths[scale]) * width);
int img_offset = abs_yy + height * abs_xx;
*depth_ms_h_p = depth_ms_h[img_offset];
*intensity_ms_h_p = intensity_ms_h[img_offset];
*mask_ms_h_p = mask_h[img_offset];
*valid_ms_h_p = !mask_h[img_offset];
*result_ms_h_p = 0;
}
}
}
// *********************************
// RUN THE ACTUAL FILTERING CODE
// *********************************
for (int scale = num_scales - 1; scale >= 0; --scale) {
printf("Filtering at scale %d, [%dx%d]\n", scale, pyr_heights[scale], pyr_widths[scale]);
#ifdef DEBUG
char filename1[50];
sprintf(filename1, "missing_pixels_before_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(mask_ms_h + pyr_offsets[scale], filename1, pyr_heights[scale], pyr_widths[scale]);
char filename2[50];
sprintf(filename2, "valid_pixels_before_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(valid_ms_h + pyr_offsets[scale], filename2, pyr_heights[scale], pyr_widths[scale]);
sprintf(filename2, "valid_intensity_before_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(intensity_ms_h + pyr_offsets[scale], filename2, pyr_heights[scale], pyr_widths[scale]);
sprintf(filename2, "depth_before_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(depth_ms_h + pyr_offsets[scale], filename2, pyr_heights[scale], pyr_widths[scale]);
#endif
filter_at_scale(depth_ms_h + pyr_offsets[scale],
intensity_ms_h + pyr_offsets[scale],
mask_ms_h + pyr_offsets[scale],
valid_ms_h + pyr_offsets[scale],
result_ms_h + pyr_offsets[scale],
abs_inds_to_filter_h,
pyr_heights[scale],
pyr_widths[scale],
sigma_s[scale],
sigma_r[scale]);
#ifdef DEBUG
sprintf(filename2, "valid_pixels_after_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(valid_ms_h + pyr_offsets[scale], filename2, pyr_heights[scale], pyr_widths[scale]);
#endif
#ifdef DEBUG
char filename[50];
sprintf(filename, "depth_after_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(result_ms_h + pyr_offsets[scale], filename, pyr_heights[scale], pyr_widths[scale]);
#endif
if (scale == 0) {
continue;
}
// Now, we need to upsample the resulting depth and store it in the next
// highest location.
int num_missing_pixels = pyr_heights[scale-1] * pyr_widths[scale-1];
printf("Upsampling %d\n", num_missing_pixels);
for (int i = 0; i < num_missing_pixels; ++i) {
upsample_cpu(depth_ms_h + pyr_offsets[scale-1],
mask_ms_h + pyr_offsets[scale-1],
valid_ms_h + pyr_offsets[scale-1],
depth_ms_h + pyr_offsets[scale],
result_ms_h + pyr_offsets[scale],
mask_ms_h + pyr_offsets[scale],
valid_ms_h + pyr_offsets[scale],
pyr_heights[scale],
pyr_widths[scale],
pyr_heights[scale-1],
pyr_widths[scale-1],
i);
}
#ifdef DEBUG
sprintf(filename, "up_depth_after_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(depth_ms_h + pyr_offsets[scale-1], filename, pyr_heights[scale-1], pyr_widths[scale-1]);
sprintf(filename, "up_valid_after_filtering_scale%d.pgm", scale);
// Now that we've performed the filtering, save the intermediate image.
savePGM(valid_ms_h + pyr_offsets[scale-1], filename, pyr_heights[scale-1], pyr_widths[scale-1]);
#endif
}
// Copy the final result from the device.
for (int nn = 0; nn < N; ++nn) {
if (mask_ms_h[nn]) {
result[nn] = static_cast<uint8_t>(result_ms_h[nn] * 255);
} else {
result[nn] = depth[nn];
}
}
free(depth_ms_h);
free(intensity_ms_h);
free(mask_ms_h);
free(result_ms_h);
free(valid_ms_h);
free(abs_inds_to_filter_h);
toc("Entire Function", start_func);
}
| gpl-2.0 |
trasher/glpi | front/device.php | 1444 | <?php
/**
* ---------------------------------------------------------------------
* GLPI - Gestionnaire Libre de Parc Informatique
* Copyright (C) 2015-2021 Teclib' and contributors.
*
* http://glpi-project.org
*
* based on GLPI - Gestionnaire Libre de Parc Informatique
* Copyright (C) 2003-2014 by the INDEPNET Development Team.
*
* ---------------------------------------------------------------------
*
* LICENSE
*
* This file is part of GLPI.
*
* GLPI is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* GLPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GLPI. If not, see <http://www.gnu.org/licenses/>.
* ---------------------------------------------------------------------
*/
include ('../inc/includes.php');
if (!isset($_GET['itemtype']) || !class_exists($_GET['itemtype'])) {
throw new \RuntimeException(
'Missing or incorrect device type called!'
);
}
$dropdown = new $_GET['itemtype'];
include (GLPI_ROOT . "/front/dropdown.common.php");
| gpl-2.0 |
nds32/gcc | libstdc++-v3/testsuite/23_containers/multiset/allocator/move.cc | 1854 | // Copyright (C) 2013-2014 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the
// terms of the GNU General Public License as published by the
// Free Software Foundation; either version 3, or (at your option)
// any later version.
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License along
// with this library; see the file COPYING3. If not see
// <http://www.gnu.org/licenses/>.
// { dg-options "-std=gnu++11" }
#include <set>
#include <testsuite_hooks.h>
#include <testsuite_allocator.h>
struct T { int i; };
bool operator<(T l, T r) { return l.i < r.i; }
using Cmp = std::less<T>;
using __gnu_test::uneq_allocator;
void test01()
{
bool test __attribute__((unused)) = true;
typedef uneq_allocator<T> alloc_type;
typedef std::multiset<T, Cmp, alloc_type> test_type;
test_type v1(alloc_type(1));
v1 = { test_type::value_type{} };
auto it = v1.begin();
test_type v2(std::move(v1));
VERIFY(1 == v1.get_allocator().get_personality());
VERIFY(1 == v2.get_allocator().get_personality());
VERIFY( it == v2.begin() );
}
void test02()
{
bool test __attribute__((unused)) = true;
typedef uneq_allocator<T> alloc_type;
typedef std::multiset<T, Cmp, alloc_type> test_type;
test_type v1(alloc_type(1));
v1 = { test_type::value_type{} };
test_type v2(std::move(v1), alloc_type(2));
VERIFY(1 == v1.get_allocator().get_personality());
VERIFY(2 == v2.get_allocator().get_personality());
}
int main()
{
test01();
test02();
return 0;
}
| gpl-2.0 |
ddam2015/elite-lighting-wp | wp-content/mu-plugins/force-strong-passwords/force-zxcvbn.min.js | 227 | jQuery(document).ready(function(t){var s=t("#pass-strength-result");s.length&&s.parents("form").on("submit",function(){t(this).append('<input type="hidden" name="slt-fsp-pass-strength-result" value="'+s.attr("class")+'">')})}); | gpl-2.0 |
achellies/src | graphics/skia/src/ext/platform_canvas.cc | 3248 | // Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ext/platform_canvas.h"
#include "ext/bitmap_platform_device.h"
#include "include/core/SkTypes.h"
namespace skia {
PlatformCanvas::PlatformCanvas() {}
SkDevice* PlatformCanvas::setBitmapDevice(const SkBitmap&) {
SkASSERT(false); // Should not be called.
return NULL;
}
// static
size_t PlatformCanvas::StrideForWidth(unsigned width) {
return 4 * width;
}
bool PlatformCanvas::initializeWithDevice(SkDevice* device) {
if (!device)
return false;
setDevice(device);
device->unref(); // Was created with refcount 1, and setDevice also refs.
return true;
}
SkCanvas* CreateBitmapCanvas(int width, int height, bool is_opaque) {
return new PlatformCanvas(width, height, is_opaque);
}
SkCanvas* TryCreateBitmapCanvas(int width, int height, bool is_opaque) {
PlatformCanvas* canvas = new PlatformCanvas();
if (!canvas->initialize(width, height, is_opaque)) {
delete canvas;
canvas = NULL;
}
return canvas;
}
SkDevice* GetTopDevice(const SkCanvas& canvas) {
SkCanvas::LayerIter iter(const_cast<SkCanvas*>(&canvas), false);
return iter.device();
}
bool SupportsPlatformPaint(const SkCanvas* canvas) {
// TODO(alokp): Rename IsNativeFontRenderingAllowed after removing these
// calls from WebKit.
PlatformDevice* platform_device = GetPlatformDevice(GetTopDevice(*canvas));
return platform_device && platform_device->IsNativeFontRenderingAllowed();
}
PlatformSurface BeginPlatformPaint(SkCanvas* canvas) {
PlatformDevice* platform_device = GetPlatformDevice(GetTopDevice(*canvas));
if (platform_device)
return platform_device->BeginPlatformPaint();
return 0;
}
void EndPlatformPaint(SkCanvas* canvas) {
PlatformDevice* platform_device = GetPlatformDevice(GetTopDevice(*canvas));
if (platform_device)
platform_device->EndPlatformPaint();
}
void DrawToNativeContext(SkCanvas* canvas, PlatformSurface context, int x,
int y, const PlatformRect* src_rect) {
PlatformDevice* platform_device = GetPlatformDevice(GetTopDevice(*canvas));
if (platform_device)
platform_device->DrawToNativeContext(context, x, y, src_rect);
}
void DrawToNativeLayeredContext(SkCanvas* canvas, PlatformSurface context, int x,
int y, const PlatformRect* src_rect) {
PlatformDevice* platform_device = GetPlatformDevice(GetTopDevice(*canvas));
if (platform_device)
platform_device->DrawToNativeLayeredContext(context, x, y, src_rect);
}
static SkPMColor MakeOpaqueXfermodeProc(SkPMColor src, SkPMColor dst) {
return dst | (0xFF << SK_A32_SHIFT);
}
void MakeOpaque(SkCanvas* canvas, int x, int y, int width, int height) {
if (width <= 0 || height <= 0)
return;
SkRect rect;
rect.setXYWH(SkIntToScalar(x), SkIntToScalar(y),
SkIntToScalar(width), SkIntToScalar(height));
SkPaint paint;
// so we don't draw anything on a device that ignores xfermodes
paint.setColor(0);
// install our custom mode
paint.setXfermode(new SkProcXfermode(MakeOpaqueXfermodeProc))->unref();
canvas->drawRect(rect, paint);
}
} // namespace skia
| gpl-2.0 |
artbeatads/messermeister_ab_rackservers | administrator/components/com_rsform/views/submissions/tmpl/exportprocess.php | 1206 | <?php
/**
* @version 1.4.0
* @package RSform!Pro 1.4.0
* @copyright (C) 2007-2013 www.rsjoomla.com
* @license GPL, http://www.gnu.org/copyleft/gpl.html
*/
defined('_JEXEC') or die('Restricted access');
?>
<div class="progressWrapper"><div class="progressBar" id="progressBar">0%</div></div>
<p><?php echo JText::sprintf('RSFP_EXPORT_START_MSG', '<strong id="exportmsg">3</strong>'); ?></p>
<input type="hidden" value="<?php echo $this->file; ?>" id="ExportFile" />
<input type="hidden" value="<?php echo $this->exportType; ?>" id="exportType" />
<div style="display: none" id="backButtonContainer">
<button type="button" class="rs_button" onclick="document.location.href='<?php echo JRoute::_('index.php?option=com_rsform&view=forms&layout=edit&formId='.$this->formId); ?>'"><?php echo JText::_('RSFP_BACK_TO_FORM'); ?></button>
</div>
<script type="text/javascript">
t = setInterval(function() {
var count = parseInt(document.getElementById('exportmsg').innerHTML);
if (count <= 0)
return clearTimeout(t);
document.getElementById('exportmsg').innerHTML = count - 1;
}, 1000);
setTimeout(function() {
exportProcess(0,<?php echo $this->limit; ?>,<?php echo $this->total;?>);
}, 3000);
</script> | gpl-2.0 |
thomaszbz/TYPO3.CMS | typo3/sysext/core/Classes/Resource/TextExtraction/PlainTextExtractor.php | 1652 | <?php
namespace TYPO3\CMS\Core\Resource\TextExtraction;
/*
* This file is part of the TYPO3 CMS project.
*
* It is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License, either version 2
* of the License, or any later version.
*
* For the full copyright and license information, please read the
* LICENSE.txt file that was distributed with this source code.
*
* The TYPO3 project - inspiring people to share!
*/
use TYPO3\CMS\Core\Resource\FileInterface;
use TYPO3\CMS\Core\Utility\PathUtility;
/**
* A simple text extractor to extract text from plain text files.
*
*/
class PlainTextExtractor implements TextExtractorInterface {
/**
* Checks if the given file can be read by this extractor
*
* @param FileInterface $file
* @return bool
*/
public function canExtractText(FileInterface $file) {
$canExtract = FALSE;
if ($file->getMimeType() === 'text/plain') {
$canExtract = TRUE;
}
return $canExtract;
}
/**
* The actual text extraction.
*
* @param FileInterface $file
* @return string
*/
public function extractText(FileInterface $file) {
$localTempFile = $file->getForLocalProcessing(FALSE);
// extract text
$content = file_get_contents($localTempFile);
// In case of remote storage, the temporary copy of the
// original file in typo3temp must be removed
// Simply compare the filenames, because the filename is so unique that
// it is nearly impossible to have a file with this name in a storage
if (PathUtility::basename($localTempFile) !== $file->getName()) {
unlink($localTempFile);
}
return $content;
}
}
| gpl-2.0 |
franksun/wp | plugins/editors/jce/jce.php | 9547 | <?php
/**
* @package JCE
* @copyright Copyright (c) 2009-2012 Ryan Demmer. All rights reserved.
* @license GNU/GPL 2 or later - http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
* JCE is free software. This version may have been modified pursuant
* to the GNU General Public License, and as distributed it includes or
* is derivative of works licensed under the GNU General Public License or
* other free or open source software licenses.
*/
// Do not allow direct access
defined('_JEXEC') or die('RESTRICTED');
jimport('joomla.plugin.plugin');
/**
* JCE WYSIWYG Editor Plugin
* @since 1.5
*/
class plgEditorJCE extends JPlugin {
/**
* Constructor
*
* @access public
* @param object $subject The object to observe
* @param array $config An array that holds the plugin configuration
* @since 1.5
*/
public function __construct(& $subject, $config) {
parent::__construct($subject, $config);
}
/**
* Method to handle the onInit event.
* - Initializes the JCE WYSIWYG Editor
*
* @access public
* @param $toString Return javascript and css as a string
* @return string JavaScript Initialization string
* @since 1.5
*/
public function onInit() {
$app = JFactory::getApplication();
$language = JFactory::getLanguage();
$document = JFactory::getDocument();
// set IE mode
//$document->setMetaData('X-UA-Compatible', 'IE=Edge', true);
// Check for existence of Admin Component
if (!is_dir(JPATH_SITE . '/components/com_jce') || !is_dir(JPATH_ADMINISTRATOR . '/components/com_jce')) {
JError::raiseWarning('SOME_ERROR_CODE', 'WF_COMPONENT_MISSING');
}
$language->load('plg_editors_jce', JPATH_ADMINISTRATOR);
$language->load('com_jce', JPATH_ADMINISTRATOR);
// load constants and loader
require_once(JPATH_ADMINISTRATOR . '/components/com_jce/includes/base.php');
wfimport('admin.models.editor');
$model = new WFModelEditor();
return $model->buildEditor();
}
/**
* JCE WYSIWYG Editor - get the editor content
*
* @vars string The name of the editor
*/
public function onGetContent($editor) {
//return "WFEditor.getContent('" . $editor . "');";
return $this->onSave($editor);
}
/**
* JCE WYSIWYG Editor - set the editor content
*
* @vars string The name of the editor
*/
public function onSetContent($editor, $html) {
return "WFEditor.setContent('" . $editor . "','" . $html . "');";
}
/**
* JCE WYSIWYG Editor - copy editor content to form field
*
* @vars string The name of the editor
*/
public function onSave($editor) {
return "WFEditor.getContent('" . $editor . "');";
}
/**
* JCE WYSIWYG Editor - display the editor
*
* @vars string The name of the editor area
* @vars string The content of the field
* @vars string The width of the editor area
* @vars string The height of the editor area
* @vars int The number of columns for the editor area
* @vars int The number of rows for the editor area
* @vars mixed Can be boolean or array.
*/
public function onDisplay($name, $content, $width, $height, $col, $row, $buttons = true, $id = null, $asset = null, $author = null) {
if (empty($id)) {
$id = $name;
}
// Only add "px" to width and height if they are not given as a percentage
if (is_numeric($width)) {
$width .= 'px';
}
if (is_numeric($height)) {
$height .= 'px';
}
if (empty($id)) {
$id = $name;
}
$editor = '<textarea id="' . $id . '" name="' . $name . '" cols="' . $col . '" rows="' . $row . '" style="width:' . $width . ';height:' . $height . ';" class="wfEditor mce_editable source" wrap="off">' . $content . '</textarea>';
$editor .= $this->_displayButtons($id, $buttons, $asset, $author);
return $editor;
}
public function onGetInsertMethod($name) {
}
private function _displayButtons($name, $buttons, $asset, $author) {
$return = '';
$args = array(
'name' => $name,
'event' => 'onGetInsertMethod'
);
$results = (array) $this->update($args);
foreach ($results as $result) {
if (is_string($result) && trim($result)) {
$return .= $result;
}
}
if (is_array($buttons) || (is_bool($buttons) && $buttons)) {
$buttons = $this->_subject->getButtons($name, $buttons, $asset, $author);
$version = new JVersion;
// only available in Joomla 3.4.1+
if ($version->isCompatible('3.4.1')) {
// fix for some buttons that do not include the class
foreach ($buttons as $button) {
if (is_object($button)) {
if (isset($button->class)) {
if (preg_match('#\bbtn\b#', $button->class) === false) {
$button->class .= " btn";
}
} else {
$button->class = "btn";
}
}
}
$return .= JLayoutHelper::render('joomla.editors.buttons', $buttons);
// Joomla 3.0 to 3.4
} else if ($version->isCompatible('3.0')) {
/*
* This will allow plugins to attach buttons or change the behavior on the fly using AJAX
*/
$return .= "\n<div id=\"editor-xtd-buttons\" class=\"btn-toolbar pull-left\">\n";
$return .= "\n<div class=\"btn-toolbar\">\n";
foreach ($results as $button) {
/*
* Results should be an object
*/
if ($button->get('name')) {
$modal = ($button->get('modal')) ? ' class="modal-button btn"' : null;
$href = ($button->get('link')) ? ' class="btn" href="' . JURI::base() . $button->get('link') . '"' : null;
$onclick = ($button->get('onclick')) ? ' onclick="' . $button->get('onclick') . '"' : 'onclick="IeCursorFix(); return false;"';
$title = ($button->get('title')) ? $button->get('title') : $button->get('text');
$return .= '<a' . $modal . ' title="' . $title . '"' . $href . $onclick . ' rel="' . $button->get('options')
. '"><i class="icon-' . $button->get('name') . '"></i> ' . $button->get('text') . "</a>\n";
}
}
$return .= "</div>\n";
$return .= "</div>\n";
} else {
// Load modal popup behavior
JHTML::_('behavior.modal', 'a.modal-button');
/*
* This will allow plugins to attach buttons or change the behavior on the fly using AJAX
*/
$return .= "\n<div id=\"editor-xtd-buttons\"";
if ($version->isCompatible('3.0')) {
$return .= " class=\"btn-toolbar pull-left\">\n";
$return .= "\n<div class=\"btn-toolbar\">\n";
} else {
$return .= ">\n";
}
foreach ($buttons as $button) {
/*
* Results should be an object
*/
if ($button->get('name')) {
$modal = ($button->get('modal')) ? ' class="btn modal-button"' : '';
$href = ($button->get('link')) ? ' class="btn" href="' . JURI::base() . $button->get('link') . '"' : '';
$onclick = ($button->get('onclick')) ? ' onclick="' . $button->get('onclick') . '"' : ' onclick="IeCursorFix(); return false;"';
$title = ($button->get('title')) ? $button->get('title') : $button->get('text');
if (!$version->isCompatible('3.0')) {
$return .= '<div class="button2-left"><div class="' . $button->get('name') . '">';
}
$return .= '<a' . $modal . ' title="' . $title . '"' . $href . $onclick . ' rel="' . $button->get('options') . '">';
// add icon-font class
if ($version->isCompatible('3.0')) {
$return .= '<i class="icon-' . $button->get('name') . '"></i> ';
}
$return .= $button->get('text') . '</a>';
if (!$version->isCompatible('3.0')) {
$return .= '</div></div>';
}
}
}
if ($version->isCompatible('3.0')) {
$return .= "</div>\n";
}
$return .= "</div>\n";
}
}
return $return;
}
}
?> | gpl-2.0 |
PongPi/epaper | wp-content/plugins/s2member/includes/classes/sp-access.inc.php | 9779 | <?php
/**
* Specific Post/Page Access routines.
*
* Copyright: © 2009-2011
* {@link http://www.websharks-inc.com/ WebSharks, Inc.}
* (coded in the USA)
*
* Released under the terms of the GNU General Public License.
* You should have received a copy of the GNU General Public License,
* along with this software. In the main directory, see: /licensing/
* If not, see: {@link http://www.gnu.org/licenses/}.
*
* @package s2Member\SP_Access
* @since 3.5
*/
if(!defined('WPINC')) // MUST have WordPress.
exit ("Do not access this file directly.");
if (!class_exists ("c_ws_plugin__s2member_sp_access"))
{
/**
* Specific Post/Page Access routines.
*
* @package s2Member\SP_Access
* @since 3.5
*/
class c_ws_plugin__s2member_sp_access
{
/**
* Generates Specific Post/Page Access links.
*
* @package s2Member\SP_Access
* @since 3.5
*
* @param string|int $sp_ids Comma-delimited list of Specific Post/Page IDs *(numerical)*.
* @param int|string $hours Optional. A numeric expiration time for this link, in hours. Defaults to `72`.
* @param bool $shrink Optional. Defaults to true. If false, the raw link will NOT be processed by the tinyURL API.
* @return str|bool A Specific Post/Page Access Link, or false on failure.
*/
public static function sp_access_link_gen ($sp_ids = FALSE, $hours = 72, $shrink = TRUE)
{
foreach(array_keys(get_defined_vars())as$__v)$__refs[$__v]=&$$__v;
do_action("ws_plugin__s2member_before_sp_access_link_gen", get_defined_vars ());
unset($__refs, $__v);
if ((is_string ($sp_ids) || is_numeric ($sp_ids)) && ($sp_ids = preg_replace ("/[^0-9;,]/", "", $sp_ids)) && ($leading_id = preg_replace ("/^([0-9]+).*$/", "$1", $sp_ids)) && is_numeric ($hours))
{
$sp_access = c_ws_plugin__s2member_utils_encryption::encrypt ("sp_time_hours:.:|:.:" . $sp_ids . ":.:|:.:" . strtotime ("now") . ":.:|:.:" . $hours);
$sp_access_link = add_query_arg ("s2member_sp_access", urlencode ($sp_access), get_permalink ($leading_id)); // Generate long URL/link.
if ($shrink && ($shorter_url = c_ws_plugin__s2member_utils_urls::shorten ($sp_access_link)))
$sp_access_link = $shorter_url . "#" . $_SERVER["HTTP_HOST"];
}
return apply_filters("ws_plugin__s2member_sp_access_link_gen", ((!empty($sp_access_link)) ? $sp_access_link : false), get_defined_vars ());
}
/**
* Generates Specific Post/Page Access links via AJAX.
*
* @package s2Member\SP_Access
* @since 3.5
*
* @attaches-to ``add_action("wp_ajax_ws_plugin__s2member_sp_access_link_via_ajax");``
*
* @return null Exits script execution after returning data for AJAX caller.
*/
public static function sp_access_link_via_ajax ()
{
do_action("ws_plugin__s2member_before_sp_access_link_via_ajax", get_defined_vars ());
status_header (200); // Send a 200 OK status header.
header ("Content-Type: text/plain; charset=UTF-8"); // Content-Type with UTF-8.
while (@ob_end_clean ()); // Clean any existing output buffers.
if (current_user_can ("create_users")) // Check privileges as well. Ability to create Users?
if (!empty($_POST["ws_plugin__s2member_sp_access_link_via_ajax"]) && is_string ($nonce = $_POST["ws_plugin__s2member_sp_access_link_via_ajax"]) && wp_verify_nonce ($nonce, "ws-plugin--s2member-sp-access-link-via-ajax"))
if (($_p = c_ws_plugin__s2member_utils_strings::trim_deep (stripslashes_deep ($_POST))) && isset ($_p["s2member_sp_access_link_ids"], $_p["s2member_sp_access_link_hours"]))
$sp_access_link = c_ws_plugin__s2member_sp_access::sp_access_link_gen ((string)$_p["s2member_sp_access_link_ids"], (string)$_p["s2member_sp_access_link_hours"]);
exit (apply_filters("ws_plugin__s2member_sp_access_link_via_ajax", ((!empty($sp_access_link)) ? $sp_access_link : ""), get_defined_vars ()));
}
/**
* Handles Specific Post/Page Access authentication.
*
* @package s2Member\SP_Access
* @since 3.5
*
* @param int|string $sp_id Numeric Post/Page ID in WordPress.
* @param bool $read_only Optional. Defaults to false. If ``$read_only = true``,
* no session cookies are set, no IP Restrictions are checked, and script execution is not exited on Link failure.
* In other words, with ``$read_only = true``, this function will simply return true or false.
* @return null|bool|string Returns `true` (or the SP access string), if access is indeed allowed in one way or another.
* If access is denied with ``$read_only = true`` simply return false. If access is denied with ``$read_only = false``, return false; but if a Specific Post/Page Access Link is currently being used, we exit with a warning about Access Link expiration here.
*/
public static function sp_access ($sp_id = FALSE, $read_only = FALSE)
{
do_action("ws_plugin__s2member_before_sp_access", get_defined_vars ());
$excluded = apply_filters("ws_plugin__s2member_sp_access_excluded", false, get_defined_vars ());
if ($excluded || current_user_can (apply_filters("ws_plugin__s2member_sp_access_excluded_cap", "edit_posts", get_defined_vars ())))
return apply_filters("ws_plugin__s2member_sp_access", true, get_defined_vars (), "auth-via-exclusion");
else if ($sp_id && is_numeric ($sp_id) && ((!empty($_GET["s2member_sp_access"]) && ($_g["s2member_sp_access"] = trim (stripslashes ((string)$_GET["s2member_sp_access"]))) && is_array($sp_access_values = array($_g["s2member_sp_access"]))) || is_array($sp_access_values = c_ws_plugin__s2member_sp_access::sp_access_session ())) && !empty($sp_access_values))
{
foreach ($sp_access_values as $sp_access_value) // Supports multiple access values in a session. We go through each of them.
{
if (is_array($sp_access = preg_split ("/\:\.\:\|\:\.\:/", c_ws_plugin__s2member_utils_encryption::decrypt ($sp_access_value))))
{
if (count ($sp_access) === 4 && $sp_access[0] === "sp_time_hours" && in_array($sp_id, preg_split ("/[\r\n\t\s;,]+/", $sp_access[1])))
{
if (is_numeric ($sp_access[2]) && is_numeric ($sp_access[3]) && $sp_access[2] <= strtotime ("now") && ($sp_access[2] + ($sp_access[3] * 3600)) >= strtotime ("now"))
{
if (!$read_only && !empty($_g["s2member_sp_access"])) // Add to session?
c_ws_plugin__s2member_sp_access::sp_access_session ($_g["s2member_sp_access"]);
if ($read_only || c_ws_plugin__s2member_ip_restrictions::ip_restrictions_ok ($_SERVER["REMOTE_ADDR"], $sp_access_value))
return apply_filters("ws_plugin__s2member_sp_access", $sp_access_value, get_defined_vars (), "auth-via-link-session");
}
}
}
}
// Otherwise, authentication was NOT possible via link or session.
if (!$read_only && /* A Specific Post/Page Access Link? */ !empty($_g["s2member_sp_access"]))
{
status_header (503);
header ("Content-Type: text/html; charset=UTF-8");
while (@ob_end_clean ()); // Clean any existing output buffers.
exit (_x ('<strong>Your Link Expired:</strong><br />Please contact Support if you need assistance.', "s2member-front", "s2member"));
}
else // Else return false here.
return apply_filters("ws_plugin__s2member_sp_access", false, get_defined_vars (), "no-auth-via-link-session");
}
else // Else return false here.
return apply_filters("ws_plugin__s2member_sp_access", false, get_defined_vars (), "no-auth-no-link-session");
}
/**
* Handles Specific Post/Page sessions, by writing access values into a cookie.
*
* Can be used to add a new value to the session, and/or to return the current set of values in the session.
*
* @package s2Member\SP_Access
* @since 3.5
*
* @param string $add_sp_access_value Encrypted Specific Post/Page Access value.
* @return array Array of Specific Post/Page Access values.
*/
public static function sp_access_session ($add_sp_access_value = FALSE)
{
foreach(array_keys(get_defined_vars())as$__v)$__refs[$__v]=&$$__v;
do_action("ws_plugin__s2member_before_sp_access_session", get_defined_vars ());
unset($__refs, $__v);
$sp_access_values = (!empty($_COOKIE["s2member_sp_access"])) ? preg_split ("/\:\.\:\|\:\.\:/", (string)$_COOKIE["s2member_sp_access"]) : array();
if ($add_sp_access_value && is_string ($add_sp_access_value) && !in_array /* Not in session? */ ($add_sp_access_value, $sp_access_values))
{
$sp_access_values[] = $add_sp_access_value; // Add an access value, and update the delimited session cookie.
$sp_access_values = array_unique ($sp_access_values); // Keep this array unique; disallow double-stacking.
$cookie = implode (":.:|:.:", $sp_access_values); // Implode the access values into a delimited string.
$cookie = (strlen ($cookie) >= 4096) ? $add_sp_access_value : $cookie; // Max cookie size is 4kbs.
setcookie ("s2member_sp_access", $cookie, time () + 31556926, COOKIEPATH, COOKIE_DOMAIN);
setcookie ("s2member_sp_access", $cookie, time () + 31556926, SITECOOKIEPATH, COOKIE_DOMAIN);
$_COOKIE["s2member_sp_access"] = $cookie; // Real-time cookie updates.
foreach(array_keys(get_defined_vars())as$__v)$__refs[$__v]=&$$__v;
do_action("ws_plugin__s2member_during_sp_access_session", get_defined_vars ());
unset($__refs, $__v);
}
return apply_filters("ws_plugin__s2member_sp_access_session", $sp_access_values, get_defined_vars ());
}
}
}
| gpl-2.0 |
EyeSeeTea/dhis2 | dhis-2/dhis-web/dhis-web-mobile/src/main/webapp/dhis-web-mobile-resources/js/dhis2.storage.js | 6852 | /*
* Copyright (c) 2004-2013, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
var dhis2 = dhis2 || {};
dhis2['storage'] = dhis2['storage'] || {};
dhis2.storage.FormManager = function ( args ) {
this._organisationUnits = undefined;
this._forms = undefined;
this._id = _.uniqueId('form-manager');
};
dhis2.storage.FormManager.prototype.getMetaData = function () {
return $.ajax({
url : '../api/currentUser/assignedDataSets?optionSets=true',
dataType : 'json',
cache : false
}).success(function ( data ) {
// clear out old localStorage, some phones doesn't like it when you overwrite old keys
localStorage.removeItem('mobileOrganisationUnits');
localStorage.removeItem('mobileForms');
localStorage.removeItem('mobileOptionSets');
if( data.organisationUnits ) {
localStorage.setItem('mobileOrganisationUnits', JSON.stringify(data.organisationUnits));
} else {
localStorage.setItem('mobileOrganisationUnits', JSON.stringify({}));
}
if( data.forms ) {
localStorage.setItem('mobileForms', JSON.stringify(data.forms));
} else {
localStorage.setItem('mobileForms', JSON.stringify({}));
}
if( data.optionSets ) {
localStorage.setItem('mobileOptionSets', JSON.stringify(data.optionSets));
} else {
localStorage.setItem('mobileOptionSets', JSON.stringify({}));
}
});
};
dhis2.storage.FormManager.prototype.needMetaData = function () {
return this.organisationUnits() === undefined || this.forms() === undefined;
};
dhis2.storage.FormManager.prototype.organisationUnits = function () {
if ( this._organisationUnits === undefined ) {
var organisationUnits = localStorage.getItem('mobileOrganisationUnits');
if( organisationUnits != null && organisationUnits != "null" ) {
this._organisationUnits = JSON.parse(organisationUnits);
}
}
return this._organisationUnits;
};
dhis2.storage.FormManager.prototype.organisationUnit = function (id) {
return this.organisationUnits()[id];
};
dhis2.storage.FormManager.prototype.dataSets = function (id) {
return this.organisationUnit(id).dataSets;
};
dhis2.storage.FormManager.prototype.optionSets = function() {
if( this._optionSets === undefined ) {
var optionSets = localStorage.getItem('mobileOptionSets');
if( optionSets != null && optionSets != "null" ) {
this._optionSets = JSON.parse(optionSets);
}
}
return this._optionSets;
};
dhis2.storage.FormManager.prototype.optionSet = function( id ) {
return this.optionSets()[id];
};
dhis2.storage.FormManager.prototype.forms = function () {
if( this._forms === undefined ) {
var form = localStorage.getItem('mobileForms');
if( form != null && form != "null") {
this._forms = JSON.parse( form );
}
}
return this._forms;
};
dhis2.storage.FormManager.prototype.form = function ( id ) {
return this.forms()[id]
};
dhis2.storage.FormManager.prototype.dataValueSets = function() {
var dataValueSets = localStorage.getItem('mobileDataValueSets');
if( dataValueSets != null && dataValueSets != "null" && dataValueSets != "[]" )
{
dataValueSets = JSON.parse( dataValueSets );
} else {
dataValueSets = {};
}
return dataValueSets;
};
dhis2.storage.makeUploadDataValueSetRequest = function( dataValueSet ) {
return $.ajax({
url : '../api/dataValueSets',
type : 'POST',
cache : false,
contentType : 'application/json',
data : JSON.stringify( dataValueSet )
});
};
dhis2.storage.getUniqueKey = function( dataValueSet ) {
return dataValueSet.orgUnit + '-' + dataValueSet.dataSet + '-' + dataValueSet.period;
};
dhis2.storage.FormManager.prototype.getDataValueSetValues = function( dataValueSet ) {
var dataValueSets = this.dataValueSets();
return dataValueSets[ dhis2.storage.getUniqueKey( dataValueSet )];
};
dhis2.storage.FormManager.prototype.saveDataValueSet = function( dataValueSet ) {
var dataValueSets = this.dataValueSets();
return dhis2.storage.makeUploadDataValueSetRequest( dataValueSet ).error(function() {
// add to local dataValueSets
dataValueSets[dhis2.storage.getUniqueKey(dataValueSet)] = dataValueSet;
// delete old values
localStorage.removeItem('mobileDataValueSets');
localStorage.setItem('mobileDataValueSets', JSON.stringify( dataValueSets ));
});
};
dhis2.storage.FormManager.prototype.uploadDataValueSets = function() {
var dataValueSets = this.dataValueSets();
var deferreds = [];
_.each(dataValueSets, function( value, key ) {
deferreds.push(dhis2.storage.makeUploadDataValueSetRequest( value ).success(function() {
delete dataValueSets[key];
})
);
});
return $.when.apply( null, deferreds ).always(function() {
// delete old values
localStorage.removeItem('mobileDataValueSets');
localStorage.setItem('mobileDataValueSets', JSON.stringify( dataValueSets ));
});
};
// global storage manager instance
(function () {
window.fm = new dhis2.storage.FormManager();
}).call();
| gpl-3.0 |
Anbyew/Customized-Centrifuge | ds.cpp | 3211 | /*
* Copyright 2011, Ben Langmead <langmea@cs.jhu.edu>
*
* This file is part of Bowtie 2.
*
* Bowtie 2 is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Bowtie 2 is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Bowtie 2. If not, see <http://www.gnu.org/licenses/>.
*/
#include "ds.h"
MemoryTally gMemTally;
/**
* Tally a memory allocation of size amt bytes.
*/
void MemoryTally::add(int cat, uint64_t amt) {
ThreadSafe ts(&mutex_m);
tots_[cat] += amt;
tot_ += amt;
if(tots_[cat] > peaks_[cat]) {
peaks_[cat] = tots_[cat];
}
if(tot_ > peak_) {
peak_ = tot_;
}
}
/**
* Tally a memory free of size amt bytes.
*/
void MemoryTally::del(int cat, uint64_t amt) {
ThreadSafe ts(&mutex_m);
assert_geq(tots_[cat], amt);
assert_geq(tot_, amt);
tots_[cat] -= amt;
tot_ -= amt;
}
#ifdef MAIN_DS
#include <limits>
#include "random_source.h"
using namespace std;
int main(void) {
cerr << "Test EHeap 1...";
{
EHeap<float> h;
h.insert(0.5f); // 1
h.insert(0.6f); // 2
h.insert(0.25f); // 3
h.insert(0.75f); // 4
h.insert(0.1f); // 5
h.insert(0.9f); // 6
h.insert(0.4f); // 7
assert_eq(7, h.size());
if(h.pop() != 0.1f) {
throw 1;
}
assert_eq(6, h.size());
if(h.pop() != 0.25f) {
throw 1;
}
assert_eq(5, h.size());
if(h.pop() != 0.4f) {
throw 1;
}
assert_eq(4, h.size());
if(h.pop() != 0.5f) {
throw 1;
}
assert_eq(3, h.size());
if(h.pop() != 0.6f) {
throw 1;
}
assert_eq(2, h.size());
if(h.pop() != 0.75f) {
throw 1;
}
assert_eq(1, h.size());
if(h.pop() != 0.9f) {
throw 1;
}
assert_eq(0, h.size());
assert(h.empty());
}
cerr << "PASSED" << endl;
cerr << "Test EHeap 2...";
{
EHeap<size_t> h;
RandomSource rnd(12);
size_t lim = 2000;
while(h.size() < lim) {
h.insert(rnd.nextU32());
}
size_t last = std::numeric_limits<size_t>::max();
bool first = true;
while(!h.empty()) {
size_t p = h.pop();
assert(first || p >= last);
last = p;
first = false;
}
}
cerr << "PASSED" << endl;
cerr << "Test EBitList 1...";
{
EBitList<128> l;
assert_eq(0, l.size());
assert_eq(std::numeric_limits<size_t>::max(), l.max());
assert(!l.test(0));
assert(!l.test(1));
assert(!l.test(10));
for(int i = 0; i < 3; i++) {
l.set(10);
assert(!l.test(0));
assert(!l.test(1));
assert(!l.test(9));
assert(l.test(10));
assert(!l.test(11));
}
assert_eq(10, l.max());
l.clear();
assert(!l.test(10));
assert_eq(std::numeric_limits<size_t>::max(), l.max());
RandomSource rnd(12);
size_t lim = 2000;
for(size_t i = 0; i < lim; i++) {
uint32_t ri = rnd.nextU32() % 10000;
l.set(ri);
assert(l.test(ri));
}
}
cerr << "PASSED" << endl;
}
#endif /*def MAIN_SSTRING*/
| gpl-3.0 |
zhangswings/iBeebo | app/src/main/java/org/zarroboogs/weibo/hot/bean/hotweibo/PicInfo.java | 988 | package org.zarroboogs.weibo.hot.bean.hotweibo;
import org.json.*;
public class PicInfo {
private PicBig picBig;
private PicMiddle picMiddle;
private PicSmall picSmall;
public PicInfo () {
}
public PicInfo (JSONObject json) {
this.picBig = new PicBig(json.optJSONObject("pic_big"));
this.picMiddle = new PicMiddle(json.optJSONObject("pic_middle"));
this.picSmall = new PicSmall(json.optJSONObject("pic_small"));
}
public PicBig getPicBig() {
return this.picBig;
}
public void setPicBig(PicBig picBig) {
this.picBig = picBig;
}
public PicMiddle getPicMiddle() {
return this.picMiddle;
}
public void setPicMiddle(PicMiddle picMiddle) {
this.picMiddle = picMiddle;
}
public PicSmall getPicSmall() {
return this.picSmall;
}
public void setPicSmall(PicSmall picSmall) {
this.picSmall = picSmall;
}
}
| gpl-3.0 |
SaaldjorMike/librenms | includes/polling/entity-physical/state.inc.php | 1482 | <?php
// Set Entity state
foreach (dbFetch('SELECT * FROM `entPhysical_state` WHERE `device_id` = ?', array($device['device_id'])) as $entity) {
if (!isset($entPhysical_state[$entity['entPhysicalIndex']][$entity['subindex']][$entity['group']][$entity['key']])) {
dbDelete(
'entPhysical_state',
'`device_id` = ? AND `entPhysicalIndex` = ? AND `subindex` = ? AND `group` = ? AND `key` = ?',
array(
$device['device_id'],
$entity['entPhysicalIndex'],
$entity['subindex'],
$entity['group'],
$entity['key'],
)
);
} else {
if ($entPhysical_state[$entity['entPhysicalIndex']][$entity['subindex']][$entity['group']][$entity['key']] != $entity['value']) {
echo 'no match!';
}
unset($entPhysical_state[$entity['entPhysicalIndex']][$entity['subindex']][$entity['group']][$entity['key']]);
}
}//end foreach
// End Set Entity Attrivs
// Delete Entity state
foreach ((array)$entPhysical_state as $epi => $entity) {
foreach ($entity as $subindex => $si) {
foreach ($si as $group => $ti) {
foreach ($ti as $key => $value) {
dbInsert(array('device_id' => $device['device_id'], 'entPhysicalIndex' => $epi, 'subindex' => $subindex, 'group' => $group, 'key' => $key, 'value' => $value), 'entPhysical_state');
}
}
}
} // End Delete Entity state
| gpl-3.0 |
rart/studio2 | src/main/java/org/craftercms/studio/api/v1/service/dependency/DependencyRules.java | 8050 | /*
* Crafter Studio Web-content authoring solution
* Copyright (C) 2007-2016 Crafter Software Corporation.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.craftercms.studio.api.v1.service.dependency;
import org.craftercms.studio.api.v1.service.configuration.ServicesConfig;
import org.craftercms.studio.api.v1.service.content.ContentService;
import org.craftercms.studio.api.v1.service.objectstate.ObjectStateService;
import org.craftercms.studio.api.v1.to.DmDependencyTO;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class DependencyRules {
protected String site;
public DependencyRules(String site) {
this.site = site;
}
public Set<DmDependencyTO> applySubmitRule(DmDependencyTO submittedItem){
Set<DmDependencyTO> dependencies = new HashSet<DmDependencyTO>();
if (submittedItem.getDocuments() != null) {
for (DmDependencyTO document : submittedItem.getDocuments()) {
if (objectStateService.isUpdatedOrNew(site, document.getUri())) {
document.setNow(submittedItem.isNow());
document.setScheduledDate(submittedItem.getScheduledDate());
document.setSubmitted(true);
dependencies.add(document);
}
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(document);
dependencies.addAll(dependencyTOSet);
}
}
// get components
if (submittedItem.getComponents() != null) {
for (DmDependencyTO component : submittedItem.getComponents()) {
if (objectStateService.isUpdatedOrNew(site, component.getUri())) {
component.setNow(submittedItem.isNow());
component.setScheduledDate(submittedItem.getScheduledDate());
component.setSubmitted(true);
dependencies.add(component);
}
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(component);
dependencies.addAll(dependencyTOSet);
}
}
// get assets
if (submittedItem.getAssets() != null) {
for (DmDependencyTO asset : submittedItem.getAssets()) {
if (objectStateService.isUpdatedOrNew(site, asset.getUri())) {
dependencies.add(asset);
asset.setNow(submittedItem.isNow());
asset.setScheduledDate(submittedItem.getScheduledDate());
asset.setSubmitted(true);
}
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(asset);
dependencies.addAll(dependencyTOSet);
}
}
// get templates
if (submittedItem.getRenderingTemplates() != null) {
for (DmDependencyTO template : submittedItem.getRenderingTemplates()) {
if (objectStateService.isUpdatedOrNew(site, template.getUri())) {
dependencies.add(template);
template.setNow(submittedItem.isNow());
template.setScheduledDate(submittedItem.getScheduledDate());
template.setSubmitted(true);
}
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(template);
dependencies.addAll(dependencyTOSet);
}
}
// get level descriptors
if (submittedItem.getLevelDescriptors() != null) {
for (DmDependencyTO ld : submittedItem.getLevelDescriptors()) {
if (objectStateService.isUpdatedOrNew(site, ld.getUri())) {
dependencies.add(ld);
ld.setNow(submittedItem.isNow());
ld.setScheduledDate(submittedItem.getScheduledDate());
ld.setSubmitted(true);
}
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(ld);
dependencies.addAll(dependencyTOSet);
}
}
// get pages
if (submittedItem.getPages() != null) {
for (DmDependencyTO page : submittedItem.getPages()) {
if (objectStateService.isNew(site, page.getUri())) {
page.setNow(submittedItem.isNow());
page.setScheduledDate(submittedItem.getScheduledDate());
page.setSubmitted(true);
dependencies.add(page);
}
Set<DmDependencyTO> childPages = applySubmitRule(page);
dependencies.addAll(childPages);
}
}
return dependencies;
}
public Set<DmDependencyTO> applyDeleteDependencyRule(DmDependencyTO deletedItem) {
return Collections.emptySet();//$Review$ pick up the cascades from configuration
}
public Set<DmDependencyTO> applyRejectRule(DmDependencyTO submittedItem){
Set<DmDependencyTO> dependencies = new HashSet<DmDependencyTO>();
if(submittedItem.isSubmittedForDeletion()) {
return applyDeleteDependencyRule(submittedItem);
}
if (submittedItem.getDocuments() != null) {
for (DmDependencyTO document : submittedItem.getDocuments()) {
dependencies.add(document);
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(document);
dependencies.addAll(dependencyTOSet);
}
}
// get components
if (submittedItem.getComponents() != null) {
for (DmDependencyTO component : submittedItem.getComponents()) {
dependencies.add(component);
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(component);
dependencies.addAll(dependencyTOSet);
}
}
// get components
if (submittedItem.getAssets() != null) {
for (DmDependencyTO asset : submittedItem.getAssets()) {
dependencies.add(asset);
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(asset);
dependencies.addAll(dependencyTOSet);
}
}
// get templates
if (submittedItem.getRenderingTemplates() != null) {
for (DmDependencyTO template : submittedItem.getRenderingTemplates()) {
dependencies.add(template);
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(template);
dependencies.addAll(dependencyTOSet);
}
}
// get level descriptors
if (submittedItem.getLevelDescriptors() != null) {
for (DmDependencyTO ld : submittedItem.getLevelDescriptors()) {
dependencies.add(ld);
Set<DmDependencyTO> dependencyTOSet = applySubmitRule(ld);
dependencies.addAll(dependencyTOSet);
}
}
return dependencies;
}
protected ContentService contentService;
protected ObjectStateService objectStateService;
public ContentService getContentService() { return contentService; }
public void setContentService(ContentService contentService) { this.contentService = contentService; }
public ObjectStateService getObjectStateService() { return objectStateService; }
public void setObjectStateService(ObjectStateService objectStateService) { this.objectStateService = objectStateService; }
}
| gpl-3.0 |
aagallag/nexmon | utilities/boringssl/src/crypto/chacha/chacha_test.cc | 15684 | /* Copyright (c) 2016, Google Inc.
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
* SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION
* OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
* CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */
#include <stdio.h>
#include <stdint.h>
#include <string.h>
#include <memory>
#include <openssl/crypto.h>
#include <openssl/chacha.h>
static const uint8_t kKey[32] = {
0x98, 0xbe, 0xf1, 0x46, 0x9b, 0xe7, 0x26, 0x98, 0x37, 0xa4, 0x5b,
0xfb, 0xc9, 0x2a, 0x5a, 0x6a, 0xc7, 0x62, 0x50, 0x7c, 0xf9, 0x64,
0x43, 0xbf, 0x33, 0xb9, 0x6b, 0x1b, 0xd4, 0xc6, 0xf8, 0xf6,
};
static const uint8_t kNonce[12] = {
0x44, 0xe7, 0x92, 0xd6, 0x33, 0x35, 0xab, 0xb1, 0x58, 0x2e, 0x92, 0x53,
};
static uint32_t kCounter = 42;
static const uint8_t kInput[] = {
0x58, 0x28, 0xd5, 0x30, 0x36, 0x2c, 0x60, 0x55, 0x29, 0xf8, 0xe1, 0x8c,
0xae, 0x15, 0x15, 0x26, 0xf2, 0x3a, 0x73, 0xa0, 0xf3, 0x12, 0xa3, 0x88,
0x5f, 0x2b, 0x74, 0x23, 0x3d, 0xc9, 0x05, 0x23, 0xc6, 0x54, 0x49, 0x1e,
0x44, 0x88, 0x14, 0xd9, 0xda, 0x37, 0x15, 0xdc, 0xb7, 0xe4, 0x23, 0xb3,
0x9d, 0x7e, 0x16, 0x68, 0x35, 0xfc, 0x02, 0x6d, 0xcc, 0x8a, 0xe5, 0xdd,
0x5f, 0xe4, 0xd2, 0x56, 0x6f, 0x12, 0x9c, 0x9c, 0x7d, 0x6a, 0x38, 0x48,
0xbd, 0xdf, 0xd9, 0xac, 0x1b, 0xa2, 0x4d, 0xc5, 0x43, 0x04, 0x3c, 0xd7,
0x99, 0xe1, 0xa7, 0x13, 0x9c, 0x51, 0xc2, 0x6d, 0xf9, 0xcf, 0x07, 0x3b,
0xe4, 0xbf, 0x93, 0xa3, 0xa9, 0xb4, 0xc5, 0xf0, 0x1a, 0xe4, 0x8d, 0x5f,
0xc6, 0xc4, 0x7c, 0x69, 0x7a, 0xde, 0x1a, 0xc1, 0xc9, 0xcf, 0xc2, 0x4e,
0x7a, 0x25, 0x2c, 0x32, 0xe9, 0x17, 0xba, 0x68, 0xf1, 0x37, 0x5d, 0x62,
0x84, 0x46, 0xf5, 0x80, 0x7f, 0x1a, 0x71, 0xf7, 0xbe, 0x72, 0x4b, 0xb8,
0x1c, 0xfe, 0x3e, 0xbd, 0xae, 0x0d, 0x73, 0x0d, 0x87, 0x4a, 0x31, 0xc3,
0x3d, 0x46, 0x6f, 0xb3, 0xd7, 0x6b, 0xe3, 0xb8, 0x70, 0x17, 0x8e, 0x7a,
0x6a, 0x0e, 0xbf, 0xa8, 0xbc, 0x2b, 0xdb, 0xfa, 0x4f, 0xb6, 0x26, 0x20,
0xee, 0x63, 0xf0, 0x6d, 0x26, 0xac, 0x6a, 0x18, 0x37, 0x6e, 0x59, 0x81,
0xd1, 0x60, 0xe6, 0x40, 0xd5, 0x6d, 0x68, 0xba, 0x8b, 0x65, 0x4a, 0xf9,
0xf1, 0xae, 0x56, 0x24, 0x8f, 0xe3, 0x8e, 0xe7, 0x7e, 0x6f, 0xcf, 0x92,
0xdf, 0xa9, 0x75, 0x3a, 0xd6, 0x2e, 0x1c, 0xaf, 0xf2, 0xd6, 0x8b, 0x39,
0xad, 0xd2, 0x5d, 0xfb, 0xd7, 0xdf, 0x05, 0x57, 0x0d, 0xf7, 0xf6, 0x8f,
0x2d, 0x14, 0xb0, 0x4e, 0x1a, 0x3c, 0x77, 0x04, 0xcd, 0x3c, 0x5c, 0x58,
0x52, 0x10, 0x6f, 0xcf, 0x5c, 0x03, 0xc8, 0x5f, 0x85, 0x2b, 0x05, 0x82,
0x60, 0xda, 0xcc, 0xcd, 0xd6, 0x88, 0xbf, 0xc0, 0x10, 0xb3, 0x6f, 0x54,
0x54, 0x42, 0xbc, 0x4b, 0x77, 0x21, 0x4d, 0xee, 0x87, 0x45, 0x06, 0x4c,
0x60, 0x38, 0xd2, 0x7e, 0x1d, 0x30, 0x6c, 0x55, 0xf0, 0x38, 0x80, 0x1c,
0xde, 0x3d, 0xea, 0x68, 0x3e, 0xf6, 0x3e, 0x59, 0xcf, 0x0d, 0x08, 0xae,
0x8c, 0x02, 0x0b, 0xc1, 0x72, 0x6a, 0xb4, 0x6d, 0xf3, 0xf7, 0xb3, 0xef,
0x3a, 0xb1, 0x06, 0xf2, 0xf4, 0xd6, 0x69, 0x7b, 0x3e, 0xa2, 0x16, 0x31,
0x31, 0x79, 0xb6, 0x33, 0xa9, 0xca, 0x8a, 0xa8, 0xbe, 0xf3, 0xe9, 0x38,
0x28, 0xd1, 0xe1, 0x3b, 0x4e, 0x2e, 0x47, 0x35, 0xa4, 0x61, 0x14, 0x1e,
0x42, 0x2c, 0x49, 0x55, 0xea, 0xe3, 0xb3, 0xce, 0x39, 0xd3, 0xb3, 0xef,
0x4a, 0x4d, 0x78, 0x49, 0xbd, 0xf6, 0x7c, 0x0a, 0x2c, 0xd3, 0x26, 0xcb,
0xd9, 0x6a, 0xad, 0x63, 0x93, 0xa7, 0x29, 0x92, 0xdc, 0x1f, 0xaf, 0x61,
0x82, 0x80, 0x74, 0xb2, 0x9c, 0x4a, 0x86, 0x73, 0x50, 0xd8, 0xd1, 0xff,
0xee, 0x1a, 0xe2, 0xdd, 0xa2, 0x61, 0xbd, 0x10, 0xc3, 0x5f, 0x67, 0x9f,
0x29, 0xe4, 0xd3, 0x70, 0xe5, 0x67, 0x3a, 0xd2, 0x20, 0x00, 0xcc, 0x25,
0x15, 0x96, 0x54, 0x45, 0x85, 0xed, 0x82, 0x88, 0x3b, 0x9f, 0x3b, 0xc3,
0x04, 0xd4, 0x23, 0xb1, 0x0d, 0xdc, 0xc8, 0x26, 0x9d, 0x28, 0xb3, 0x25,
0x4d, 0x52, 0xe5, 0x33, 0xf3, 0xed, 0x2c, 0xb8, 0x1a, 0xcf, 0xc3, 0x52,
0xb4, 0x2f, 0xc7, 0x79, 0x96, 0x14, 0x7d, 0x72, 0x27, 0x72, 0x85, 0xea,
0x6d, 0x41, 0xa0, 0x22, 0x13, 0x6d, 0x06, 0x83, 0xa4, 0xdd, 0x0f, 0x69,
0xd2, 0x01, 0xcd, 0xc6, 0xb8, 0x64, 0x5c, 0x2c, 0x79, 0xd1, 0xc7, 0xd3,
0x31, 0xdb, 0x2c, 0xff, 0xda, 0xd0, 0x69, 0x31, 0xad, 0x83, 0x5f, 0xed,
0x6a, 0x97, 0xe4, 0x00, 0x43, 0xb0, 0x2e, 0x97, 0xae, 0x00, 0x5f, 0x5c,
0xb9, 0xe8, 0x39, 0x80, 0x10, 0xca, 0x0c, 0xfa, 0xf0, 0xb5, 0xcd, 0xaa,
0x27, 0x11, 0x60, 0xd9, 0x21, 0x86, 0x93, 0x91, 0x9f, 0x2d, 0x1a, 0x8e,
0xde, 0x0b, 0xb5, 0xcb, 0x05, 0x24, 0x30, 0x45, 0x4d, 0x11, 0x75, 0xfd,
0xe5, 0xa0, 0xa9, 0x4e, 0x3a, 0x8c, 0x3b, 0x52, 0x5a, 0x37, 0x18, 0x05,
0x4a, 0x7a, 0x09, 0x6a, 0xe6, 0xd5, 0xa9, 0xa6, 0x71, 0x47, 0x4c, 0x50,
0xe1, 0x3e, 0x8a, 0x21, 0x2b, 0x4f, 0x0e, 0xe3, 0xcb, 0x72, 0xc5, 0x28,
0x3e, 0x5a, 0x33, 0xec, 0x48, 0x92, 0x2e, 0xa1, 0x24, 0x57, 0x09, 0x0f,
0x01, 0x85, 0x3b, 0x34, 0x39, 0x7e, 0xc7, 0x90, 0x62, 0xe2, 0xdc, 0x5d,
0x0a, 0x2c, 0x51, 0x26, 0x95, 0x3a, 0x95, 0x92, 0xa5, 0x39, 0x8f, 0x0c,
0x83, 0x0b, 0x9d, 0x38, 0xab, 0x98, 0x2a, 0xc4, 0x01, 0xc4, 0x0d, 0x77,
0x13, 0xcb, 0xca, 0xf1, 0x28, 0x31, 0x52, 0x75, 0x27, 0x2c, 0xf0, 0x04,
0x86, 0xc8, 0xf3, 0x3d, 0xf2, 0x9d, 0x8f, 0x55, 0x52, 0x40, 0x3f, 0xaa,
0x22, 0x7f, 0xe7, 0x69, 0x3b, 0xee, 0x44, 0x09, 0xde, 0xff, 0xb0, 0x69,
0x3a, 0xae, 0x74, 0xe9, 0x9d, 0x33, 0xae, 0x8b, 0x6d, 0x60, 0x04, 0xff,
0x53, 0x3f, 0x88, 0xe9, 0x63, 0x9b, 0xb1, 0x6d, 0x2c, 0x22, 0x15, 0x5a,
0x15, 0xd9, 0xe5, 0xcb, 0x03, 0x78, 0x3c, 0xca, 0x59, 0x8c, 0xc8, 0xc2,
0x86, 0xff, 0xd2, 0x79, 0xd6, 0xc6, 0xec, 0x5b, 0xbb, 0xa0, 0xae, 0x01,
0x20, 0x09, 0x2e, 0x38, 0x5d, 0xda, 0x5d, 0xe0, 0x59, 0x4e, 0xe5, 0x8b,
0x84, 0x8f, 0xb6, 0xe0, 0x56, 0x9f, 0x21, 0xa1, 0xcf, 0xb2, 0x0f, 0x2c,
0x93, 0xf8, 0xcf, 0x37, 0xc1, 0x9f, 0x32, 0x98, 0x21, 0x65, 0x52, 0x66,
0x6e, 0xd3, 0x71, 0x98, 0x55, 0xb9, 0x46, 0x9f, 0x1a, 0x35, 0xc4, 0x47,
0x69, 0x62, 0x70, 0x4b, 0x77, 0x9e, 0xe4, 0x21, 0xe6, 0x32, 0x5a, 0x26,
0x05, 0xba, 0x57, 0x53, 0xd7, 0x9b, 0x55, 0x3c, 0xbb, 0x53, 0x79, 0x60,
0x9c, 0xc8, 0x4d, 0xf7, 0xf5, 0x1d, 0x54, 0x02, 0x91, 0x68, 0x0e, 0xaa,
0xca, 0x5a, 0x78, 0x0c, 0x28, 0x9a, 0xc3, 0xac, 0x49, 0xc0, 0xf4, 0x85,
0xee, 0x59, 0x76, 0x7e, 0x28, 0x4e, 0xf1, 0x5c, 0x63, 0xf7, 0xce, 0x0e,
0x2c, 0x21, 0xa0, 0x58, 0xe9, 0x01, 0xfd, 0xeb, 0xd1, 0xaf, 0xe6, 0xef,
0x93, 0xb3, 0x95, 0x51, 0x60, 0xa2, 0x74, 0x40, 0x15, 0xe5, 0xf4, 0x0a,
0xca, 0x6d, 0x9a, 0x37, 0x42, 0x4d, 0x5a, 0x58, 0x49, 0x0f, 0xe9, 0x02,
0xfc, 0x77, 0xd8, 0x59, 0xde, 0xdd, 0xad, 0x4b, 0x99, 0x2e, 0x64, 0x73,
0xad, 0x42, 0x2f, 0xf3, 0x2c, 0x0d, 0x49, 0xe4, 0x2e, 0x6c, 0xa4, 0x73,
0x75, 0x18, 0x14, 0x85, 0xbb, 0x64, 0xb4, 0xa1, 0xb0, 0x6e, 0x01, 0xc0,
0xcf, 0x17, 0x9c, 0xc5, 0x28, 0xc3, 0x2d, 0x6c, 0x17, 0x2a, 0x3d, 0x06,
0x5c, 0xf3, 0xb4, 0x49, 0x75, 0xad, 0x17, 0x69, 0xd4, 0xca, 0x65, 0xae,
0x44, 0x71, 0xa5, 0xf6, 0x0d, 0x0f, 0x8e, 0x37, 0xc7, 0x43, 0xce, 0x6b,
0x08, 0xe9, 0xd1, 0x34, 0x48, 0x8f, 0xc9, 0xfc, 0xf3, 0x5d, 0x2d, 0xec,
0x62, 0xd3, 0xf0, 0xb3, 0xfe, 0x2e, 0x40, 0x55, 0x76, 0x54, 0xc7, 0xb4,
0x61, 0x16, 0xcc, 0x7c, 0x1c, 0x19, 0x24, 0xe6, 0x4d, 0xd4, 0xc3, 0x77,
0x67, 0x1f, 0x3c, 0x74, 0x79, 0xa1, 0xf8, 0x85, 0x88, 0x1d, 0x6f, 0xa4,
0x7e, 0x2c, 0x21, 0x9f, 0x49, 0xf5, 0xaa, 0x4e, 0xf3, 0x4a, 0xfa, 0x9d,
0xbe, 0xf6, 0xce, 0xda, 0xb5, 0xab, 0x39, 0xbd, 0x16, 0x41, 0xa9, 0x4a,
0xac, 0x09, 0x01, 0xca,
};
static const uint8_t kOutput[] = {
0x54, 0x30, 0x6a, 0x13, 0xda, 0x59, 0x6b, 0x6d, 0x59, 0x49, 0xc8, 0xc5,
0xab, 0x26, 0xd4, 0x8a, 0xad, 0xc0, 0x3d, 0xaf, 0x14, 0xb9, 0x15, 0xb8,
0xca, 0xdf, 0x17, 0xa7, 0x03, 0xd3, 0xc5, 0x06, 0x01, 0xef, 0x21, 0xdd,
0xa3, 0x0b, 0x9e, 0x48, 0xb8, 0x5e, 0x0b, 0x87, 0x9f, 0x95, 0x23, 0x68,
0x85, 0x69, 0xd2, 0x5d, 0xaf, 0x57, 0xe9, 0x27, 0x11, 0x3d, 0x49, 0xfa,
0xf1, 0x08, 0xcc, 0x15, 0xec, 0x1d, 0x19, 0x16, 0x12, 0x9b, 0xc8, 0x66,
0x1f, 0xfa, 0x2c, 0x93, 0xf4, 0x99, 0x11, 0x27, 0x31, 0x0e, 0xd8, 0x46,
0x47, 0x40, 0x11, 0x70, 0x01, 0xca, 0xe8, 0x5b, 0xc5, 0x91, 0xc8, 0x3a,
0xdc, 0xaa, 0xf3, 0x4b, 0x80, 0xe5, 0xbc, 0x03, 0xd0, 0x89, 0x72, 0xbc,
0xce, 0x2a, 0x76, 0x0c, 0xf5, 0xda, 0x4c, 0x10, 0x06, 0x35, 0x41, 0xb1,
0xe6, 0xb4, 0xaa, 0x7a, 0xef, 0xf0, 0x62, 0x4a, 0xc5, 0x9f, 0x2c, 0xaf,
0xb8, 0x2f, 0xd9, 0xd1, 0x01, 0x7a, 0x36, 0x2f, 0x3e, 0x83, 0xa5, 0xeb,
0x81, 0x70, 0xa0, 0x57, 0x17, 0x46, 0xea, 0x9e, 0xcb, 0x0e, 0x74, 0xd3,
0x44, 0x57, 0x1d, 0x40, 0x06, 0xf8, 0xb7, 0xcb, 0x5f, 0xf4, 0x79, 0xbd,
0x11, 0x19, 0xd6, 0xee, 0xf8, 0xb0, 0xaa, 0xdd, 0x00, 0x62, 0xad, 0x3b,
0x88, 0x9a, 0x88, 0x5b, 0x1b, 0x07, 0xc9, 0xae, 0x9e, 0xa6, 0x94, 0xe5,
0x55, 0xdb, 0x45, 0x23, 0xb9, 0x2c, 0xcd, 0x29, 0xd3, 0x54, 0xc3, 0x88,
0x1e, 0x5f, 0x52, 0xf2, 0x09, 0x00, 0x26, 0x26, 0x1a, 0xed, 0xf5, 0xc2,
0xa9, 0x7d, 0xf9, 0x21, 0x5a, 0xaf, 0x6d, 0xab, 0x8e, 0x16, 0x84, 0x96,
0xb5, 0x4f, 0xcf, 0x1e, 0xa3, 0xaf, 0x08, 0x9f, 0x79, 0x86, 0xc3, 0xbe,
0x0c, 0x70, 0xcb, 0x8f, 0xf3, 0xc5, 0xf8, 0xe8, 0x4b, 0x21, 0x7d, 0x18,
0xa9, 0xed, 0x8b, 0xfb, 0x6b, 0x5a, 0x6f, 0x26, 0x0b, 0x56, 0x04, 0x7c,
0xfe, 0x0e, 0x1e, 0xc1, 0x3f, 0x82, 0xc5, 0x73, 0xbd, 0x53, 0x0c, 0xf0,
0xe2, 0xc9, 0xf3, 0x3d, 0x1b, 0x6d, 0xba, 0x70, 0xc1, 0x6d, 0xb6, 0x00,
0x28, 0xe1, 0xc4, 0x78, 0x62, 0x04, 0xda, 0x23, 0x86, 0xc3, 0xda, 0x74,
0x3d, 0x7c, 0xd6, 0x76, 0x29, 0xb2, 0x27, 0x2e, 0xb2, 0x35, 0x42, 0x60,
0x82, 0xcf, 0x30, 0x2c, 0x59, 0xe4, 0xe3, 0xd0, 0x74, 0x1f, 0x58, 0xe8,
0xda, 0x47, 0x45, 0x73, 0x1c, 0x05, 0x93, 0xae, 0x75, 0xbe, 0x1f, 0x81,
0xd8, 0xb7, 0xb3, 0xff, 0xfc, 0x8b, 0x52, 0x9e, 0xed, 0x8b, 0x37, 0x9f,
0xe0, 0xb8, 0xa2, 0x66, 0xe1, 0x6a, 0xc5, 0x1f, 0x1d, 0xf0, 0xde, 0x3f,
0x3d, 0xb0, 0x28, 0xf3, 0xaa, 0x4e, 0x4d, 0x31, 0xb0, 0x26, 0x79, 0x2b,
0x08, 0x0f, 0xe9, 0x2f, 0x79, 0xb3, 0xc8, 0xdd, 0xa7, 0x89, 0xa8, 0xa8,
0x1d, 0x59, 0x0e, 0x4f, 0x1e, 0x93, 0x1f, 0x70, 0x7f, 0x4e, 0x7e, 0xfe,
0xb8, 0xca, 0x63, 0xe0, 0xa6, 0x05, 0xcc, 0xd7, 0xde, 0x2a, 0x49, 0x31,
0x78, 0x5c, 0x5f, 0x44, 0xb2, 0x9b, 0x91, 0x99, 0x14, 0x29, 0x63, 0x09,
0x12, 0xdd, 0x02, 0xd9, 0x7b, 0xe9, 0xf5, 0x12, 0x07, 0xd0, 0xe7, 0xe6,
0xe8, 0xdd, 0xda, 0xa4, 0x73, 0xc4, 0x8e, 0xbd, 0x7b, 0xb7, 0xbb, 0xcb,
0x83, 0x2f, 0x43, 0xf6, 0x1c, 0x50, 0xae, 0x9b, 0x2e, 0x52, 0x80, 0x18,
0x85, 0xa8, 0x23, 0x52, 0x7a, 0x6a, 0xf7, 0x42, 0x36, 0xca, 0x91, 0x5a,
0x3d, 0x2a, 0xa0, 0x35, 0x7d, 0x70, 0xfc, 0x4c, 0x18, 0x7c, 0x57, 0x72,
0xcf, 0x9b, 0x29, 0xd6, 0xd0, 0xb4, 0xd7, 0xe6, 0x89, 0x70, 0x69, 0x22,
0x5e, 0x45, 0x09, 0x4d, 0x49, 0x87, 0x84, 0x5f, 0x8a, 0x5f, 0xe4, 0x15,
0xd3, 0xe3, 0x72, 0xaf, 0xb2, 0x30, 0x9c, 0xc1, 0xff, 0x8e, 0x6d, 0x2a,
0x76, 0x9e, 0x08, 0x03, 0x7e, 0xe0, 0xc3, 0xc2, 0x97, 0x06, 0x6b, 0x33,
0x2b, 0x08, 0xe3, 0xd5, 0x0b, 0xd8, 0x32, 0x67, 0x61, 0x10, 0xed, 0x6b,
0xed, 0x50, 0xef, 0xd7, 0x1c, 0x1b, 0xe0, 0x6d, 0xa1, 0x64, 0x19, 0x34,
0x2f, 0xe4, 0xe8, 0x54, 0xbf, 0x84, 0x0e, 0xdf, 0x0e, 0x8b, 0xd8, 0xdd,
0x77, 0x96, 0xb8, 0x54, 0xab, 0xf2, 0x95, 0x59, 0x0d, 0x0d, 0x0a, 0x15,
0x6e, 0x01, 0xf2, 0x24, 0xab, 0xa0, 0xd8, 0xdf, 0x38, 0xea, 0x97, 0x58,
0x76, 0x88, 0xbe, 0xaf, 0x45, 0xe3, 0x56, 0x4f, 0x68, 0xe8, 0x4b, 0xe7,
0x2b, 0x22, 0x18, 0x96, 0x82, 0x89, 0x25, 0x34, 0xd1, 0xdd, 0x08, 0xea,
0x7e, 0x21, 0xef, 0x57, 0x55, 0x43, 0xf7, 0xfa, 0xca, 0x1c, 0xde, 0x99,
0x2e, 0x8b, 0xd8, 0xc3, 0xcf, 0x89, 0x4d, 0xfc, 0x3b, 0x7d, 0x4a, 0xc9,
0x99, 0xc4, 0x31, 0xb6, 0x7a, 0xae, 0xf8, 0x49, 0xb2, 0x46, 0xc1, 0x60,
0x05, 0x75, 0xf3, 0x3d, 0xf2, 0xc9, 0x84, 0xa4, 0xb9, 0x8a, 0x87, 0x2a,
0x87, 0x5c, 0x0a, 0xbc, 0x51, 0x7d, 0x9a, 0xf5, 0xc9, 0x24, 0x2d, 0x5e,
0xe6, 0xc6, 0xe3, 0xcd, 0x7e, 0xe4, 0xaf, 0x8a, 0x6c, 0x00, 0x04, 0xc8,
0xd7, 0xa5, 0xad, 0xfa, 0xb2, 0x08, 0x4a, 0x26, 0x9b, 0x7c, 0xd0, 0xc6,
0x13, 0xb1, 0xb9, 0x65, 0x3f, 0x70, 0x30, 0xf9, 0x98, 0x9d, 0x87, 0x99,
0x57, 0x71, 0x3e, 0xb1, 0xc3, 0x24, 0xf0, 0xa6, 0xa2, 0x60, 0x9d, 0x66,
0xd2, 0x5f, 0xae, 0xe3, 0x94, 0x87, 0xea, 0xd1, 0xea, 0x0d, 0x2a, 0x77,
0xef, 0x31, 0xcc, 0xeb, 0xf9, 0x0c, 0xdc, 0x9c, 0x12, 0x80, 0xbb, 0xb0,
0x8e, 0xab, 0x9a, 0x04, 0xcd, 0x4b, 0x95, 0x4f, 0x7a, 0x0b, 0x53, 0x7c,
0x16, 0xcc, 0x0e, 0xb1, 0x73, 0x10, 0xdd, 0xaa, 0x76, 0x94, 0x90, 0xd9,
0x8b, 0x66, 0x41, 0x31, 0xed, 0x8c, 0x7d, 0x74, 0xc4, 0x33, 0xfa, 0xc3,
0x43, 0x8d, 0x10, 0xbc, 0x84, 0x4d, 0x0e, 0x95, 0x32, 0xdf, 0x17, 0x43,
0x6d, 0xd2, 0x5e, 0x12, 0xb9, 0xed, 0x33, 0xd9, 0x97, 0x6f, 0x4a, 0xcd,
0xc3, 0xcd, 0x81, 0x34, 0xbe, 0x7e, 0xa2, 0xd0, 0xa7, 0x91, 0x5d, 0x90,
0xf6, 0x5e, 0x4a, 0x25, 0x0f, 0xcc, 0x24, 0xeb, 0xe1, 0xe4, 0x62, 0x6c,
0x8f, 0x45, 0x36, 0x97, 0x5d, 0xda, 0x20, 0x2b, 0x86, 0x00, 0x8c, 0x94,
0xa9, 0x6a, 0x69, 0xb2, 0xe9, 0xbb, 0x82, 0x8e, 0x41, 0x95, 0xb4, 0xb7,
0xf1, 0x55, 0x52, 0x30, 0x39, 0x48, 0xb3, 0x25, 0x82, 0xa9, 0x10, 0x27,
0x89, 0xb5, 0xe5, 0x1f, 0xab, 0x72, 0x3c, 0x70, 0x08, 0xce, 0xe6, 0x61,
0xbf, 0x19, 0xc8, 0x90, 0x2b, 0x29, 0x30, 0x3e, 0xb8, 0x4c, 0x33, 0xf0,
0xf0, 0x15, 0x2e, 0xb7, 0x25, 0xca, 0x99, 0x4b, 0x6f, 0x4b, 0x41, 0x50,
0xee, 0x56, 0x99, 0xcf, 0x2b, 0xa4, 0xc4, 0x7c, 0x5c, 0xa6, 0xd4, 0x67,
0x04, 0x5c, 0x5d, 0x5f, 0x26, 0x9e, 0x0f, 0xe2, 0x58, 0x68, 0x4c, 0x30,
0xcd, 0xef, 0x46, 0xdb, 0x37, 0x6f, 0xbb, 0xc4, 0x80, 0xca, 0x8a, 0x54,
0x5d, 0x71, 0x9d, 0x0c, 0xe8, 0xb8, 0x2c, 0x10, 0x90, 0x44, 0xa4, 0x88,
0x3f, 0xbc, 0x15, 0x3c, 0xd2, 0xca, 0x0e, 0xc3, 0xe4, 0x6e, 0xef, 0xb0,
0xcb, 0xfd, 0x61, 0x7c, 0x27, 0xf2, 0x25, 0xea, 0x71, 0x6d, 0xf7, 0x49,
0x9c, 0x81, 0x27, 0xf0, 0x61, 0x33, 0xcf, 0x55, 0x68, 0xd3, 0x73, 0xa4,
0xed, 0x35, 0x65, 0x2a, 0xf2, 0x3e, 0xcf, 0x90, 0x98, 0x54, 0x6d, 0x95,
0x6a, 0x0c, 0x9c, 0x24, 0x0e, 0xb4, 0xb7, 0x9b, 0x8d, 0x6e, 0x1c, 0xbc,
0xeb, 0x17, 0x10, 0x86, 0xda, 0x91, 0x6d, 0x89, 0x4c, 0xeb, 0xf5, 0x50,
0x8f, 0x40, 0xcf, 0x4a,
};
static_assert(sizeof(kInput) == sizeof(kOutput),
"Input and output lengths don't match.");
static bool TestChaCha20(size_t len) {
std::unique_ptr<uint8_t[]> buf(new uint8_t[len]);
CRYPTO_chacha_20(buf.get(), kInput, len, kKey, kNonce, kCounter);
if (memcmp(buf.get(), kOutput, len) != 0) {
fprintf(stderr, "Mismatch at length %u.\n", static_cast<unsigned>(len));
return false;
}
// Test in-place at various offsets.
static const size_t kOffsets[] = {
0, 1, 2, 8, 15, 16, 17, 31, 32, 33, 63,
64, 65, 95, 96, 97, 127, 128, 129, 255, 256, 257,
};
for (size_t offset : kOffsets) {
buf.reset(new uint8_t[len + offset]);
memcpy(buf.get() + offset, kInput, len);
CRYPTO_chacha_20(buf.get(), buf.get() + offset, len, kKey, kNonce,
kCounter);
if (memcmp(buf.get(), kOutput, len) != 0) {
fprintf(stderr, "Mismatch at length %u with in-place offset %u.\n",
static_cast<unsigned>(len), static_cast<unsigned>(offset));
return false;
}
}
return true;
}
int main(int argc, char **argv) {
CRYPTO_library_init();
// Run the test with the test vector at all lengths.
for (size_t len = 0; len <= sizeof(kInput); len++) {
if (!TestChaCha20(len)) {
return 1;
}
}
printf("PASS\n");
return 0;
}
| gpl-3.0 |
malin1993ml/h-store | src/hsqldb19b3/org/hsqldb/ExpressionOrderBy.java | 5085 | /* Copyright (c) 2001-2009, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL HSQL DEVELOPMENT GROUP, HSQLDB.ORG,
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hsqldb;
import org.hsqldb.HSQLInterface.HSQLParseException;
/**
* Implementation of ORDER BY operations
*
* @author Fred Toussi (fredt@users dot sourceforge.net)
* @version 1.9.0
* @since 1.9.0
*/
public class ExpressionOrderBy extends Expression {
private boolean isDescending;
private boolean isNullsLast;
ExpressionOrderBy(Expression e) {
super(OpTypes.ORDER_BY);
nodes = new Expression[UNARY];
nodes[LEFT] = e;
}
/**
* Set an ORDER BY column expression DESC
*/
void setDescending() {
isDescending = true;
}
/**
* Is an ORDER BY column expression DESC
*/
boolean isDescending() {
return isDescending;
}
/**
* Set an ORDER BY column NULL ordering
*/
void setNullsLast() {
isNullsLast = true;
}
/**
* Is an ORDER BY column NULL ordering
*/
boolean isNullsLast() {
return isNullsLast;
}
public Object getValue(Session session) {
return nodes[LEFT].getValue(session);
}
public void resolveTypes(Session session, Expression parent) {
nodes[LEFT].resolveTypes(session, parent);
if (nodes[LEFT].isParam) {
throw Error.error(ErrorCode.X_42567);
}
dataType = nodes[LEFT].dataType;
}
public String getSQL() {
StringBuffer sb = new StringBuffer();
sb.append(Tokens.T_ORDER).append(' ').append(Tokens.T_BY).append(' ');
if (nodes[LEFT].alias != null) {
sb.append(nodes[LEFT].alias.name);
} else {
sb.append(nodes[LEFT].getSQL());
}
if (isDescending) {
sb.append(' ').append(Tokens.T_DESC);
}
return sb.toString();
}
protected String describe(Session session, int blanks) {
StringBuffer sb = new StringBuffer();
sb.append('\n');
for (int i = 0; i < blanks; i++) {
sb.append(' ');
}
sb.append(Tokens.T_ORDER).append(' ').append(Tokens.T_BY);
sb.append(' ');
if (isDescending) {
sb.append(Tokens.T_DESC).append(' ');
}
return sb.toString();
}
/*************** VOLTDB *********************/
/**
* VoltDB added method to get a non-catalog-dependent
* representation of this HSQLDB object.
* @param session The current Session object may be needed to resolve
* some names.
* @param indent A string of whitespace to be prepended to every line
* in the resulting XML.
* @return XML, correctly indented, representing this object.
* @throws HSQLParseException
*/
String voltGetXML(Session session, String indent) throws HSQLParseException
{
StringBuffer sb = new StringBuffer();
String extra = "desc='" + (isDescending ? "true" : "false") + "'";
sb.append(indent).append("<operation id=\"").append(this.getUniqueId()).append("\"");
sb.append(" type=\"").append("orderby").append("\"");
if ((this.alias != null) && (getAlias().length() > 0)) {
sb.append(" alias='" + getAlias() + "'");
}
sb.append(" ").append(extra);
sb.append(">\n");
for (Expression expr : nodes) {
sb.append(expr.voltGetXML(session, indent + HSQLInterface.XML_INDENT)).append('\n');
}
sb.append(indent).append("</operation>");
return sb.toString();
}
}
| gpl-3.0 |
ollie314/kuma | tests/ui/_cli.js | 2888 | // This file allows for overriding of certain config vars via the command line
// The local default config is assumed
define(['tests/lib/config'], function(libConfig) {
return {
mixinArgs: function(args, config) {
var greps = [];
// Take an argument with comma-separated value and apply it
function checkAndParse(property, arg, callback) {
if(arg) arg = arg.trim();
if(!arg) return;
config[property].length = 0;
arg.trim().split(',').forEach(callback);
}
// Allow overriding of which browsers to run via a comma-separated string
// ex: "firefox,chrome" or just "firefox"
checkAndParse('environments', args.b, function(item) {
config.environments.push({ browserName: item.trim() });
});
// Allow overriding of which test suites to run, so you can run one more more/*
checkAndParse('functionalSuites', args.t, function(item) {
config.functionalSuites.push('tests/' + item.trim());
});
// Set a username and password if present
// If we weren't provided username and password, let's set a grep to avoid login tests
if(args.u == undefined && args.p == undefined) {
greps.push('requires-login');
console.log('No username (-u) and password (-p) provided. Tests requiring login will be skipped.');
}
// Set a document for wiki testing
if(args.wd == undefined) {
greps.push('requires-doc');
console.log('No wiki document (-wd) provided. Most wiki tests will be skipped.');
}
// Don't allow some test types on prod
if(libConfig.productionDomain == libConfig.domain) {
greps.push('requires-destructive', 'requires-admin');
console.log('Destructive tests not allowed on production');
console.log('Admin tests not allowed on production');
}
// Allow intrusive testing if specified (i.e. actually saving, editing generated documents)
if(args.destructive != 'true') {
greps.push('requires-destructive');
console.log('No destructive permission provided. Some wiki tests will be skipped.');
}
// Set the final GREP value
greps = greps.filter(function(value, index, self){
return self.indexOf(value) === index;
});
args.grep = greps.length ? ('^(?!.*?\\[(' + greps.join('|') + ')\\])') : '';
if(args.grep) {
console.log('Command line arguments have forced a grep to skip tests: ' + args.grep);
}
return config;
}
}
});
| mpl-2.0 |
HotChalk/canvas-lms | app/models/quizzes/quiz_question/answer_parsers/calculated.rb | 3044 | #
# Copyright (C) 2013 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
module Quizzes::QuizQuestion::AnswerParsers
class Calculated < AnswerParser
def parse(question)
question[:formulas] = format_formulas(question[:formulas])
question[:variables] = parse_variables(question[:variables])
@answers.map_with_group! do |answer_group, answer|
answer_params = {:weight => 100, :variables => []}
answer_params[:answer] = answer[:answer_text].to_f
variables = hash_to_array(answer[:variables])
variables.each do |variable|
variable = Quizzes::QuizQuestion::RawFields.new(variable)
name = variable.fetch_with_enforced_length(:name)
answer_params[:variables] << {
:name => name,
:value => format_value(variable.fetch_any(:value).to_f, @scale_lookup_dictionary[name])
}
end
answer = Quizzes::QuizQuestion::AnswerGroup::Answer.new(answer_params)
answer_group.taken_ids << answer.set_id(answer_group.taken_ids)
answer
end
question.answers = @answers
question
end
private
def format_formulas(formulas)
formulas = hash_to_array(formulas)
formulas.map do |formula|
formula = Quizzes::QuizQuestion::RawFields.new({formula: trim_length(formula)})
{formula: formula.fetch_with_enforced_length(:formula)}
end
end
def parse_variables(variables)
@scale_lookup_dictionary ||= {}
hash_to_array(variables).map do |variable|
variable = Quizzes::QuizQuestion::RawFields.new(variable.merge({name: trim_length(variable[:name])}))
# Setup a scale lookup dictionary
var_name = variable.fetch_with_enforced_length(:name)
scale = variable.fetch_any(:scale).to_i
@scale_lookup_dictionary[var_name] = scale
# Return the formatted variable
{
name: var_name,
min: variable.fetch_any(:min).to_f,
max: variable.fetch_any(:max).to_f,
scale: scale
}
end
end
def format_value(float_value, scale)
scale ? format("%.#{scale}f", float_value) : float_value
end
def trim_length(field)
field[0..1024]
end
def hash_to_array(obj)
if obj.respond_to?(:values)
obj.values
else
obj || []
end
end
def trim_padding(n)
n.to_s[9..-1].to_i
end
end
end
| agpl-3.0 |
pydio/pydio-core | core/src/plugins/editor.openlayer/res/i18n/et.php | 358 | <?php
// Estonian translation by Ardi Jürgens <ardi (at) zone.ee>
// + updates/fixes by Kain Väljaots <kain (at) zone.ee>
// Last update: 27.05.2013
$mess=array(
"1" => "OpenLayers kaart",
"2" => "Filter",
"3" => "Positsioon",
"4" => "Kihid",
"5" => "Antialias",
"6" => "Formaat",
"7" => "Stiilid",
"8" => "Filter",
"9" => "Otsi",
"10" => "Puhasta",
);
| agpl-3.0 |
redconfetti/canvas-lms | db/migrate/20110118001335_add_ip_filter_to_quizzes.rb | 202 | class AddIpFilterToQuizzes < ActiveRecord::Migration
tag :predeploy
def self.up
add_column :quizzes, :ip_filter, :string
end
def self.down
remove_column :quizzes, :ip_filter
end
end
| agpl-3.0 |
harterj/moose | modules/navier_stokes/src/bcs/NSEnergyInviscidSpecifiedDensityAndVelocityBC.C | 2473 | //* This file is part of the MOOSE framework
//* https://www.mooseframework.org
//*
//* All rights reserved, see COPYRIGHT for full restrictions
//* https://github.com/idaholab/moose/blob/master/COPYRIGHT
//*
//* Licensed under LGPL 2.1, please see LICENSE for details
//* https://www.gnu.org/licenses/lgpl-2.1.html
// Navier-Stokes includes
// This was experimental code and did not really work out, do not use!
#include "NSEnergyInviscidSpecifiedDensityAndVelocityBC.h"
#include "NS.h"
registerMooseObject("NavierStokesApp", NSEnergyInviscidSpecifiedDensityAndVelocityBC);
InputParameters
NSEnergyInviscidSpecifiedDensityAndVelocityBC::validParams()
{
InputParameters params = NSEnergyInviscidBC::validParams();
// Coupled variables
params.addRequiredCoupledVar(NS::pressure, "pressure");
params.addDeprecatedCoupledVar("p", NS::pressure, "1/1/2022");
// Required parameters
params.addRequiredParam<Real>("specified_density", "The specified density for this boundary");
params.addRequiredParam<Real>("specified_u",
"The x-component of the specified velocity for this boundary");
params.addRequiredParam<Real>("specified_v",
"The y-component of the specified velocity for this boundary");
params.addParam<Real>(
"specified_w",
0.0,
"The z-component of the specified velocity for this boundary"); // only required in 3D
return params;
}
NSEnergyInviscidSpecifiedDensityAndVelocityBC::NSEnergyInviscidSpecifiedDensityAndVelocityBC(
const InputParameters & parameters)
: NSEnergyInviscidBC(parameters),
_pressure(coupledValue(NS::pressure)),
_specified_density(getParam<Real>("specified_density")),
_specified_u(getParam<Real>("specified_u")),
_specified_v(getParam<Real>("specified_v")),
_specified_w(getParam<Real>("specified_w"))
{
}
Real
NSEnergyInviscidSpecifiedDensityAndVelocityBC::computeQpResidual()
{
return qpResidualHelper(_specified_density,
RealVectorValue(_specified_u, _specified_v, _specified_w),
_pressure[_qp]);
}
Real
NSEnergyInviscidSpecifiedDensityAndVelocityBC::computeQpJacobian()
{
// TODO
// return computeJacobianHelper(/*on-diagonal variable is energy=*/4);
return 0.;
}
Real
NSEnergyInviscidSpecifiedDensityAndVelocityBC::computeQpOffDiagJacobian(unsigned /*jvar*/)
{
// TODO
// return computeJacobianHelper(mapVarNumber(jvar));
return 0.;
}
| lgpl-2.1 |
harterj/moose | modules/phase_field/test/src/materials/ADTestDerivativeFunction.C | 2926 | //* This file is part of the MOOSE framework
//* https://www.mooseframework.org
//*
//* All rights reserved, see COPYRIGHT for full restrictions
//* https://github.com/idaholab/moose/blob/master/COPYRIGHT
//*
//* Licensed under LGPL 2.1, please see LICENSE for details
//* https://www.gnu.org/licenses/lgpl-2.1.html
#include "ADTestDerivativeFunction.h"
registerMooseObject("PhaseFieldTestApp", ADTestDerivativeFunction);
InputParameters
ADTestDerivativeFunction::validParams()
{
InputParameters params = ADMaterial::validParams();
params.addClassDescription(
"Material that implements the a function of one variable and its first derivative.");
MooseEnum functionEnum("F1 F2 F3");
params.addRequiredParam<MooseEnum>("function",
functionEnum,
"F1 = 2 op[0]^2 (1 - op[0])^2 - 0.2 op[0]; "
"F2 = 0.1 op[0]^2 + op[1]^2; "
"F3 = op[0] * op[1]");
params.addParam<MaterialPropertyName>("f_name", "F", "function property name");
params.addRequiredCoupledVar("op", "Order parameter variables");
return params;
}
ADTestDerivativeFunction::ADTestDerivativeFunction(const InputParameters & parameters)
: ADMaterial(parameters),
_function(getParam<MooseEnum>("function").template getEnum<FunctionEnum>()),
_op(adCoupledValues("op")),
_f_name(getParam<MaterialPropertyName>("f_name")),
_prop_F(declareADProperty<Real>(_f_name)),
_prop_dFdop(coupledComponents("op"))
{
for (std::size_t i = 0; i < _op.size(); ++i)
_prop_dFdop[i] = &declareADProperty<Real>(
derivativePropertyNameFirst(_f_name, this->getVar("op", i)->name()));
if (_function == FunctionEnum::F1 && _op.size() != 1)
paramError("op", "Specify exactly one variable to an F1 type function.");
if (_function == FunctionEnum::F2 && _op.size() != 2)
paramError("op", "Specify exactly two variables to an F2 type function.");
if (_function == FunctionEnum::F3 && _op.size() != 2)
paramError("op", "Specify exactly two variables to an F3 type function.");
}
void
ADTestDerivativeFunction::computeQpProperties()
{
const ADReal & a = (*_op[0])[_qp];
switch (_function)
{
case FunctionEnum::F1:
_prop_F[_qp] = 2.0 * a * a * (1.0 - a) * (1.0 - a) - 0.2 * a;
(*_prop_dFdop[0])[_qp] = 4.0 * a * a * (a - 1.0) + 4.0 * a * (1.0 - a) * (1.0 - a) - 0.2;
break;
case FunctionEnum::F2:
{
const ADReal & b = (*_op[1])[_qp];
_prop_F[_qp] = 0.1 * a * a + b * b;
(*_prop_dFdop[0])[_qp] = 0.2 * a;
(*_prop_dFdop[1])[_qp] = 2.0 * b;
break;
}
case FunctionEnum::F3:
{
const ADReal & b = (*_op[1])[_qp];
_prop_F[_qp] = a * b;
(*_prop_dFdop[0])[_qp] = b;
(*_prop_dFdop[1])[_qp] = a;
break;
}
default:
mooseError("Invalid function enum value");
}
}
| lgpl-2.1 |
cubem2013/reservation2013 | _package/rb/modules/pension/theme/_mobile/_pc/default/free_bbs.php | 40 | <div class="title">자유게시판</div> | lgpl-3.0 |
turbokongen/home-assistant | homeassistant/components/season/sensor.py | 4165 | """Support for tracking which astronomical or meteorological season it is."""
from datetime import datetime
import logging
import ephem
import voluptuous as vol
from homeassistant import util
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_NAME, CONF_TYPE
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util.dt import utcnow
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Season"
EQUATOR = "equator"
NORTHERN = "northern"
SOUTHERN = "southern"
STATE_AUTUMN = "autumn"
STATE_SPRING = "spring"
STATE_SUMMER = "summer"
STATE_WINTER = "winter"
TYPE_ASTRONOMICAL = "astronomical"
TYPE_METEOROLOGICAL = "meteorological"
VALID_TYPES = [TYPE_ASTRONOMICAL, TYPE_METEOROLOGICAL]
HEMISPHERE_SEASON_SWAP = {
STATE_WINTER: STATE_SUMMER,
STATE_SPRING: STATE_AUTUMN,
STATE_AUTUMN: STATE_SPRING,
STATE_SUMMER: STATE_WINTER,
}
SEASON_ICONS = {
STATE_SPRING: "mdi:flower",
STATE_SUMMER: "mdi:sunglasses",
STATE_AUTUMN: "mdi:leaf",
STATE_WINTER: "mdi:snowflake",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_TYPE, default=TYPE_ASTRONOMICAL): vol.In(VALID_TYPES),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Display the current season."""
if None in (hass.config.latitude, hass.config.longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return False
latitude = util.convert(hass.config.latitude, float)
_type = config.get(CONF_TYPE)
name = config.get(CONF_NAME)
if latitude < 0:
hemisphere = SOUTHERN
elif latitude > 0:
hemisphere = NORTHERN
else:
hemisphere = EQUATOR
_LOGGER.debug(_type)
add_entities([Season(hass, hemisphere, _type, name)], True)
return True
def get_season(date, hemisphere, season_tracking_type):
"""Calculate the current season."""
if hemisphere == "equator":
return None
if season_tracking_type == TYPE_ASTRONOMICAL:
spring_start = ephem.next_equinox(str(date.year)).datetime()
summer_start = ephem.next_solstice(str(date.year)).datetime()
autumn_start = ephem.next_equinox(spring_start).datetime()
winter_start = ephem.next_solstice(summer_start).datetime()
else:
spring_start = datetime(2017, 3, 1).replace(year=date.year)
summer_start = spring_start.replace(month=6)
autumn_start = spring_start.replace(month=9)
winter_start = spring_start.replace(month=12)
if spring_start <= date < summer_start:
season = STATE_SPRING
elif summer_start <= date < autumn_start:
season = STATE_SUMMER
elif autumn_start <= date < winter_start:
season = STATE_AUTUMN
elif winter_start <= date or spring_start > date:
season = STATE_WINTER
# If user is located in the southern hemisphere swap the season
if hemisphere == NORTHERN:
return season
return HEMISPHERE_SEASON_SWAP.get(season)
class Season(Entity):
"""Representation of the current season."""
def __init__(self, hass, hemisphere, season_tracking_type, name):
"""Initialize the season."""
self.hass = hass
self._name = name
self.hemisphere = hemisphere
self.datetime = None
self.type = season_tracking_type
self.season = None
@property
def name(self):
"""Return the name."""
return self._name
@property
def state(self):
"""Return the current season."""
return self.season
@property
def device_class(self):
"""Return the device class."""
return "season__season"
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SEASON_ICONS.get(self.season, "mdi:cloud")
def update(self):
"""Update season."""
self.datetime = utcnow().replace(tzinfo=None)
self.season = get_season(self.datetime, self.hemisphere, self.type)
| apache-2.0 |
pabloescribanoloza/xamarin-forms-book-preview-2 | Chapter22/ColorAnimations/ColorAnimations/ColorAnimations.WinPhone81/App.xaml.cs | 5174 | using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.ApplicationModel;
using Windows.ApplicationModel.Activation;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Media.Animation;
using Windows.UI.Xaml.Navigation;
// The Blank Application template is documented at http://go.microsoft.com/fwlink/?LinkId=391641
namespace ColorAnimations.WinPhone81
{
/// <summary>
/// Provides application-specific behavior to supplement the default Application class.
/// </summary>
public sealed partial class App : Application
{
private TransitionCollection transitions;
/// <summary>
/// Initializes the singleton application object. This is the first line of authored code
/// executed, and as such is the logical equivalent of main() or WinMain().
/// </summary>
public App()
{
this.InitializeComponent();
this.Suspending += this.OnSuspending;
}
/// <summary>
/// Invoked when the application is launched normally by the end user. Other entry points
/// will be used when the application is launched to open a specific file, to display
/// search results, and so forth.
/// </summary>
/// <param name="e">Details about the launch request and process.</param>
protected override void OnLaunched(LaunchActivatedEventArgs e)
{
#if DEBUG
if (System.Diagnostics.Debugger.IsAttached)
{
this.DebugSettings.EnableFrameRateCounter = true;
}
#endif
Frame rootFrame = Window.Current.Content as Frame;
// Do not repeat app initialization when the Window already has content,
// just ensure that the window is active
if (rootFrame == null)
{
// Create a Frame to act as the navigation context and navigate to the first page
rootFrame = new Frame();
// TODO: change this value to a cache size that is appropriate for your application
rootFrame.CacheSize = 1;
Xamarin.Forms.Forms.Init(e);
if (e.PreviousExecutionState == ApplicationExecutionState.Terminated)
{
// TODO: Load state from previously suspended application
}
// Place the frame in the current Window
Window.Current.Content = rootFrame;
}
if (rootFrame.Content == null)
{
// Removes the turnstile navigation for startup.
if (rootFrame.ContentTransitions != null)
{
this.transitions = new TransitionCollection();
foreach (var c in rootFrame.ContentTransitions)
{
this.transitions.Add(c);
}
}
rootFrame.ContentTransitions = null;
rootFrame.Navigated += this.RootFrame_FirstNavigated;
// When the navigation stack isn't restored navigate to the first page,
// configuring the new page by passing required information as a navigation
// parameter
if (!rootFrame.Navigate(typeof(MainPage), e.Arguments))
{
throw new Exception("Failed to create initial page");
}
}
// Ensure the current window is active
Window.Current.Activate();
}
/// <summary>
/// Restores the content transitions after the app has launched.
/// </summary>
/// <param name="sender">The object where the handler is attached.</param>
/// <param name="e">Details about the navigation event.</param>
private void RootFrame_FirstNavigated(object sender, NavigationEventArgs e)
{
var rootFrame = sender as Frame;
rootFrame.ContentTransitions = this.transitions ?? new TransitionCollection() { new NavigationThemeTransition() };
rootFrame.Navigated -= this.RootFrame_FirstNavigated;
}
/// <summary>
/// Invoked when application execution is being suspended. Application state is saved
/// without knowing whether the application will be terminated or resumed with the contents
/// of memory still intact.
/// </summary>
/// <param name="sender">The source of the suspend request.</param>
/// <param name="e">Details about the suspend request.</param>
private void OnSuspending(object sender, SuspendingEventArgs e)
{
var deferral = e.SuspendingOperation.GetDeferral();
// TODO: Save application state and stop any background activity
deferral.Complete();
}
}
} | apache-2.0 |
haocafes/DataflowJavaSDK | sdk/src/test/java/com/google/cloud/dataflow/sdk/coders/ListCoderTest.java | 2330 | /*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.coders;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
/** Unit tests for {@link ListCoder}. */
@RunWith(JUnit4.class)
public class ListCoderTest {
private static final List<List<Integer>> TEST_VALUES = Arrays.<List<Integer>>asList(
Collections.<Integer>emptyList(),
Collections.singletonList(43),
Arrays.asList(1, 2, 3, 4),
new LinkedList<Integer>(Arrays.asList(7, 6, 5)));
@Test
public void testDecodeEncodeContentsInSameOrder() throws Exception {
Coder<List<Integer>> coder = ListCoder.of(VarIntCoder.of());
for (List<Integer> value : TEST_VALUES) {
CoderProperties.<Integer, List<Integer>>coderDecodeEncodeContentsInSameOrder(coder, value);
}
}
@Test
public void testGetInstanceComponentsNonempty() throws Exception {
List<Integer> list = Arrays.asList(21, 5, 3, 5);
List<Object> components = ListCoder.getInstanceComponents(list);
assertEquals(1, components.size());
assertEquals(21, components.get(0));
}
@Test
public void testGetInstanceComponentsEmpty() throws Exception {
List<Integer> list = Arrays.asList();
List<Object> components = ListCoder.getInstanceComponents(list);
assertNull(components);
}
@Test
public void testEmptyList() throws Exception {
List<Integer> list = Collections.emptyList();
Coder<List<Integer>> coder = ListCoder.of(VarIntCoder.of());
CoderProperties.<List<Integer>>coderDecodeEncodeEqual(coder, list);
}
}
| apache-2.0 |
mbiarnes/kie-wb-common | kie-wb-common-services/kie-wb-common-compiler/kie-wb-common-compiler-distribution/src/test/java/org/kie/workbench/common/services/backend/compiler/BaseCompilerTest.java | 2381 | /*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.services.backend.compiler;
import java.io.Serializable;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.kie.workbench.common.services.backend.compiler.impl.WorkspaceCompilationInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.uberfire.java.nio.file.Files;
import org.uberfire.java.nio.file.Path;
import org.uberfire.java.nio.file.Paths;
public class BaseCompilerTest implements Serializable {
protected static Path tmpRoot;
protected String mavenRepoPath;
protected Logger logger = LoggerFactory.getLogger(BaseCompilerTest.class);
protected String alternateSettingsAbsPath;
protected WorkspaceCompilationInfo info;
@BeforeClass
public static void setup() {
System.setProperty("org.uberfire.nio.git.daemon.enabled", "false");
System.setProperty("org.uberfire.nio.git.ssh.enabled", "false");
}
public BaseCompilerTest(String prjName) {
try {
mavenRepoPath = TestUtilMaven.getMavenRepo();
tmpRoot = Files.createTempDirectory("repo");
alternateSettingsAbsPath = TestUtilMaven.getSettingsFile();
Path tmp = Files.createDirectories(Paths.get(tmpRoot.toString(), "dummy"));
TestUtil.copyTree(Paths.get(prjName), tmp);
info = new WorkspaceCompilationInfo(Paths.get(tmp.toUri()));
} catch (Exception e) {
logger.error(e.getMessage());
}
}
@AfterClass
public static void tearDown() {
System.clearProperty("org.uberfire.nio.git.daemon.enabled");
System.clearProperty("org.uberfire.nio.git.ssh.enabled");
if (tmpRoot != null) {
TestUtil.rm(tmpRoot.toFile());
}
}
}
| apache-2.0 |
yapengsong/ovirt-engine | backend/manager/modules/common/src/main/java/org/ovirt/engine/core/common/vdscommands/GetDiskImageAlignmentVDSCommandParameters.java | 1565 | package org.ovirt.engine.core.common.vdscommands;
import java.util.HashMap;
import java.util.Map;
import org.ovirt.engine.core.compat.Guid;
public class GetDiskImageAlignmentVDSCommandParameters extends GetDiskAlignmentVDSCommandParameters {
private Guid poolId, domainId, imageGroupId, imageId;
public GetDiskImageAlignmentVDSCommandParameters(Guid vdsId, Guid vmId) {
super(vdsId, vmId);
}
public GetDiskImageAlignmentVDSCommandParameters() {
}
public void setPoolId(Guid poolId) {
this.poolId = poolId;
}
public Guid getPoolId() {
return poolId;
}
public void setDomainId(Guid domainId) {
this.domainId = domainId;
}
public Guid getDomainId() {
return domainId;
}
public void setImageGroupId(Guid imageGroupId) {
this.imageGroupId = imageGroupId;
}
public Guid getImageGroupId() {
return imageGroupId;
}
public void setImageId(Guid imageId) {
this.imageId = imageId;
}
public Guid getImageId() {
return imageId;
}
@Override
public Map<String, String> getDriveSpecs() {
Map<String, String> drive = new HashMap<String, String>();
drive.put("device", "disk");
drive.put("domainID", getDomainId().toString());
drive.put("poolID", getPoolId().toString());
drive.put("volumeID", Guid.Empty.toString());
drive.put("imageID", getImageGroupId().toString());
drive.put("volumeID", getImageId().toString());
return drive;
}
}
| apache-2.0 |
elvisisking/modeshape | modeshape-jcr/src/main/java/org/modeshape/jcr/cache/ChildReferences.java | 18304 | /*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr.cache;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.modeshape.common.annotation.Immutable;
import org.modeshape.common.annotation.NotThreadSafe;
import org.modeshape.common.annotation.ThreadSafe;
import org.modeshape.common.collection.EmptyIterator;
import org.modeshape.jcr.cache.document.WorkspaceCache;
import org.modeshape.jcr.value.Name;
import org.modeshape.jcr.value.NamespaceRegistry;
import org.modeshape.jcr.value.Path.Segment;
/**
* An interface used to access the {@link ChildReference} instances owned by a parent node.
*/
@Immutable
public interface ChildReferences extends Iterable<ChildReference> {
/**
* A constant that might be returned by {@link #size()} if the number of child references is unknown.
*/
static final long UNKNOWN_SIZE = -1L;
/**
* Get the total number of child references for the node, including all subsequent blocks of ChildReferences.
*
* @return the total number of children, or {@link #UNKNOWN_SIZE}
*/
long size();
/**
* Determine if there are no references in this container. This is equivalent to calling {@code size() == 0} but may be
* faster.
*
* @return true if there are no references in this container, or false if there are.
*/
boolean isEmpty();
/**
* Return the number of nodes that have the supplied name. If there are no siblings with the same supplied name, this method
* will return 1; otherwise it will return the number of same-name-siblings.
*
* @param name the name
* @return the number of siblings with the supplied name; never negative
*/
int getChildCount( Name name );
/**
* Look for the child reference that has the given name and a SNS index of '1'.
*
* @param name the name for the node
* @return the child reference, or null if there is no such child
*/
ChildReference getChild( Name name );
/**
* Look for the child reference that has the given name and SNS index.
*
* @param name the name for the node
* @param snsIndex the same-name-sibling index; must be positive
* @return the child reference, or null if there is no such child
*/
ChildReference getChild( Name name,
int snsIndex );
/**
* Look for the child reference that has the given name and SNS index.
*
* @param name the name for the node
* @param snsIndex the same-name-sibling index; must be positive
* @param context the context in which the child should be evaluated; may be null if there is no context
* @return the child reference, or null if there is no such child
*/
ChildReference getChild( Name name,
int snsIndex,
Context context );
/**
* Look for the child reference that has the given name and SNS index.
*
* @param segment the path segment, which defines the name and SNS index
* @return the child reference, or null if there is no such child
*/
ChildReference getChild( Segment segment );
/**
* Determine if this contains a reference to the specified child.
*
* @param key the node key of the child
* @return true if there is a child reference, or false if there is none
*/
boolean hasChild( NodeKey key );
/**
* Look for the child reference that has the node key.
*
* @param key the node key of the child
* @return the child reference, or null if there is no such child
*/
ChildReference getChild( NodeKey key );
/**
* Look for the child reference that has the node key.
*
* @param key the node key of the child
* @param context the context in which the child should be evaluated; may be null if there is no context
* @return the child reference, or null if there is no such child
*/
ChildReference getChild( NodeKey key,
Context context );
/**
* Return whether it is possible/feasible to {@link #getChild(NodeKey, Context) find} a ChildReference for a child node given
* only its NodeKey. Implementations that have very large numbers of children may provide an alternative way to
* {@link WorkspaceCache#getChildReference(NodeKey,NodeKey) lookup} a child reference directly. In such cases, this method may
* return false.
*
* @return true if {@link #getChild(NodeKey)} and {@link #getChild(NodeKey, Context)} should be used to find the
* ChildReference, or false if doing so is not recommended.
*/
boolean supportsGetChildReferenceByKey();
/**
* Get an iterator over all of the children that have same name matching the supplied value. This essentially returns an
* iterator over all of the same-name-siblings.
*
* @param name the name of the same-name-sibling nodes; may not be null
* @return the iterator; never null
*/
Iterator<ChildReference> iterator( Name name );
/**
* Get an iterator over all of the children that have same name matching the supplied value. This essentially returns an
* iterator over all of the same-name-siblings.
*
* @param name the name of the same-name-sibling nodes; may not be null
* @param context the context in which the child should be evaluated; may be null if there is no context
* @return the iterator; never null
*/
Iterator<ChildReference> iterator( Name name,
Context context );
/**
* Get an iterator over all of the children.
*
* @return the iterator; never null
*/
@Override
Iterator<ChildReference> iterator();
/**
* Get an iterator over all of the children that have {@link Segment#getName() names} (excluding same-name-sibling indexes)
* matching at least one of the supplied patterns.
*
* @param namePatterns the list of string literals or regex patterns describing the names
* @param registry the namespace registry, used to convert names to a form compatible with the name patterns
* @return the iterator; never null
*/
Iterator<ChildReference> iterator( Collection<?> namePatterns,
NamespaceRegistry registry );
/**
* Get an iterator over all child references in this collection, using the supplied context.
*
* @param context the context in which the child should be evaluated; may be null if there is no context
* @return the iterator over all references; never null
*/
Iterator<ChildReference> iterator( Context context );
/**
* Get an iterator over all of the children that have {@link Segment#getName() names} (excluding same-name-sibling indexes)
* matching at least one of the supplied patterns, using the supplied context. The resulting iterator is lazy where possible,
* but it may be an expensive call if there are large numbers of children.
*
* @param context the context in which the child should be evaluated; may be null if there is no context
* @param namePatterns the list of string literals or regex patterns describing the names
* @param registry the namespace registry, used to convert names to a form compatible with the name patterns
* @return the iterator; never null
*/
Iterator<ChildReference> iterator( Context context,
Collection<?> namePatterns,
NamespaceRegistry registry );
/**
* Determine if the child references instance should support SNS or not.
*
* @return {@code true} if the child reference instance supports SNS
*/
boolean allowsSNS();
/**
* Get the keys for all of the children. The resulting iterator is lazy where possible, but it may be an expensive call if
* there are large numbers of children.
*
* @return the iterator over the keys; never null
*/
Iterator<NodeKey> getAllKeys();
/**
* The context in which the names are evaluated.
*/
interface Context {
/**
* Consume the next child with the supplied name and key.
*
* @param name the name of the node; may not be null
* @param key the key for the node; may not be null
* @return the same-name-sibling index for this node; always positive
*/
int consume( Name name,
NodeKey key );
/**
* Get the set of changes for this context.
*
* @return the changes; can be null if there are no changes
*/
Changes changes();
}
/**
* The representation of a set of changes for the child references.
*/
interface Changes {
/**
* Get the references to the children with the supplied name that were inserted.
*
* @param name the name; may not be null
* @return the iterator over the insertions; or {@code null} if there are no insertions with this name
*/
Iterator<ChildInsertions> insertions( Name name );
/**
* Get the child reference for the inserted node with the supplied key.
*
* @param key the node key for the inserted node; may not be null
* @return the child reference, or null if no node was inserted with the supplied key
*/
ChildReference inserted( NodeKey key );
/**
* Get the set of child references that were inserted before the node with the supplied key.
*
* @param key the node key for the node before which the inserted nodes are to be returned; may not be null
* @return the nodes that were inserted before the node with the supplied key or {@code null} if there are no insertions
* before the given key.
*/
ChildInsertions insertionsBefore( ChildReference key );
/**
* Determine whether the supplied child reference was removed.
*
* @param ref the reference; may not be null
* @return true if the child reference was removed, or false otherwise
*/
boolean isRemoved( ChildReference ref );
/**
* Determine whether the supplied child reference was renamed.
*
* @param ref the reference; may not be null
* @return true if the child reference was renamed, or false otherwise
*/
boolean isRenamed( ChildReference ref );
/**
* Determine whether any of the child references were renamed to the supplied name.
*
* @param newName the new name; may not be null
* @return true if at least one child reference was renamed to the supplied name, or false otherwise
*/
boolean isRenamed( Name newName );
/**
* Return the new name for the child node with the supplied key.
*
* @param key the child node's key; may not be null
* @return the new name, or null if the node is not a child or was not renamed
*/
Name renamed( NodeKey key );
/**
* Determine if this set of changes is empty.
*
* @return true if there are no effective changes, or false if there is at least one effective change
*/
boolean isEmpty();
/**
* Get the number of child references that were removed.
*
* @return the number of removed child references; never negative
*/
int removalCount();
/**
* Get the number of child references that were inserted.
*
* @return the number of inserted child references; never negative
*/
int insertionCount();
/**
* Get the number of child references that were inserted with the given name.
*
* @param name the {@link Name} of a child, never {@code null}
* @return the number of inserted child references which have the given name; never negative
*/
int insertionCount( Name name );
/**
* Get the number of child references that were renamed.
*
* @return the number of renamed child references; never negative
*/
int renameCount();
}
/**
* A representation of the child references that were inserted before some other node.
*/
interface ChildInsertions {
/**
* The nodes that were inserted.
*
* @return the iterator over the child references that were inserted; never null
*/
Iterable<ChildReference> inserted();
/**
* The reference to the child before which the nodes are to be inserted.
*
* @return the child reference before which the nodes are to be inserted; never null
*/
ChildReference insertedBefore();
}
/**
* A {@link ChildReferences.Changes} implementation that has no changes and that is useful when there are never any siblings
* with the same names, since it always returns '1' for the SNS index.
*/
public static final class NoContext implements Context {
public static final Context INSTANCE = new NoContext();
private NoContext() {
}
@Override
public int consume( Name name,
NodeKey key ) {
return 1;
}
@Override
public Changes changes() {
return null;
}
}
@Immutable
public static final class NoChanges implements Changes {
protected static final Iterator<ChildInsertions> NO_INSERTIONS_ITERATOR = new EmptyIterator<ChildInsertions>();
@Override
public boolean isEmpty() {
return true;
}
@Override
public int insertionCount() {
return 0;
}
@Override
public int insertionCount( Name name ) {
return 0;
}
@Override
public int removalCount() {
return 0;
}
@Override
public int renameCount() {
return 0;
}
@Override
public Name renamed( NodeKey key ) {
return null;
}
@Override
public Iterator<ChildInsertions> insertions( Name name ) {
return NO_INSERTIONS_ITERATOR;
}
@Override
public ChildReference inserted( NodeKey key ) {
return null;
}
@Override
public ChildInsertions insertionsBefore( ChildReference key ) {
return null;
}
@Override
public boolean isRemoved( ChildReference key ) {
return false;
}
@Override
public boolean isRenamed( ChildReference ref ) {
return false;
}
@Override
public boolean isRenamed( Name newName ) {
return false;
}
}
/**
* A {@link ChildReferences.Context} implementation that has no changes and that always returns '1' for the SNS index.
*/
@ThreadSafe
public static final class NoSnsIndexesContext implements Context {
@Override
public int consume( Name name,
NodeKey key ) {
return 1;
}
@Override
public Changes changes() {
return null;
}
}
/**
* A {@link ChildReferences.Context} implementation that has no changes and can be used to find the SNS indexes for nodes
* named a single name.
*/
@ThreadSafe
public static class SingleNameContext implements Context {
private int index = 0;
@Override
public int consume( Name name,
NodeKey key ) {
return ++index;
}
@Override
public Changes changes() {
return null;
}
}
/**
* A {@link ChildReferences.Context} implementation that has no changes but maintains the SNS indexes for nodes with any name.
*/
@NotThreadSafe
public static class BasicContext implements Context {
private final Map<Name, AtomicInteger> indexes = new HashMap<Name, AtomicInteger>();
@Override
public int consume( Name name,
NodeKey key ) {
AtomicInteger index = indexes.get(name);
if (index == null) {
index = new AtomicInteger(1);
indexes.put(name, index);
return 1;
}
return index.incrementAndGet();
}
@Override
public Changes changes() {
return null;
}
}
/**
* A {@link ChildReferences.Context} implementation that has changes and can be used to find the SNS indexes for nodes named a
* single name.
*/
@ThreadSafe
public static class WithChanges implements Context {
private final Context delegate;
private final Changes changes;
public WithChanges( Context delegate,
Changes changes ) {
this.delegate = delegate;
this.changes = changes;
}
@Override
public int consume( Name name,
NodeKey key ) {
return this.delegate.consume(name, key);
}
@Override
public Changes changes() {
return changes;
}
}
}
| apache-2.0 |
romankagan/DDBWorkbench | plugins/svn4idea/testSource/org/jetbrains/idea/svn16/VcsWaitForUpdateForTest.java | 1912 | /*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn16;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.VcsConfiguration;
import com.intellij.openapi.vcs.changes.EnsureUpToDateFromNonAWTThread;
import com.intellij.openapi.vfs.VirtualFile;
import org.junit.Test;
public class VcsWaitForUpdateForTest extends Svn16TestCase {
@Test
public void testRefreshes() throws Exception {
enableSilentOperation(VcsConfiguration.StandardConfirmation.ADD);
final VirtualFile file = createFileInCommand("a.txt", "old content");
final Object lock = new Object();
final Ref<Boolean> done = new Ref<Boolean>();
final Thread thread = new Thread(new Runnable() {
@Override
public void run() {
new EnsureUpToDateFromNonAWTThread(myProject).execute();
done.set(Boolean.TRUE);
synchronized (lock) {
lock.notifyAll();
}
}
});
thread.start();
synchronized (lock) {
final long start = System.currentTimeMillis();
final int timeout = 3000;
while ((System.currentTimeMillis() - start < timeout) && (! Boolean.TRUE.equals(done.get()))) {
try {
lock.wait(timeout);
}
catch (InterruptedException e) {
//
}
}
}
assert Boolean.TRUE.equals(done.get());
}
}
| apache-2.0 |
utkarshx/ViewerJS | viewer.js | 23758 | /**
* Copyright (C) 2012-2015 KO GmbH <copyright@kogmbh.com>
*
* @licstart
* This file is part of ViewerJS.
*
* ViewerJS is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License (GNU AGPL)
* as published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* ViewerJS is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with ViewerJS. If not, see <http://www.gnu.org/licenses/>.
* @licend
*
* @source: http://viewerjs.org/
* @source: http://github.com/kogmbh/ViewerJS
*/
/*
* This file is a derivative from a part of Mozilla's PDF.js project. The
* original license header follows.
*/
/* Copyright 2012 Mozilla Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*global document, window*/
function Viewer(viewerPlugin, parameters) {
"use strict";
var self = this,
kScrollbarPadding = 40,
kMinScale = 0.25,
kMaxScale = 4.0,
kDefaultScaleDelta = 1.1,
kDefaultScale = 'auto',
presentationMode = false,
isFullScreen = false,
initialized = false,
url,
viewerElement = document.getElementById('viewer'),
canvasContainer = document.getElementById('canvasContainer'),
overlayNavigator = document.getElementById('overlayNavigator'),
titlebar = document.getElementById('titlebar'),
toolbar = document.getElementById('toolbarContainer'),
pageSwitcher = document.getElementById('toolbarLeft'),
zoomWidget = document.getElementById('toolbarMiddleContainer'),
scaleSelector = document.getElementById('scaleSelect'),
dialogOverlay = document.getElementById('dialogOverlay'),
toolbarRight = document.getElementById('toolbarRight'),
aboutDialog,
pages = [],
currentPage,
scaleChangeTimer,
touchTimer,
toolbarTouchTimer,
/**@const*/
UI_FADE_DURATION = 5000;
function isBlankedOut() {
return (blanked.style.display === 'block');
}
function initializeAboutInformation() {
var aboutDialogCentererTable, aboutDialogCentererCell, aboutButton, pluginName, pluginVersion, pluginURL,
version;
version = (String(typeof ViewerJS_version) !== "undefined" ? ViewerJS_version : "From Source");
if (viewerPlugin) {
pluginName = viewerPlugin.getPluginName();
pluginVersion = viewerPlugin.getPluginVersion();
pluginURL = viewerPlugin.getPluginURL();
}
// Create dialog
aboutDialogCentererTable = document.createElement('div');
aboutDialogCentererTable.id = "aboutDialogCentererTable";
aboutDialogCentererCell = document.createElement('div');
aboutDialogCentererCell.id = "aboutDialogCentererCell";
aboutDialog = document.createElement('div');
aboutDialog.id = "aboutDialog";
aboutDialog.innerHTML =
"<h1>ViewerJS</h1>" +
"<p>Open Source document viewer for webpages, built with HTML and JavaScript.</p>" +
"<p>Learn more and get your own copy on the <a href=\"http://viewerjs.org/\" target=\"_blank\">ViewerJS website</a>.</p>" +
(viewerPlugin ? ("<p>Using the <a href = \""+ pluginURL + "\" target=\"_blank\">" + pluginName + "</a> " +
"(<span id = \"pluginVersion\">" + pluginVersion + "</span>) " +
"plugin to show you this document.</p>")
: "") +
"<p>Version " + version + "</p>" +
"<p>Supported by <a href=\"https://nlnet.nl\" target=\"_blank\"><br><img src=\"images\/nlnet.png\" width=\"160\" height=\"60\" alt=\"NLnet Foundation\"></a></p>" +
"<p>Made by <a href=\"http://kogmbh.com\" target=\"_blank\"><br><img src=\"images\/kogmbh.png\" width=\"172\" height=\"40\" alt=\"KO GmbH\"></a></p>" +
"<button id = \"aboutDialogCloseButton\" class = \"toolbarButton textButton\">Close</button>";
dialogOverlay.appendChild(aboutDialogCentererTable);
aboutDialogCentererTable.appendChild(aboutDialogCentererCell);
aboutDialogCentererCell.appendChild(aboutDialog);
// Create button to open dialog that says "ViewerJS"
aboutButton = document.createElement('button');
aboutButton.id = "about";
aboutButton.className = "toolbarButton textButton about";
aboutButton.title = "About";
aboutButton.innerHTML = "ViewerJS"
toolbarRight.appendChild(aboutButton);
// Attach events to the above
aboutButton.addEventListener('click', function () {
showAboutDialog();
});
document.getElementById('aboutDialogCloseButton').addEventListener('click', function () {
hideAboutDialog();
});
}
function showAboutDialog() {
dialogOverlay.style.display = "block";
}
function hideAboutDialog() {
dialogOverlay.style.display = "none";
}
function selectScaleOption(value) {
// Retrieve the options from the zoom level <select> element
var options = scaleSelector.options,
option,
predefinedValueFound = false,
i;
for (i = 0; i < options.length; i += 1) {
option = options[i];
if (option.value !== value) {
option.selected = false;
continue;
}
option.selected = true;
predefinedValueFound = true;
}
return predefinedValueFound;
}
function getPages() {
return viewerPlugin.getPages();
}
function setScale(val, resetAutoSettings) {
if (val === self.getZoomLevel()) {
return;
}
self.setZoomLevel(val);
var event = document.createEvent('UIEvents');
event.initUIEvent('scalechange', false, false, window, 0);
event.scale = val;
event.resetAutoSettings = resetAutoSettings;
window.dispatchEvent(event);
}
function onScroll() {
var pageNumber;
if (viewerPlugin.onScroll) {
viewerPlugin.onScroll();
}
if (viewerPlugin.getPageInView) {
pageNumber = viewerPlugin.getPageInView();
if (pageNumber) {
currentPage = pageNumber;
document.getElementById('pageNumber').value = pageNumber;
}
}
}
function delayedRefresh(milliseconds) {
window.clearTimeout(scaleChangeTimer);
scaleChangeTimer = window.setTimeout(function () {
onScroll();
}, milliseconds);
}
function parseScale(value, resetAutoSettings) {
var scale,
maxWidth,
maxHeight;
if (value === 'custom') {
scale = parseFloat(document.getElementById('customScaleOption').textContent) / 100;
} else {
scale = parseFloat(value);
}
if (scale) {
setScale(scale, true);
delayedRefresh(300);
return;
}
maxWidth = canvasContainer.clientWidth - kScrollbarPadding;
maxHeight = canvasContainer.clientHeight - kScrollbarPadding;
switch (value) {
case 'page-actual':
setScale(1, resetAutoSettings);
break;
case 'page-width':
viewerPlugin.fitToWidth(maxWidth);
break;
case 'page-height':
viewerPlugin.fitToHeight(maxHeight);
break;
case 'page-fit':
viewerPlugin.fitToPage(maxWidth, maxHeight);
break;
case 'auto':
if (viewerPlugin.isSlideshow()) {
viewerPlugin.fitToPage(maxWidth + kScrollbarPadding, maxHeight + kScrollbarPadding);
} else {
viewerPlugin.fitSmart(maxWidth);
}
break;
}
selectScaleOption(value);
delayedRefresh(300);
}
function readZoomParameter(zoom) {
var validZoomStrings = ["auto", "page-actual", "page-width"],
number;
if (validZoomStrings.indexOf(zoom) !== -1) {
return zoom;
}
number = parseFloat(zoom);
if (number && kMinScale <= number && number <= kMaxScale) {
return zoom;
}
return kDefaultScale;
}
function readStartPageParameter(startPage) {
var result = parseInt(startPage, 10);
return isNaN(result) ? 1 : result;
}
this.initialize = function () {
var initialScale,
element;
initialScale = readZoomParameter(parameters.zoom);
url = parameters.documentUrl;
document.title = parameters.title;
var documentName = document.getElementById('documentName');
documentName.innerHTML = "";
documentName.appendChild(documentName.ownerDocument.createTextNode(parameters.title));
viewerPlugin.onLoad = function () {
document.getElementById('pluginVersion').innerHTML = viewerPlugin.getPluginVersion();
if (viewerPlugin.isSlideshow()) {
// Slideshow pages should be centered
canvasContainer.classList.add("slideshow");
// Show page nav controls only for presentations
pageSwitcher.style.visibility = 'visible';
} else {
// For text documents, show the zoom widget.
zoomWidget.style.visibility = 'visible';
// Only show the page switcher widget if the plugin supports page numbers
if (viewerPlugin.getPageInView) {
pageSwitcher.style.visibility = 'visible';
}
}
initialized = true;
pages = getPages();
document.getElementById('numPages').innerHTML = 'of ' + pages.length;
self.showPage(readStartPageParameter(parameters.startpage));
// Set default scale
parseScale(initialScale);
canvasContainer.onscroll = onScroll;
delayedRefresh();
};
viewerPlugin.initialize(canvasContainer, url);
};
/**
* Shows the 'n'th page. If n is larger than the page count,
* shows the last page. If n is less than 1, shows the first page.
* @return {undefined}
*/
this.showPage = function (n) {
if (n <= 0) {
n = 1;
} else if (n > pages.length) {
n = pages.length;
}
viewerPlugin.showPage(n);
currentPage = n;
document.getElementById('pageNumber').value = currentPage;
};
/**
* Shows the next page. If there is no subsequent page, does nothing.
* @return {undefined}
*/
this.showNextPage = function () {
self.showPage(currentPage + 1);
};
/**
* Shows the previous page. If there is no previous page, does nothing.
* @return {undefined}
*/
this.showPreviousPage = function () {
self.showPage(currentPage - 1);
};
/**
* Attempts to 'download' the file.
* @return {undefined}
*/
this.download = function () {
var documentUrl = url.split('#')[0];
documentUrl += '#viewer.action=download';
window.open(documentUrl, '_parent');
};
/**
* Toggles the fullscreen state of the viewer
* @return {undefined}
*/
this.toggleFullScreen = function () {
var elem = viewerElement;
if (!isFullScreen) {
if (elem.requestFullscreen) {
elem.requestFullscreen();
} else if (elem.mozRequestFullScreen) {
elem.mozRequestFullScreen();
} else if (elem.webkitRequestFullscreen) {
elem.webkitRequestFullscreen();
} else if (elem.webkitRequestFullScreen) {
elem.webkitRequestFullScreen(Element.ALLOW_KEYBOARD_INPUT);
} else if (elem.msRequestFullscreen) {
elem.msRequestFullscreen();
}
} else {
if (document.exitFullscreen) {
document.exitFullscreen();
} else if (document.cancelFullScreen) {
document.cancelFullScreen();
} else if (document.mozCancelFullScreen) {
document.mozCancelFullScreen();
} else if (document.webkitExitFullscreen) {
document.webkitExitFullscreen();
} else if (document.webkitCancelFullScreen) {
document.webkitCancelFullScreen();
} else if (document.msExitFullscreen) {
document.msExitFullscreen();
}
}
};
/**
* Toggles the presentation mode of the viewer.
* Presentation mode involves fullscreen + hidden UI controls
*/
this.togglePresentationMode = function () {
var overlayCloseButton = document.getElementById('overlayCloseButton');
if (!presentationMode) {
titlebar.style.display = toolbar.style.display = 'none';
overlayCloseButton.style.display = 'block';
canvasContainer.classList.add('presentationMode');
canvasContainer.onmousedown = function (event) {
event.preventDefault();
};
canvasContainer.oncontextmenu = function (event) {
event.preventDefault();
};
canvasContainer.onmouseup = function (event) {
event.preventDefault();
if (event.which === 1) {
self.showNextPage();
} else {
self.showPreviousPage();
}
};
parseScale('page-fit');
} else {
if (isBlankedOut()) {
leaveBlankOut();
}
titlebar.style.display = toolbar.style.display = 'block';
overlayCloseButton.style.display = 'none';
canvasContainer.classList.remove('presentationMode');
canvasContainer.onmouseup = function () {};
canvasContainer.oncontextmenu = function () {};
canvasContainer.onmousedown = function () {};
parseScale('auto');
}
presentationMode = !presentationMode;
};
/**
* Gets the zoom level of the document
* @return {!number}
*/
this.getZoomLevel = function () {
return viewerPlugin.getZoomLevel();
};
/**
* Set the zoom level of the document
* @param {!number} value
* @return {undefined}
*/
this.setZoomLevel = function (value) {
viewerPlugin.setZoomLevel(value);
};
/**
* Zoom out by 10 %
* @return {undefined}
*/
this.zoomOut = function () {
// 10 % decrement
var newScale = (self.getZoomLevel() / kDefaultScaleDelta).toFixed(2);
newScale = Math.max(kMinScale, newScale);
parseScale(newScale, true);
};
/**
* Zoom in by 10%
* @return {undefined}
*/
this.zoomIn = function () {
// 10 % increment
var newScale = (self.getZoomLevel() * kDefaultScaleDelta).toFixed(2);
newScale = Math.min(kMaxScale, newScale);
parseScale(newScale, true);
};
function cancelPresentationMode() {
if (presentationMode && !isFullScreen) {
self.togglePresentationMode();
}
}
function handleFullScreenChange() {
isFullScreen = !isFullScreen;
cancelPresentationMode();
}
function showOverlayNavigator() {
if (presentationMode || viewerPlugin.isSlideshow()) {
overlayNavigator.className = 'viewer-touched';
window.clearTimeout(touchTimer);
touchTimer = window.setTimeout(function () {
overlayNavigator.className = '';
}, UI_FADE_DURATION);
}
}
/**
* @param {!boolean} timed Fade after a while
*/
function showToolbars() {
titlebar.classList.add('viewer-touched');
toolbar.classList.add('viewer-touched');
window.clearTimeout(toolbarTouchTimer);
toolbarTouchTimer = window.setTimeout(function () {
hideToolbars();
}, UI_FADE_DURATION);
}
function hideToolbars() {
titlebar.classList.remove('viewer-touched');
toolbar.classList.remove('viewer-touched');
}
function toggleToolbars() {
if (titlebar.classList.contains('viewer-touched')) {
hideToolbars();
} else {
showToolbars();
}
}
function blankOut(value) {
blanked.style.display = 'block';
blanked.style.backgroundColor = value;
hideToolbars();
}
function leaveBlankOut() {
blanked.style.display = 'none';
toggleToolbars();
}
function setButtonClickHandler(buttonId, handler) {
var button = document.getElementById(buttonId);
button.addEventListener('click', function () {
handler();
button.blur();
});
}
function init() {
initializeAboutInformation();
if (viewerPlugin) {
self.initialize();
if (!(document.exitFullscreen || document.cancelFullScreen || document.mozCancelFullScreen || document.webkitExitFullscreen || document.webkitCancelFullScreen || document.msExitFullscreen)) {
document.getElementById('fullscreen').style.visibility = 'hidden';
document.getElementById('presentation').style.visibility = 'hidden';
}
setButtonClickHandler('overlayCloseButton', self.toggleFullScreen);
setButtonClickHandler('fullscreen', self.toggleFullScreen);
setButtonClickHandler('presentation', function () {
if (!isFullScreen) {
self.toggleFullScreen();
}
self.togglePresentationMode();
});
document.addEventListener('fullscreenchange', handleFullScreenChange);
document.addEventListener('webkitfullscreenchange', handleFullScreenChange);
document.addEventListener('mozfullscreenchange', handleFullScreenChange);
document.addEventListener('MSFullscreenChange', handleFullScreenChange);
setButtonClickHandler('download', self.download);
setButtonClickHandler('zoomOut', self.zoomOut);
setButtonClickHandler('zoomIn', self.zoomIn);
setButtonClickHandler('previous', self.showPreviousPage);
setButtonClickHandler('next', self.showNextPage);
setButtonClickHandler('previousPage', self.showPreviousPage);
setButtonClickHandler('nextPage', self.showNextPage);
document.getElementById('pageNumber').addEventListener('change', function () {
self.showPage(this.value);
});
document.getElementById('scaleSelect').addEventListener('change', function () {
parseScale(this.value);
});
canvasContainer.addEventListener('click', showOverlayNavigator);
overlayNavigator.addEventListener('click', showOverlayNavigator);
canvasContainer.addEventListener('click', toggleToolbars);
titlebar.addEventListener('click', showToolbars);
toolbar.addEventListener('click', showToolbars);
window.addEventListener('scalechange', function (evt) {
var customScaleOption = document.getElementById('customScaleOption'),
predefinedValueFound = selectScaleOption(String(evt.scale));
customScaleOption.selected = false;
if (!predefinedValueFound) {
customScaleOption.textContent = Math.round(evt.scale * 10000) / 100 + '%';
customScaleOption.selected = true;
}
}, true);
window.addEventListener('resize', function (evt) {
if (initialized &&
(document.getElementById('pageWidthOption').selected ||
document.getElementById('pageAutoOption').selected)) {
parseScale(document.getElementById('scaleSelect').value);
}
showOverlayNavigator();
});
window.addEventListener('keydown', function (evt) {
var key = evt.keyCode,
shiftKey = evt.shiftKey;
// blanked-out mode?
if (isBlankedOut()) {
switch (key) {
case 16: // Shift
case 17: // Ctrl
case 18: // Alt
case 91: // LeftMeta
case 93: // RightMeta
case 224: // MetaInMozilla
case 225: // AltGr
// ignore modifier keys alone
break;
default:
leaveBlankOut();
break;
}
} else {
switch (key) {
case 8: // backspace
case 33: // pageUp
case 37: // left arrow
case 38: // up arrow
case 80: // key 'p'
self.showPreviousPage();
break;
case 13: // enter
case 34: // pageDown
case 39: // right arrow
case 40: // down arrow
case 78: // key 'n'
self.showNextPage();
break;
case 32: // space
shiftKey ? self.showPreviousPage() : self.showNextPage();
break;
case 66: // key 'b' blanks screen (to black) or returns to the document
case 190: // and so does the key '.' (dot)
if (presentationMode) {
blankOut('#000');
}
break;
case 87: // key 'w' blanks page (to white) or returns to the document
case 188: // and so does the key ',' (comma)
if (presentationMode) {
blankOut('#FFF');
}
break;
case 36: // key 'Home' goes to first page
self.showPage(1);
break;
case 35: // key 'End' goes to last page
self.showPage(pages.length);
break;
}
}
});
}
}
init();
}
| apache-2.0 |
signed/intellij-community | platform/analysis-impl/src/com/intellij/codeInspection/ProblemDescriptorBase.java | 8490 | /*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection;
import com.intellij.lang.annotation.ProblemGroup;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.colors.TextAttributesKey;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import com.intellij.pom.Navigatable;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class ProblemDescriptorBase extends CommonProblemDescriptorImpl implements ProblemDescriptor {
private static final Logger LOG = Logger.getInstance("#com.intellij.codeInspection.ex.ProblemDescriptorImpl");
@NotNull private final SmartPsiElementPointer myStartSmartPointer;
@Nullable private final SmartPsiElementPointer myEndSmartPointer;
private final ProblemHighlightType myHighlightType;
private Navigatable myNavigatable;
private final boolean myAfterEndOfLine;
private final TextRange myTextRangeInElement;
private final boolean myShowTooltip;
private TextAttributesKey myEnforcedTextAttributes;
private int myLineNumber = -1;
private ProblemGroup myProblemGroup;
public ProblemDescriptorBase(@NotNull PsiElement startElement,
@NotNull PsiElement endElement,
@NotNull String descriptionTemplate,
LocalQuickFix[] fixes,
@NotNull ProblemHighlightType highlightType,
boolean isAfterEndOfLine,
@Nullable TextRange rangeInElement,
final boolean showTooltip,
boolean onTheFly) {
super(fixes, descriptionTemplate);
myShowTooltip = showTooltip;
PsiFile startContainingFile = startElement.getContainingFile();
LOG.assertTrue(startContainingFile != null && startContainingFile.isValid() || startElement.isValid(), startElement);
PsiFile endContainingFile = startElement == endElement ? startContainingFile : endElement.getContainingFile();
LOG.assertTrue(startElement == endElement || endContainingFile != null && endContainingFile.isValid() || endElement.isValid(), endElement);
assertPhysical(startElement);
if (startElement != endElement) assertPhysical(endElement);
final TextRange startElementRange = startElement.getTextRange();
LOG.assertTrue(startElementRange != null, startElement);
final TextRange endElementRange = endElement.getTextRange();
LOG.assertTrue(endElementRange != null, endElement);
if (startElementRange.getStartOffset() >= endElementRange.getEndOffset()) {
if (!(startElement instanceof PsiFile && endElement instanceof PsiFile)) {
LOG.error("Empty PSI elements should not be passed to createDescriptor. Start: " + startElement + ", end: " + endElement + ", startContainingFile: " + startContainingFile);
}
}
myHighlightType = highlightType;
final Project project = startContainingFile == null ? startElement.getProject() : startContainingFile.getProject();
final SmartPointerManager manager = SmartPointerManager.getInstance(project);
myStartSmartPointer = manager.createSmartPsiElementPointer(startElement, startContainingFile);
myEndSmartPointer = startElement == endElement ? null : manager.createSmartPsiElementPointer(endElement, endContainingFile);
myAfterEndOfLine = isAfterEndOfLine;
myTextRangeInElement = rangeInElement;
}
protected void assertPhysical(final PsiElement element) {
if (!element.isPhysical()) {
LOG.error("Non-physical PsiElement. Physical element is required to be able to anchor the problem in the source tree: " +
element + "; file: " + element.getContainingFile());
}
}
@Override
public PsiElement getPsiElement() {
PsiElement startElement = getStartElement();
if (myEndSmartPointer == null) {
return startElement;
}
PsiElement endElement = getEndElement();
if (startElement == endElement) {
return startElement;
}
if (startElement == null || endElement == null) return null;
return PsiTreeUtil.findCommonParent(startElement, endElement);
}
@Override
@Nullable
public TextRange getTextRangeInElement() {
return myTextRangeInElement;
}
@Override
public PsiElement getStartElement() {
return myStartSmartPointer.getElement();
}
@Override
public PsiElement getEndElement() {
return myEndSmartPointer == null ? getStartElement() : myEndSmartPointer.getElement();
}
@Override
public int getLineNumber() {
if (myLineNumber == -1) {
PsiElement psiElement = getPsiElement();
if (psiElement == null) return -1;
if (!psiElement.isValid()) return -1;
LOG.assertTrue(psiElement.isPhysical());
InjectedLanguageManager manager = InjectedLanguageManager.getInstance(psiElement.getProject());
PsiFile containingFile = manager.getTopLevelFile(psiElement);
Document document = PsiDocumentManager.getInstance(psiElement.getProject()).getDocument(containingFile);
if (document == null) return -1;
TextRange textRange = getTextRange();
if (textRange == null) return -1;
textRange = manager.injectedToHost(psiElement, textRange);
final int startOffset = textRange.getStartOffset();
final int textLength = document.getTextLength();
LOG.assertTrue(startOffset <= textLength, getDescriptionTemplate() + " at " + startOffset + ", " + textLength);
myLineNumber = document.getLineNumber(startOffset);
}
return myLineNumber;
}
@NotNull
@Override
public ProblemHighlightType getHighlightType() {
return myHighlightType;
}
@Override
public boolean isAfterEndOfLine() {
return myAfterEndOfLine;
}
@Override
public void setTextAttributes(TextAttributesKey key) {
myEnforcedTextAttributes = key;
}
public TextAttributesKey getEnforcedTextAttributes() {
return myEnforcedTextAttributes;
}
public TextRange getTextRangeForNavigation() {
TextRange textRange = getTextRange();
if (textRange == null) return null;
PsiElement element = getPsiElement();
return InjectedLanguageManager.getInstance(element.getProject()).injectedToHost(element, textRange);
}
public TextRange getTextRange() {
PsiElement startElement = getStartElement();
PsiElement endElement = myEndSmartPointer == null ? startElement : getEndElement();
if (startElement == null || endElement == null) {
return null;
}
TextRange textRange = startElement.getTextRange();
if (startElement == endElement) {
if (isAfterEndOfLine()) return new TextRange(textRange.getEndOffset(), textRange.getEndOffset());
if (myTextRangeInElement != null) {
return new TextRange(textRange.getStartOffset() + myTextRangeInElement.getStartOffset(),
textRange.getStartOffset() + myTextRangeInElement.getEndOffset());
}
return textRange;
}
return new TextRange(textRange.getStartOffset(), endElement.getTextRange().getEndOffset());
}
public Navigatable getNavigatable() {
return myNavigatable;
}
public void setNavigatable(final Navigatable navigatable) {
myNavigatable = navigatable;
}
@Override
@Nullable
public ProblemGroup getProblemGroup() {
return myProblemGroup;
}
@Override
public void setProblemGroup(@Nullable ProblemGroup problemGroup) {
myProblemGroup = problemGroup;
}
@Override
public boolean showTooltip() {
return myShowTooltip;
}
@Override
public String toString() {
PsiElement element = getPsiElement();
return ProblemDescriptorUtil.renderDescriptionMessage(this, element);
}
}
| apache-2.0 |
lisyoen/webida-server | src/server/emul/incubator-ripple/lib/client/platform/webworks.handset/2.0.0/server/category.js | 1718 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
var db = ripple('db'),
_KEY = "blackberry-pim-category",
_self;
function _get() {
return db.retrieveObject(_KEY) || [];
}
function _save(category) {
var categories = _get();
if (!categories.some(function (item) {
return item === category;
})) {
categories.push(category);
db.saveObject(_KEY, categories);
}
}
function _remove(category) {
var categories = _get(),
index = categories.indexOf(category);
if (index >= 0) {
categories.splice(index, 1);
db.saveObject(_KEY, categories);
}
}
_self = {
addCategory: function (args) {
_save(args.categoryName);
return {code: 1};
},
deleteCategory: function (args) {
_remove(args.categoryName);
return {code: 1};
},
getCategories: function () {
return {code: 1, data: _get()};
}
};
module.exports = _self;
| apache-2.0 |
fanyon/flink | flink-runtime/src/test/java/org/apache/flink/runtime/rpc/AsyncCallsTest.java | 5920 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.rpc;
import akka.actor.ActorSystem;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.core.testutils.OneShotLatch;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.concurrent.Future;
import org.apache.flink.runtime.rpc.akka.AkkaRpcService;
import org.apache.flink.util.TestLogger;
import org.junit.AfterClass;
import org.junit.Test;
import java.util.concurrent.Callable;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
import static org.junit.Assert.*;
public class AsyncCallsTest extends TestLogger {
// ------------------------------------------------------------------------
// shared test members
// ------------------------------------------------------------------------
private static final ActorSystem actorSystem = AkkaUtils.createDefaultActorSystem();
private static final AkkaRpcService akkaRpcService =
new AkkaRpcService(actorSystem, Time.milliseconds(10000L));
@AfterClass
public static void shutdown() {
akkaRpcService.stopService();
actorSystem.shutdown();
}
// ------------------------------------------------------------------------
// tests
// ------------------------------------------------------------------------
@Test
public void testScheduleWithNoDelay() throws Exception {
// to collect all the thread references
final ReentrantLock lock = new ReentrantLock();
final AtomicBoolean concurrentAccess = new AtomicBoolean(false);
TestEndpoint testEndpoint = new TestEndpoint(akkaRpcService, lock);
testEndpoint.start();
TestGateway gateway = testEndpoint.getSelf();
// a bunch of gateway calls
gateway.someCall();
gateway.anotherCall();
gateway.someCall();
// run something asynchronously
for (int i = 0; i < 10000; i++) {
testEndpoint.runAsync(new Runnable() {
@Override
public void run() {
boolean holdsLock = lock.tryLock();
if (holdsLock) {
lock.unlock();
} else {
concurrentAccess.set(true);
}
}
});
}
Future<String> result = testEndpoint.callAsync(new Callable<String>() {
@Override
public String call() throws Exception {
boolean holdsLock = lock.tryLock();
if (holdsLock) {
lock.unlock();
} else {
concurrentAccess.set(true);
}
return "test";
}
}, Time.seconds(30L));
String str = result.get(30, TimeUnit.SECONDS);
assertEquals("test", str);
// validate that no concurrent access happened
assertFalse("Rpc Endpoint had concurrent access", testEndpoint.hasConcurrentAccess());
assertFalse("Rpc Endpoint had concurrent access", concurrentAccess.get());
akkaRpcService.stopServer(testEndpoint.getSelf());
}
@Test
public void testScheduleWithDelay() throws Exception {
// to collect all the thread references
final ReentrantLock lock = new ReentrantLock();
final AtomicBoolean concurrentAccess = new AtomicBoolean(false);
final OneShotLatch latch = new OneShotLatch();
final long delay = 100;
TestEndpoint testEndpoint = new TestEndpoint(akkaRpcService, lock);
testEndpoint.start();
// run something asynchronously
testEndpoint.runAsync(new Runnable() {
@Override
public void run() {
boolean holdsLock = lock.tryLock();
if (holdsLock) {
lock.unlock();
} else {
concurrentAccess.set(true);
}
}
});
final long start = System.nanoTime();
testEndpoint.scheduleRunAsync(new Runnable() {
@Override
public void run() {
boolean holdsLock = lock.tryLock();
if (holdsLock) {
lock.unlock();
} else {
concurrentAccess.set(true);
}
latch.trigger();
}
}, delay, TimeUnit.MILLISECONDS);
latch.await();
final long stop = System.nanoTime();
// validate that no concurrent access happened
assertFalse("Rpc Endpoint had concurrent access", testEndpoint.hasConcurrentAccess());
assertFalse("Rpc Endpoint had concurrent access", concurrentAccess.get());
assertTrue("call was not properly delayed", ((stop - start) / 1_000_000) >= delay);
}
// ------------------------------------------------------------------------
// test RPC endpoint
// ------------------------------------------------------------------------
public interface TestGateway extends RpcGateway {
void someCall();
void anotherCall();
}
@SuppressWarnings("unused")
public static class TestEndpoint extends RpcEndpoint<TestGateway> {
private final ReentrantLock lock;
private volatile boolean concurrentAccess;
public TestEndpoint(RpcService rpcService, ReentrantLock lock) {
super(rpcService);
this.lock = lock;
}
@RpcMethod
public void someCall() {
boolean holdsLock = lock.tryLock();
if (holdsLock) {
lock.unlock();
} else {
concurrentAccess = true;
}
}
@RpcMethod
public void anotherCall() {
boolean holdsLock = lock.tryLock();
if (holdsLock) {
lock.unlock();
} else {
concurrentAccess = true;
}
}
public boolean hasConcurrentAccess() {
return concurrentAccess;
}
}
}
| apache-2.0 |
vergilchiu/hive | ql/src/java/org/apache/hadoop/hive/ql/hooks/Hook.java | 964 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.hooks;
/**
*
* The new interface for all the pre execute hooks and post execute hooks
*
*/
public interface Hook {
}
| apache-2.0 |
gotroy/elasticsearch | src/main/java/org/elasticsearch/index/query/MatchQueryParser.java | 9509 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.QueryParsers;
import org.elasticsearch.index.search.MatchQuery;
import java.io.IOException;
/**
*
*/
public class MatchQueryParser implements QueryParser {
public static final String NAME = "match";
@Inject
public MatchQueryParser() {
}
@Override
public String[] names() {
return new String[]{
NAME, "match_phrase", "matchPhrase", "match_phrase_prefix", "matchPhrasePrefix", "matchFuzzy", "match_fuzzy", "fuzzy_match"
};
}
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
MatchQuery.Type type = MatchQuery.Type.BOOLEAN;
if ("match_phrase".equals(parser.currentName()) || "matchPhrase".equals(parser.currentName()) ||
"text_phrase".equals(parser.currentName()) || "textPhrase".equals(parser.currentName())) {
type = MatchQuery.Type.PHRASE;
} else if ("match_phrase_prefix".equals(parser.currentName()) || "matchPhrasePrefix".equals(parser.currentName()) ||
"text_phrase_prefix".equals(parser.currentName()) || "textPhrasePrefix".equals(parser.currentName())) {
type = MatchQuery.Type.PHRASE_PREFIX;
}
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.FIELD_NAME) {
throw new QueryParsingException(parseContext.index(), "[match] query malformed, no field");
}
String fieldName = parser.currentName();
Object value = null;
float boost = 1.0f;
MatchQuery matchQuery = new MatchQuery(parseContext);
String minimumShouldMatch = null;
String queryName = null;
token = parser.nextToken();
if (token == XContentParser.Token.START_OBJECT) {
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token.isValue()) {
if ("query".equals(currentFieldName)) {
value = parser.objectText();
} else if ("type".equals(currentFieldName)) {
String tStr = parser.text();
if ("boolean".equals(tStr)) {
type = MatchQuery.Type.BOOLEAN;
} else if ("phrase".equals(tStr)) {
type = MatchQuery.Type.PHRASE;
} else if ("phrase_prefix".equals(tStr) || "phrasePrefix".equals(currentFieldName)) {
type = MatchQuery.Type.PHRASE_PREFIX;
} else {
throw new QueryParsingException(parseContext.index(), "[match] query does not support type " + tStr);
}
} else if ("analyzer".equals(currentFieldName)) {
String analyzer = parser.text();
if (parseContext.analysisService().analyzer(analyzer) == null) {
throw new QueryParsingException(parseContext.index(), "[match] analyzer [" + parser.text() + "] not found");
}
matchQuery.setAnalyzer(analyzer);
} else if ("boost".equals(currentFieldName)) {
boost = parser.floatValue();
} else if ("slop".equals(currentFieldName) || "phrase_slop".equals(currentFieldName) || "phraseSlop".equals(currentFieldName)) {
matchQuery.setPhraseSlop(parser.intValue());
} else if (Fuzziness.FIELD.match(currentFieldName, parseContext.parseFlags())) {
matchQuery.setFuzziness(Fuzziness.parse(parser));
} else if ("prefix_length".equals(currentFieldName) || "prefixLength".equals(currentFieldName)) {
matchQuery.setFuzzyPrefixLength(parser.intValue());
} else if ("max_expansions".equals(currentFieldName) || "maxExpansions".equals(currentFieldName)) {
matchQuery.setMaxExpansions(parser.intValue());
} else if ("operator".equals(currentFieldName)) {
String op = parser.text();
if ("or".equalsIgnoreCase(op)) {
matchQuery.setOccur(BooleanClause.Occur.SHOULD);
} else if ("and".equalsIgnoreCase(op)) {
matchQuery.setOccur(BooleanClause.Occur.MUST);
} else {
throw new QueryParsingException(parseContext.index(), "text query requires operator to be either 'and' or 'or', not [" + op + "]");
}
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
minimumShouldMatch = parser.textOrNull();
} else if ("rewrite".equals(currentFieldName)) {
matchQuery.setRewriteMethod(QueryParsers.parseRewriteMethod(parser.textOrNull(), null));
} else if ("fuzzy_rewrite".equals(currentFieldName) || "fuzzyRewrite".equals(currentFieldName)) {
matchQuery.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(parser.textOrNull(), null));
} else if ("fuzzy_transpositions".equals(fieldName)) {
matchQuery.setTranspositions(parser.booleanValue());
} else if ("lenient".equals(currentFieldName)) {
matchQuery.setLenient(parser.booleanValue());
} else if ("cutoff_frequency".equals(currentFieldName)) {
matchQuery.setCommonTermsCutoff(parser.floatValue());
} else if ("zero_terms_query".equals(currentFieldName)) {
String zeroTermsDocs = parser.text();
if ("none".equalsIgnoreCase(zeroTermsDocs)) {
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.NONE);
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL);
} else {
throw new QueryParsingException(parseContext.index(), "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
}
} else if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else {
throw new QueryParsingException(parseContext.index(), "[match] query does not support [" + currentFieldName + "]");
}
}
}
parser.nextToken();
} else {
value = parser.objectText();
// move to the next token
token = parser.nextToken();
if (token != XContentParser.Token.END_OBJECT) {
throw new QueryParsingException(parseContext.index(), "[match] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
}
}
if (value == null) {
throw new QueryParsingException(parseContext.index(), "No text specified for text query");
}
Query query = matchQuery.parse(type, fieldName, value);
if (query == null) {
return null;
}
if (query instanceof BooleanQuery) {
Queries.applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch);
} else if (query instanceof ExtendedCommonTermsQuery) {
((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch);
}
query.setBoost(boost);
if (queryName != null) {
parseContext.addNamedQuery(queryName, query);
}
return query;
}
}
| apache-2.0 |
lintzc/gpdb | src/test/tinc/tincrepo/mpp/gpdb/tests/storage/pg_twophase/test_pg_twophase_11_20.py | 2681 | """
Copyright (C) 2004-2015 Pivotal Software, Inc. All rights reserved.
This program and the accompanying materials are made available under
the terms of the under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import tinctest
from mpp.gpdb.tests.storage.pg_twophase.pg_twophase import PgtwoPhaseTestCase
class PgtwoPhase11To20(PgtwoPhaseTestCase):
'''
Testing state of prepared transactions upon crash-recovery
@gucs gp_create_table_random_default_distribution=off
'''
def __init__(self, methodName):
super(PgtwoPhase11To20,self).__init__(methodName)
def test_execute_split_sqls_11_20(self):
'''
@data_provider data_types_provider
'''
skip_state = self.test_data[1][0]
cluster_state = self.test_data[1][1]
ddl_type = self.test_data[1][2]
fault_type = self.test_data[1][3]
crash_type = self.test_data[1][4]
self.execute_split_sqls(skip_state, cluster_state, ddl_type, fault_type, crash_type)
@tinctest.dataProvider('data_types_provider')
def test_data_provider():
data = {'11_noskip_change_tracking_create_commit_gpstop_i': ['noskip','change_tracking','create','commit','gpstop_i'],
'12_noskip_change_tracking_create_commit_gpstop_a': ['noskip','change_tracking','create','commit','gpstop_a'],
'13_skip_resync_create_commit_gpstop_i': ['skip','resync','create','commit','gpstop_i'],
'14_skip_resync_create_commit_gpstop_a': ['skip','resync','create','commit','gpstop_a'],
'15_noskip_resync_create_commit_gpstop_i': ['noskip','resync','create','commit','gpstop_i'],
'16_noskip_resync_create_commit_gpstop_a': ['noskip','resync','create','commit','gpstop_a'],
'17_skip_sync_drop_commit_gpstop_i': ['skip','sync','drop','commit','gpstop_i'],
'18_skip_sync_drop_commit_gpstop_a': ['skip','sync','drop','commit','gpstop_a'],
'19_skip_sync_drop_commit_failover_to_primary': ['skip','sync','drop','commit','failover_to_primary'],
'20_skip_sync_drop_commit_failover_to_mirror': ['skip','sync','drop','commit','failover_to_mirror']
}
return data
| apache-2.0 |
vroyer/elassandra | server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java | 12870 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingHelper;
import org.elasticsearch.cluster.routing.UnassignedInfo;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.test.ESTestCase;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
import static org.hamcrest.Matchers.equalTo;
public class DiskUsageTests extends ESTestCase {
public void testDiskUsageCalc() {
DiskUsage du = new DiskUsage("node1", "n1", "random", 100, 40);
assertThat(du.getFreeDiskAsPercentage(), equalTo(40.0));
assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - 40.0));
assertThat(du.getFreeBytes(), equalTo(40L));
assertThat(du.getUsedBytes(), equalTo(60L));
assertThat(du.getTotalBytes(), equalTo(100L));
// Test that DiskUsage handles invalid numbers, as reported by some
// filesystems (ZFS & NTFS)
DiskUsage du2 = new DiskUsage("node1", "n1","random", 100, 101);
assertThat(du2.getFreeDiskAsPercentage(), equalTo(101.0));
assertThat(du2.getFreeBytes(), equalTo(101L));
assertThat(du2.getUsedBytes(), equalTo(-1L));
assertThat(du2.getTotalBytes(), equalTo(100L));
DiskUsage du3 = new DiskUsage("node1", "n1", "random",-1, -1);
assertThat(du3.getFreeDiskAsPercentage(), equalTo(100.0));
assertThat(du3.getFreeBytes(), equalTo(-1L));
assertThat(du3.getUsedBytes(), equalTo(0L));
assertThat(du3.getTotalBytes(), equalTo(-1L));
DiskUsage du4 = new DiskUsage("node1", "n1","random", 0, 0);
assertThat(du4.getFreeDiskAsPercentage(), equalTo(100.0));
assertThat(du4.getFreeBytes(), equalTo(0L));
assertThat(du4.getUsedBytes(), equalTo(0L));
assertThat(du4.getTotalBytes(), equalTo(0L));
}
public void testRandomDiskUsage() {
int iters = scaledRandomIntBetween(1000, 10000);
for (int i = 1; i < iters; i++) {
long total = between(Integer.MIN_VALUE, Integer.MAX_VALUE);
long free = between(Integer.MIN_VALUE, Integer.MAX_VALUE);
DiskUsage du = new DiskUsage("random", "random", "random", total, free);
if (total == 0) {
assertThat(du.getFreeBytes(), equalTo(free));
assertThat(du.getTotalBytes(), equalTo(0L));
assertThat(du.getUsedBytes(), equalTo(-free));
assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0));
assertThat(du.getUsedDiskAsPercentage(), equalTo(0.0));
} else {
assertThat(du.getFreeBytes(), equalTo(free));
assertThat(du.getTotalBytes(), equalTo(total));
assertThat(du.getUsedBytes(), equalTo(total - free));
assertThat(du.getFreeDiskAsPercentage(), equalTo(100.0 * ((double) free / total)));
assertThat(du.getUsedDiskAsPercentage(), equalTo(100.0 - (100.0 * ((double) free / total))));
}
}
}
public void testFillShardLevelInfo() {
final Index index = new Index("test", "0xdeadbeef");
ShardRouting test_0 = ShardRouting.newUnassigned(new ShardId(index, 0), false, PeerRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
test_0 = ShardRoutingHelper.initialize(test_0, "node1");
test_0 = ShardRoutingHelper.moveToStarted(test_0);
Path test0Path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve("0");
CommonStats commonStats0 = new CommonStats();
commonStats0.store = new StoreStats(100);
ShardRouting test_1 = ShardRouting.newUnassigned(new ShardId(index, 1), false, PeerRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"));
test_1 = ShardRoutingHelper.initialize(test_1, "node2");
test_1 = ShardRoutingHelper.moveToStarted(test_1);
Path test1Path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve("1");
CommonStats commonStats1 = new CommonStats();
commonStats1.store = new StoreStats(1000);
ShardStats[] stats = new ShardStats[] {
new ShardStats(test_0, new ShardPath(false, test0Path, test0Path, test_0.shardId()), commonStats0 , null, null, null),
new ShardStats(test_1, new ShardPath(false, test1Path, test1Path, test_1.shardId()), commonStats1 , null, null, null)
};
ImmutableOpenMap.Builder<String, Long> shardSizes = ImmutableOpenMap.builder();
ImmutableOpenMap.Builder<ShardRouting, String> routingToPath = ImmutableOpenMap.builder();
ClusterState state = ClusterState.builder(new ClusterName("blarg")).version(0).build();
InternalClusterInfoService.buildShardLevelInfo(logger, stats, shardSizes, routingToPath, state);
assertEquals(2, shardSizes.size());
assertTrue(shardSizes.containsKey(ClusterInfo.shardIdentifierFromRouting(test_0)));
assertTrue(shardSizes.containsKey(ClusterInfo.shardIdentifierFromRouting(test_1)));
assertEquals(100L, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_0)).longValue());
assertEquals(1000L, shardSizes.get(ClusterInfo.shardIdentifierFromRouting(test_1)).longValue());
assertEquals(2, routingToPath.size());
assertTrue(routingToPath.containsKey(test_0));
assertTrue(routingToPath.containsKey(test_1));
assertEquals(test0Path.getParent().getParent().getParent().toAbsolutePath().toString(), routingToPath.get(test_0));
assertEquals(test1Path.getParent().getParent().getParent().toAbsolutePath().toString(), routingToPath.get(test_1));
}
public void testFillDiskUsage() {
ImmutableOpenMap.Builder<String, DiskUsage> newLeastAvaiableUsages = ImmutableOpenMap.builder();
ImmutableOpenMap.Builder<String, DiskUsage> newMostAvaiableUsages = ImmutableOpenMap.builder();
FsInfo.Path[] node1FSInfo = new FsInfo.Path[] {
new FsInfo.Path("/middle", "/dev/sda", 100, 90, 80),
new FsInfo.Path("/least", "/dev/sdb", 200, 190, 70),
new FsInfo.Path("/most", "/dev/sdc", 300, 290, 280),
};
FsInfo.Path[] node2FSInfo = new FsInfo.Path[] {
new FsInfo.Path("/least_most", "/dev/sda", 100, 90, 80),
};
FsInfo.Path[] node3FSInfo = new FsInfo.Path[] {
new FsInfo.Path("/least", "/dev/sda", 100, 90, 70),
new FsInfo.Path("/most", "/dev/sda", 100, 90, 80),
};
List<NodeStats> nodeStats = Arrays.asList(
new NodeStats(new DiscoveryNode("node_1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0,
null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null, null),
new NodeStats(new DiscoveryNode("node_2", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0,
null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null, null),
new NodeStats(new DiscoveryNode("node_3", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0,
null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null, null)
);
InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvaiableUsages, newMostAvaiableUsages);
DiskUsage leastNode_1 = newLeastAvaiableUsages.get("node_1");
DiskUsage mostNode_1 = newMostAvaiableUsages.get("node_1");
assertDiskUsage(mostNode_1, node1FSInfo[2]);
assertDiskUsage(leastNode_1, node1FSInfo[1]);
DiskUsage leastNode_2 = newLeastAvaiableUsages.get("node_2");
DiskUsage mostNode_2 = newMostAvaiableUsages.get("node_2");
assertDiskUsage(leastNode_2, node2FSInfo[0]);
assertDiskUsage(mostNode_2, node2FSInfo[0]);
DiskUsage leastNode_3 = newLeastAvaiableUsages.get("node_3");
DiskUsage mostNode_3 = newMostAvaiableUsages.get("node_3");
assertDiskUsage(leastNode_3, node3FSInfo[0]);
assertDiskUsage(mostNode_3, node3FSInfo[1]);
}
public void testFillDiskUsageSomeInvalidValues() {
ImmutableOpenMap.Builder<String, DiskUsage> newLeastAvailableUsages = ImmutableOpenMap.builder();
ImmutableOpenMap.Builder<String, DiskUsage> newMostAvailableUsages = ImmutableOpenMap.builder();
FsInfo.Path[] node1FSInfo = new FsInfo.Path[] {
new FsInfo.Path("/middle", "/dev/sda", 100, 90, 80),
new FsInfo.Path("/least", "/dev/sdb", -1, -1, -1),
new FsInfo.Path("/most", "/dev/sdc", 300, 290, 280),
};
FsInfo.Path[] node2FSInfo = new FsInfo.Path[] {
new FsInfo.Path("/least_most", "/dev/sda", -1, -1, -1),
};
FsInfo.Path[] node3FSInfo = new FsInfo.Path[] {
new FsInfo.Path("/most", "/dev/sda", 100, 90, 70),
new FsInfo.Path("/least", "/dev/sda", 10, -1, 0),
};
List<NodeStats> nodeStats = Arrays.asList(
new NodeStats(new DiscoveryNode("node_1", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0,
null,null,null,null,null,new FsInfo(0, null, node1FSInfo), null,null,null,null,null, null, null),
new NodeStats(new DiscoveryNode("node_2", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0,
null,null,null,null,null, new FsInfo(0, null, node2FSInfo), null,null,null,null,null, null, null),
new NodeStats(new DiscoveryNode("node_3", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT), 0,
null,null,null,null,null, new FsInfo(0, null, node3FSInfo), null,null,null,null,null, null, null)
);
InternalClusterInfoService.fillDiskUsagePerNode(logger, nodeStats, newLeastAvailableUsages, newMostAvailableUsages);
DiskUsage leastNode_1 = newLeastAvailableUsages.get("node_1");
DiskUsage mostNode_1 = newMostAvailableUsages.get("node_1");
assertNull("node1 should have been skipped", leastNode_1);
assertDiskUsage(mostNode_1, node1FSInfo[2]);
DiskUsage leastNode_2 = newLeastAvailableUsages.get("node_2");
DiskUsage mostNode_2 = newMostAvailableUsages.get("node_2");
assertNull("node2 should have been skipped", leastNode_2);
assertNull("node2 should have been skipped", mostNode_2);
DiskUsage leastNode_3 = newLeastAvailableUsages.get("node_3");
DiskUsage mostNode_3 = newMostAvailableUsages.get("node_3");
assertDiskUsage(leastNode_3, node3FSInfo[1]);
assertDiskUsage(mostNode_3, node3FSInfo[0]);
}
private void assertDiskUsage(DiskUsage usage, FsInfo.Path path) {
assertNotNull(usage);
assertNotNull(path);
assertEquals(usage.toString(), usage.getPath(), path.getPath());
assertEquals(usage.toString(), usage.getTotalBytes(), path.getTotal().getBytes());
assertEquals(usage.toString(), usage.getFreeBytes(), path.getAvailable().getBytes());
}
}
| apache-2.0 |
google-research/tiny-differentiable-simulator | third_party/zeromq/src/epoll.cpp | 6298 | /*
Copyright (c) 2007-2016 Contributors as noted in the AUTHORS file
This file is part of libzmq, the ZeroMQ core engine in C++.
libzmq is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
As a special exception, the Contributors give you permission to link
this library with independent modules to produce an executable,
regardless of the license terms of these independent modules, and to
copy and distribute the resulting executable under terms of your choice,
provided that you also meet, for each linked independent module, the
terms and conditions of the license of that module. An independent
module is a module which is not derived from or based on this library.
If you modify this library, you must extend this exception to your
version of the library.
libzmq is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "precompiled.hpp"
#if defined ZMQ_IOTHREAD_POLLER_USE_EPOLL
#include "epoll.hpp"
#if !defined ZMQ_HAVE_WINDOWS
#include <unistd.h>
#endif
#include <stdlib.h>
#include <string.h>
#include <signal.h>
#include <algorithm>
#include <new>
#include "macros.hpp"
#include "err.hpp"
#include "config.hpp"
#include "i_poll_events.hpp"
#ifdef ZMQ_HAVE_WINDOWS
const zmq::epoll_t::epoll_fd_t zmq::epoll_t::epoll_retired_fd =
INVALID_HANDLE_VALUE;
#endif
zmq::epoll_t::epoll_t (const zmq::thread_ctx_t &ctx_) :
worker_poller_base_t (ctx_)
{
#ifdef ZMQ_IOTHREAD_POLLER_USE_EPOLL_CLOEXEC
// Setting this option result in sane behaviour when exec() functions
// are used. Old sockets are closed and don't block TCP ports, avoid
// leaks, etc.
_epoll_fd = epoll_create1 (EPOLL_CLOEXEC);
#else
_epoll_fd = epoll_create (1);
#endif
errno_assert (_epoll_fd != epoll_retired_fd);
}
zmq::epoll_t::~epoll_t ()
{
// Wait till the worker thread exits.
stop_worker ();
#ifdef ZMQ_HAVE_WINDOWS
epoll_close (_epoll_fd);
#else
close (_epoll_fd);
#endif
for (retired_t::iterator it = _retired.begin (), end = _retired.end ();
it != end; ++it) {
LIBZMQ_DELETE (*it);
}
}
zmq::epoll_t::handle_t zmq::epoll_t::add_fd (fd_t fd_, i_poll_events *events_)
{
check_thread ();
poll_entry_t *pe = new (std::nothrow) poll_entry_t;
alloc_assert (pe);
// The memset is not actually needed. It's here to prevent debugging
// tools to complain about using uninitialised memory.
memset (pe, 0, sizeof (poll_entry_t));
pe->fd = fd_;
pe->ev.events = 0;
pe->ev.data.ptr = pe;
pe->events = events_;
int rc = epoll_ctl (_epoll_fd, EPOLL_CTL_ADD, fd_, &pe->ev);
errno_assert (rc != -1);
// Increase the load metric of the thread.
adjust_load (1);
return pe;
}
void zmq::epoll_t::rm_fd (handle_t handle_)
{
check_thread ();
poll_entry_t *pe = static_cast<poll_entry_t *> (handle_);
int rc = epoll_ctl (_epoll_fd, EPOLL_CTL_DEL, pe->fd, &pe->ev);
errno_assert (rc != -1);
pe->fd = retired_fd;
_retired.push_back (pe);
// Decrease the load metric of the thread.
adjust_load (-1);
}
void zmq::epoll_t::set_pollin (handle_t handle_)
{
check_thread ();
poll_entry_t *pe = static_cast<poll_entry_t *> (handle_);
pe->ev.events |= EPOLLIN;
int rc = epoll_ctl (_epoll_fd, EPOLL_CTL_MOD, pe->fd, &pe->ev);
errno_assert (rc != -1);
}
void zmq::epoll_t::reset_pollin (handle_t handle_)
{
check_thread ();
poll_entry_t *pe = static_cast<poll_entry_t *> (handle_);
pe->ev.events &= ~(static_cast<short> (EPOLLIN));
int rc = epoll_ctl (_epoll_fd, EPOLL_CTL_MOD, pe->fd, &pe->ev);
errno_assert (rc != -1);
}
void zmq::epoll_t::set_pollout (handle_t handle_)
{
check_thread ();
poll_entry_t *pe = static_cast<poll_entry_t *> (handle_);
pe->ev.events |= EPOLLOUT;
int rc = epoll_ctl (_epoll_fd, EPOLL_CTL_MOD, pe->fd, &pe->ev);
errno_assert (rc != -1);
}
void zmq::epoll_t::reset_pollout (handle_t handle_)
{
check_thread ();
poll_entry_t *pe = static_cast<poll_entry_t *> (handle_);
pe->ev.events &= ~(static_cast<short> (EPOLLOUT));
int rc = epoll_ctl (_epoll_fd, EPOLL_CTL_MOD, pe->fd, &pe->ev);
errno_assert (rc != -1);
}
void zmq::epoll_t::stop ()
{
check_thread ();
}
int zmq::epoll_t::max_fds ()
{
return -1;
}
void zmq::epoll_t::loop ()
{
epoll_event ev_buf[max_io_events];
while (true) {
// Execute any due timers.
int timeout = static_cast<int> (execute_timers ());
if (get_load () == 0) {
if (timeout == 0)
break;
// TODO sleep for timeout
continue;
}
// Wait for events.
int n = epoll_wait (_epoll_fd, &ev_buf[0], max_io_events,
timeout ? timeout : -1);
if (n == -1) {
errno_assert (errno == EINTR);
continue;
}
for (int i = 0; i < n; i++) {
poll_entry_t *pe =
(static_cast<poll_entry_t *> (ev_buf[i].data.ptr));
if (pe->fd == retired_fd)
continue;
if (ev_buf[i].events & (EPOLLERR | EPOLLHUP))
pe->events->in_event ();
if (pe->fd == retired_fd)
continue;
if (ev_buf[i].events & EPOLLOUT)
pe->events->out_event ();
if (pe->fd == retired_fd)
continue;
if (ev_buf[i].events & EPOLLIN)
pe->events->in_event ();
}
// Destroy retired event sources.
for (retired_t::iterator it = _retired.begin (), end = _retired.end ();
it != end; ++it) {
LIBZMQ_DELETE (*it);
}
_retired.clear ();
}
}
#endif
| apache-2.0 |
yuriyminin/yuriy.io | js/main.js | 6767 | /*
Multiverse by HTML5 UP
html5up.net | @n33co
Free for personal and commercial use under the CCA 3.0 license (html5up.net/license)
*/
(function($) {
skel.breakpoints({
xlarge: '(max-width: 1680px)',
large: '(max-width: 1280px)',
medium: '(max-width: 980px)',
small: '(max-width: 736px)',
xsmall: '(max-width: 480px)'
});
$(function() {
var $window = $(window),
$body = $('body'),
$wrapper = $('#wrapper');
// Hack: Enable IE workarounds.
if (skel.vars.IEVersion < 12)
$body.addClass('ie');
// Touch?
if (skel.vars.mobile)
$body.addClass('touch');
// Transitions supported?
if (skel.canUse('transition')) {
// Add (and later, on load, remove) "loading" class.
$body.addClass('loading');
$window.on('load', function() {
window.setTimeout(function() {
$body.removeClass('loading');
}, 100);
});
// Prevent transitions/animations on resize.
var resizeTimeout;
$window.on('resize', function() {
window.clearTimeout(resizeTimeout);
$body.addClass('resizing');
resizeTimeout = window.setTimeout(function() {
$body.removeClass('resizing');
}, 100);
});
}
// Scroll back to top.
$window.scrollTop(0);
// Fix: Placeholder polyfill.
$('form').placeholder();
// Panels.
var $panels = $('.panel');
$panels.each(function() {
var $this = $(this),
$toggles = $('[href="#' + $this.attr('id') + '"]'),
$closer = $('<div class="closer" />').appendTo($this);
// Closer.
$closer
.on('click', function(event) {
$this.trigger('---hide');
});
// Events.
$this
.on('click', function(event) {
event.stopPropagation();
})
.on('---toggle', function() {
if ($this.hasClass('active'))
$this.triggerHandler('---hide');
else
$this.triggerHandler('---show');
})
.on('---show', function() {
// Hide other content.
if ($body.hasClass('content-active'))
$panels.trigger('---hide');
// Activate content, toggles.
$this.addClass('active');
$toggles.addClass('active');
// Activate body.
$body.addClass('content-active');
})
.on('---hide', function() {
// Deactivate content, toggles.
$this.removeClass('active');
$toggles.removeClass('active');
// Deactivate body.
$body.removeClass('content-active');
});
// Toggles.
$toggles
.removeAttr('href')
.css('cursor', 'pointer')
.on('click', function(event) {
event.preventDefault();
event.stopPropagation();
$this.trigger('---toggle');
});
});
// Global events.
$body
.on('click', function(event) {
if ($body.hasClass('content-active')) {
event.preventDefault();
event.stopPropagation();
$panels.trigger('---hide');
}
});
$window
.on('keyup', function(event) {
if (event.keyCode == 27
&& $body.hasClass('content-active')) {
event.preventDefault();
event.stopPropagation();
$panels.trigger('---hide');
}
});
// Header.
var $header = $('#header');
// Links.
$header.find('a').each(function() {
var $this = $(this),
href = $this.attr('href');
// Internal link? Skip.
if (!href
|| href.charAt(0) == '#')
return;
// Redirect on click.
$this
.removeAttr('href')
.css('cursor', 'pointer')
.on('click', function(event) {
event.preventDefault();
event.stopPropagation();
window.location.href = href;
});
});
// Footer.
var $footer = $('#footer');
// Copyright.
// This basically just moves the copyright line to the end of the *last* sibling of its current parent
// when the "medium" breakpoint activates, and moves it back when it deactivates.
$footer.find('.copyright').each(function() {
var $this = $(this),
$parent = $this.parent(),
$lastParent = $parent.parent().children().last();
skel
.on('+medium', function() {
$this.appendTo($lastParent);
})
.on('-medium', function() {
$this.appendTo($parent);
});
});
// Main.
var $main = $('#main');
// Thumbs.
$main.children('.thumb').each(function() {
var $this = $(this),
$image = $this.find('.image'), $image_img = $image.children('img'),
x;
// No image? Bail.
if ($image.length == 0)
return;
// Image.
// This sets the background of the "image" <span> to the image pointed to by its child
// <img> (which is then hidden). Gives us way more flexibility.
// Set background.
$image.css('background-image', 'url(' + $image_img.attr('src') + ')');
// Set background position.
if (x = $image_img.data('position'))
$image.css('background-position', x);
// Hide original img.
$image_img.hide();
// Hack: IE<11 doesn't support pointer-events, which means clicks to our image never
// land as they're blocked by the thumbnail's caption overlay gradient. This just forces
// the click through to the image.
if (skel.vars.IEVersion < 11)
$this
.css('cursor', 'pointer')
.on('click', function() {
$image.trigger('click');
});
});
// Poptrox.
$main.poptrox({
baseZIndex: 20000,
caption: function($a) {
var s = '';
$a.nextAll().each(function() {
s += this.outerHTML;
});
return s;
},
fadeSpeed: 300,
onPopupClose: function() { $body.removeClass('modal-active'); },
onPopupOpen: function() { $body.addClass('modal-active'); },
overlayOpacity: 0,
popupCloserText: '',
popupHeight: 150,
popupLoaderText: '',
popupSpeed: 300,
popupWidth: 150,
selector: '.thumb > a.image',
usePopupCaption: true,
usePopupCloser: true,
usePopupDefaultStyling: false,
usePopupForceClose: true,
usePopupLoader: true,
usePopupNav: true,
windowMargin: 50
});
// Hack: Set margins to 0 when 'xsmall' activates.
skel
.on('-xsmall', function() {
$main[0]._poptrox.windowMargin = 50;
})
.on('+xsmall', function() {
$main[0]._poptrox.windowMargin = 0;
});
});
})(jQuery); | apache-2.0 |
wizant/chris | wp-content/plugins/wp-survey-and-quiz-tool/pages/admin/quiz/create.php | 1098 | <?php global $blog_id; ?>
<div class="wrap">
<?php if ( isset($successMessage) ) {?>
<div class='updated'><?php echo $successMessage; ?></div>
<?php } ?>
<div id="icon-tools" class="icon32"></div>
<h2>
WP Survey And Quiz Tool - Create Quiz
</h2>
<?php require WPSQT_DIR.'pages/admin/misc/navbar.php'; ?>
<?php if ( isset($errorArray) && !empty($errorArray) ) { ?>
<div class="error">
<ol class="error">
<?php foreach($errorArray as $error ){ ?>
<li><?php echo $error; ?></li>
<?php } ?>
</ol>
</div>
<?php } ?>
<form method="POST" action="<?php echo esc_url($_SERVER['REQUEST_URI']); ?>" id="quiz_form">
<input type="hidden" name="wpsqt_nonce" value="<?php echo WPSQT_NONCE_CURRENT; ?>" />
<?php $objForm->display(); ?>
<p class="submit">
<input class="button-primary" type="submit" name="Save" value="Save Quiz" id="submitbutton" />
</p>
</form>
<a name="template_tokens"></a>
<h3>Replacement Token</h3>
<?php echo $objTokens->getDescriptions(); ?>
</div>
<?php require_once WPSQT_DIR.'/pages/admin/shared/image.php'; ?> | apache-2.0 |
huihoo/olat | OLAT-LMS/src/main/java/org/olat/presentation/framework/core/components/form/flexible/impl/elements/richText/_static/js/tinymce/themes/simple/langs/no.js | 308 | tinyMCE.addI18n('no.simple',{
bold_desc:"Fet (Ctrl+B)",
italic_desc:"Kursiv (Ctrl+I)",
underline_desc:"Understreke (Ctrl+U)",
striketrough_desc:"Gjennomstreke",
bullist_desc:"Punktliste",
numlist_desc:"Nummerliste",
undo_desc:"Angre",
redo_desc:"Gj\u00F8r om",
cleanup_desc:"Rense grisete kode"
}); | apache-2.0 |
mylog00/flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/ZooKeeperCheckpointRecoveryFactory.java | 2281 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.curator.framework.CuratorFramework;
import org.apache.flink.api.common.JobID;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.apache.flink.runtime.util.ZooKeeperUtils;
import java.util.concurrent.Executor;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* {@link CheckpointCoordinator} components in {@link HighAvailabilityMode#ZOOKEEPER}.
*/
public class ZooKeeperCheckpointRecoveryFactory implements CheckpointRecoveryFactory {
private final CuratorFramework client;
private final Configuration config;
private final Executor executor;
public ZooKeeperCheckpointRecoveryFactory(
CuratorFramework client,
Configuration config,
Executor executor) {
this.client = checkNotNull(client, "Curator client");
this.config = checkNotNull(config, "Configuration");
this.executor = checkNotNull(executor, "Executor");
}
@Override
public CompletedCheckpointStore createCheckpointStore(JobID jobId, int maxNumberOfCheckpointsToRetain, ClassLoader userClassLoader)
throws Exception {
return ZooKeeperUtils.createCompletedCheckpoints(client, config, jobId,
maxNumberOfCheckpointsToRetain, executor);
}
@Override
public CheckpointIDCounter createCheckpointIDCounter(JobID jobID) throws Exception {
return ZooKeeperUtils.createCheckpointIDCounter(client, config, jobID);
}
}
| apache-2.0 |
efundamentals/openwayback | wayback-core/src/test/java/org/archive/wayback/replay/TransparentReplayRendererTest.java | 11406 | /**
*
*/
package org.archive.wayback.replay;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.zip.GZIPInputStream;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import junit.framework.TestCase;
import org.archive.io.warc.TestWARCReader;
import org.archive.io.warc.TestWARCRecordInfo;
import org.archive.io.warc.WARCRecord;
import org.archive.io.warc.WARCRecordInfo;
import org.archive.wayback.ResultURIConverter;
import org.archive.wayback.core.CaptureSearchResult;
import org.archive.wayback.core.CaptureSearchResults;
import org.archive.wayback.core.Resource;
import org.archive.wayback.core.WaybackRequest;
import org.archive.wayback.resourcestore.resourcefile.WarcResource;
import org.easymock.Capture;
import org.easymock.CaptureType;
import org.easymock.EasyMock;
/**
* unit test for {@link TransparentReplayRenderer}
*
* @contributor kenji
*
*/
public class TransparentReplayRendererTest extends TestCase {
TransparentReplayRenderer cut;
// never used in TransparentReplayRenerer.
HttpServletRequest request = null;
HttpServletResponse response;
WaybackRequest wbRequest;
CaptureSearchResult result = new CaptureSearchResult();
ResultURIConverter uriConverter;
// unused in TransparentReplayRenderer.
CaptureSearchResults results = null;
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
super.setUp();
//HttpHeaderProcessor httpHeaderProcessor = new IdentityHttpHeaderProcessor();
HttpHeaderProcessor httpHeaderProcessor = new RedirectRewritingHttpHeaderProcessor();
cut = new TransparentReplayRenderer(httpHeaderProcessor);
// unused in TransparentReplayRenderer
wbRequest = null; //new WaybackRequest();
// use test fixture version as we want to focus on TransparentReplayRenderer behavior.
uriConverter = EasyMock.createMock(ResultURIConverter.class);
// result is only used in HttpHeaderOperation.processHeaders()
results = new CaptureSearchResults();
response = EasyMock.createMock(HttpServletResponse.class);
}
public static class TestServletOutputStream extends ServletOutputStream {
ByteArrayOutputStream out = new ByteArrayOutputStream();
@Override
public void write(int b) throws IOException {
out.write(b);
}
public byte[] getBytes() {
return out.toByteArray();
}
public String getString() {
try {
return out.toString("UTF-8");
} catch (UnsupportedEncodingException ex) {
throw new RuntimeException("unexpected UnsupportedEncodingException", ex);
}
}
}
public void testRenderResource_BasicCapture() throws Exception {
final String ct = "image/gif";
WARCRecordInfo recinfo = TestWARCRecordInfo.createHttpResponse(ct, TestWARCRecordInfo.PAYLOAD_GIF);
TestWARCReader ar = new TestWARCReader(recinfo);
WARCRecord rec = ar.get(0);
Resource payloadResource = new WarcResource(rec, ar);
payloadResource.parseHeaders();
Resource headersResource = payloadResource;
TestServletOutputStream servletOutput = new TestServletOutputStream();
response.setStatus(200);
EasyMock.expect(response.getOutputStream()).andReturn(servletOutput);
response.setHeader("Content-Type", ct);
// ??? RedirectRewritingHttpHeaderProcessor drops Content-Length header. is this really
// it is supposed to do?
//response.setHeader("Content-Length", Integer.toString(payloadBytes.length));
response.setHeader(EasyMock.<String>notNull(), EasyMock.<String>notNull());
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(response);
cut.renderResource(request, response, wbRequest, result,
headersResource, payloadResource, uriConverter, results);
EasyMock.verify(response);
byte[] content = servletOutput.getBytes();
assertTrue("servlet output", Arrays.equals(TestWARCRecordInfo.PAYLOAD_GIF, content));
}
/**
* test replay of capture with {@code Content-Encoding: gzip}.
* TransparentReplayRenderer copies original, compressed payload to the output.
*
* TODO: should render uncompressed content if client cannot handle
* {@code Content-Encoding: gzip}.
*
* @throws Exception
*/
public void testRenderResource_CompressedCapture() throws Exception {
final String ct = "image/gif";
WARCRecordInfo recinfo = new TestWARCRecordInfo(
TestWARCRecordInfo.buildCompressedHttpResponseBlock(ct,
TestWARCRecordInfo.PAYLOAD_GIF));
TestWARCReader ar = new TestWARCReader(recinfo);
WARCRecord rec = ar.get(0);
Resource payloadResource = new WarcResource(rec, ar);
payloadResource.parseHeaders();
Resource headersResource = payloadResource;
TestServletOutputStream servletOutput = new TestServletOutputStream();
response.setStatus(200);
EasyMock.expect(response.getOutputStream()).andReturn(servletOutput);
response.setHeader("Content-Type", ct);
response.setHeader("Content-Encoding", "gzip");
// ??? RedirectRewritingHttpHeaderProcessor drops Content-Length header. is this really
// what it is supposed to do?
//response.setHeader("Content-Length", Integer.toString(payloadBytes.length));
response.setHeader(EasyMock.<String>notNull(), EasyMock.<String>notNull());
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(response);
cut.renderResource(request, response, wbRequest, result,
headersResource, payloadResource, uriConverter, results);
EasyMock.verify(response);
// content is the original gzip-compressed bytes for PAYLOAD_GIF.
InputStream zis = new GZIPInputStream(new ByteArrayInputStream(servletOutput.getBytes()));
byte[] content = new byte[TestWARCRecordInfo.PAYLOAD_GIF.length];
zis.read(content);
assertTrue("servlet output", Arrays.equals(TestWARCRecordInfo.PAYLOAD_GIF, content));
}
public void testRenderResource_Redirect() throws Exception {
String location = "http://www.example.com/index.html";
WARCRecordInfo recinfo = new TestWARCRecordInfo(TestWARCRecordInfo.buildHttpRedirectResponseBlock(location));
TestWARCReader ar = new TestWARCReader(recinfo);
WARCRecord rec = ar.get(0);
Resource payloadResource = new WarcResource(rec, ar);
payloadResource.parseHeaders();
final String originalUrl = "http://www.example.com/";
final String captureTimestamp = "20130101123456";
result.setOriginalUrl(originalUrl);
result.setCaptureTimestamp(captureTimestamp);
// makeReplayURI() is called through RedirectRewritingHttpHeaderProcessor.
// TODO: perhaps HttpheaderProcessor is the right class to make fixture?
EasyMock.expect(uriConverter.makeReplayURI(captureTimestamp, location))
.andReturn("/web/" + captureTimestamp + "/" + location);
TestServletOutputStream servletOutput = new TestServletOutputStream();
response.setStatus(302);
EasyMock.expect(response.getOutputStream()).andReturn(servletOutput);
response.setHeader("Content-Type", "text/html");
response.setHeader(EasyMock.eq("Location"), EasyMock.matches("/web/" + captureTimestamp + "/" + location));
// RedirectRewritingHttpHeaderProcessor drops Content-Length.
// response.setHeader("Content-Length", "0");
response.setHeader(EasyMock.<String>notNull(), EasyMock.<String>notNull());
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(response, uriConverter);
cut.renderResource(request, response, wbRequest, result,
payloadResource, payloadResource, uriConverter, results);
EasyMock.verify(response, uriConverter);
byte[] content = servletOutput.getBytes();
assertEquals("payload length", 0, content.length);
}
/**
* test replay of capture with {@code Transfer-Encoding: chunked}.
*
* <p>TransparentReplayRenderer writes out chunk-decoded payload, because
* {@link WarcResource} always decodes chunked-entity. Point of this test
* is that response never have {@code Transfer-Encoding: chunked} header,
* even when initialized with {@link IdentityHttpHeaderProcessor}.
* so, this is not really a unit test for TransparentReplayRenderer, but
* a multi-component test placed here for convenience.</p>
* <p>This test does not use member object {@code cut}, in order to test
* with {@link IdentityHttpHeaderProcessor}.</p>
*
* @throws Exception
*/
public void testRenderResource_Chunked() throws Exception {
final String ct = "text/xml";
final String payload = "<?xml version=\"1.0\"?>\n" +
"<payload name=\"archive\">\n" +
" <inside/>\n" +
"</payload>\n";
final byte[] recordBytes = TestWARCRecordInfo.buildHttpResponseBlock(
"200 OK", ct, payload.getBytes("UTF-8"), true);
//System.out.println(new String(recordBytes, "UTF-8"));
WARCRecordInfo recinfo = new TestWARCRecordInfo(recordBytes);
TestWARCReader ar = new TestWARCReader(recinfo);
WARCRecord rec = ar.get(0);
Resource payloadResource = new WarcResource(rec, ar);
payloadResource.parseHeaders();
Resource headersResource = payloadResource;
TestServletOutputStream servletOutput = new TestServletOutputStream();
// expectations
response.setStatus(200);
EasyMock.expect(response.getOutputStream()).andReturn(servletOutput);
// capture setHeader() call for "Transfer-Encoding"
Capture<String> transferEncodingCapture = new Capture<String>(CaptureType.FIRST);
response.setHeader(EasyMock.eq("Transfer-Encoding"), EasyMock.capture(transferEncodingCapture));
EasyMock.expectLastCall().anyTimes();
response.setHeader(EasyMock.<String>anyObject(), EasyMock.<String>anyObject());
EasyMock.expectLastCall().anyTimes();
EasyMock.replay(response);
// creating separate test object to use IdentityHttpHeaderProcessor
TransparentReplayRenderer cut2 = new TransparentReplayRenderer(new IdentityHttpHeaderProcessor());
cut2.renderResource(request, response, wbRequest, result,
headersResource, payloadResource, uriConverter, results);
EasyMock.verify(response);
assertFalse("Transfer-Encoding header must not be set", transferEncodingCapture.hasCaptured());
// content is the original gzip-compressed bytes for PAYLOAD_GIF.
String output = new String(servletOutput.getBytes(), "UTF-8");
assertEquals(payload, output);
}
}
| apache-2.0 |
kaviththiranga/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/AutoscaleInMediatorItemProvider.java | 3086 | /**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.wso2.developerstudio.eclipse.gmf.esb.AutoscaleInMediator;
/**
* This is the item provider adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.AutoscaleInMediator} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class AutoscaleInMediatorItemProvider
extends MediatorItemProvider {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public AutoscaleInMediatorItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
}
/**
* This returns AutoscaleInMediator.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/AutoscaleInMediator"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((AutoscaleInMediator)object).getDescription();
return label == null || label.length() == 0 ?
getString("_UI_AutoscaleInMediator_type") :
getString("_UI_AutoscaleInMediator_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
}
}
| apache-2.0 |
ruchiherself/AssessSupertrees | include/boost/spirit/home/support/ascii.hpp | 2394 | /*=============================================================================
Copyright (c) 2001-2007 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
=============================================================================*/
#if !defined(SPIRIT_ASCII_JAN_31_2006_0529PM)
#define SPIRIT_ASCII_JAN_31_2006_0529PM
#include <boost/spirit/home/support/char_class.hpp>
#include <boost/proto/core.hpp>
namespace boost { namespace spirit { namespace ascii
{
typedef spirit::char_class::ascii char_set;
namespace tag = spirit::char_class::tag;
template <typename Class>
struct make_tag
: proto::terminal<spirit::char_class::key<char_set, Class> > {};
typedef make_tag<tag::alnum>::type alnum_type;
typedef make_tag<tag::alpha>::type alpha_type;
typedef make_tag<tag::blank>::type blank_type;
typedef make_tag<tag::cntrl>::type cntrl_type;
typedef make_tag<tag::digit>::type digit_type;
typedef make_tag<tag::graph>::type graph_type;
typedef make_tag<tag::print>::type print_type;
typedef make_tag<tag::punct>::type punct_type;
typedef make_tag<tag::space>::type space_type;
typedef make_tag<tag::xdigit>::type xdigit_type;
alnum_type const alnum = {{}};
alpha_type const alpha = {{}};
blank_type const blank = {{}};
cntrl_type const cntrl = {{}};
digit_type const digit = {{}};
graph_type const graph = {{}};
print_type const print = {{}};
punct_type const punct = {{}};
space_type const space = {{}};
xdigit_type const xdigit = {{}};
typedef proto::terminal<
spirit::char_class::no_case_tag<char_set> >::type
no_case_type;
no_case_type const no_case = no_case_type();
typedef proto::terminal<
spirit::char_class::lower_case_tag<char_set> >::type
lower_type;
typedef proto::terminal<
spirit::char_class::upper_case_tag<char_set> >::type
upper_type;
lower_type const lower = lower_type();
upper_type const upper = upper_type();
#if defined(__GNUC__)
inline void silence_unused_warnings__ascii()
{
(void) alnum; (void) alpha; (void) blank; (void) cntrl; (void) digit;
(void) graph; (void) print; (void) punct; (void) space; (void) xdigit;
}
#endif
}}}
#endif
| apache-2.0 |
szegedim/hadoop | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java | 43768 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.JobPriority;
import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.LocalConfigurationProvider;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceInformation;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.client.api.impl.YarnClientImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.resource.ResourceUtils;
import org.apache.log4j.Appender;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Layout;
import org.apache.log4j.Level;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.apache.log4j.spi.LoggingEvent;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.ImmutableList;
/**
* Test YarnRunner and make sure the client side plugin works
* fine
*/
public class TestYARNRunner {
private static final Logger LOG =
LoggerFactory.getLogger(TestYARNRunner.class);
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
// prefix before <LOG_DIR>/profile.out
private static final String PROFILE_PARAMS =
MRJobConfig.DEFAULT_TASK_PROFILE_PARAMS.substring(0,
MRJobConfig.DEFAULT_TASK_PROFILE_PARAMS.lastIndexOf("%"));
private static class CustomResourceTypesConfigurationProvider
extends LocalConfigurationProvider {
@Override
public InputStream getConfigurationInputStream(Configuration bootstrapConf,
String name) throws YarnException, IOException {
if (YarnConfiguration.RESOURCE_TYPES_CONFIGURATION_FILE.equals(name)) {
return new ByteArrayInputStream(
("<configuration>\n" +
" <property>\n" +
" <name>yarn.resource-types</name>\n" +
" <value>a-custom-resource</value>\n" +
" </property>\n" +
" <property>\n" +
" <name>yarn.resource-types.a-custom-resource.units</name>\n" +
" <value>G</value>\n" +
" </property>\n" +
"</configuration>\n").getBytes());
} else {
return super.getConfigurationInputStream(bootstrapConf, name);
}
}
}
private static class TestAppender extends AppenderSkeleton {
private final List<LoggingEvent> logEvents = new CopyOnWriteArrayList<>();
@Override
public boolean requiresLayout() {
return false;
}
@Override
public void close() {
}
@Override
protected void append(LoggingEvent arg0) {
logEvents.add(arg0);
}
private List<LoggingEvent> getLogEvents() {
return logEvents;
}
}
private YARNRunner yarnRunner;
private ResourceMgrDelegate resourceMgrDelegate;
private YarnConfiguration conf;
private ClientCache clientCache;
private ApplicationId appId;
private JobID jobId;
private File testWorkDir =
new File("target", TestYARNRunner.class.getName());
private ApplicationSubmissionContext submissionContext;
private ClientServiceDelegate clientDelegate;
private static final String failString = "Rejected job";
@BeforeClass
public static void setupBeforeClass() {
ResourceUtils.resetResourceTypes(new Configuration());
}
@Before
public void setUp() throws Exception {
resourceMgrDelegate = mock(ResourceMgrDelegate.class);
conf = new YarnConfiguration();
conf.set(YarnConfiguration.RM_PRINCIPAL, "mapred/host@REALM");
clientCache = new ClientCache(conf, resourceMgrDelegate);
clientCache = spy(clientCache);
yarnRunner = new YARNRunner(conf, resourceMgrDelegate, clientCache);
yarnRunner = spy(yarnRunner);
submissionContext = mock(ApplicationSubmissionContext.class);
doAnswer(
new Answer<ApplicationSubmissionContext>() {
@Override
public ApplicationSubmissionContext answer(InvocationOnMock invocation)
throws Throwable {
return submissionContext;
}
}
).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
any(String.class), any(Credentials.class));
appId = ApplicationId.newInstance(System.currentTimeMillis(), 1);
jobId = TypeConverter.fromYarn(appId);
if (testWorkDir.exists()) {
FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true);
}
testWorkDir.mkdirs();
}
@After
public void cleanup() {
FileUtil.fullyDelete(testWorkDir);
ResourceUtils.resetResourceTypes(new Configuration());
}
@Test(timeout=20000)
public void testJobKill() throws Exception {
clientDelegate = mock(ClientServiceDelegate.class);
when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
State.PREP, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
when(clientDelegate.killJob(any(JobID.class))).thenReturn(true);
doAnswer(
new Answer<ClientServiceDelegate>() {
@Override
public ClientServiceDelegate answer(InvocationOnMock invocation)
throws Throwable {
return clientDelegate;
}
}
).when(clientCache).getClient(any(JobID.class));
yarnRunner.killJob(jobId);
verify(resourceMgrDelegate).killApplication(appId);
when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
yarnRunner.killJob(jobId);
verify(clientDelegate).killJob(jobId);
when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(null);
when(resourceMgrDelegate.getApplicationReport(any(ApplicationId.class)))
.thenReturn(
ApplicationReport.newInstance(appId, null, "tmp", "tmp", "tmp",
"tmp", 0, null, YarnApplicationState.FINISHED, "tmp", "tmp",
0L, 0L, 0L,
FinalApplicationStatus.SUCCEEDED, null, null, 0f,
"tmp", null));
yarnRunner.killJob(jobId);
verify(clientDelegate).killJob(jobId);
}
@Test(timeout=60000)
public void testJobKillTimeout() throws Exception {
long timeToWaitBeforeHardKill =
10000 + MRJobConfig.DEFAULT_MR_AM_HARD_KILL_TIMEOUT_MS;
conf.setLong(MRJobConfig.MR_AM_HARD_KILL_TIMEOUT_MS,
timeToWaitBeforeHardKill);
clientDelegate = mock(ClientServiceDelegate.class);
doAnswer(
new Answer<ClientServiceDelegate>() {
@Override
public ClientServiceDelegate answer(InvocationOnMock invocation)
throws Throwable {
return clientDelegate;
}
}
).when(clientCache).getClient(any(JobID.class));
when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
long startTimeMillis = System.currentTimeMillis();
yarnRunner.killJob(jobId);
assertTrue("killJob should have waited at least " + timeToWaitBeforeHardKill
+ " ms.", System.currentTimeMillis() - startTimeMillis
>= timeToWaitBeforeHardKill);
}
@Test(timeout=20000)
public void testJobSubmissionFailure() throws Exception {
when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
thenReturn(appId);
ApplicationReport report = mock(ApplicationReport.class);
when(report.getApplicationId()).thenReturn(appId);
when(report.getDiagnostics()).thenReturn(failString);
when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.FAILED);
when(resourceMgrDelegate.getApplicationReport(appId)).thenReturn(report);
Credentials credentials = new Credentials();
File jobxml = new File(testWorkDir, "job.xml");
OutputStream out = new FileOutputStream(jobxml);
conf.writeXml(out);
out.close();
try {
yarnRunner.submitJob(jobId, testWorkDir.getAbsolutePath().toString(), credentials);
} catch(IOException io) {
LOG.info("Logging exception:", io);
assertTrue(io.getLocalizedMessage().contains(failString));
}
}
@Test(timeout=20000)
public void testResourceMgrDelegate() throws Exception {
/* we not want a mock of resource mgr delegate */
final ApplicationClientProtocol clientRMProtocol = mock(ApplicationClientProtocol.class);
ResourceMgrDelegate delegate = new ResourceMgrDelegate(conf) {
@Override
protected void serviceStart() throws Exception {
assertTrue(this.client instanceof YarnClientImpl);
((YarnClientImpl) this.client).setRMClient(clientRMProtocol);
}
};
/* make sure kill calls finish application master */
when(clientRMProtocol.forceKillApplication(any(KillApplicationRequest.class)))
.thenReturn(KillApplicationResponse.newInstance(true));
delegate.killApplication(appId);
verify(clientRMProtocol).forceKillApplication(any(KillApplicationRequest.class));
/* make sure getalljobs calls get all applications */
when(clientRMProtocol.getApplications(any(GetApplicationsRequest.class))).
thenReturn(recordFactory.newRecordInstance(GetApplicationsResponse.class));
delegate.getAllJobs();
verify(clientRMProtocol).getApplications(any(GetApplicationsRequest.class));
/* make sure getapplication report is called */
when(clientRMProtocol.getApplicationReport(any(GetApplicationReportRequest.class)))
.thenReturn(recordFactory.newRecordInstance(GetApplicationReportResponse.class));
delegate.getApplicationReport(appId);
verify(clientRMProtocol).getApplicationReport(any(GetApplicationReportRequest.class));
/* make sure metrics is called */
GetClusterMetricsResponse clusterMetricsResponse = recordFactory.newRecordInstance
(GetClusterMetricsResponse.class);
clusterMetricsResponse.setClusterMetrics(recordFactory.newRecordInstance(
YarnClusterMetrics.class));
when(clientRMProtocol.getClusterMetrics(any(GetClusterMetricsRequest.class)))
.thenReturn(clusterMetricsResponse);
delegate.getClusterMetrics();
verify(clientRMProtocol).getClusterMetrics(any(GetClusterMetricsRequest.class));
when(clientRMProtocol.getClusterNodes(any(GetClusterNodesRequest.class))).
thenReturn(recordFactory.newRecordInstance(GetClusterNodesResponse.class));
delegate.getActiveTrackers();
verify(clientRMProtocol).getClusterNodes(any(GetClusterNodesRequest.class));
GetNewApplicationResponse newAppResponse = recordFactory.newRecordInstance(
GetNewApplicationResponse.class);
newAppResponse.setApplicationId(appId);
when(clientRMProtocol.getNewApplication(any(GetNewApplicationRequest.class))).
thenReturn(newAppResponse);
delegate.getNewJobID();
verify(clientRMProtocol).getNewApplication(any(GetNewApplicationRequest.class));
GetQueueInfoResponse queueInfoResponse = recordFactory.newRecordInstance(
GetQueueInfoResponse.class);
queueInfoResponse.setQueueInfo(recordFactory.newRecordInstance(QueueInfo.class));
when(clientRMProtocol.getQueueInfo(any(GetQueueInfoRequest.class))).
thenReturn(queueInfoResponse);
delegate.getQueues();
verify(clientRMProtocol).getQueueInfo(any(GetQueueInfoRequest.class));
GetQueueUserAclsInfoResponse aclResponse = recordFactory.newRecordInstance(
GetQueueUserAclsInfoResponse.class);
when(clientRMProtocol.getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class)))
.thenReturn(aclResponse);
delegate.getQueueAclsForCurrentUser();
verify(clientRMProtocol).getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class));
}
@Test(timeout=20000)
public void testGetHSDelegationToken() throws Exception {
try {
Configuration conf = new Configuration();
// Setup mock service
InetSocketAddress mockRmAddress = new InetSocketAddress("localhost", 4444);
Text rmTokenSevice = SecurityUtil.buildTokenService(mockRmAddress);
InetSocketAddress mockHsAddress = new InetSocketAddress("localhost", 9200);
Text hsTokenSevice = SecurityUtil.buildTokenService(mockHsAddress);
// Setup mock rm token
RMDelegationTokenIdentifier tokenIdentifier = new RMDelegationTokenIdentifier(
new Text("owner"), new Text("renewer"), new Text("real"));
Token<RMDelegationTokenIdentifier> token = new Token<RMDelegationTokenIdentifier>(
new byte[0], new byte[0], tokenIdentifier.getKind(), rmTokenSevice);
token.setKind(RMDelegationTokenIdentifier.KIND_NAME);
// Setup mock history token
org.apache.hadoop.yarn.api.records.Token historyToken =
org.apache.hadoop.yarn.api.records.Token.newInstance(new byte[0],
MRDelegationTokenIdentifier.KIND_NAME.toString(), new byte[0],
hsTokenSevice.toString());
GetDelegationTokenResponse getDtResponse =
Records.newRecord(GetDelegationTokenResponse.class);
getDtResponse.setDelegationToken(historyToken);
// mock services
MRClientProtocol mockHsProxy = mock(MRClientProtocol.class);
doReturn(mockHsAddress).when(mockHsProxy).getConnectAddress();
doReturn(getDtResponse).when(mockHsProxy).getDelegationToken(
any(GetDelegationTokenRequest.class));
ResourceMgrDelegate rmDelegate = mock(ResourceMgrDelegate.class);
doReturn(rmTokenSevice).when(rmDelegate).getRMDelegationTokenService();
ClientCache clientCache = mock(ClientCache.class);
doReturn(mockHsProxy).when(clientCache).getInitializedHSProxy();
Credentials creds = new Credentials();
YARNRunner yarnRunner = new YARNRunner(conf, rmDelegate, clientCache);
// No HS token if no RM token
yarnRunner.addHistoryToken(creds);
verify(mockHsProxy, times(0)).getDelegationToken(
any(GetDelegationTokenRequest.class));
// No HS token if RM token, but secirity disabled.
creds.addToken(new Text("rmdt"), token);
yarnRunner.addHistoryToken(creds);
verify(mockHsProxy, times(0)).getDelegationToken(
any(GetDelegationTokenRequest.class));
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
UserGroupInformation.setConfiguration(conf);
creds = new Credentials();
// No HS token if no RM token, security enabled
yarnRunner.addHistoryToken(creds);
verify(mockHsProxy, times(0)).getDelegationToken(
any(GetDelegationTokenRequest.class));
// HS token if RM token present, security enabled
creds.addToken(new Text("rmdt"), token);
yarnRunner.addHistoryToken(creds);
verify(mockHsProxy, times(1)).getDelegationToken(
any(GetDelegationTokenRequest.class));
// No additional call to get HS token if RM and HS token present
yarnRunner.addHistoryToken(creds);
verify(mockHsProxy, times(1)).getDelegationToken(
any(GetDelegationTokenRequest.class));
} finally {
// Back to defaults.
UserGroupInformation.setConfiguration(new Configuration());
}
}
@Test(timeout=20000)
public void testHistoryServerToken() throws Exception {
//Set the master principal in the config
conf.set(YarnConfiguration.RM_PRINCIPAL,"foo@LOCAL");
final String masterPrincipal = Master.getMasterPrincipal(conf);
final MRClientProtocol hsProxy = mock(MRClientProtocol.class);
when(hsProxy.getDelegationToken(any(GetDelegationTokenRequest.class))).thenAnswer(
new Answer<GetDelegationTokenResponse>() {
public GetDelegationTokenResponse answer(InvocationOnMock invocation) {
GetDelegationTokenRequest request =
(GetDelegationTokenRequest)invocation.getArguments()[0];
// check that the renewer matches the cluster's RM principal
assertEquals(masterPrincipal, request.getRenewer() );
org.apache.hadoop.yarn.api.records.Token token =
recordFactory.newRecordInstance(org.apache.hadoop.yarn.api.records.Token.class);
// none of these fields matter for the sake of the test
token.setKind("");
token.setService("");
token.setIdentifier(ByteBuffer.allocate(0));
token.setPassword(ByteBuffer.allocate(0));
GetDelegationTokenResponse tokenResponse =
recordFactory.newRecordInstance(GetDelegationTokenResponse.class);
tokenResponse.setDelegationToken(token);
return tokenResponse;
}
});
UserGroupInformation.createRemoteUser("someone").doAs(
new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
yarnRunner = new YARNRunner(conf, null, null);
yarnRunner.getDelegationTokenFromHS(hsProxy);
verify(hsProxy).
getDelegationToken(any(GetDelegationTokenRequest.class));
return null;
}
});
}
@Test(timeout=20000)
public void testAMAdminCommandOpts() throws Exception {
JobConf jobConf = new JobConf();
jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true");
jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m");
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext submissionContext =
buildSubmitContext(yarnRunner, jobConf);
ContainerLaunchContext containerSpec = submissionContext.getAMContainerSpec();
List<String> commands = containerSpec.getCommands();
int index = 0;
int adminIndex = 0;
int adminPos = -1;
int userIndex = 0;
int userPos = -1;
int tmpDirPos = -1;
for(String command : commands) {
if(command != null) {
assertFalse("Profiler should be disabled by default",
command.contains(PROFILE_PARAMS));
adminPos = command.indexOf("-Djava.net.preferIPv4Stack=true");
if(adminPos >= 0)
adminIndex = index;
userPos = command.indexOf("-Xmx1024m");
if(userPos >= 0)
userIndex = index;
tmpDirPos = command.indexOf("-Djava.io.tmpdir=");
}
index++;
}
// Check java.io.tmpdir opts are set in the commands
assertTrue("java.io.tmpdir is not set for AM", tmpDirPos > 0);
// Check both admin java opts and user java opts are in the commands
assertTrue("AM admin command opts not in the commands.", adminPos > 0);
assertTrue("AM user command opts not in the commands.", userPos > 0);
// Check the admin java opts is before user java opts in the commands
if(adminIndex == userIndex) {
assertTrue("AM admin command opts is after user command opts.", adminPos < userPos);
} else {
assertTrue("AM admin command opts is after user command opts.", adminIndex < userIndex);
}
}
@Test(timeout=20000)
public void testWarnCommandOpts() throws Exception {
org.apache.log4j.Logger logger =
org.apache.log4j.Logger.getLogger(YARNRunner.class);
ByteArrayOutputStream bout = new ByteArrayOutputStream();
Layout layout = new SimpleLayout();
Appender appender = new WriterAppender(layout, bout);
logger.addAppender(appender);
JobConf jobConf = new JobConf();
jobConf.set(MRJobConfig.MR_AM_ADMIN_COMMAND_OPTS, "-Djava.net.preferIPv4Stack=true -Djava.library.path=foo");
jobConf.set(MRJobConfig.MR_AM_COMMAND_OPTS, "-Xmx1024m -Djava.library.path=bar");
YARNRunner yarnRunner = new YARNRunner(jobConf);
@SuppressWarnings("unused")
ApplicationSubmissionContext submissionContext =
buildSubmitContext(yarnRunner, jobConf);
String logMsg = bout.toString();
assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " +
"yarn.app.mapreduce.am.admin-command-opts can cause programs to no " +
"longer function if hadoop native libraries are used. These values " +
"should be set as part of the LD_LIBRARY_PATH in the app master JVM " +
"env using yarn.app.mapreduce.am.admin.user.env config settings."));
assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " +
"yarn.app.mapreduce.am.command-opts can cause programs to no longer " +
"function if hadoop native libraries are used. These values should " +
"be set as part of the LD_LIBRARY_PATH in the app master JVM env " +
"using yarn.app.mapreduce.am.env config settings."));
}
@Test(timeout=20000)
public void testAMProfiler() throws Exception {
JobConf jobConf = new JobConf();
jobConf.setBoolean(MRJobConfig.MR_AM_PROFILE, true);
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext submissionContext =
buildSubmitContext(yarnRunner, jobConf);
ContainerLaunchContext containerSpec = submissionContext.getAMContainerSpec();
List<String> commands = containerSpec.getCommands();
for(String command : commands) {
if (command != null) {
if (command.contains(PROFILE_PARAMS)) {
return;
}
}
}
throw new IllegalStateException("Profiler opts not found!");
}
@Test
public void testNodeLabelExp() throws Exception {
JobConf jobConf = new JobConf();
jobConf.set(MRJobConfig.JOB_NODE_LABEL_EXP, "GPU");
jobConf.set(MRJobConfig.AM_NODE_LABEL_EXP, "highMem");
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext appSubCtx =
buildSubmitContext(yarnRunner, jobConf);
assertEquals(appSubCtx.getNodeLabelExpression(), "GPU");
assertEquals(appSubCtx.getAMContainerResourceRequests().get(0)
.getNodeLabelExpression(), "highMem");
}
@Test
public void testResourceRequestLocalityAny() throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, true);
verifyResourceRequestLocality(null, null, amAnyResourceRequest);
verifyResourceRequestLocality(null, "label1", amAnyResourceRequest);
}
@Test
public void testResourceRequestLocalityRack() throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, false);
ResourceRequest amRackResourceRequest =
createResourceRequest("/rack1", true);
verifyResourceRequestLocality("/rack1", null, amAnyResourceRequest,
amRackResourceRequest);
verifyResourceRequestLocality("/rack1", "label1", amAnyResourceRequest,
amRackResourceRequest);
}
@Test
public void testResourceRequestLocalityNode() throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, false);
ResourceRequest amRackResourceRequest =
createResourceRequest("/rack1", false);
ResourceRequest amNodeResourceRequest =
createResourceRequest("node1", true);
verifyResourceRequestLocality("/rack1/node1", null, amAnyResourceRequest,
amRackResourceRequest, amNodeResourceRequest);
verifyResourceRequestLocality("/rack1/node1", "label1",
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest);
}
@Test
public void testResourceRequestLocalityNodeDefaultRack() throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, false);
ResourceRequest amRackResourceRequest =
createResourceRequest("/default-rack", false);
ResourceRequest amNodeResourceRequest =
createResourceRequest("node1", true);
verifyResourceRequestLocality("node1", null,
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest);
verifyResourceRequestLocality("node1", "label1",
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest);
}
@Test
public void testResourceRequestLocalityMultipleNodes() throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, false);
ResourceRequest amRackResourceRequest =
createResourceRequest("/rack1", false);
ResourceRequest amNodeResourceRequest =
createResourceRequest("node1", true);
ResourceRequest amNode2ResourceRequest =
createResourceRequest("node2", true);
verifyResourceRequestLocality("/rack1/node1,/rack1/node2", null,
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest,
amNode2ResourceRequest);
verifyResourceRequestLocality("/rack1/node1,/rack1/node2", "label1",
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest,
amNode2ResourceRequest);
}
@Test
public void testResourceRequestLocalityMultipleNodesDifferentRack()
throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, false);
ResourceRequest amRackResourceRequest =
createResourceRequest("/rack1", false);
ResourceRequest amNodeResourceRequest =
createResourceRequest("node1", true);
ResourceRequest amRack2ResourceRequest =
createResourceRequest("/rack2", false);
ResourceRequest amNode2ResourceRequest =
createResourceRequest("node2", true);
verifyResourceRequestLocality("/rack1/node1,/rack2/node2", null,
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest,
amRack2ResourceRequest, amNode2ResourceRequest);
verifyResourceRequestLocality("/rack1/node1,/rack2/node2", "label1",
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest,
amRack2ResourceRequest, amNode2ResourceRequest);
}
@Test
public void testResourceRequestLocalityMultipleNodesDefaultRack()
throws Exception {
ResourceRequest amAnyResourceRequest =
createResourceRequest(ResourceRequest.ANY, false);
ResourceRequest amRackResourceRequest =
createResourceRequest("/rack1", false);
ResourceRequest amNodeResourceRequest =
createResourceRequest("node1", true);
ResourceRequest amRack2ResourceRequest =
createResourceRequest("/default-rack", false);
ResourceRequest amNode2ResourceRequest =
createResourceRequest("node2", true);
verifyResourceRequestLocality("/rack1/node1,node2", null,
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest,
amRack2ResourceRequest, amNode2ResourceRequest);
verifyResourceRequestLocality("/rack1/node1,node2", "label1",
amAnyResourceRequest, amRackResourceRequest, amNodeResourceRequest,
amRack2ResourceRequest, amNode2ResourceRequest);
}
@Test
public void testResourceRequestLocalityInvalid() throws Exception {
try {
verifyResourceRequestLocality("rack/node1", null,
new ResourceRequest[]{});
fail("Should have failed due to invalid resource but did not");
} catch (IOException ioe) {
assertTrue(ioe.getMessage().contains("Invalid resource name"));
}
try {
verifyResourceRequestLocality("/rack/node1/blah", null,
new ResourceRequest[]{});
fail("Should have failed due to invalid resource but did not");
} catch (IOException ioe) {
assertTrue(ioe.getMessage().contains("Invalid resource name"));
}
}
private void verifyResourceRequestLocality(String strictResource,
String label, ResourceRequest... expectedReqs) throws Exception {
JobConf jobConf = new JobConf();
if (strictResource != null) {
jobConf.set(MRJobConfig.AM_STRICT_LOCALITY, strictResource);
}
if (label != null) {
jobConf.set(MRJobConfig.AM_NODE_LABEL_EXP, label);
for (ResourceRequest expectedReq : expectedReqs) {
expectedReq.setNodeLabelExpression(label);
}
}
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext appSubCtx =
buildSubmitContext(yarnRunner, jobConf);
assertEquals(Arrays.asList(expectedReqs),
appSubCtx.getAMContainerResourceRequests());
}
private ResourceRequest createResourceRequest(String name,
boolean relaxLocality) {
Resource capability = recordFactory.newRecordInstance(Resource.class);
capability.setMemorySize(MRJobConfig.DEFAULT_MR_AM_VMEM_MB);
capability.setVirtualCores(MRJobConfig.DEFAULT_MR_AM_CPU_VCORES);
ResourceRequest req =
recordFactory.newRecordInstance(ResourceRequest.class);
req.setPriority(YARNRunner.AM_CONTAINER_PRIORITY);
req.setResourceName(name);
req.setCapability(capability);
req.setNumContainers(1);
req.setRelaxLocality(relaxLocality);
return req;
}
@Test
public void testAMStandardEnvWithDefaultLibPath() throws Exception {
testAMStandardEnv(false, false);
}
@Test
public void testAMStandardEnvWithCustomLibPath() throws Exception {
testAMStandardEnv(true, false);
}
@Test
public void testAMStandardEnvWithCustomLibPathWithSeparateEnvProps()
throws Exception {
testAMStandardEnv(true, true);
}
private void testAMStandardEnv(boolean customLibPath,
boolean useSeparateEnvProps) throws Exception {
// the Windows behavior is different and this test currently doesn't really
// apply
// MAPREDUCE-6588 should revisit this test
assumeNotWindows();
final String ADMIN_LIB_PATH = "foo";
final String USER_LIB_PATH = "bar";
final String USER_SHELL = "shell";
JobConf jobConf = new JobConf();
String pathKey = Environment.LD_LIBRARY_PATH.name();
if (customLibPath) {
if (useSeparateEnvProps) {
// Specify these as individual variables instead of k=v lists
jobConf.set(MRJobConfig.MR_AM_ADMIN_USER_ENV + "." + pathKey,
ADMIN_LIB_PATH);
jobConf.set(MRJobConfig.MR_AM_ENV + "." + pathKey, USER_LIB_PATH);
} else {
jobConf.set(MRJobConfig.MR_AM_ADMIN_USER_ENV, pathKey + "=" +
ADMIN_LIB_PATH);
jobConf.set(MRJobConfig.MR_AM_ENV, pathKey + "=" + USER_LIB_PATH);
}
}
jobConf.set(MRJobConfig.MAPRED_ADMIN_USER_SHELL, USER_SHELL);
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext appSubCtx =
buildSubmitContext(yarnRunner, jobConf);
// make sure PWD is first in the lib path
ContainerLaunchContext clc = appSubCtx.getAMContainerSpec();
Map<String, String> env = clc.getEnvironment();
String libPath = env.get(pathKey);
assertNotNull(pathKey + " not set", libPath);
String cps = jobConf.getBoolean(
MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM,
MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM)
? ApplicationConstants.CLASS_PATH_SEPARATOR : File.pathSeparator;
String expectedLibPath =
MRApps.crossPlatformifyMREnv(conf, Environment.PWD);
if (customLibPath) {
// append admin libpath and user libpath
expectedLibPath += cps + ADMIN_LIB_PATH + cps + USER_LIB_PATH;
} else {
expectedLibPath += cps +
MRJobConfig.DEFAULT_MR_AM_ADMIN_USER_ENV.substring(
pathKey.length() + 1);
}
assertEquals("Bad AM " + pathKey + " setting", expectedLibPath, libPath);
// make sure SHELL is set
String shell = env.get(Environment.SHELL.name());
assertNotNull("SHELL not set", shell);
assertEquals("Bad SHELL setting", USER_SHELL, shell);
}
@Test
public void testJobPriority() throws Exception {
JobConf jobConf = new JobConf();
jobConf.set(MRJobConfig.PRIORITY, "LOW");
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext appSubCtx = buildSubmitContext(yarnRunner,
jobConf);
// 2 corresponds to LOW
assertEquals(appSubCtx.getPriority(), Priority.newInstance(2));
// Set an integer explicitly
jobConf.set(MRJobConfig.PRIORITY, "12");
yarnRunner = new YARNRunner(jobConf);
appSubCtx = buildSubmitContext(yarnRunner,
jobConf);
// Verify whether 12 is set to submission context
assertEquals(appSubCtx.getPriority(), Priority.newInstance(12));
}
private ApplicationSubmissionContext buildSubmitContext(
YARNRunner yarnRunner, JobConf jobConf) throws IOException {
File jobxml = new File(testWorkDir, MRJobConfig.JOB_CONF_FILE);
OutputStream out = new FileOutputStream(jobxml);
conf.writeXml(out);
out.close();
File jobsplit = new File(testWorkDir, MRJobConfig.JOB_SPLIT);
out = new FileOutputStream(jobsplit);
out.close();
File jobsplitmetainfo = new File(testWorkDir,
MRJobConfig.JOB_SPLIT_METAINFO);
out = new FileOutputStream(jobsplitmetainfo);
out.close();
return yarnRunner.createApplicationSubmissionContext(jobConf,
testWorkDir.toString(), new Credentials());
}
// Test configs that match regex expression should be set in
// containerLaunchContext
@Test
public void testSendJobConf() throws IOException {
JobConf jobConf = new JobConf();
jobConf.set("dfs.nameservices", "mycluster1,mycluster2");
jobConf.set("dfs.namenode.rpc-address.mycluster2.nn1", "123.0.0.1");
jobConf.set("dfs.namenode.rpc-address.mycluster2.nn2", "123.0.0.2");
jobConf.set("dfs.ha.namenodes.mycluster2", "nn1,nn2");
jobConf.set("dfs.client.failover.proxy.provider.mycluster2", "provider");
jobConf.set("hadoop.tmp.dir", "testconfdir");
jobConf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
"kerberos");
jobConf.set("mapreduce.job.send-token-conf",
"dfs.nameservices|^dfs.namenode.rpc-address.*$|^dfs.ha.namenodes.*$"
+ "|^dfs.client.failover.proxy.provider.*$"
+ "|dfs.namenode.kerberos.principal");
UserGroupInformation.setConfiguration(jobConf);
YARNRunner yarnRunner = new YARNRunner(jobConf);
ApplicationSubmissionContext submissionContext =
buildSubmitContext(yarnRunner, jobConf);
Configuration confSent = BuilderUtils.parseTokensConf(submissionContext);
// configs that match regex should be included
Assert.assertEquals("123.0.0.1",
confSent.get("dfs.namenode.rpc-address.mycluster2.nn1"));
Assert.assertEquals("123.0.0.2",
confSent.get("dfs.namenode.rpc-address.mycluster2.nn2"));
// configs that aren't matching regex should not be included
Assert.assertTrue(confSent.get("hadoop.tmp.dir") == null || !confSent
.get("hadoop.tmp.dir").equals("testconfdir"));
UserGroupInformation.reset();
}
@Test
public void testCustomAMRMResourceType() throws Exception {
initResourceTypes();
String customResourceName = "a-custom-resource";
JobConf jobConf = new JobConf();
jobConf.setInt(MRJobConfig.MR_AM_RESOURCE_PREFIX +
customResourceName, 5);
jobConf.setInt(MRJobConfig.MR_AM_CPU_VCORES, 3);
yarnRunner = new YARNRunner(jobConf);
submissionContext = buildSubmitContext(yarnRunner, jobConf);
List<ResourceRequest> resourceRequests =
submissionContext.getAMContainerResourceRequests();
Assert.assertEquals(1, resourceRequests.size());
ResourceRequest resourceRequest = resourceRequests.get(0);
ResourceInformation resourceInformation = resourceRequest.getCapability()
.getResourceInformation(customResourceName);
Assert.assertEquals("Expecting the default unit (G)",
"G", resourceInformation.getUnits());
Assert.assertEquals(5L, resourceInformation.getValue());
Assert.assertEquals(3, resourceRequest.getCapability().getVirtualCores());
}
@Test
public void testAMRMemoryRequest() throws Exception {
for (String memoryName : ImmutableList.of(
MRJobConfig.RESOURCE_TYPE_NAME_MEMORY,
MRJobConfig.RESOURCE_TYPE_ALTERNATIVE_NAME_MEMORY)) {
JobConf jobConf = new JobConf();
jobConf.set(MRJobConfig.MR_AM_RESOURCE_PREFIX + memoryName, "3 Gi");
yarnRunner = new YARNRunner(jobConf);
submissionContext = buildSubmitContext(yarnRunner, jobConf);
List<ResourceRequest> resourceRequests =
submissionContext.getAMContainerResourceRequests();
Assert.assertEquals(1, resourceRequests.size());
ResourceRequest resourceRequest = resourceRequests.get(0);
long memorySize = resourceRequest.getCapability().getMemorySize();
Assert.assertEquals(3072, memorySize);
}
}
@Test
public void testAMRMemoryRequestOverriding() throws Exception {
for (String memoryName : ImmutableList.of(
MRJobConfig.RESOURCE_TYPE_NAME_MEMORY,
MRJobConfig.RESOURCE_TYPE_ALTERNATIVE_NAME_MEMORY)) {
TestAppender testAppender = new TestAppender();
org.apache.log4j.Logger logger =
org.apache.log4j.Logger.getLogger(YARNRunner.class);
logger.addAppender(testAppender);
try {
JobConf jobConf = new JobConf();
jobConf.set(MRJobConfig.MR_AM_RESOURCE_PREFIX + memoryName, "3 Gi");
jobConf.setInt(MRJobConfig.MR_AM_VMEM_MB, 2048);
yarnRunner = new YARNRunner(jobConf);
submissionContext = buildSubmitContext(yarnRunner, jobConf);
List<ResourceRequest> resourceRequests =
submissionContext.getAMContainerResourceRequests();
Assert.assertEquals(1, resourceRequests.size());
ResourceRequest resourceRequest = resourceRequests.get(0);
long memorySize = resourceRequest.getCapability().getMemorySize();
Assert.assertEquals(3072, memorySize);
assertTrue(testAppender.getLogEvents().stream().anyMatch(
e -> e.getLevel() == Level.WARN && ("Configuration " +
"yarn.app.mapreduce.am.resource." + memoryName + "=3Gi is " +
"overriding the yarn.app.mapreduce.am.resource.mb=2048 " +
"configuration").equals(e.getMessage())));
} finally {
logger.removeAppender(testAppender);
}
}
}
private void initResourceTypes() {
Configuration configuration = new Configuration();
configuration.set(YarnConfiguration.RM_CONFIGURATION_PROVIDER_CLASS,
CustomResourceTypesConfigurationProvider.class.getName());
ResourceUtils.resetResourceTypes(configuration);
}
}
| apache-2.0 |
google/llvm-propeller | libcxx/test/std/numerics/rand/rand.dis/rand.dist.norm/rand.dist.norm.normal/io.pass.cpp | 1178 | //===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
// <random>
// template<class RealType = double>
// class normal_distribution
// template <class CharT, class Traits, class RealType>
// basic_ostream<CharT, Traits>&
// operator<<(basic_ostream<CharT, Traits>& os,
// const normal_distribution<RealType>& x);
// template <class CharT, class Traits, class RealType>
// basic_istream<CharT, Traits>&
// operator>>(basic_istream<CharT, Traits>& is,
// normal_distribution<RealType>& x);
#include <random>
#include <sstream>
#include <cassert>
#include "test_macros.h"
int main(int, char**)
{
{
typedef std::normal_distribution<> D;
D d1(7, 5);
std::ostringstream os;
os << d1;
std::istringstream is(os.str());
D d2;
is >> d2;
assert(d1 == d2);
}
return 0;
}
| apache-2.0 |
kunickiaj/homebrew-core | Formula/naga.rb | 731 | class Naga < Formula
desc "Terminal implementation of the Snake game"
homepage "https://github.com/anayjoshi/naga/"
url "https://github.com/anayjoshi/naga/archive/naga-v1.0.tar.gz"
sha256 "7f56b03b34e2756b9688e120831ef4f5932cd89b477ad8b70b5bcc7c32f2f3b3"
bottle do
cellar :any_skip_relocation
sha256 "8baa28b92a0d6970a857c859b11e4a1df878db5270f259bd3ccfe3b5f57f3303" => :sierra
sha256 "6ff3dd51d1cdeed9364c36c25d1c2794f973e2927077eaeb251fa0dbfc48a531" => :el_capitan
sha256 "fe303605603697993def097e9557a0dcec83d323a0b43d51fb1811108937da6c" => :yosemite
end
def install
bin.mkpath
system "make", "install", "INSTALL_PATH=#{bin}/naga"
end
test do
File.exist? "#{bin}/naga"
end
end
| bsd-2-clause |
ylluminarious/homebrew-core | Formula/pakchois.rb | 885 | class Pakchois < Formula
desc "PKCS #11 wrapper library"
homepage "http://www.manyfish.co.uk/pakchois/"
url "http://www.manyfish.co.uk/pakchois/pakchois-0.4.tar.gz"
sha256 "d73dc5f235fe98e4d1e8c904f40df1cf8af93204769b97dbb7ef7a4b5b958b9a"
bottle do
sha256 "ee7978dad7998e747e468f1b9afaa692304efb2ca857d4c0903945f030841fb7" => :high_sierra
sha256 "84a90b245c59676817d4c9100d64d7747068e0d3557fc5c3218d8a83a98f78fe" => :sierra
sha256 "b02057a2cc01daa05754c537820b58d7c77b632fc5fdb2a6f6dcec77341fe65b" => :el_capitan
sha256 "30a06a914f2025d7d23dff48fa8523be455bf925a3282a8c35f56779fd8bd27a" => :yosemite
sha256 "03d6ab9d51bdebf61b3c415908e222467fd31cefc4811200eee9e407c604f7f5" => :mavericks
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| bsd-2-clause |
nsams/koala-framework | tests/Kwf/Component/Generator/Recursive/Flag.php | 226 | <?php
class Kwf_Component_Generator_Recursive_Flag extends Kwc_Abstract
{
public static function getSettings()
{
$ret = parent::getSettings();
$ret['flags']['foo'] = true;
return $ret;
}
}
| bsd-2-clause |
smessmer/homebrew-core | Formula/yazpp.rb | 912 | class Yazpp < Formula
desc "C++ API for the Yaz toolkit"
homepage "https://www.indexdata.com/yazpp"
url "http://ftp.indexdata.dk/pub/yazpp/yazpp-1.6.5.tar.gz"
sha256 "802537484d4247706f31c121df78b29fc2f26126995963102e19ef378f3c39d2"
bottle do
cellar :any
sha256 "ad3ae23deb4f16249fbfc8794a30116911a211c76adbc024948cf9b8842a55b4" => :mojave
sha256 "870f730cc4ee76700749f4091d111cb0e9a529d43c1ba7cb40b36807e49d9b76" => :high_sierra
sha256 "794e2e265413005b3c26a0fa38e1ab8957bd1ec13cf4abb63730070181d9beb4" => :sierra
sha256 "292447a86953bb10361130542d2db9e0c0fc410e9be3b13b8c80891fbfaeec20" => :el_capitan
sha256 "6f769c30797af9cb98bf02491706f96b7085eed2d5d05c377e51ca5e0bf8541a" => :yosemite
end
depends_on "yaz"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| bsd-2-clause |
dsarman/perun | perun-core/src/main/java/cz/metacentrum/perun/core/impl/modules/attributes/urn_perun_resource_attribute_def_def_unixGroupName_namespace.java | 9696 | package cz.metacentrum.perun.core.impl.modules.attributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import cz.metacentrum.perun.core.api.Attribute;
import cz.metacentrum.perun.core.api.AttributeDefinition;
import cz.metacentrum.perun.core.api.AttributesManager;
import cz.metacentrum.perun.core.api.Facility;
import cz.metacentrum.perun.core.api.Group;
import cz.metacentrum.perun.core.api.Resource;
import cz.metacentrum.perun.core.api.Vo;
import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.ConsistencyErrorException;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeValueException;
import cz.metacentrum.perun.core.api.exceptions.WrongReferenceAttributeValueException;
import cz.metacentrum.perun.core.impl.PerunSessionImpl;
import cz.metacentrum.perun.core.implApi.modules.attributes.ResourceAttributesModuleAbstract;
import cz.metacentrum.perun.core.implApi.modules.attributes.ResourceAttributesModuleImplApi;
import java.util.HashSet;
import java.util.Set;
/**
*
* @author Slavek Licehammer <glory@ics.muni.cz>
*/
public class urn_perun_resource_attribute_def_def_unixGroupName_namespace extends ResourceAttributesModuleAbstract implements ResourceAttributesModuleImplApi {
private static final String A_F_unixGroupName_namespace = AttributesManager.NS_FACILITY_ATTR_DEF + ":unixGroupName-namespace";
private static final String A_F_unixGID_namespace = AttributesManager.NS_FACILITY_ATTR_DEF + ":unixGID-namespace";
private static final String A_R_unixGID_namespace = AttributesManager.NS_RESOURCE_ATTR_DEF + ":unixGID-namespace";
private static final String A_G_unixGroupName_namespace = AttributesManager.NS_GROUP_ATTR_DEF + ":unixGroupName-namespace";
@Override
public void checkAttributeValue(PerunSessionImpl sess, Resource resource, Attribute attribute) throws InternalErrorException, WrongAttributeValueException, WrongReferenceAttributeValueException, WrongAttributeAssignmentException{
//prepare namespace and groupName value variables
String groupName = null;
if(attribute.getValue() != null) groupName = (String) attribute.getValue();
String groupNameNamespace = attribute.getFriendlyNameParameter();
if(groupName == null) {
// if this is resource, its not ok
throw new WrongAttributeValueException(attribute, "Attribute groupName-namespace for resourece can't be null.");
}
//Check attribute regex
sess.getPerunBl().getModulesUtilsBl().checkAttributeRegex(attribute, "^[-_.a-zA-Z0-9]+$");
//Check reserved unix group names
sess.getPerunBl().getModulesUtilsBl().checkReservedUnixGroupNames(attribute);
try {
//prepare attributes group and resource unixGroupName
Attribute groupUnixGroupName = new Attribute(sess.getPerunBl().getAttributesManagerBl().getAttributeDefinition(sess, A_G_unixGroupName_namespace + ":" + groupNameNamespace));
Attribute resourceUnixGroupName = attribute;
groupUnixGroupName.setValue(attribute.getValue());
//prepare lists of groups and resources with the same groupName value in the same namespace
List<Group> groupsWithSameGroupNameInTheSameNamespace = new ArrayList<Group>();
List<Resource> resourcesWithSameGroupNameInTheSameNamespace = new ArrayList<Resource>();
//Fill lists of groups and resources
groupsWithSameGroupNameInTheSameNamespace.addAll(sess.getPerunBl().getGroupsManagerBl().getGroupsByAttribute(sess, groupUnixGroupName));
resourcesWithSameGroupNameInTheSameNamespace.addAll(sess.getPerunBl().getResourcesManagerBl().getResourcesByAttribute(sess, resourceUnixGroupName));
//If there is no group or resource with same GroupNameInTheSameNamespace, its ok
if(groupsWithSameGroupNameInTheSameNamespace.isEmpty() && resourcesWithSameGroupNameInTheSameNamespace.isEmpty()) return;
//First need to know that i have right to write any of duplicit groupName-namespace attribute
boolean haveRights = sess.getPerunBl().getModulesUtilsBl().haveRightToWriteAttributeInAnyGroupOrResource(sess, groupsWithSameGroupNameInTheSameNamespace, resourcesWithSameGroupNameInTheSameNamespace, groupUnixGroupName, resourceUnixGroupName);
if(!haveRights) throw new WrongReferenceAttributeValueException(attribute, "This groupName is already used for other group or resource and user has no rights to use it.");
//Now if rights are ok, prepare lists of UnixGIDs attributes of this group (also equivalent resource GID)
List<Attribute> resourceUnixGIDs = sess.getPerunBl().getAttributesManagerBl().getAllAttributesStartWithNameWithoutNullValue(sess, resource, A_R_unixGID_namespace + ":");
List<Attribute> groupVersionUnixGIDs = sess.getPerunBl().getModulesUtilsBl().getListOfGroupGIDsFromListOfResourceGIDs(sess, resourceUnixGIDs);
//In list of duplicit groups looking for GID in same namespace but with different value, thats not correct
if(!groupsWithSameGroupNameInTheSameNamespace.isEmpty()) {
for(Group g: groupsWithSameGroupNameInTheSameNamespace) {
for(Attribute a: groupVersionUnixGIDs) {
int compare;
compare = sess.getPerunBl().getModulesUtilsBl().haveTheSameAttributeWithTheSameNamespace(sess, g, a);
if(compare > 0) {
throw new WrongReferenceAttributeValueException(attribute, a, "One of the group GIDs is from the same namespace like other group GID but with different values.");
}
}
}
}
//In list of duplicit resources looking for GID in same namespace but with different value, thats not correct
if(!resourcesWithSameGroupNameInTheSameNamespace.isEmpty()) {
for(Resource r: resourcesWithSameGroupNameInTheSameNamespace) {
for(Attribute a: resourceUnixGIDs) {
int compare;
compare = sess.getPerunBl().getModulesUtilsBl().haveTheSameAttributeWithTheSameNamespace(sess, r, a);
if(compare > 0) {
throw new WrongReferenceAttributeValueException(attribute, a, "One of the group GIDs is from the same namespace like other resource GIDs but with different values.");
}
}
}
}
} catch(AttributeNotExistsException ex) {
throw new ConsistencyErrorException(ex);
}
}
@Override
public void changedAttributeHook(PerunSessionImpl session, Resource resource, Attribute attribute) throws InternalErrorException, WrongReferenceAttributeValueException {
//Need to know if this is remove or set, if value is null, its remove, otherway it is set
String groupNameNamespace = attribute.getFriendlyNameParameter();
try {
if(attribute.getValue() == null) {
//This is ok, for now no changes for removing some GroupName of this Resource
} else {
//First need to find facility for the group
Facility facilityOfResource = session.getPerunBl().getResourcesManagerBl().getFacility(session, resource);
String gidNamespace = null;
//If facility has the same namespace of GroupName like attribute unixGroupName-namespace, then prepare gidNamespace
Attribute facilityGroupNameNamespace = session.getPerunBl().getAttributesManagerBl().getAttribute(session, facilityOfResource, A_F_unixGroupName_namespace);
if(facilityGroupNameNamespace.getValue() != null) {
if(groupNameNamespace.equals(facilityGroupNameNamespace.getValue())) {
Attribute facilityGIDNamespace = session.getPerunBl().getAttributesManagerBl().getAttribute(session, facilityOfResource, A_F_unixGID_namespace);
if(facilityGIDNamespace.getValue() != null) {
gidNamespace = (String) facilityGIDNamespace.getValue();
}
}
}
//If there is any gidNamespace which is need to be set, do it there
if(gidNamespace != null) {
Attribute resourceUnixGIDNamespace = session.getPerunBl().getAttributesManagerBl().getAttribute(session, resource, A_R_unixGID_namespace + ":" + gidNamespace);
if(resourceUnixGIDNamespace.getValue() == null) {
resourceUnixGIDNamespace = session.getPerunBl().getAttributesManagerBl().fillAttribute(session, resource, resourceUnixGIDNamespace);
if(resourceUnixGIDNamespace.getValue() == null) throw new WrongReferenceAttributeValueException(attribute, resourceUnixGIDNamespace);
try {
session.getPerunBl().getAttributesManagerBl().setAttribute(session, resource, resourceUnixGIDNamespace);
} catch (WrongAttributeValueException ex) {
throw new WrongReferenceAttributeValueException(attribute, resourceUnixGIDNamespace, ex);
}
} else {
try {
session.getPerunBl().getAttributesManagerBl().checkAttributeValue(session, resource, resourceUnixGIDNamespace);
} catch (WrongAttributeValueException ex) {
throw new WrongReferenceAttributeValueException(attribute, resourceUnixGIDNamespace, ex);
}
}
}
}
} catch (WrongAttributeAssignmentException ex) {
//TODO: need to add WrongAttributeAssignmentException to header of modules methods
throw new InternalErrorException(ex);
} catch (AttributeNotExistsException ex) {
throw new ConsistencyErrorException(ex);
}
}
@Override
public List<String> getDependencies() {
List<String> dependencies = new ArrayList<String>();
dependencies.add(A_F_unixGroupName_namespace);
return dependencies;
}
/*public AttributeDefinition getAttributeDefinition() {
AttributeDefinition attr = new AttributeDefinition();
attr.setNamespace(AttributesManager.NS_RESOURCE_ATTR_DEF);
attr.setFriendlyName("unixGroupName-namespace");
attr.setType(String.class.getName());
attr.setDescription("Unix group name namespace.");
return attr;
}*/
}
| bsd-2-clause |
smessmer/homebrew-core | Formula/gsl.rb | 980 | class Gsl < Formula
desc "Numerical library for C and C++"
homepage "https://www.gnu.org/software/gsl/"
url "https://ftp.gnu.org/gnu/gsl/gsl-2.5.tar.gz"
mirror "https://ftpmirror.gnu.org/gsl/gsl-2.5.tar.gz"
sha256 "0460ad7c2542caaddc6729762952d345374784100223995eb14d614861f2258d"
bottle do
cellar :any
sha256 "2b76f0bb640a36340efb3bc44a9df6e8b1694cc251637f95eca02c541add53ff" => :mojave
sha256 "a11e16ee61294794105faf42908ae1547617c822b19edca88a627917feb87f28" => :high_sierra
sha256 "79ad420d6c495d16a7a3ed57c5a5000dcd4f77cb98af27b3eb6c21e1a748a451" => :sierra
sha256 "af4c116bf27bc4880d85d1a50c62ba435e2a9bfae0b6a7f2a09f974791a91408" => :el_capitan
end
def install
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}"
system "make" # A GNU tool which doesn't support just make install! Shameful!
system "make", "install"
end
test do
system bin/"gsl-randist", "0", "20", "cauchy", "30"
end
end
| bsd-2-clause |
Frottello/Studioaandelinde | sapphire/filesystem/Folder.php | 17497 | <?php
/**
* Represents a folder in the assets/ directory.
* The folder path is stored in the "Filename" property.
*
* Updating the "Name" or "Filename" properties on
* a folder object also updates all associated children
* (both {@link File} and {@link Folder} records).
*
* Deleting a folder will also remove the folder from the filesystem,
* including any subfolders and contained files. Use {@link deleteDatabaseOnly()}
* to avoid touching the filesystem.
*
* See {@link File} documentation for more details about the
* relationship between the database and filesystem in the sapphire file APIs.
*
* @package sapphire
* @subpackage filesystem
*/
class Folder extends File {
static $default_sort = "\"Sort\"";
/**
* Find the given folder or create it both as {@link Folder} database records
* and on the filesystem. If necessary, creates parent folders as well.
*
* @param $folderPath string Absolute or relative path to the file.
* If path is relative, its interpreted relative to the "assets/" directory.
* @return Folder
*/
static function findOrMake($folderPath) {
// Create assets directory, if it is missing
if(!file_exists(ASSETS_PATH)) Filesystem::makeFolder(ASSETS_PATH);
$folderPath = trim(Director::makeRelative($folderPath));
// replace leading and trailing slashes
$folderPath = preg_replace('/^\/?(.*)\/?$/', '$1', $folderPath);
$parts = explode("/",$folderPath);
$parentID = 0;
$item = null;
foreach($parts as $part) {
if(!$part) continue; // happens for paths with a trailing slash
$item = DataObject::get_one(
"Folder",
sprintf(
"\"Name\" = '%s' AND \"ParentID\" = %d",
Convert::raw2sql($part),
(int)$parentID
)
);
if(!$item) {
$item = new Folder();
$item->ParentID = $parentID;
$item->Name = $part;
$item->Title = $part;
$item->write();
}
if(!file_exists($item->getFullPath())) {
Filesystem::makeFolder($item->getFullPath());
}
$parentID = $item->ID;
}
return $item;
}
/**
* Syncronise the file database with the actual content of the assets folder
*/
function syncChildren() {
$parentID = (int)$this->ID; // parentID = 0 on the singleton, used as the 'root node';
$added = 0;
$deleted = 0;
// First, merge any children that are duplicates
$duplicateChildrenNames = DB::query("SELECT \"Name\" FROM \"File\" WHERE \"ParentID\" = $parentID GROUP BY \"Name\" HAVING count(*) > 1")->column();
if($duplicateChildrenNames) foreach($duplicateChildrenNames as $childName) {
$childName = Convert::raw2sql($childName);
// Note, we do this in the database rather than object-model; otherwise we get all sorts of problems about deleting files
$children = DB::query("SELECT \"ID\" FROM \"File\" WHERE \"Name\" = '$childName' AND \"ParentID\" = $parentID")->column();
if($children) {
$keptChild = array_shift($children);
foreach($children as $removedChild) {
DB::query("UPDATE \"File\" SET \"ParentID\" = $keptChild WHERE \"ParentID\" = $removedChild");
DB::query("DELETE FROM \"File\" WHERE \"ID\" = $removedChild");
}
} else {
user_error("Inconsistent database issue: SELECT ID FROM \"File\" WHERE Name = '$childName' AND ParentID = $parentID should have returned data", E_USER_WARNING);
}
}
// Get index of database content
// We don't use DataObject so that things like subsites doesn't muck with this.
$dbChildren = DB::query("SELECT * FROM \"File\" WHERE \"ParentID\" = $parentID");
$hasDbChild = array();
if($dbChildren) {
foreach($dbChildren as $dbChild) {
$className = $dbChild['ClassName'];
if(!$className) $className = "File";
$hasDbChild[$dbChild['Name']] = new $className($dbChild);
}
}
$unwantedDbChildren = $hasDbChild;
// Iterate through the actual children, correcting the database as necessary
$baseDir = $this->FullPath;
if(!$this->Filename) die($this->ID . " - " . $this->FullPath);
if(file_exists($baseDir)) {
$actualChildren = scandir($baseDir);
foreach($actualChildren as $actualChild) {
if($actualChild[0] == '.' || $actualChild[0] == '_' || substr($actualChild,0,6) == 'Thumbs' || $actualChild == 'web.config') {
continue;
}
// A record with a bad class type doesn't deserve to exist. It must be purged!
if(isset($hasDbChild[$actualChild])) {
$child = $hasDbChild[$actualChild];
if(( !( $child instanceof Folder ) && is_dir($baseDir . $actualChild) )
|| (( $child instanceof Folder ) && !is_dir($baseDir . $actualChild)) ) {
DB::query("DELETE FROM \"File\" WHERE \"ID\" = $child->ID");
unset($hasDbChild[$actualChild]);
}
}
if(isset($hasDbChild[$actualChild])) {
$child = $hasDbChild[$actualChild];
unset($unwantedDbChildren[$actualChild]);
} else {
$added++;
$childID = $this->constructChild($actualChild);
$child = DataObject::get_by_id("File", $childID);
}
if( $child && is_dir($baseDir . $actualChild)) {
$childResult = $child->syncChildren();
$added += $childResult['added'];
$deleted += $childResult['deleted'];
}
// Clean up the child record from memory after use. Important!
$child->destroy();
$child = null;
}
// Iterate through the unwanted children, removing them all
if(isset($unwantedDbChildren)) foreach($unwantedDbChildren as $unwantedDbChild) {
DB::query("DELETE FROM \"File\" WHERE \"ID\" = $unwantedDbChild->ID");
$deleted++;
}
} else {
DB::query("DELETE FROM \"File\" WHERE \"ID\" = $this->ID");
}
return array('added' => $added, 'deleted' => $deleted);
}
/**
* Construct a child of this Folder with the given name.
* It does this without actually using the object model, as this starts messing
* with all the data. Rather, it does a direct database insert.
*/
function constructChild($name) {
// Determine the class name - File, Folder or Image
$baseDir = $this->FullPath;
if(is_dir($baseDir . $name)) {
$className = "Folder";
} else {
// Could use getimagesize to get the type of the image
$ext = strtolower(substr($name,strrpos($name,'.')+1));
switch($ext) {
case "gif": case "jpg": case "jpeg": case "png": $className = "Image"; break;
default: $className = "File";
}
}
if(Member::currentUser()) $ownerID = Member::currentUser()->ID;
else $ownerID = 0;
$filename = Convert::raw2sql($this->Filename . $name);
if($className == 'Folder' ) $filename .= '/';
$name = Convert::raw2sql($name);
DB::query("INSERT INTO \"File\"
(\"ClassName\", \"ParentID\", \"OwnerID\", \"Name\", \"Filename\", \"Created\", \"LastEdited\", \"Title\")
VALUES ('$className', $this->ID, $ownerID, '$name', '$filename', " . DB::getConn()->now() . ',' . DB::getConn()->now() . ", '$name')");
return DB::getGeneratedID("File");
}
/**
* Take a file uploaded via a POST form, and save it inside this folder.
*/
function addUploadToFolder($tmpFile) {
if(!is_array($tmpFile)) {
user_error("Folder::addUploadToFolder() Not passed an array. Most likely, the form hasn't got the right enctype", E_USER_ERROR);
}
if(!isset($tmpFile['size'])) {
return;
}
$base = BASE_PATH;
// $parentFolder = Folder::findOrMake("Uploads");
// Generate default filename
$file = str_replace(' ', '-',$tmpFile['name']);
$file = ereg_replace('[^A-Za-z0-9+.-]+','',$file);
$file = ereg_replace('-+', '-',$file);
while($file[0] == '_' || $file[0] == '.') {
$file = substr($file, 1);
}
$file = $this->RelativePath . $file;
Filesystem::makeFolder(dirname("$base/$file"));
$doubleBarrelledExts = array('.gz', '.bz', '.bz2');
$ext = "";
if(preg_match('/^(.*)(\.[^.]+)$/', $file, $matches)) {
$file = $matches[1];
$ext = $matches[2];
// Special case for double-barrelled
if(in_array($ext, $doubleBarrelledExts) && preg_match('/^(.*)(\.[^.]+)$/', $file, $matches)) {
$file = $matches[1];
$ext = $matches[2] . $ext;
}
}
$origFile = $file;
$i = 1;
while(file_exists("$base/$file$ext")) {
$i++;
$oldFile = $file;
if(strpos($file, '.') !== false) {
$file = ereg_replace('[0-9]*(\.[^.]+$)', $i . '\\1', $file);
} elseif(strpos($file, '_') !== false) {
$file = ereg_replace('_([^_]+$)', '_' . $i, $file);
} else {
$file .= "_$i";
}
if($oldFile == $file && $i > 2) user_error("Couldn't fix $file$ext with $i", E_USER_ERROR);
}
if (move_uploaded_file($tmpFile['tmp_name'], "$base/$file$ext")) {
// Update with the new image
return $this->constructChild(basename($file . $ext));
} else {
if(!file_exists($tmpFile['tmp_name'])) user_error("Folder::addUploadToFolder: '$tmpFile[tmp_name]' doesn't exist", E_USER_ERROR);
else user_error("Folder::addUploadToFolder: Couldn't copy '$tmpFile[tmp_name]' to '$base/$file$ext'", E_USER_ERROR);
return false;
}
}
function validate() {
return new ValidationResult(true);
}
//-------------------------------------------------------------------------------------------------
// Data Model Definition
function getRelativePath() {
return parent::getRelativePath() . "/";
}
function onBeforeDelete() {
if($this->ID && ($children = $this->AllChildren())) {
foreach($children as $child) {
if(!$this->Filename || !$this->Name || !file_exists($this->getFullPath())) {
$child->setField('Name',null);
$child->Filename = null;
}
$child->delete();
}
}
// Do this after so a folder's contents are removed before we delete the folder.
if($this->Filename && $this->Name && file_exists($this->getFullPath())) {
$files = glob( $this->getFullPath() . '/*' );
if( !$files || ( count( $files ) == 1 && preg_match( '/\/_resampled$/', $files[0] ) ) )
Filesystem::removeFolder( $this->getFullPath() );
}
parent::onBeforeDelete();
}
/**
* Delete the database record (recursively for folders) without touching the filesystem
*/
function deleteDatabaseOnly() {
if($children = $this->myChildren()) {
foreach($children as $child) $child->deleteDatabaseOnly();
}
parent::deleteDatabaseOnly();
}
public function myChildren() {
// Ugly, but functional.
$ancestors = ClassInfo::ancestry($this->class);
foreach($ancestors as $i => $a) {
if(isset($baseClass) && $baseClass === -1) {
$baseClass = $a;
break;
}
if($a == "DataObject") $baseClass = -1;
}
$g = DataObject::get($baseClass, "\"ParentID\" = " . $this->ID);
return $g;
}
/**
* Returns true if this folder has children
*/
public function hasChildren() {
return (bool)DB::query("SELECT COUNT(*) FROM \"File\" WHERE ParentID = "
. (int)$this->ID)->value();
}
/**
* Returns true if this folder has children
*/
public function hasChildFolders() {
$SQL_folderClasses = Convert::raw2sql(ClassInfo::subclassesFor('Folder'));
return (bool)DB::query("SELECT COUNT(*) FROM \"File\" WHERE \"ParentID\" = " . (int)$this->ID
. " AND \"ClassName\" IN ('" . implode("','", $SQL_folderClasses) . "')")->value();
}
/**
* Overloaded to call recursively on all contained {@link File} records.
*/
public function updateFilesystem() {
parent::updateFilesystem();
// Note: Folders will have been renamed on the filesystem already at this point,
// File->updateFilesystem() needs to take this into account.
if($this->ID && ($children = $this->AllChildren())) {
foreach($children as $child) {
$child->updateFilesystem();
$child->write();
}
}
}
/**
* This isn't a decendant of SiteTree, but needs this in case
* the group is "reorganised";
*/
function cmsCleanup_parentChanged(){
}
/**
* Return the FieldSet used to edit this folder in the CMS.
* You can modify this fieldset by subclassing folder, or by creating a {@link DataObjectDecorator}
* and implemeting updateCMSFields(FieldSet $fields) on that decorator.
*/
function getCMSFields() {
$fileList = new AssetTableField(
$this,
"Files",
"File",
array("Title" => _t('Folder.TITLE', "Title"), "Filename" => _t('Folder.FILENAME', "Filename")),
""
);
$fileList->setFolder($this);
$fileList->setPopupCaption(_t('Folder.VIEWEDITASSET', "View/Edit Asset"));
$titleField = ($this->ID && $this->ID != "root") ? new TextField("Title", _t('Folder.TITLE')) : new HiddenField("Title");
if( $this->canEdit() ) {
$deleteButton = new InlineFormAction('deletemarked',_t('Folder.DELSELECTED','Delete selected files'), 'delete');
$deleteButton->includeDefaultJS(false);
} else {
$deleteButton = new HiddenField('deletemarked');
}
$fields = new FieldSet(
new HiddenField("Name"),
new TabSet("Root",
new Tab("Files", _t('Folder.FILESTAB', "Files"),
$titleField,
$fileList,
$deleteButton,
new HiddenField("FileIDs"),
new HiddenField("DestFolderID")
),
new Tab("Details", _t('Folder.DETAILSTAB', "Details"),
new ReadonlyField("URL", _t('Folder.URL', 'URL')),
new ReadonlyField("ClassName", _t('Folder.TYPE','Type')),
new ReadonlyField("Created", _t('Folder.CREATED','First Uploaded')),
new ReadonlyField("LastEdited", _t('Folder.LASTEDITED','Last Updated'))
),
new Tab("Upload", _t('Folder.UPLOADTAB', "Upload"),
new LiteralField("UploadIframe",
$this->getUploadIframe()
)
)
/* // commenting out unused files tab till bugs are fixed
new Tab("UnusedFiles", _t('Folder.UNUSEDFILESTAB', "Unused files"),
new Folder_UnusedAssetsField($this)
) */
),
new HiddenField("ID")
);
if(!$this->canEdit()) {
$fields->removeFieldFromTab("Root", "Upload");
}
$this->extend('updateCMSFields', $fields);
return $fields;
}
/**
* Looks for files used in system and create where clause which contains all ID's of files.
*
* @returns String where clause which will work as filter.
*/
public function getUnusedFilesListFilter() {
$result = DB::query("SELECT DISTINCT \"FileID\" FROM \"SiteTree_ImageTracking\"");
$usedFiles = array();
$where = '';
$classes = ClassInfo::subclassesFor('SiteTree');
if($result->numRecords() > 0) {
while($nextResult = $result->next()) {
$where .= $nextResult['FileID'] . ',';
}
}
foreach($classes as $className) {
$query = singleton($className)->extendedSQL();
$ids = $query->execute()->column();
if(!count($ids)) continue;
foreach(singleton($className)->has_one() as $relName => $joinClass) {
if($joinClass == 'Image' || $joinClass == 'File') {
$fieldName = $relName .'ID';
$query = singleton($className)->extendedSQL("$fieldName > 0");
$query->distinct = true;
$query->select = array($fieldName);
$usedFiles = array_merge($usedFiles, $query->execute()->column());
} elseif($joinClass == 'Folder') {
// @todo
}
}
}
if($usedFiles) {
return "\"File\".\"ID\" NOT IN (" . implode(', ', $usedFiles) . ") AND (\"ClassName\" = 'File' OR \"ClassName\" = 'Image')";
} else {
return "(\"ClassName\" = 'File' OR \"ClassName\" = 'Image')";
}
return $where;
}
/**
* Display the upload form. Returns an iframe tag that will show admin/assets/uploadiframe.
*/
function getUploadIframe() {
return <<<HTML
<iframe name="AssetAdmin_upload" src="admin/assets/uploadiframe/{$this->ID}" id="AssetAdmin_upload" border="0" style="border-style none !important; width: 97%; min-height: 300px; height: 100%; height: expression(document.body.clientHeight) !important;">
</iframe>
HTML;
}
/**
* Get the children of this folder that are also folders.
*/
function ChildFolders() {
return DataObject::get("Folder", "\"ParentID\" = " . (int)$this->ID);
}
}
/**
* @package sapphire
* @subpackage filesystem
*/
class Folder_UnusedAssetsField extends CompositeField {
protected $folder;
public function __construct($folder) {
$this->folder = $folder;
parent::__construct(new FieldSet());
}
public function getChildren() {
if($this->children->Count() == 0) {
$inlineFormAction = new InlineFormAction("delete_unused_thumbnails", _t('Folder.DELETEUNUSEDTHUMBNAILS', 'Delete unused thumbnails'));
$inlineFormAction->includeDefaultJS(false) ;
$this->children = new FieldSet(
new LiteralField( "UnusedAssets", "<h2>"._t('Folder.UNUSEDFILESTITLE', 'Unused files')."</h2>" ),
$this->getAssetList(),
new FieldGroup(
new LiteralField( "UnusedThumbnails", "<h2>"._t('Folder.UNUSEDTHUMBNAILSTITLE', 'Unused thumbnails')."</h2>"),
$inlineFormAction
)
);
$this->children->setForm($this->form);
}
return $this->children;
}
public function FieldHolder() {
$output = "";
foreach($this->getChildren() as $child) {
$output .= $child->FieldHolder();
}
return $output;
}
/**
* Creates table for displaying unused files.
*
* @returns AssetTableField
*/
protected function getAssetList() {
$where = $this->folder->getUnusedFilesListFilter();
$assetList = new AssetTableField(
$this->folder,
"AssetList",
"File",
array("Title" => _t('Folder.TITLE', "Title"), "LinkedURL" => _t('Folder.FILENAME', "Filename")),
"",
$where
);
$assetList->setPopupCaption(_t('Folder.VIEWASSET', "View Asset"));
$assetList->setPermissions(array("show","delete"));
$assetList->Markable = false;
return $assetList;
}
}
?>
| bsd-3-clause |
AICP/external_chromium_org | content/public/test/content_test_suite_base.cc | 3065 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/public/test/content_test_suite_base.h"
#include "base/basictypes.h"
#include "base/compiler_specific.h"
#include "base/memory/scoped_ptr.h"
#include "base/test/test_suite.h"
#include "base/threading/sequenced_worker_pool.h"
#include "content/browser/browser_thread_impl.h"
#include "content/common/url_schemes.h"
#include "content/gpu/in_process_gpu_thread.h"
#include "content/public/common/content_client.h"
#include "content/renderer/in_process_renderer_thread.h"
#include "content/utility/in_process_utility_thread.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/base/ui_base_paths.h"
#if !defined(OS_IOS)
#include "content/browser/gpu/gpu_process_host.h"
#include "content/browser/renderer_host/render_process_host_impl.h"
#include "content/browser/utility_process_host_impl.h"
#endif
#if defined(OS_ANDROID)
#include "base/android/jni_android.h"
#include "content/browser/android/browser_jni_registrar.h"
#include "content/common/android/common_jni_registrar.h"
#include "media/base/android/media_jni_registrar.h"
#include "net/android/net_jni_registrar.h"
#include "ui/base/android/ui_base_jni_registrar.h"
#include "ui/gfx/android/gfx_jni_registrar.h"
#include "ui/gl/android/gl_jni_registrar.h"
#include "ui/shell_dialogs/android/shell_dialogs_jni_registrar.h"
#endif
namespace content {
class ContentTestSuiteBaseListener : public testing::EmptyTestEventListener {
public:
ContentTestSuiteBaseListener() {
}
virtual void OnTestEnd(const testing::TestInfo& test_info) OVERRIDE {
BrowserThreadImpl::FlushThreadPoolHelper();
}
private:
DISALLOW_COPY_AND_ASSIGN(ContentTestSuiteBaseListener);
};
ContentTestSuiteBase::ContentTestSuiteBase(int argc, char** argv)
: base::TestSuite(argc, argv) {
}
void ContentTestSuiteBase::Initialize() {
base::TestSuite::Initialize();
#if defined(OS_ANDROID)
// Register JNI bindings for android.
JNIEnv* env = base::android::AttachCurrentThread();
content::android::RegisterCommonJni(env);
content::android::RegisterBrowserJni(env);
gfx::android::RegisterJni(env);
media::RegisterJni(env);
net::android::RegisterJni(env);
ui::android::RegisterJni(env);
ui::shell_dialogs::RegisterJni(env);
#endif
testing::UnitTest::GetInstance()->listeners().Append(
new ContentTestSuiteBaseListener);
}
void ContentTestSuiteBase::RegisterContentSchemes(
ContentClient* content_client) {
SetContentClient(content_client);
content::RegisterContentSchemes(false);
SetContentClient(NULL);
}
void ContentTestSuiteBase::RegisterInProcessThreads() {
#if !defined(OS_IOS)
UtilityProcessHostImpl::RegisterUtilityMainThreadFactory(
CreateInProcessUtilityThread);
RenderProcessHostImpl::RegisterRendererMainThreadFactory(
CreateInProcessRendererThread);
GpuProcessHost::RegisterGpuMainThreadFactory(CreateInProcessGpuThread);
#endif
}
} // namespace content
| bsd-3-clause |
Just-D/chromium-1 | third_party/mojo/src/mojo/edk/embedder/platform_channel_pair_posix_unittest.cc | 9408 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "mojo/edk/embedder/platform_channel_pair.h"
#include <errno.h>
#include <poll.h>
#include <signal.h>
#include <stdio.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <sys/uio.h>
#include <unistd.h>
#include <deque>
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/files/scoped_file.h"
#include "base/files/scoped_temp_dir.h"
#include "base/logging.h"
#include "mojo/edk/embedder/platform_channel_utils_posix.h"
#include "mojo/edk/embedder/platform_handle.h"
#include "mojo/edk/embedder/platform_handle_vector.h"
#include "mojo/edk/embedder/scoped_platform_handle.h"
#include "mojo/edk/test/test_utils.h"
#include "mojo/public/cpp/system/macros.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace mojo {
namespace embedder {
namespace {
void WaitReadable(PlatformHandle h) {
struct pollfd pfds = {};
pfds.fd = h.fd;
pfds.events = POLLIN;
CHECK_EQ(poll(&pfds, 1, -1), 1);
}
class PlatformChannelPairPosixTest : public testing::Test {
public:
PlatformChannelPairPosixTest() {}
~PlatformChannelPairPosixTest() override {}
void SetUp() override {
// Make sure |SIGPIPE| isn't being ignored.
struct sigaction action = {};
action.sa_handler = SIG_DFL;
ASSERT_EQ(0, sigaction(SIGPIPE, &action, &old_action_));
}
void TearDown() override {
// Restore the |SIGPIPE| handler.
ASSERT_EQ(0, sigaction(SIGPIPE, &old_action_, nullptr));
}
private:
struct sigaction old_action_;
MOJO_DISALLOW_COPY_AND_ASSIGN(PlatformChannelPairPosixTest);
};
TEST_F(PlatformChannelPairPosixTest, NoSigPipe) {
PlatformChannelPair channel_pair;
ScopedPlatformHandle server_handle = channel_pair.PassServerHandle().Pass();
ScopedPlatformHandle client_handle = channel_pair.PassClientHandle().Pass();
// Write to the client.
static const char kHello[] = "hello";
EXPECT_EQ(static_cast<ssize_t>(sizeof(kHello)),
write(client_handle.get().fd, kHello, sizeof(kHello)));
// Close the client.
client_handle.reset();
// Read from the server; this should be okay.
char buffer[100] = {};
EXPECT_EQ(static_cast<ssize_t>(sizeof(kHello)),
read(server_handle.get().fd, buffer, sizeof(buffer)));
EXPECT_STREQ(kHello, buffer);
// Try reading again.
ssize_t result = read(server_handle.get().fd, buffer, sizeof(buffer));
// We should probably get zero (for "end of file"), but -1 would also be okay.
EXPECT_TRUE(result == 0 || result == -1);
if (result == -1)
PLOG(WARNING) << "read (expected 0 for EOF)";
// Test our replacement for |write()|/|send()|.
result = PlatformChannelWrite(server_handle.get(), kHello, sizeof(kHello));
EXPECT_EQ(-1, result);
if (errno != EPIPE)
PLOG(WARNING) << "write (expected EPIPE)";
// Test our replacement for |writev()|/|sendv()|.
struct iovec iov[2] = {{const_cast<char*>(kHello), sizeof(kHello)},
{const_cast<char*>(kHello), sizeof(kHello)}};
result = PlatformChannelWritev(server_handle.get(), iov, 2);
EXPECT_EQ(-1, result);
if (errno != EPIPE)
PLOG(WARNING) << "write (expected EPIPE)";
}
TEST_F(PlatformChannelPairPosixTest, SendReceiveData) {
PlatformChannelPair channel_pair;
ScopedPlatformHandle server_handle = channel_pair.PassServerHandle().Pass();
ScopedPlatformHandle client_handle = channel_pair.PassClientHandle().Pass();
for (size_t i = 0; i < 10; i++) {
std::string send_string(1 << i, 'A' + i);
EXPECT_EQ(static_cast<ssize_t>(send_string.size()),
PlatformChannelWrite(server_handle.get(), send_string.data(),
send_string.size()));
WaitReadable(client_handle.get());
char buf[10000] = {};
std::deque<PlatformHandle> received_handles;
ssize_t result = PlatformChannelRecvmsg(client_handle.get(), buf,
sizeof(buf), &received_handles);
EXPECT_EQ(static_cast<ssize_t>(send_string.size()), result);
EXPECT_EQ(send_string, std::string(buf, static_cast<size_t>(result)));
EXPECT_TRUE(received_handles.empty());
}
}
TEST_F(PlatformChannelPairPosixTest, SendReceiveFDs) {
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
static const char kHello[] = "hello";
PlatformChannelPair channel_pair;
ScopedPlatformHandle server_handle = channel_pair.PassServerHandle().Pass();
ScopedPlatformHandle client_handle = channel_pair.PassClientHandle().Pass();
// Reduce the number of FDs opened on OS X to avoid test flake.
#if defined(OS_MACOSX)
const size_t kNumHandlesToSend = kPlatformChannelMaxNumHandles / 2;
#else
const size_t kNumHandlesToSend = kPlatformChannelMaxNumHandles;
#endif
for (size_t i = 1; i < kNumHandlesToSend; i++) {
// Make |i| files, with the j-th file consisting of j copies of the digit
// |c|.
const char c = '0' + (i % 10);
ScopedPlatformHandleVectorPtr platform_handles(new PlatformHandleVector);
for (size_t j = 1; j <= i; j++) {
base::FilePath unused;
base::ScopedFILE fp(
base::CreateAndOpenTemporaryFileInDir(temp_dir.path(), &unused));
ASSERT_TRUE(fp);
ASSERT_EQ(j, fwrite(std::string(j, c).data(), 1, j, fp.get()));
platform_handles->push_back(
test::PlatformHandleFromFILE(fp.Pass()).release());
ASSERT_TRUE(platform_handles->back().is_valid());
}
// Send the FDs (+ "hello").
struct iovec iov = {const_cast<char*>(kHello), sizeof(kHello)};
// We assume that the |sendmsg()| actually sends all the data.
EXPECT_EQ(static_cast<ssize_t>(sizeof(kHello)),
PlatformChannelSendmsgWithHandles(server_handle.get(), &iov, 1,
&platform_handles->at(0),
platform_handles->size()));
WaitReadable(client_handle.get());
char buf[10000] = {};
std::deque<PlatformHandle> received_handles;
// We assume that the |recvmsg()| actually reads all the data.
EXPECT_EQ(static_cast<ssize_t>(sizeof(kHello)),
PlatformChannelRecvmsg(client_handle.get(), buf, sizeof(buf),
&received_handles));
EXPECT_STREQ(kHello, buf);
EXPECT_EQ(i, received_handles.size());
for (size_t j = 0; !received_handles.empty(); j++) {
base::ScopedFILE fp(test::FILEFromPlatformHandle(
ScopedPlatformHandle(received_handles.front()), "rb"));
received_handles.pop_front();
ASSERT_TRUE(fp);
rewind(fp.get());
char read_buf[kNumHandlesToSend];
size_t bytes_read = fread(read_buf, 1, sizeof(read_buf), fp.get());
EXPECT_EQ(j + 1, bytes_read);
EXPECT_EQ(std::string(j + 1, c), std::string(read_buf, bytes_read));
}
}
}
TEST_F(PlatformChannelPairPosixTest, AppendReceivedFDs) {
base::ScopedTempDir temp_dir;
ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
static const char kHello[] = "hello";
PlatformChannelPair channel_pair;
ScopedPlatformHandle server_handle = channel_pair.PassServerHandle().Pass();
ScopedPlatformHandle client_handle = channel_pair.PassClientHandle().Pass();
const std::string file_contents("hello world");
{
base::FilePath unused;
base::ScopedFILE fp(
base::CreateAndOpenTemporaryFileInDir(temp_dir.path(), &unused));
ASSERT_TRUE(fp);
ASSERT_EQ(file_contents.size(),
fwrite(file_contents.data(), 1, file_contents.size(), fp.get()));
ScopedPlatformHandleVectorPtr platform_handles(new PlatformHandleVector);
platform_handles->push_back(
test::PlatformHandleFromFILE(fp.Pass()).release());
ASSERT_TRUE(platform_handles->back().is_valid());
// Send the FD (+ "hello").
struct iovec iov = {const_cast<char*>(kHello), sizeof(kHello)};
// We assume that the |sendmsg()| actually sends all the data.
EXPECT_EQ(static_cast<ssize_t>(sizeof(kHello)),
PlatformChannelSendmsgWithHandles(server_handle.get(), &iov, 1,
&platform_handles->at(0),
platform_handles->size()));
}
WaitReadable(client_handle.get());
// Start with an invalid handle in the deque.
std::deque<PlatformHandle> received_handles;
received_handles.push_back(PlatformHandle());
char buf[100] = {};
// We assume that the |recvmsg()| actually reads all the data.
EXPECT_EQ(static_cast<ssize_t>(sizeof(kHello)),
PlatformChannelRecvmsg(client_handle.get(), buf, sizeof(buf),
&received_handles));
EXPECT_STREQ(kHello, buf);
ASSERT_EQ(2u, received_handles.size());
EXPECT_FALSE(received_handles[0].is_valid());
EXPECT_TRUE(received_handles[1].is_valid());
{
base::ScopedFILE fp(test::FILEFromPlatformHandle(
ScopedPlatformHandle(received_handles[1]), "rb"));
received_handles[1] = PlatformHandle();
ASSERT_TRUE(fp);
rewind(fp.get());
char read_buf[100];
size_t bytes_read = fread(read_buf, 1, sizeof(read_buf), fp.get());
EXPECT_EQ(file_contents.size(), bytes_read);
EXPECT_EQ(file_contents, std::string(read_buf, bytes_read));
}
}
} // namespace
} // namespace embedder
} // namespace mojo
| bsd-3-clause |
axinging/sky_engine | mojo/go/tests/message_test.go | 1553 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package tests
import (
"testing"
"mojo/public/go/bindings"
)
func checkMessageEncoding(t *testing.T, header, payload bindings.MessageHeader) {
var encodedMessage, decodedMessage *bindings.Message
var err error
var decodedHeader, decodedPayload bindings.MessageHeader
if encodedMessage, err = bindings.EncodeMessage(header, &payload); err != nil {
t.Fatalf("Failed encoding message: %v", err)
}
if decodedMessage, err = bindings.ParseMessage(encodedMessage.Bytes, nil); err != nil {
t.Fatalf("Failed decoding message header: %v", err)
}
if decodedHeader = decodedMessage.Header; decodedHeader != header {
t.Fatalf("Unexpected header decoded: got %v, want %v", decodedHeader, header)
}
if err = decodedMessage.DecodePayload(&decodedPayload); err != nil {
t.Fatalf("Failed decoding message payload: %v", err)
}
if decodedPayload != payload {
t.Fatalf("Unexpected header with request id decoded: got %v, want %v", decodedPayload, payload)
}
}
// TestMessageHeader tests that headers are identical after being
// encoded/decoded.
func TestMessageHeader(t *testing.T) {
header := bindings.MessageHeader{2, 0, 0}
headerWithId := bindings.MessageHeader{1, 2, 3}
checkMessageEncoding(t, header, headerWithId)
checkMessageEncoding(t, headerWithId, header)
headerWithZeroId := bindings.MessageHeader{1, 2, 0}
checkMessageEncoding(t, headerWithZeroId, header)
}
| bsd-3-clause |
grimfang/panda3d | direct/src/fsm/FSM.py | 20469 | """The new Finite State Machine module. This replaces the module
previously called FSM (now called :mod:`.ClassicFSM`).
"""
__all__ = ['FSMException', 'FSM']
from direct.showbase.DirectObject import DirectObject
from direct.directnotify import DirectNotifyGlobal
from direct.showbase import PythonUtil
from direct.stdpy.threading import RLock
class FSMException(Exception):
pass
class AlreadyInTransition(FSMException):
pass
class RequestDenied(FSMException):
pass
class FSM(DirectObject):
"""
A Finite State Machine. This is intended to be the base class
of any number of specific machines, which consist of a collection
of states and transitions, and rules to switch between states
according to arbitrary input data.
The states of an FSM are defined implicitly. Each state is
identified by a string, which by convention begins with a capital
letter. (Also by convention, strings passed to request that are
not state names should begin with a lowercase letter.)
To define specialized behavior when entering or exiting a
particular state, define a method named enterState() and/or
exitState(), where "State" is the name of the state, e.g.:
def enterRed(self):
... do stuff ...
def exitRed(self):
... cleanup stuff ...
def enterYellow(self):
... do stuff ...
def exitYellow(self):
... cleanup stuff ...
def enterGreen(self):
... do stuff ...
def exitGreen(self):
... cleanup stuff ...
Both functions can access the previous state name as
self.oldState, and the new state name we are transitioning to as
self.newState. (Of course, in enterRed(), self.newState will
always be "Red", and the in exitRed(), self.oldState will always
be "Red".)
Both functions are optional. If either function is omitted, the
state is still defined, but nothing is done when transitioning
into (or out of) the state.
Additionally, you may define a filterState() function for each
state. The purpose of this function is to decide what state to
transition to next, if any, on receipt of a particular input. The
input is always a string and a tuple of optional parameters (which
is often empty), and the return value should either be None to do
nothing, or the name of the state to transition into. For
example:
def filterRed(self, request, args):
if request in ['Green']:
return (request,) + args
return None
def filterYellow(self, request, args):
if request in ['Red']:
return (request,) + args
return None
def filterGreen(self, request, args):
if request in ['Yellow']:
return (request,) + args
return None
As above, the filterState() functions are optional. If any is
omitted, the defaultFilter() method is called instead. A standard
implementation of defaultFilter() is provided, which may be
overridden in a derived class to change the behavior on an
unexpected transition.
If self.defaultTransitions is left unassigned, then the standard
implementation of defaultFilter() will return None for any
lowercase transition name and allow any uppercase transition name
(this assumes that an uppercase name is a request to go directly
to a particular state by name).
self.state may be queried at any time other than during the
handling of the enter() and exit() functions. During these
functions, self.state contains the value None (you are not really
in any state during the transition). However, during a transition
you *can* query the outgoing and incoming states, respectively,
via self.oldState and self.newState. At other times, self.state
contains the name of the current state.
Initially, the FSM is in state 'Off'. It does not call enterOff()
at construction time; it is simply in Off already by convention.
If you need to call code in enterOff() to initialize your FSM
properly, call it explicitly in the constructor. Similarly, when
cleanup() is called or the FSM is destructed, the FSM transitions
back to 'Off' by convention. (It does call enterOff() at this
point, but does not call exitOff().)
To implement nested hierarchical FSM's, simply create a nested FSM
and store it on the class within the appropriate enterState()
function, and clean it up within the corresponding exitState()
function.
There is a way to define specialized transition behavior between
two particular states. This is done by defining a from<X>To<Y>()
function, where X is the old state and Y is the new state. If this
is defined, it will be run in place of the exit<X> and enter<Y>
functions, so if you want that behavior, you'll have to call them
specifically. Otherwise, you can completely replace that transition's
behavior.
See the code in SampleFSM.py for further examples.
"""
notify = DirectNotifyGlobal.directNotify.newCategory("FSM")
SerialNum = 0
# This member lists the default transitions that are accepted
# without question by the defaultFilter. It's a map of state
# names to a list of legal target state names from that state.
# Define it only if you want to use the classic FSM model of
# defining all (or most) of your transitions up front. If
# this is set to None (the default), all named-state
# transitions (that is, those requests whose name begins with
# a capital letter) are allowed. If it is set to an empty
# map, no transitions are implicitly allowed--all transitions
# must be approved by some filter function.
defaultTransitions = None
def __init__(self, name):
self.fsmLock = RLock()
self._name = name
self.stateArray = []
self._serialNum = FSM.SerialNum
FSM.SerialNum += 1
self._broadcastStateChanges = False
# Initially, we are in the Off state by convention.
self.state = 'Off'
# This member records transition requests made by demand() or
# forceTransition() while the FSM is in transition between
# states.
self.__requestQueue = []
if __debug__:
from direct.fsm.ClassicFSM import _debugFsms
import weakref
_debugFsms[name]=weakref.ref(self)
def cleanup(self):
# A convenience function to force the FSM to clean itself up
# by transitioning to the "Off" state.
self.fsmLock.acquire()
try:
assert self.state
if self.state != 'Off':
self.__setState('Off')
finally:
self.fsmLock.release()
def setBroadcastStateChanges(self, doBroadcast):
self._broadcastStateChanges = doBroadcast
def getStateChangeEvent(self):
# if setBroadcastStateChanges(True), this event will be sent through
# the messenger on every state change. The new and old states are
# accessible as self.oldState and self.newState, and the transition
# functions will already have been called.
return 'FSM-%s-%s-stateChange' % (self._serialNum, self._name)
def getCurrentFilter(self):
if not self.state:
error = "FSM cannot determine current filter while in transition (%s -> %s)." % (self.oldState, self.newState)
raise AlreadyInTransition(error)
filter = getattr(self, "filter" + self.state, None)
if not filter:
# If there's no matching filterState() function, call
# defaultFilter() instead.
filter = self.defaultFilter
return filter
def getCurrentOrNextState(self):
# Returns the current state if we are in a state now, or the
# state we are transitioning into if we are currently within
# the enter or exit function for a state.
self.fsmLock.acquire()
try:
if self.state:
return self.state
return self.newState
finally:
self.fsmLock.release()
def getCurrentStateOrTransition(self):
# Returns the current state if we are in a state now, or the
# transition we are performing if we are currently within
# the enter or exit function for a state.
self.fsmLock.acquire()
try:
if self.state:
return self.state
return '%s -> %s' % (self.oldState, self.newState)
finally:
self.fsmLock.release()
def isInTransition(self):
self.fsmLock.acquire()
try:
return self.state == None
finally:
self.fsmLock.release()
def forceTransition(self, request, *args):
"""Changes unconditionally to the indicated state. This
bypasses the filterState() function, and just calls
exitState() followed by enterState()."""
self.fsmLock.acquire()
try:
assert isinstance(request, str)
self.notify.debug("%s.forceTransition(%s, %s" % (
self._name, request, str(args)[1:]))
if not self.state:
# Queue up the request.
self.__requestQueue.append(PythonUtil.Functor(
self.forceTransition, request, *args))
return
self.__setState(request, *args)
finally:
self.fsmLock.release()
def demand(self, request, *args):
"""Requests a state transition, by code that does not expect
the request to be denied. If the request is denied, raises a
RequestDenied exception.
Unlike request(), this method allows a new request to be made
while the FSM is currently in transition. In this case, the
request is queued up and will be executed when the current
transition finishes. Multiple requests will queue up in
sequence.
"""
self.fsmLock.acquire()
try:
assert isinstance(request, str)
self.notify.debug("%s.demand(%s, %s" % (
self._name, request, str(args)[1:]))
if not self.state:
# Queue up the request.
self.__requestQueue.append(PythonUtil.Functor(
self.demand, request, *args))
return
if not self.request(request, *args):
raise RequestDenied("%s (from state: %s)" % (request, self.state))
finally:
self.fsmLock.release()
def request(self, request, *args):
"""Requests a state transition (or other behavior). The
request may be denied by the FSM's filter function. If it is
denied, the filter function may either raise an exception
(RequestDenied), or it may simply return None, without
changing the FSM's state.
The request parameter should be a string. The request, along
with any additional arguments, is passed to the current
filterState() function. If filterState() returns a string,
the FSM transitions to that state.
The return value is the same as the return value of
filterState() (that is, None if the request does not provoke a
state transition, otherwise it is a tuple containing the name
of the state followed by any optional args.)
If the FSM is currently in transition (i.e. in the middle of
executing an enterState or exitState function), an
AlreadyInTransition exception is raised (but see demand(),
which will queue these requests up and apply when the
transition is complete)."""
self.fsmLock.acquire()
try:
assert isinstance(request, str)
self.notify.debug("%s.request(%s, %s" % (
self._name, request, str(args)[1:]))
filter = self.getCurrentFilter()
result = filter(request, args)
if result:
if isinstance(result, str):
# If the return value is a string, it's just the name
# of the state. Wrap it in a tuple for consistency.
result = (result,) + args
# Otherwise, assume it's a (name, *args) tuple
self.__setState(*result)
return result
finally:
self.fsmLock.release()
def defaultEnter(self, *args):
""" This is the default function that is called if there is no
enterState() method for a particular state name. """
pass
def defaultExit(self):
""" This is the default function that is called if there is no
exitState() method for a particular state name. """
pass
def defaultFilter(self, request, args):
"""This is the function that is called if there is no
filterState() method for a particular state name.
This default filter function behaves in one of two modes:
(1) if self.defaultTransitions is None, allow any request
whose name begins with a capital letter, which is assumed to
be a direct request to a particular state. This is similar to
the old ClassicFSM onUndefTransition=ALLOW, with no explicit
state transitions listed.
(2) if self.defaultTransitions is not None, allow only those
requests explicitly identified in this map. This is similar
to the old ClassicFSM onUndefTransition=DISALLOW, with an
explicit list of allowed state transitions.
Specialized FSM's may wish to redefine this default filter
(for instance, to always return the request itself, thus
allowing any transition.)."""
if request == 'Off':
# We can always go to the "Off" state.
return (request,) + args
if self.defaultTransitions is None:
# If self.defaultTransitions is None, it means to accept
# all requests whose name begins with a capital letter.
# These are direct requests to a particular state.
if request[0].isupper():
return (request,) + args
else:
# If self.defaultTransitions is not None, it is a map of
# allowed transitions from each state. That is, each key
# of the map is the current state name; for that key, the
# value is a list of allowed transitions from the
# indicated state.
if request in self.defaultTransitions.get(self.state, []):
# This transition is listed in the defaultTransitions map;
# accept it.
return (request,) + args
# If self.defaultTransitions is not None, it is an error
# to request a direct state transition (capital letter
# request) not listed in defaultTransitions and not
# handled by an earlier filter.
if request[0].isupper():
raise RequestDenied("%s (from state: %s)" % (request, self.state))
# In either case, we quietly ignore unhandled command
# (lowercase) requests.
assert self.notify.debug("%s ignoring request %s from state %s." % (self._name, request, self.state))
return None
def filterOff(self, request, args):
"""From the off state, we can always go directly to any other
state."""
if request[0].isupper():
return (request,) + args
return self.defaultFilter(request, args)
def setStateArray(self, stateArray):
"""array of unique states to iterate through"""
self.fsmLock.acquire()
try:
self.stateArray = stateArray
finally:
self.fsmLock.release()
def requestNext(self, *args):
"""Request the 'next' state in the predefined state array."""
self.fsmLock.acquire()
try:
if self.stateArray:
if not self.state in self.stateArray:
self.request(self.stateArray[0])
else:
cur_index = self.stateArray.index(self.state)
new_index = (cur_index + 1) % len(self.stateArray)
self.request(self.stateArray[new_index], args)
else:
assert self.notifier.debug(
"stateArray empty. Can't switch to next.")
finally:
self.fsmLock.release()
def requestPrev(self, *args):
"""Request the 'previous' state in the predefined state array."""
self.fsmLock.acquire()
try:
if self.stateArray:
if not self.state in self.stateArray:
self.request(self.stateArray[0])
else:
cur_index = self.stateArray.index(self.state)
new_index = (cur_index - 1) % len(self.stateArray)
self.request(self.stateArray[new_index], args)
else:
assert self.notifier.debug(
"stateArray empty. Can't switch to next.")
finally:
self.fsmLock.release()
def __setState(self, newState, *args):
# Internal function to change unconditionally to the indicated
# state.
assert self.state
assert self.notify.debug("%s to state %s." % (self._name, newState))
self.oldState = self.state
self.newState = newState
self.state = None
try:
if not self.__callFromToFunc(self.oldState, self.newState, *args):
self.__callExitFunc(self.oldState)
self.__callEnterFunc(self.newState, *args)
pass
pass
except:
# If we got an exception during the enter or exit methods,
# go directly to state "InternalError" and raise up the
# exception. This might leave things a little unclean
# since we've partially transitioned, but what can you do?
self.state = 'InternalError'
del self.oldState
del self.newState
raise
if self._broadcastStateChanges:
messenger.send(self.getStateChangeEvent())
self.state = newState
del self.oldState
del self.newState
if self.__requestQueue:
request = self.__requestQueue.pop(0)
assert self.notify.debug("%s continued queued request." % (self._name))
request()
def __callEnterFunc(self, name, *args):
# Calls the appropriate enter function when transitioning into
# a new state, if it exists.
assert self.state == None and self.newState == name
func = getattr(self, "enter" + name, None)
if not func:
# If there's no matching enterFoo() function, call
# defaultEnter() instead.
func = self.defaultEnter
func(*args)
def __callFromToFunc(self, oldState, newState, *args):
# Calls the appropriate fromTo function when transitioning into
# a new state, if it exists.
assert self.state == None and self.oldState == oldState and self.newState == newState
func = getattr(self, "from%sTo%s" % (oldState,newState), None)
if func:
func(*args)
return True
return False
def __callExitFunc(self, name):
# Calls the appropriate exit function when leaving a
# state, if it exists.
assert self.state == None and self.oldState == name
func = getattr(self, "exit" + name, None)
if not func:
# If there's no matching exitFoo() function, call
# defaultExit() instead.
func = self.defaultExit
func()
def __repr__(self):
return self.__str__()
def __str__(self):
"""
Print out something useful about the fsm
"""
self.fsmLock.acquire()
try:
className = self.__class__.__name__
if self.state:
str = ('%s FSM:%s in state "%s"' % (className, self._name, self.state))
else:
str = ('%s FSM:%s in transition from \'%s\' to \'%s\'' % (className, self._name, self.oldState, self.newState))
return str
finally:
self.fsmLock.release()
| bsd-3-clause |
fbellini/AliPhysics | PWGLF/RESONANCES/macros/mini/ConfigKStarPlusMinusPbPbRun1.C | 44960 | /*=================================================================================
Dukhishyam Mallick - last modified 01 April 2019 (mallick.dukhishyam@cern.ch)
*** Configuration script for K*+-->K0Short-Pi analysis ***
=======================================================================================*/
// A configuration script for RSN package needs to define the followings:
//
// (1) decay tree of each resonance to be studied, which is needed to select
// true pairs and to assign the right mass to all candidate daughters
// (2) cuts at all levels: single daughters, tracks, events
// (3) output objects: histograms or trees
//
Bool_t ConfigKStarPlusMinusPbPbRun1
(
AliRsnMiniAnalysisTask *task,
Bool_t isMC,
Bool_t isPP,
Bool_t isGT,
Bool_t isRotate,
Float_t piPIDCut,
Float_t nsigmaTOF,
Int_t customQualityCutsID=AliRsnCutSetDaughterParticle::kDisableCustom,
AliRsnCutSetDaughterParticle::ERsnDaughterCutSet cutPiCandidate = AliRsnCutSetDaughterParticle::kTPCpidTOFveto3s,
Float_t pi_k0s_PIDCut,
Int_t aodFilterBit,
Bool_t enableMonitor=kTRUE ,
TString monitorOpt="",
Float_t massTol,
Float_t massTolVeto,
Int_t tol_switch,
Double_t tol_sigma,
Float_t pLife,
Float_t radiuslow,
Bool_t Switch,
Float_t k0sDCA,
Float_t k0sCosPoinAn,
Float_t k0sDaughDCA,
Int_t NTPCcluster,
const char *suffix,
AliRsnCutSet *PairCutsSame,
AliRsnCutSet *PairCutsMix,
Float_t DCAxy,
Bool_t enableSys,
Float_t crossedRows,
Float_t rowsbycluster,
Float_t v0rapidity,
Int_t Sys
/*
Int_t Multbin=100,
Int_t lMultbin=0,
Int_t hMultbin=100,
Int_t Ptbin=100,
Int_t lPtbin=0,
Int_t hPtbin=10,
Int_t Costhetabin=10,
Int_t lCosthetabin=0,
Int_t hCosthetabin=10
*/
//UInt_t triggerMask=AliVEvent::kINT7
)
//kTPCpidphipp2015
{
// manage suffix
if (strlen(suffix) > 0) suffix = Form("_%s", suffix);
/////////////////////////////////////////////////////
// selections for the pion from the decay of KStarPlusMinus*
/////////////////////////////////////////////////////
//
AliRsnCutSetDaughterParticle* cutSetQ;
AliRsnCutSetDaughterParticle* cutSetPi;
AliRsnCutTrackQuality* trkQualityCut= new AliRsnCutTrackQuality("myQualityCut");
cout<<"Value of custom quality--------------------"<<customQualityCutsID<<endl;
if(SetCustomQualityCut(trkQualityCut,customQualityCutsID,aodFilterBit)){
//Set custom quality cuts for systematic checks
cutSetQ=new AliRsnCutSetDaughterParticle(Form("cutQ_bit%i",aodFilterBit),trkQualityCut,AliRsnCutSetDaughterParticle::kQualityStd2011,AliPID::kPion,-1.);
cutSetPi=new AliRsnCutSetDaughterParticle(Form("cutPi%i_%2.1fsigma",cutPiCandidate, piPIDCut,nsigmaTOF),trkQualityCut,cutPiCandidate,AliPID::kPion,piPIDCut,nsigmaTOF);
}else{
//use default quality cuts std 2010 with crossed rows TPC
Bool_t useCrossedRows = 1;
cutSetQ=new AliRsnCutSetDaughterParticle(Form("cutQ_bit%i",aodFilterBit),AliRsnCutSetDaughterParticle::kQualityStd2011,AliPID::kPion,-1.,aodFilterBit,kTRUE);
cutSetPi=new AliRsnCutSetDaughterParticle(Form("cutPi%i_%2.1fsigma",cutPiCandidate,piPIDCut,nsigmaTOF),cutPiCandidate,AliPID::kPion,piPIDCut,nsigmaTOF,aodFilterBit, kTRUE);
}
Int_t iCutQ=task->AddTrackCuts(cutSetQ);
Int_t iCutPi=task->AddTrackCuts(cutSetPi);
/////////////////////////////////////////////////////////////
// selections for K0s and for the daughters of K0s
/////////////////////////////////////////////////////////////
//
// selections for pion daugthers of K0s
AliESDtrackCuts *esdTrackCuts = new AliESDtrackCuts("qualityDaughterK0s");
esdTrackCuts->SetEtaRange(-0.8,0.8);
esdTrackCuts->SetRequireTPCRefit();
esdTrackCuts->SetAcceptKinkDaughters(0); //
esdTrackCuts->SetMinNCrossedRowsTPC(crossedRows);
esdTrackCuts->SetMinRatioCrossedRowsOverFindableClustersTPC(rowsbycluster);
esdTrackCuts->SetMaxChi2PerClusterTPC(100);
esdTrackCuts->SetMinDCAToVertexXY(DCAxy); //Use one of the two - pt dependent or fixed value cut.
//
/////////////////////////////////////////////////
// selections for K0s
AliRsnCutV0 *cutK0s = new AliRsnCutV0("cutK0s", kK0Short, AliPID::kPion, AliPID::kPion);
cutK0s->SetPIDCutPion(pi_k0s_PIDCut); // PID for the pion daughter of K0s
cutK0s->SetESDtrackCuts(esdTrackCuts);
cutK0s->SetMaxDaughtersDCA(k0sDaughDCA);
cutK0s->SetMaxDCAVertex(k0sDCA);
cutK0s->SetMinCosPointingAngle(k0sCosPoinAn);
cutK0s->SetTolerance(massTol);
cutK0s->SetToleranceVeto(massTolVeto); //Rejection range for Competing V0 Rejection
cutK0s->SetSwitch(Switch);
cutK0s->SetfLife(pLife);
cutK0s->SetfLowRadius(radiuslow);
cutK0s->SetfHighRadius(100);
cutK0s->SetMaxRapidity(v0rapidity);
cutK0s->SetpT_Tolerance(tol_switch);
cutK0s->SetMassTolSigma(tol_sigma);
//cutK0s->SetArmentousCut(2.0);
//cout<<"Get Input Value Of Armentous cut-------->:"<<cutK0s->GetArment
if(enableSys)
{
if(Sys==1){cutK0s->SetPIDCutPion(pi_k0s_PIDCut-0.5);}
else if(Sys==2){cutK0s->SetPIDCutPion(pi_k0s_PIDCut-1.0);}
else if(Sys==3){cutK0s->SetMaxDaughtersDCA(k0sDaughDCA-0.25);}
else if(Sys==4){cutK0s->SetMaxDaughtersDCA(k0sDaughDCA+0.25);}
else if(Sys==5){cutK0s->SetMinCosPointingAngle(k0sCosPoinAn-0.02);}
else if(Sys==6){cutK0s->SetMinCosPointingAngle(k0sCosPoinAn+0.02);}
else if(Sys==7){cutK0s->SetTolerance(massTol+1);}
else if(Sys==8){cutK0s->SetTolerance(massTol+2);}
else if(Sys==9){cutK0s->SetTolerance(massTol-1);}
else if(Sys==10){cutK0s->SetfLife(pLife-8);}
else if(Sys==11){cutK0s->SetfLowRadius(radiuslow-0.2);}
else if(Sys==12){cutK0s->SetfLowRadius(radiuslow+0.2);}
else if(Sys==13){cutK0s->SetMaxRapidity(v0rapidity-0.1);}
else if(Sys==14){cutK0s->SetMaxRapidity(v0rapidity+0.1);}
else if(Sys==15){cutK0s->SetToleranceVeto(massTolVeto-0.0011);}
else if(Sys==16){cutK0s->SetToleranceVeto(massTolVeto+0.0011);}
else if(Sys==17){esdTrackCuts->SetMinDCAToVertexXY(DCAxy-0.01);}
else if(Sys==18){esdTrackCuts->SetMinDCAToVertexXY(DCAxy+0.01);}
else if(Sys==19){esdTrackCuts->SetMinNCrossedRowsTPC(crossedRows+10);}
else if(Sys==20){esdTrackCuts->SetMinNCrossedRowsTPC(crossedRows+30);}
else if(Sys==21){esdTrackCuts->SetMinRatioCrossedRowsOverFindableClustersTPC(rowsbycluster+0.1);}
}
AliRsnCutSet *cutSetK0s = new AliRsnCutSet("setK0s", AliRsnTarget::kDaughter);
cutSetK0s->AddCut(cutK0s);
cutSetK0s->SetCutScheme(cutK0s->GetName());
Int_t iCutK0s = task->AddTrackCuts(cutSetK0s);
//
if(enableMonitor){
Printf("======== Cut monitoring enabled");
gROOT->LoadMacro("$ALICE_PHYSICS/PWGLF/RESONANCES/macros/mini/AddMonitorOutput.C");
//AddMonitorOutput(isMC, cutPi->GetMonitorOutput(), monitorOpt.Data());
//AddMonitorOutput(isMC, cutQ->GetMonitorOutput(), monitorOpt.Data());
AddMonitorOutput(isMC, cutSetQ->GetMonitorOutput(), monitorOpt.Data());
AddMonitorOutput(isMC, cutSetPi->GetMonitorOutput(), monitorOpt.Data());
AddMonitorOutput(isMC, cutSetK0s->GetMonitorOutput(), monitorOpt.Data());
}
//
// -- Values ------------------------------------------------------------------------------------
//
/// -- Values ------------------------------------------------------------------------------------
/* invariant mass */ Int_t imID = task->CreateValue(AliRsnMiniValue::kInvMass, kFALSE);
/* transv. momentum */ Int_t ptID = task->CreateValue(AliRsnMiniValue::kPt, kFALSE);
/* centrality */ Int_t centID = task->CreateValue(AliRsnMiniValue::kMult, kFALSE);
/* pseudorapidity */ Int_t etaID = task->CreateValue(AliRsnMiniValue::kEta, kFALSE);
/* rapidity */ Int_t yID = task->CreateValue(AliRsnMiniValue::kY, kFALSE);
/* CosThetaStar */ //Int_t cosThStarID = task->CreateValue(AliRsnMiniValue::kCosThetaStar,kFALSE);
/* 1st daughter pt */ Int_t fdpt = task->CreateValue(AliRsnMiniValue::kFirstDaughterPt,kFALSE);
/* 2nd daughter pt */ Int_t sdpt = task->CreateValue(AliRsnMiniValue::kSecondDaughterPt,kFALSE);
/* 1st daughter p */ Int_t fdp = task->CreateValue(AliRsnMiniValue::kFirstDaughterP,kFALSE);
/* 2nd daughter p */ Int_t sdp = task->CreateValue(AliRsnMiniValue::kSecondDaughterP,kFALSE);
/* cos(theta) J */ Int_t ctjID = task->CreateValue(AliRsnMiniValue::kCosThetaJackson,kFALSE);
/* cos(theta) J (MC)*/ Int_t ctjmID = task->CreateValue(AliRsnMiniValue::kCosThetaJackson,kTRUE);
/* cos(theta) T */ Int_t cttID = task->CreateValue(AliRsnMiniValue::kCosThetaTransversity,kFALSE);
/* cos(theta) T (MC)*/ Int_t cttmID = task->CreateValue(AliRsnMiniValue::kCosThetaTransversity,kTRUE);
if(isMC==1)
{
/* CosThetaStar */ Int_t cosThStarID = task->CreateValue(AliRsnMiniValue::kCosThetaStarAbs, kTRUE);
}
else
{
/* CosThetaStar */ Int_t cosThStarID = task->CreateValue(AliRsnMiniValue::kCosThetaStarAbs, kFALSE);
}
//
// -- Create all needed outputs -----------------------------------------------------------------
//
// use an array for more compact writing, which are different on mixing and charges
// [0] = unlike
// [1] = mixing
// [2] = like ++
// [3] = like --
Bool_t use [6] = {1 ,1 ,1 ,1 ,1 ,1 };
Bool_t useIM [6] = {1 ,1 ,1 ,1 ,1 ,1 };
TString name [7] = {"KStarPlusMinus","AKStarPlusMinus","KStarPlusMinusmix","AKStarPlusMinusmix","KStarPlusMinust","AKStarPlusMinust","KSHORT"};
TString comp [7] = {"PAIR" ,"PAIR" ,"MIX" ,"MIX" ,"TRUE" ,"TRUE","SINGLE" };
TString output [7] = {"SPARSE" ,"SPARSE" ,"SPARSE" ,"SPARSE" ,"SPARSE" ,"SPARSE","SPARSE" };
Char_t charge1 [6] = {'0' ,'0' ,'0' ,'0' ,'0' ,'0' };
Char_t charge2 [6] = {'+' ,'-' ,'+' ,'-' ,'+' ,'-' };
// Int_t cutID1 [6] = { iCutK0s ,iCutK0s ,iCutK0s ,iCutK0s ,iCutK0s ,iCutK0s };
Int_t cutID1 [7] = { iCutK0s ,iCutK0s ,iCutK0s ,iCutK0s ,iCutK0s ,iCutK0s, iCutK0s };
Int_t cutID2 [6] = { iCutPi ,iCutPi ,iCutPi ,iCutPi ,iCutPi ,iCutPi };
Int_t ipdg [6] = {323 ,-323 ,323 ,-323 ,323 ,-323 };
Double_t mass [6] = { 0.89166 ,0.89166 ,0.89166 ,0.89166 ,0.89166 ,0.89166 };
AliRsnCutSet* paircuts[6] = {PairCutsSame, PairCutsSame, PairCutsMix, PairCutsMix, PairCutsSame, PairCutsSame };
for (Int_t i = 0; i < 6; i++) {
if (!use[i]) continue;
//if (collSyst) output[i] = "SPARSE";
// create output
AliRsnMiniOutput *out = task->CreateOutput(Form("ChargeKstar_%s%s", name[i].Data(), suffix), output[i].Data(), comp[i].Data());
// selection settings
// if(i < 6)
//{
out->SetCutID(0, cutID1[i]);
out->SetCutID(1, cutID2[i]);
out->SetDaughter(0, AliRsnDaughter::kKaon0);
out->SetDaughter(1, AliRsnDaughter::kPion);
out->SetCharge(0, charge1[i]);
out->SetCharge(1, charge2[i]);
out->SetMotherPDG(ipdg[i]);
out->SetMotherMass(mass[i]);
// pair cuts
out->SetPairCuts(paircuts[i]);
// axis X: invmass
if (useIM[i])
out->AddAxis(imID, 90, 0.6, 1.5);
// out->AddAxis(imID, 700, 1.2, 4.0);
// axis Y: transverse momentum
out->AddAxis(ptID,300, 0, 30);
// out->AddAxis(k0sDCA, 10, 0.0, 1.0);
// axis W: Centrality //if (co
if(isPP) out->AddAxis(centID, 400, 0.5, 400.5);
else out->AddAxis(centID, 100, 0,100);
if(isGT) out->AddAxis(sdpt,100,0.,10.);
// axis W: CosThetaStar
if (!isPP)
out->AddAxis(cosThStarID,10, 0, 1);
else
out->AddAxis(cosThStarID, 10, 0, 1);
//}
/* else if (i==6)
{
out->SetCutID(0, cutID1[i]);
out->AddAxis(imID, 90, 0.2, 1.1);
out->AddAxis(ptID,300, 0, 30);
out->AddAxis(centID, 100,0,100);
out->AddAxis(cosThStarID,10,0, 1);
}
*/
}
// AddMonitorOutput_K0sP(cutSetK0s->GetMonitorOutput());
/*******************commentout*******************************/
AddMonitorOutput_K0sPt(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sNegDaughPt(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sPosDaughPt(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sMass(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sDCA(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sRadius(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sDaughterDCA(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sCosPointAngle(cutSetK0s->GetMonitorOutput());
//added by me/////////
//AddMonitorOutput_ArmentousCut(cutSetK0s->GetMonitorOutput());
////////////////////////
// AddMonitorOutput_K0sProtonPID(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sPionPID(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sfpLife(cutSetK0s->GetMonitorOutput());
AddMonitorOutput_K0sMass_Pt(cutSetK0s->GetMonitorOutput());
/***************************commetout*****************************/
//Monitor Output for Tracks
// AddMonitorOutput_MinDCAToVertexXYPtDep(cutSetK0s->GetMonitorOutput());
//AddMonitorOutput_MinDCAToVertexXY(cutSetK0s->GetMonitorOutput()); //Uncomment if fixed value Cut used
//cutK0s->Print();
if(isRotate){
for (Int_t i = 0; i < 2; i++)
{ if (!use[i]) continue;
//if (collSyst) output[i] = "SPARSE";
AliRsnMiniOutput *out = task->CreateOutput(Form("ChargeKstar_Rotated_%s%s", name[i].Data(), suffix), output[i].Data(), "ROTATE2");
out->SetCutID(0, cutID1[i]);
out->SetCutID(1, cutID2[i]);
out->SetDaughter(0, AliRsnDaughter::kKaon0);
out->SetDaughter(1, AliRsnDaughter::kPion);
out->SetCharge(0, charge1[i]);
out->SetCharge(1, charge2[i]);
out->SetMotherPDG(ipdg[i]);
out->SetMotherMass(mass[i]);
// pair cuts
out->SetPairCuts(PairCutsSame);
if (useIM[i]) out->AddAxis(imID, 90, 0.6, 1.5);
out->AddAxis(ptID,300, 0, 30);
// out->AddAxis(k0sDCA, 10, 0.0, 1.0);
// axis W: Centrality //if (co
if(isPP) out->AddAxis(centID, 400, 0.5, 400.5);
else out->AddAxis(centID, 100, 0,100);
if(isGT) out->AddAxis(sdpt,100,0.,10.);
// axis W: CosThetaStar
if (!isPP)
out->AddAxis(cosThStarID,10, 0, 1);
else
out->AddAxis(cosThStarID,10, 0,1);
}
}
if (isMC) {
TString mode = "SPARSE";
//TString mode = "HIST";
//if (collSyst) mode = "SPARSE";
// create output
AliRsnMiniOutput *out = task->CreateOutput(Form("KStarPlusMinus_MotherMC%s", suffix), mode.Data(), "MOTHER");
// selection settings
out->SetDaughter(0, AliRsnDaughter::kKaon0);
out->SetDaughter(1, AliRsnDaughter::kPion);
out->SetMotherPDG(323);
out->SetMotherMass(0.89166);
// pair cuts
out->SetPairCuts(PairCutsSame);
// binnings
out->AddAxis(imID, 90, 0.6, 1.5);
out->AddAxis(ptID,300,0, 30);
// out->AddAxis(k0sDCA, 10, 0.0, 1.0);
// axis W: Centrality //if (co
if(isPP) out->AddAxis(centID, 400, 0.5, 400.5);
else out->AddAxis(centID, 100, 0,100);
if(isGT) out->AddAxis(sdpt,100,0.,10.);
// axis W: CosThetaStar
if (!isPP)
out->AddAxis(cosThStarID,10, 0, 1);
else
out->AddAxis(cosThStarID, 10, 0, 1);
// create output
AliRsnMiniOutput *out = task->CreateOutput(Form("AKStarPlusMinus_MotherMC%s", suffix), mode.Data(), "MOTHER");
// selection settings
out->SetDaughter(0, AliRsnDaughter::kKaon0);
out->SetDaughter(1, AliRsnDaughter::kPion);
out->SetMotherPDG(-323);
out->SetMotherMass(0.89166);
// pair cuts
out->SetPairCuts(PairCutsSame);
// binnings
out->AddAxis(imID, 90, 0.6, 1.5);
out->AddAxis(ptID,300, 0, 30);
// out->AddAxis(k0sDCA, 10, 0.0, 1.0);
// axis W: Centrality //if (co
if(isPP) out->AddAxis(centID, 400, 0.5, 400.5);
else out->AddAxis(centID, 100, 0,100);
if(isGT) out->AddAxis(sdpt,100,0.,10.);
// axis W: CosThetaStar
if (!isPP)
out->AddAxis(cosThStarID,10, 0, 1);
else
out->AddAxis(cosThStarID, 10, 0, 1);
AliRsnMiniOutput* outps=task->CreateOutput(Form("K*_phaseSpace%s", suffix),"HIST","TRUE");
outps->SetDaughter(0,AliRsnDaughter::kKaon0);
outps->SetDaughter(1,AliRsnDaughter::kPion);
outps->SetCutID(0,iCutK0s);
outps->SetCutID(1,iCutPi);
outps->SetMotherPDG(323);
outps->SetMotherMass(0.89166);
outps->SetPairCuts(PairCutsSame);
outps->AddAxis(fdpt,100,0.,10.);
outps->AddAxis(sdpt,100,0.,10.);
outps->AddAxis(ptID,200,0.,20.);
AliRsnMiniOutput* outpsf=task->CreateOutput(Form("K*_phaseSpaceFine%s", suffix),"HIST","TRUE");
outpsf->SetDaughter(0,AliRsnDaughter::kKaon0);
outpsf->SetDaughter(1,AliRsnDaughter::kPion);
outpsf->SetCutID(0,iCutK0s);
outpsf->SetCutID(1,iCutPi);
outpsf->SetMotherPDG(323);
outpsf->SetMotherMass(0.89166);
outpsf->SetPairCuts(PairCutsSame);
outpsf->AddAxis(fdpt,30,0.,3.);
outpsf->AddAxis(sdpt,30,0.,3.);
outpsf->AddAxis(ptID,300,0.,3.);
}
return kTRUE;
}
void AddMonitorOutput_PionPt(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *ppt=0)
{
// PionPt
AliRsnValueDaughter *axisPionPt = new AliRsnValueDaughter("pion_pt", AliRsnValueDaughter::kPt);
axisPionPt->SetBins(0.,10.0,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorPionPt = new AliRsnListOutput("Pion_Pt", AliRsnListOutput::kHistoDefault);
outMonitorPionPt->AddValue(axisPionPt);
// add outputs to loop
if (mon) mon->Add(outMonitorPionPt);
if (ppt) ppt->AddOutput(outMonitorPionPt);
}
void AddMonitorOutput_PionEta(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *peta=0)
{
// PionDCA
AliRsnValueDaughter *axisPionEta = new AliRsnValueDaughter("pion_eta", AliRsnValueDaughter::kEta);
axisPionEta->SetBins(-2.,2.,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorPionEta = new AliRsnListOutput("Pion_Eta", AliRsnListOutput::kHistoDefault);
outMonitorPionEta->AddValue(axisPionEta);
// add outputs to loop
if (mon) mon->Add(outMonitorPionEta);
if (peta) peta->AddOutput(outMonitorPionEta);
}
void AddMonitorOutput_PionDCAxy(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *pdcaxy=0)
{
// PionDCA
AliRsnValueDaughter *axisPionDCAxy = new AliRsnValueDaughter("pion_dcaxy", AliRsnValueDaughter::kDCAXY);
axisPionDCAxy->SetBins(-0.5,0.5,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorPionDCAxy = new AliRsnListOutput("Pion_DCAxy", AliRsnListOutput::kHistoDefault);
outMonitorPionDCAxy->AddValue(axisPionDCAxy);
// add outputs to loop
if (mon) mon->Add(outMonitorPionDCAxy);
if (pdcaxy) pdcaxy->AddOutput(outMonitorPionDCAxy);
}
void AddMonitorOutput_PionDCAz(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *pdcaz=0)
{
// PionDCA
AliRsnValueDaughter *axisPionDCAz = new AliRsnValueDaughter("pion_dcaz", AliRsnValueDaughter::kDCAZ);
axisPionDCAz->SetBins(-2.5,2.5,0.005);
// output: 2D histogram
AliRsnListOutput *outMonitorPionDCAz = new AliRsnListOutput("Pion_DCAz", AliRsnListOutput::kHistoDefault);
outMonitorPionDCAz->AddValue(axisPionDCAz);
// add outputs to loop
if (mon) mon->Add(outMonitorPionDCAz);
if (pdcaz) pdcaz->AddOutput(outMonitorPionDCAz);
}
void AddMonitorOutput_PionPIDCut(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *piPID=0)
{
// Pion PID Cut
AliRsnValueDaughter *axisPionPIDCut = new AliRsnValueDaughter("pionPID", AliRsnValueDaughter::kTPCnsigmaPi);
axisPionPIDCut->SetBins(0.0,5,0.01);
// output: 2D histogram
AliRsnListOutput *outMonitorPionPIDCut = new AliRsnListOutput("Pion_PID_Cut", AliRsnListOutput::kHistoDefault);
outMonitorPionPIDCut->AddValue(axisPionPIDCut);
// add outputs to loop
if (mon) mon->Add(outMonitorPionPIDCut);
if (piPID) piPID->AddOutput(outMonitorPionPIDCut);
}
void AddMonitorOutput_PionNTPC(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *piNTPC=0)
{
// Pion PID Cut
AliRsnValueDaughter *axisPionNTPC = new AliRsnValueDaughter("pionNTPC", AliRsnValueDaughter::kNTPCclusters);
axisPionNTPC->SetBins(0.0,200,1);
// output: 2D histogram
AliRsnListOutput *outMonitorPionNTPC = new AliRsnListOutput("Pion_NTPC", AliRsnListOutput::kHistoDefault);
outMonitorPionNTPC->AddValue(axisPionNTPC);
// add outputs to loop
if (mon) mon->Add(outMonitorPionNTPC);
if (piNTPC) pNTPC->AddOutput(outMonitorPionNTPC);
}
void AddMonitorOutput_PionTPCchi2(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *piTPCchi2=0)
{
// Pion PID Cut
AliRsnValueDaughter *axisPionTPCchi2 = new AliRsnValueDaughter("pionTPCchi2", AliRsnValueDaughter::kTPCchi2);
axisPionTPCchi2->SetBins(0.0,6,.1);
// output: 2D histogram
AliRsnListOutput *outMonitorPionTPCchi2 = new AliRsnListOutput("Pion_TPCchi2", AliRsnListOutput::kHistoDefault);
outMonitorPionTPCchi2->AddValue(axisPionTPCchi2);
// add outputs to loop
if (mon) mon->Add(outMonitorPionTPCchi2);
if (piTPCchi2) pTPCchi2->AddOutput(outMonitorPionTPCchi2);
}
void AddMonitorOutput_K0sP(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lp=0)
{
AliRsnValueDaughter *axisK0sP = new AliRsnValueDaughter("k0s_momentum", AliRsnValueDaughter::kP);
axisK0sP->SetBins(0.,15.,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorMom = new AliRsnListOutput("K0s_Momentum", AliRsnListOutput::kHistoDefault);
outMonitorMom->AddValue(axisK0sP);
// add outputs to loop
if (mon) mon->Add(outMonitorMom);
if (lp) lp->AddOutput(outMonitorMom);
}
void AddMonitorOutput_K0sPt(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lpt=0)
{
// Pt
AliRsnValueDaughter *axisK0sPt = new AliRsnValueDaughter("k0s_transversemomentum", AliRsnValueDaughter::kV0Pt);
axisK0sPt->SetBins(0.,15.,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorTrMom = new AliRsnListOutput("K0s_TransverseMomentum", AliRsnListOutput::kHistoDefault);
outMonitorTrMom->AddValue(axisK0sPt);
// add outputs to loop
if (mon) mon->Add(outMonitorTrMom);
if (lpt) lpt->AddOutput(outMonitorTrMom);
}
void AddMonitorOutput_K0sNegDaughPt(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lnpt=0)
{
// Pt
AliRsnValueDaughter *axisK0sNegDaughPt = new AliRsnValueDaughter("k0s_negdaugh_transversemomentum", AliRsnValueDaughter::kV0NPt);
axisK0sNegDaughPt->SetBins(0.,15.,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sNegDaughTrMom = new AliRsnListOutput("K0s_NegDaugh_TransverseMomentum", AliRsnListOutput::kHistoDefault);
outMonitorK0sNegDaughTrMom->AddValue(axisK0sNegDaughPt);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sNegDaughTrMom);
if (lnpt) lnpt->AddOutput(outMonitorK0sNegDaughTrMom);
}
void AddMonitorOutput_K0sPosDaughPt(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lppt=0)
{
// Mass
AliRsnValueDaughter *axisK0sPosDaughPt = new AliRsnValueDaughter("k0s_posdaugh_transversemomentum", AliRsnValueDaughter::kV0PPt);
axisK0sPosDaughPt->SetBins(0.,15.,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sPosDaughTrMom = new AliRsnListOutput("K0s_PosDaugh_TransverseMomentum", AliRsnListOutput::kHistoDefault);
outMonitorK0sPosDaughTrMom->AddValue(axisK0sPosDaughPt);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sPosDaughTrMom);
if (lppt) lppt->AddOutput(outMonitorK0sPosDaughTrMom);
}
void AddMonitorOutput_K0sMass(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lm=0)
{
// Mass
AliRsnValueDaughter *axisMass = new AliRsnValueDaughter("k0s_mass", AliRsnValueDaughter::kV0Mass);
axisMass->SetBins(0.4,0.6,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorM = new AliRsnListOutput("K0s_Mass", AliRsnListOutput::kHistoDefault);
outMonitorM->AddValue(axisMass);
// add outputs to loop
if (mon) mon->Add(outMonitorM);
if (lm) lm->AddOutput(outMonitorM);
}
void AddMonitorOutput_K0sDCA(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *ldca=0)
{
// K0s DCA
AliRsnValueDaughter *axisK0sDCA = new AliRsnValueDaughter("k0s_dca", AliRsnValueDaughter::kV0DCA);
axisK0sDCA->SetBins(0.0,0.4,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sDCA = new AliRsnListOutput("K0s_DCA", AliRsnListOutput::kHistoDefault);
outMonitorK0sDCA->AddValue(axisK0sDCA);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sDCA);
if (ldca) ldca->AddOutput(outMonitorK0sDCA);
}
void AddMonitorOutput_K0sRadius(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *ldca=0)
{
// K0s Radius
AliRsnValueDaughter *axisK0sRadius = new AliRsnValueDaughter("k0s_radius", AliRsnValueDaughter::kV0Radius);
axisK0sRadius->SetBins(0.0,200,0.2);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sRadius = new AliRsnListOutput("K0s_Radius", AliRsnListOutput::kHistoDefault);
outMonitorK0sRadius->AddValue(axisK0sRadius);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sRadius);
if (ldca) ldca->AddOutput(outMonitorK0sRadius);
}
void AddMonitorOutput_K0sDaughterDCA(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *ldaugdca=0)
{
// K0s Daughter DCA
AliRsnValueDaughter *axisK0sDDCA = new AliRsnValueDaughter("k0s_daughterDCA", AliRsnValueDaughter::kDaughterDCA);
axisK0sDDCA->SetBins(0.0,2,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sDDCA = new AliRsnListOutput("K0s_DaughterDCA", AliRsnListOutput::kHistoDefault);
outMonitorK0sDDCA->AddValue(axisK0sDDCA);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sDDCA);
if (ldaugdca) ldaugdca->AddOutput(outMonitorK0sDDCA);
}
void AddMonitorOutput_K0sCosPointAngle(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lcpa=0)
{
// K0s Cosine of the Pointing Angle
AliRsnValueDaughter *axisK0sCPA = new AliRsnValueDaughter("k0s_cospointang", AliRsnValueDaughter::kCosPointAng);
//axisK0sCPA->SetBins(0.97,1.,0.0001);
axisK0sCPA->SetBins(0.9,1.,0.0001);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sCPA = new AliRsnListOutput("K0s_CosineOfPointingAngle", AliRsnListOutput::kHistoDefault);
outMonitorK0sCPA->AddValue(axisK0sCPA);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sCPA);
if (lcpa) lcpa->AddOutput(outMonitorK0sCPA);
}
//added by me /////////////////////////////////////////////////
/*
void AddMonitorOutput_ArmentousCut(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lac=0)
{
// K0s Arm Cut
AliRsnValueDaughter *axisK0sAC = new AliRsnValueDaughter("K0s_ArmCut", AliRsnValueDaughter::Armentous);
axisK0sAC->SetBins(-10.0,10.0,0.1);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sAC = new AliRsnListOutput("K0s_ArmentousCut", AliRsnListOutput::kHistoDefault);
outMonitorK0sAC->AddValue(axisK0sAC);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sAC);
if (lac) lac->AddOutput(outMonitorK0sAC);
}
*/
///////////////////////////////////////////////////////////////////
void AddMonitorOutput_K0sPionPID(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lpiPID=0)
{
AliRsnValueDaughter *axisK0sPionPID = new AliRsnValueDaughter("k0s_pionPID", AliRsnValueDaughter::kLambdaPionPIDCut);
axisK0sPionPID->SetBins(0.0,5,0.01);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sPionPID = new AliRsnListOutput("K0s_PionPID", AliRsnListOutput::kHistoDefault);
outMonitorK0sPionPID->AddValue(axisK0sPionPID);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sPionPID);
if (lpiPID) lpiPID->AddOutput(outMonitorK0sPionPID);
}
void AddMonitorOutput_K0sAntiPionPID(TObjArray *mon=0,TString opt="",AliRsnLoopDaughter *lapiPID=0)
{
AliRsnValueDaughter *axisK0sAntiPionPID = new AliRsnValueDaughter("k0s_antipionPID", AliRsnValueDaughter::kAntiLambdaAntiPionPIDCut);
axisK0sAntiPionPID->SetBins(0.0,5,0.01);
// output: 2D histogram
AliRsnListOutput *outMonitorK0sAntiPionPID = new AliRsnListOutput("K0s_AntiPionPID", AliRsnListOutput::kHistoDefault);
outMonitorK0sAntiPionPID->AddValue(axisK0sAntiPionPID);
// add outputs to loop
if (mon) mon->Add(outMonitorK0sAntiPionPID);
if (lapiPID) lpiPID->AddOutput(outMonitorK0sAntiPionPID);
}
void AddMonitorOutput_MinDCAToVertexXYPtDep(TObjArray *mon=0, TString opt="", AliRsnLoopDaughter *trackDCAXY=0)
{
// DCAXY of Tracks
AliRsnValueDaughter *axisDCATracks = new AliRsnValueDaughter("dcaXY_tracks", AliRsnValueDaughter::kV0DCAXY);
axisDCATracks->SetBins(0.0,2,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorDCATracks = new AliRsnListOutput("DCAXY_Tracks", AliRsnListOutput::kHistoDefault);
outMonitorDCATracks->AddValue(axisDCATracks);
// add outputs to loop
if (mon) mon->Add(outMonitorDCATracks);
if (trackDCAXY) trackDCAXY->AddOutput(outMonitorDCATracks);
}
// DCA V0 Secondary Tracks to Primary Vertex
void AddMonitorOutput_MinDCAToVertexXY(TObjArray *mon=0, TString opt="", AliRsnLoopDaughter *trackDCAXY=0)
{
// DCAXY of Tracks
AliRsnValueDaughter *axisDCATracks = new AliRsnValueDaughter("dcaXY_tracks", AliRsnValueDaughter::kV0DCAXY);
axisDCATracks->SetBins(0.0,2,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorDCATracks = new AliRsnListOutput("DCAXY_Tracks", AliRsnListOutput::kHistoDefault);
outMonitorDCATracks->AddValue(axisDCATracks);
// add outputs to loop
if (mon) mon->Add(outMonitorDCATracks);
if (trackDCAXY) trackDCAXY->AddOutput(outMonitorDCATracks);
}
// Lifetime of V0 particle.
void AddMonitorOutput_K0sfpLife(TObjArray *mon=0, TString opt="", AliRsnLoopDaughter *llifetime=0)
{
AliRsnValueDaughter *k0slifetime = new AliRsnValueDaughter("lifetime", AliRsnValueDaughter::kV0Lifetime);
k0slifetime->SetBins(0.0,200,0.1);
// output: 2D histogram
AliRsnListOutput *outMonitork0sLifetime = new AliRsnListOutput("k0s", AliRsnListOutput::kHistoDefault);
outMonitork0sLifetime->AddValue(k0slifetime);
// add outputs to loop
if (mon) mon->Add(outMonitork0sLifetime);
if (llifetime) llifetime->AddOutput(outMonitork0sLifetime);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void AddMonitorOutput_K0sMass_Pt(TObjArray *mon=0, TString opt="", AliRsnLoopDaughter *lMass=0, AliRsnLoopDaughter *lPt=0)
{
AliRsnValueDaughter *axisMass = new AliRsnValueDaughter("K0s_Mass", AliRsnValueDaughter::kV0Mass);
axisMass->SetBins(0.4,0.6,0.001);
AliRsnValueDaughter *axisK0sPt = new AliRsnValueDaughter("K0s_Pt", AliRsnValueDaughter::kV0Pt);
axisK0sPt->SetBins(0.,30.,0.001);
// output: 2D histogram
AliRsnListOutput *outMonitorTrMom = new AliRsnListOutput("K0s_Mass_Pt", AliRsnListOutput::kHistoDefault);
outMonitorTrMom->AddValue(axisMass);
outMonitorTrMom->AddValue(axisK0sPt);
// add outputs to loop
if (mon) mon->Add(outMonitorTrMom);
if (lPt) lpt->AddOutput(outMonitorTrMom);
//if (mon) mon->Add(outMonitorM);
//if (lMass) lm->AddOutput(outMonitorM);
}
Bool_t SetCustomQualityCut(AliRsnCutTrackQuality * trkQualityCut, Int_t customQualityCutsID = 0, Int_t customFilterBit = 0)
{
//Sets configuration for track quality object different from std quality cuts.
//Returns kTRUE if track quality cut object is successfully defined,
//returns kFALSE if an invalid set of cuts (customQualityCutsID) is chosen or if the
//object to be configured does not exist.
if ((!trkQualityCut)){
Printf("::::: SetCustomQualityCut:: use default quality cuts specified in task configuration.");
return kFALSE;
}
if(customQualityCutsID>=1 && customQualityCutsID<100 && customQualityCutsID!=2){
trkQualityCut->SetDefaults2011(kTRUE,kTRUE);
Printf(Form("::::: SetCustomQualityCut:: using standard 2011 track quality cuts"));
if(!customFilterBit){//ESD
if(customQualityCutsID==3){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexXYPtDep("0.0150+0.0500/pt^1.1");}
else if(customQualityCutsID==4){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexXYPtDep("0.006+0.0200/pt^1.1");}
else if(customQualityCutsID==5){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexZ(5.);}
else if(customQualityCutsID==6){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexZ(0.2);}
else if(customQualityCutsID==7){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterTPC(5.);}
else if(customQualityCutsID==8){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterTPC(2.3);}
else if(customQualityCutsID==9){trkQualityCut->GetESDtrackCuts()->SetMinNCrossedRowsTPC(60);}
else if(customQualityCutsID==10){trkQualityCut->GetESDtrackCuts()->SetMinNCrossedRowsTPC(100);}
else if(customQualityCutsID==11){trkQualityCut->GetESDtrackCuts()->SetMinRatioCrossedRowsOverFindableClustersTPC(0.7);}
else if(customQualityCutsID==12){trkQualityCut->GetESDtrackCuts()->SetMinRatioCrossedRowsOverFindableClustersTPC(0.9);}
else if(customQualityCutsID==13){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterITS(49.);}
else if(customQualityCutsID==14){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterITS(4.);}
else if(customQualityCutsID==15){trkQualityCut->GetESDtrackCuts()->SetMaxChi2TPCConstrainedGlobal(49.);}
else if(customQualityCutsID==16){trkQualityCut->GetESDtrackCuts()->SetMaxChi2TPCConstrainedGlobal(25.);}
else if(customQualityCutsID==17){trkQualityCut->GetESDtrackCuts()->SetClusterRequirementITS(AliESDtrackCuts::kSPD,AliESDtrackCuts::kOff);}
else if(customQualityCutsID==56){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexZ(1.);}
else if(customQualityCutsID==58){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterTPC(3.);}
else if(customQualityCutsID==60){trkQualityCut->GetESDtrackCuts()->SetMinNCrossedRowsTPC(80);}
else if(customQualityCutsID==64){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterITS(25.);}
}else{//AOD
trkQualityCut->SetCheckOnlyFilterBit(kFALSE);
if(customQualityCutsID==4){trkQualityCut->SetDCARPtFormula("0.006+0.0200/pt^1.1");}
else if(customQualityCutsID==6){trkQualityCut->SetDCAZmax(0.2);}
else if(customQualityCutsID==8){trkQualityCut->SetTrackMaxChi2(2.3);}
else if(customQualityCutsID==10){trkQualityCut->SetMinNCrossedRowsTPC(100,kTRUE);}
else if(customQualityCutsID==12){trkQualityCut->SetMinNCrossedRowsOverFindableClsTPC(0.9,kTRUE);}
else if(customQualityCutsID==56){trkQualityCut->SetDCAZmax(1.);}
else if(customQualityCutsID==58){trkQualityCut->SetTrackMaxChi2(3.5);}
else if(customQualityCutsID==60){trkQualityCut->SetMinNCrossedRowsTPC(80,kTRUE);}
}
trkQualityCut->Print();
return kTRUE;
}else if(customQualityCutsID==2 || (customQualityCutsID>=100 && customQualityCutsID<200)){
trkQualityCut->SetDefaultsTPCOnly(kTRUE);
Printf(Form("::::: SetCustomQualityCut:: using TPC-only track quality cuts"));
if(customQualityCutsID==103){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexXY(3.);}
else if(customQualityCutsID==104){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexXY(1.);}
else if(customQualityCutsID==105){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexZ(4.);}
else if(customQualityCutsID==106){trkQualityCut->GetESDtrackCuts()->SetMaxDCAToVertexZ(1.);}
else if(customQualityCutsID==107){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterTPC(7.);}
else if(customQualityCutsID==108){trkQualityCut->GetESDtrackCuts()->SetMaxChi2PerClusterTPC(2.5);}
else if(customQualityCutsID==109){trkQualityCut->GetESDtrackCuts()->SetMinNClustersTPC(30);}
else if(customQualityCutsID==110){trkQualityCut->GetESDtrackCuts()->SetMinNClustersTPC(85);}
trkQualityCut->Print();
return kTRUE;
}else{
Printf("::::: SetCustomQualityCut:: use default quality cuts specified in task configuration.");
return kFALSE;
}
//for pA 2013
//trkQualityCut->SetDefaults2011();//with filter bit=10
//reset filter bit to very loose cuts
trkQualityCut->SetAODTestFilterBit(customFilterBit);
//apply all other cuts "by hand"
trkQualityCut->SetCheckOnlyFilterBit(kFALSE);
trkQualityCut->SetMinNCrossedRowsTPC(70, kTRUE);
trkQualityCut->SetMinNCrossedRowsOverFindableClsTPC(0.8, kTRUE);
trkQualityCut->SetMaxChi2TPCConstrainedGlobal(36);//used for ESD only - for AOD does not correspond to any cut
trkQualityCut->SetTPCmaxChi2(4.0); //already in filter bit 0
trkQualityCut->SetRejectKinkDaughters(kTRUE); //already in filter bit 0
trkQualityCut->SetSPDminNClusters(AliESDtrackCuts::kAny);
trkQualityCut->SetITSmaxChi2(36);
trkQualityCut->AddStatusFlag(AliESDtrack::kTPCin , kTRUE);//already in defaults 2011
trkQualityCut->AddStatusFlag(AliESDtrack::kTPCrefit, kTRUE);//already in defaults 2011
trkQualityCut->AddStatusFlag(AliESDtrack::kITSrefit, kTRUE);//already in defaults 2011
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kFilterBitCustom) {
trkQualityCut->SetCheckOnlyFilterBit(kTRUE);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdLooserDCAXY){
trkQualityCut->SetDCARmax(2.4);
} else {
trkQualityCut->SetDCARPtFormula("0.0105+0.0350/pt^1.1");
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdLooserDCAZ){
trkQualityCut->SetDCAZmax(3.2);
} else {
trkQualityCut->SetDCAZmax(2.0);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdCrossedRows60){
trkQualityCut->SetMinNCrossedRowsTPC(60, kTRUE);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdCrossedRows80){
trkQualityCut->SetMinNCrossedRowsTPC(80, kTRUE);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdRowsToCls075){
trkQualityCut->SetMinNCrossedRowsOverFindableClsTPC(0.75, kTRUE);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdRowsToCls085){
trkQualityCut->SetMinNCrossedRowsOverFindableClsTPC(0.85, kTRUE);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdCls70){
trkQualityCut->SetAODTestFilterBit(10);
trkQualityCut->SetTPCminNClusters(70);
}
if (customQualityCutsID==AliRsnCutSetDaughterParticle::kStdChi2TPCCls35){
trkQualityCut->SetTPCmaxChi2(3.5);
}
trkQualityCut->SetPtRange(0.15, 30.0);
trkQualityCut->SetEtaRange(-0.8, 0.8);
Printf(Form("::::: SetCustomQualityCut:: using custom track quality cuts #%i",customQualityCutsID));
trkQualityCut->Print();
return kTRUE;
}
| bsd-3-clause |
chromium/chromium | base/timer/wall_clock_timer.cc | 1964 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/timer/wall_clock_timer.h"
#include <utility>
#include "base/power_monitor/power_monitor.h"
#include "base/time/clock.h"
#include "base/time/default_clock.h"
#include "base/time/default_tick_clock.h"
#include "base/time/tick_clock.h"
namespace base {
WallClockTimer::WallClockTimer() = default;
WallClockTimer::WallClockTimer(const Clock* clock, const TickClock* tick_clock)
: timer_(tick_clock), clock_(clock ? clock : DefaultClock::GetInstance()) {}
WallClockTimer::~WallClockTimer() {
RemoveObserver();
}
void WallClockTimer::Start(const Location& posted_from,
Time desired_run_time,
OnceClosure user_task) {
user_task_ = std::move(user_task);
posted_from_ = posted_from;
desired_run_time_ = desired_run_time;
AddObserver();
timer_.Start(posted_from_, desired_run_time_ - Now(), this,
&WallClockTimer::RunUserTask);
}
void WallClockTimer::Stop() {
timer_.Stop();
user_task_.Reset();
RemoveObserver();
}
bool WallClockTimer::IsRunning() const {
return timer_.IsRunning();
}
void WallClockTimer::OnResume() {
// This will actually restart timer with smaller delay
timer_.Start(posted_from_, desired_run_time_ - Now(), this,
&WallClockTimer::RunUserTask);
}
void WallClockTimer::AddObserver() {
if (!observer_added_) {
PowerMonitor::AddPowerSuspendObserver(this);
observer_added_ = true;
}
}
void WallClockTimer::RemoveObserver() {
if (observer_added_) {
PowerMonitor::RemovePowerSuspendObserver(this);
observer_added_ = false;
}
}
void WallClockTimer::RunUserTask() {
DCHECK(user_task_);
RemoveObserver();
std::exchange(user_task_, {}).Run();
}
Time WallClockTimer::Now() const {
return clock_->Now();
}
} // namespace base
| bsd-3-clause |
xp3i4/seqan | demos/alignment_local.cpp | 4016 | ///A tutorial about local alignments.
#include <iostream>
#include <seqan/score.h>
#include <seqan/align.h>
using namespace seqan;
int main()
{
///Example 1: This program applies the Smith-Waterman algorithm to compute the best local alignment between two given sequences.
StringSet<CharString> strings;
Align<String<char> > ali;
resize(rows(ali), 2);
assignSource(row(ali, 0), "aphilologicaltheorem");
assignSource(row(ali, 1), "bizarreamphibology");
int score = localAlignment(ali, Score<int>(3, -3, -2, -2));
std::cout << "Score = " << score << std::endl;
std::cout << ali;
unsigned cBeginPos = clippedBeginPosition(row(ali, 0));
unsigned cEndPos = clippedEndPosition(row(ali, 0)) - 1;
std::cout << "Aligns Seq1[" << cBeginPos << ":" << cEndPos << "]";
std::cout << " and Seq2[" << cBeginPos << ":" << cEndPos << "]" << std::endl << std::endl;
///Example 2: This program applies the Waterman-Eggert algorithm to compute all non-overlapping local alignments with score better or equal 2.
Align<String<Dna> > ali2;
resize(rows(ali2), 2);
assignSource(row(ali2, 0), "ataagcgtctcg");
assignSource(row(ali2, 1), "tcatagagttgc");
Score<int> scoring(2, -1, -2, 0);
LocalAlignmentEnumerator<Score<int>, Unbanded> enumerator(scoring, 2);
while (nextLocalAlignment(ali2, enumerator))
{
std::cout << "Score = " << getScore(enumerator) << std::endl;
std::cout << ali2;
unsigned cBeginPos0 = clippedBeginPosition(row(ali2, 0));
unsigned cEndPos0 = clippedEndPosition(row(ali2, 0)) - 1;
unsigned cBeginPos1 = clippedBeginPosition(row(ali2, 1));
unsigned cEndPos1 = clippedBeginPosition(row(ali2, 1)) - 1;
std::cout << "Aligns Seq1[" << cBeginPos0 << ":" << cEndPos0 << "]";
std::cout << " and Seq2[" << cBeginPos1 << ":" << cEndPos1 << "]";
std::cout << std::endl << std::endl;
}
///Example 3
Align<String<Dna> > ali3;
resize(rows(ali3), 2);
assignSource(row(ali3, 0), "cccccc");
assignSource(row(ali3, 1), "tttttggccccccgg");
Score<int> scoring3(1, -1, -1, -1);
LocalAlignmentEnumerator<Score<int>, Unbanded> enumerator3(scoring3, 5);
while (nextLocalAlignment(ali3, enumerator3))
{
std::cout << "Score = " << getScore(enumerator3) << std::endl;
std::cout << ali3;
unsigned cBeginPos0 = clippedBeginPosition(row(ali3, 0));
unsigned cEndPos0 = clippedEndPosition(row(ali3, 0)) - 1;
unsigned cBeginPos1 = clippedBeginPosition(row(ali3, 1));
unsigned cEndPos1 = clippedEndPosition(row(ali3, 1)) - 1;
std::cout << "Aligns Seq1[" << cBeginPos0 << ":" << cEndPos0 << "]";
std::cout << " and Seq2[" << cBeginPos1 << ":" << cEndPos1 << "]";
std::cout << std::endl << std::endl;
}
///Example 4: This program applies the banded Waterman-Eggert algorithm to compute all non-overlapping local alignments with score or equal 5
/// in the band from diagonal -1 to diagonal 8.
Align<String<Dna5> > ali4;
resize(rows(ali4), 2);
assignSource(row(ali4, 0), "AAAAAAANAAAGGGNGGGGGGGGNGGGGGANAA");
assignSource(row(ali4, 1), "GGGGGGCGGGGGGGA");
LocalAlignmentFinder<> finder4(ali4);
Score<int> scoring4(1, -1, -1, -1);
LocalAlignmentEnumerator<Score<int>, Banded> enumerator4(scoring3, -1, 8, 5);
while (nextLocalAlignment(ali4, enumerator4))
{
std::cout << "Score = " << getScore(enumerator4) << std::endl;
std::cout << ali4;
unsigned cBeginPos0 = clippedBeginPosition(row(ali4, 0));
unsigned cEndPos0 = clippedEndPosition(row(ali4, 0)) - 1;
unsigned cBeginPos1 = clippedBeginPosition(row(ali4, 1));
unsigned cEndPos1 = clippedEndPosition(row(ali4, 1)) - 1;
std::cout << "Aligns Seq1[" << cBeginPos0 << ":" << cEndPos0 << "]";
std::cout << " and Seq2[" << cBeginPos1 << ":" << cEndPos1 << "]";
std::cout << std::endl << std::endl;
}
return 0;
}
| bsd-3-clause |
featurist/cdnjs | ajax/libs/yui/3.17.2/series-areaspline/series-areaspline-min.js | 409 | /*
YUI 3.17.2 (build 9c3c78e)
Copyright 2014 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add("series-areaspline",function(e,t){e.AreaSplineSeries=e.Base.create("areaSplineSeries",e.AreaSeries,[e.CurveUtil],{drawSeries:function(){this.drawAreaSpline()}},{ATTRS:{type:{value:"areaSpline"}}})},"3.17.2",{requires:["series-area","series-curve-util"]});
| mit |
deek87/concrete5 | concrete/src/Url/Resolver/PathUrlResolver.php | 4705 | <?php
namespace Concrete\Core\Url\Resolver;
use Concrete\Core\Application\ApplicationAwareInterface;
use Concrete\Core\Application\ApplicationAwareTrait;
use Concrete\Core\Application\Service\Dashboard;
use Concrete\Core\Config\Repository\Repository;
use Concrete\Core\Page\Page;
use Concrete\Core\Url\Components\Path;
use Concrete\Core\Url\UrlInterface;
use League\Url\Url;
class PathUrlResolver implements UrlResolverInterface, ApplicationAwareInterface
{
use ApplicationAwareTrait;
/**
* @var \Concrete\Core\Config\Repository\Repository
*/
protected $config;
/**
* @var \Concrete\Core\Url\Resolver\CanonicalUrlResolver
*/
protected $canonical;
/**
* @var \Concrete\Core\Application\Service\Dashboard
*/
protected $dashboard;
/**
* PathUrlResolver constructor.
*
* @param \Concrete\Core\Config\Repository\Repository $repository
* @param \Concrete\Core\Url\Resolver\CanonicalUrlResolver $canonical_resolver
* @param \Concrete\Core\Application\Service\Dashboard $dashboard
*/
public function __construct(Repository $repository, CanonicalUrlResolver $canonical_resolver, Dashboard $dashboard)
{
$this->config = $repository;
$this->canonical = $canonical_resolver;
$this->dashboard = $dashboard;
}
/**
* {@inheritdoc}
*
* @see \Concrete\Core\Url\Resolver\UrlResolverInterface::resolve()
*/
public function resolve(array $arguments, $resolved = null)
{
if ($resolved) {
// We don't need to do any post processing on urls.
return $resolved;
}
$page = null;
foreach ($arguments as $key => $argument) {
if ($argument instanceof Page) {
$page = $argument;
break;
}
}
if ($page) {
unset($arguments[$key]);
}
$args = $arguments;
$path = array_shift($args);
if (is_scalar($path) || (is_object($path) && method_exists($path, '__toString'))) {
$path = rtrim($path, '/');
$url = $this->canonical->resolve([$page]);
$url = $this->handlePath($url, $path, $args);
return $url;
}
return null;
}
/**
* @param \Concrete\Core\Url\UrlInterface $url
* @param string $path
* @param array $args
*
* @return \Concrete\Core\Url\UrlInterface|\League\Url\Url
*/
protected function handlePath(UrlInterface $url, $path, $args)
{
$path_object = $this->basePath($url, $path, $args);
$components = parse_url($path);
$reset = false;
// Were we passed a built URL? If so, just return it.
if ($string = array_get($components, 'scheme')) {
try {
$url = Url::createFromUrl($path);
$path_object = $url->getPath();
$reset = true;
} catch (\Exception $e) {
}
}
if (!$reset) {
if ($string = array_get($components, 'path')) {
$path_object->append($string);
}
if ($string = array_get($components, 'query')) {
$url = $url->setQuery($string);
}
if ($string = array_get($components, 'fragment')) {
$url = $url->setFragment($string);
}
}
foreach ($args as $segment) {
if (!is_array($segment)) {
$segment = (string) $segment; // sometimes integers foul this up when we pass them in as URL arguments.
}
$path_object->append($segment);
}
if (!$reset) {
$url_path = $url->getPath();
$url_path->append($path_object);
} else {
$url_path = $path_object;
}
return $url->setPath($url_path);
}
/**
* @param \Concrete\Core\Url\UrlInterface $url
* @param string $path
* @param array $args
*
* @return \Concrete\Core\Url\Components\Path
*/
protected function basePath($url, $path, $args)
{
$config = $this->config;
$path_object = new Path('');
$rewriting = $config->get('concrete.seo.url_rewriting');
$rewrite_all = $config->get('concrete.seo.url_rewriting_all');
$in_dashboard = $this->dashboard->inDashboard($path);
// If rewriting is disabled, or all_rewriting is disabled and we're
// in the dashboard, add the dispatcher.
if (!$rewriting || (!$rewrite_all && $in_dashboard)) {
$path_object->prepend(DISPATCHER_FILENAME);
}
return $path_object;
}
}
| mit |
romdi/IOS | game/server/hl2/point_apc_controller.cpp | 14048 | //========= Copyright © 1996-2005, Valve Corporation, All rights reserved. ============//
//
// Purpose:
//
// $NoKeywords: $
//=============================================================================//
#include "cbase.h"
#include "basecombatweapon.h"
#include "explode.h"
#include "eventqueue.h"
#include "gamerules.h"
#include "ammodef.h"
#include "in_buttons.h"
#include "soundent.h"
#include "ndebugoverlay.h"
#include "vstdlib/random.h"
#include "engine/IEngineSound.h"
#include "player.h"
#include "entitylist.h"
#include "iservervehicle.h"
// memdbgon must be the last include file in a .cpp file!!!
#include "tier0/memdbgon.h"
#define SF_TANK_ACTIVE 0x0001
class CAPCController : public CPointEntity
{
typedef CPointEntity BaseClass;
public:
~CAPCController( void );
void Spawn( void );
void Precache( void );
bool KeyValue( const char *szKeyName, const char *szValue );
void Think( void );
void TrackTarget( void );
void StartRotSound( void );
void StopRotSound( void );
// Bmodels don't go across transitions
virtual int ObjectCaps( void ) { return BaseClass::ObjectCaps() & ~FCAP_ACROSS_TRANSITION; }
inline bool IsActive( void ) { return (m_spawnflags & SF_TANK_ACTIVE)?TRUE:FALSE; }
// Input handlers.
void InputActivate( inputdata_t &inputdata );
void InputDeactivate( inputdata_t &inputdata );
void ActivateRocketGuidance(void);
void DeactivateRocketGuidance(void);
bool InRange( float range );
Vector WorldBarrelPosition( void )
{
EntityMatrix tmp;
tmp.InitFromEntity( this );
return tmp.LocalToWorld( m_barrelPos );
}
void UpdateMatrix( void )
{
m_parentMatrix.InitFromEntity( GetParent() ? GetParent() : NULL );
}
QAngle AimBarrelAt( const Vector &parentTarget );
bool ShouldSavePhysics() { return false; }
DECLARE_DATADESC();
CBaseEntity *FindTarget( string_t targetName, CBaseEntity *pActivator );
protected:
float m_yawCenter; // "Center" yaw
float m_yawRate; // Max turn rate to track targets
// Zero is full rotation
float m_yawTolerance; // Tolerance angle
float m_pitchCenter; // "Center" pitch
float m_pitchRate; // Max turn rate on pitch
float m_pitchTolerance; // Tolerance angle
float m_minRange; // Minimum range to aim/track
float m_maxRange; // Max range to aim/track
Vector m_barrelPos; // Length of the barrel
Vector m_sightOrigin; // Last sight of target
string_t m_soundStartRotate;
string_t m_soundStopRotate;
string_t m_soundLoopRotate;
string_t m_targetEntityName;
EHANDLE m_hTarget;
EntityMatrix m_parentMatrix;
COutputVector m_OnFireAtTarget;
float m_flFiringDelay;
bool m_bFireDelayed;
};
LINK_ENTITY_TO_CLASS( point_apc_controller, CAPCController );
BEGIN_DATADESC( CAPCController )
DEFINE_FIELD( m_yawCenter, FIELD_FLOAT ),
DEFINE_KEYFIELD( m_yawRate, FIELD_FLOAT, "yawrate" ),
DEFINE_KEYFIELD( m_yawTolerance, FIELD_FLOAT, "yawtolerance" ),
DEFINE_FIELD( m_pitchCenter, FIELD_FLOAT ),
DEFINE_KEYFIELD( m_pitchRate, FIELD_FLOAT, "pitchrate" ),
DEFINE_KEYFIELD( m_pitchTolerance, FIELD_FLOAT, "pitchtolerance" ),
DEFINE_KEYFIELD( m_minRange, FIELD_FLOAT, "minRange" ),
DEFINE_KEYFIELD( m_maxRange, FIELD_FLOAT, "maxRange" ),
DEFINE_FIELD( m_barrelPos, FIELD_VECTOR ),
DEFINE_FIELD( m_sightOrigin, FIELD_VECTOR ),
DEFINE_KEYFIELD( m_soundStartRotate, FIELD_SOUNDNAME, "rotatestartsound" ),
DEFINE_KEYFIELD( m_soundStopRotate, FIELD_SOUNDNAME, "rotatestopsound" ),
DEFINE_KEYFIELD( m_soundLoopRotate, FIELD_SOUNDNAME, "rotatesound" ),
DEFINE_KEYFIELD( m_targetEntityName, FIELD_STRING, "targetentityname" ),
DEFINE_FIELD( m_hTarget, FIELD_EHANDLE ),
DEFINE_FIELD( m_parentMatrix, FIELD_VMATRIX_WORLDSPACE ),
DEFINE_FIELD( m_flFiringDelay, FIELD_FLOAT ),
DEFINE_FIELD( m_bFireDelayed, FIELD_BOOLEAN ),
// Inputs
DEFINE_INPUTFUNC( FIELD_VOID, "Activate", InputActivate ),
DEFINE_INPUTFUNC( FIELD_VOID, "Deactivate", InputDeactivate ),
// Outputs
DEFINE_OUTPUT(m_OnFireAtTarget, "OnFireAtTarget"),
END_DATADESC()
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
CAPCController::~CAPCController( void )
{
if ( m_soundLoopRotate != NULL_STRING )
{
StopSound( entindex(), CHAN_STATIC, STRING(m_soundLoopRotate) );
}
}
//------------------------------------------------------------------------------
// Purpose: Input handler for activating the tank.
//------------------------------------------------------------------------------
void CAPCController::InputActivate( inputdata_t &inputdata )
{
ActivateRocketGuidance();
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
void CAPCController::ActivateRocketGuidance(void)
{
m_spawnflags |= SF_TANK_ACTIVE;
SetNextThink( gpGlobals->curtime + 0.1f );
}
//-----------------------------------------------------------------------------
// Purpose: Input handler for deactivating the tank.
//-----------------------------------------------------------------------------
void CAPCController::InputDeactivate( inputdata_t &inputdata )
{
DeactivateRocketGuidance();
}
//-----------------------------------------------------------------------------
// Purpose:
//-----------------------------------------------------------------------------
void CAPCController::DeactivateRocketGuidance(void)
{
m_spawnflags &= ~SF_TANK_ACTIVE;
StopRotSound();
}
//-----------------------------------------------------------------------------
// Purpose:
// Input : targetName -
// pActivator -
//-----------------------------------------------------------------------------
CBaseEntity *CAPCController::FindTarget( string_t targetName, CBaseEntity *pActivator )
{
return gEntList.FindEntityGenericNearest( STRING( targetName ), GetAbsOrigin(), 0, this, pActivator );
}
//-----------------------------------------------------------------------------
// Purpose: Caches entity key values until spawn is called.
// Input : szKeyName -
// szValue -
// Output :
//-----------------------------------------------------------------------------
bool CAPCController::KeyValue( const char *szKeyName, const char *szValue )
{
if (FStrEq(szKeyName, "barrel"))
{
m_barrelPos.x = atof(szValue);
}
else if (FStrEq(szKeyName, "barrely"))
{
m_barrelPos.y = atof(szValue);
}
else if (FStrEq(szKeyName, "barrelz"))
{
m_barrelPos.z = atof(szValue);
}
else
return BaseClass::KeyValue( szKeyName, szValue );
return true;
}
//-----------------------------------------
// Spawn
//-----------------------------------------
void CAPCController::Spawn( void )
{
Precache();
m_yawCenter = GetLocalAngles().y;
m_pitchCenter = GetLocalAngles().x;
if ( IsActive() )
{
SetNextThink( gpGlobals->curtime + 1.0f );
}
UpdateMatrix();
}
//-----------------------------------------
// Precache
//-----------------------------------------
void CAPCController::Precache( void )
{
if ( m_soundStartRotate != NULL_STRING )
PrecacheScriptSound( STRING(m_soundStartRotate) );
if ( m_soundStopRotate != NULL_STRING )
PrecacheScriptSound( STRING(m_soundStopRotate) );
if ( m_soundLoopRotate != NULL_STRING )
PrecacheScriptSound( STRING(m_soundLoopRotate) );
}
//-----------------------------------------
// InRange
//-----------------------------------------
bool CAPCController::InRange( float range )
{
if ( range < m_minRange )
return FALSE;
if ( m_maxRange > 0 && range > m_maxRange )
return FALSE;
return TRUE;
}
//-----------------------------------------
// Think
//-----------------------------------------
void CAPCController::Think( void )
{
// refresh the matrix
UpdateMatrix();
SetLocalAngularVelocity( vec3_angle );
TrackTarget();
if ( fabs(GetLocalAngularVelocity().x) > 1 || fabs(GetLocalAngularVelocity().y) > 1 )
StartRotSound();
else
StopRotSound();
}
//-----------------------------------------------------------------------------
// Purpose: Aim the offset barrel at a position in parent space
// Input : parentTarget - the position of the target in parent space
// Output : Vector - angles in local space
//-----------------------------------------------------------------------------
QAngle CAPCController::AimBarrelAt( const Vector &parentTarget )
{
Vector target = parentTarget - GetLocalOrigin();
float quadTarget = target.LengthSqr();
float quadTargetXY = target.x*target.x + target.y*target.y;
// We're trying to aim the offset barrel at an arbitrary point.
// To calculate this, I think of the target as being on a sphere with
// it's center at the origin of the gun.
// The rotation we need is the opposite of the rotation that moves the target
// along the surface of that sphere to intersect with the gun's shooting direction
// To calculate that rotation, we simply calculate the intersection of the ray
// coming out of the barrel with the target sphere (that's the new target position)
// and use atan2() to get angles
// angles from target pos to center
float targetToCenterYaw = atan2( target.y, target.x );
float centerToGunYaw = atan2( m_barrelPos.y, sqrt( quadTarget - (m_barrelPos.y*m_barrelPos.y) ) );
float targetToCenterPitch = atan2( target.z, sqrt( quadTargetXY ) );
float centerToGunPitch = atan2( -m_barrelPos.z, sqrt( quadTarget - (m_barrelPos.z*m_barrelPos.z) ) );
return QAngle( -RAD2DEG(targetToCenterPitch+centerToGunPitch), RAD2DEG( targetToCenterYaw + centerToGunYaw ), 0 );
}
void CAPCController::TrackTarget( void )
{
trace_t tr;
bool updateTime = FALSE, lineOfSight;
QAngle angles;
Vector barrelEnd;
CBaseEntity *pTarget = NULL;
barrelEnd.Init();
if ( IsActive() )
{
SetNextThink( gpGlobals->curtime + 0.1f );
}
else
{
return;
}
// -----------------------------------
// Get world target position
// -----------------------------------
barrelEnd = WorldBarrelPosition();
Vector worldTargetPosition;
CBaseEntity *pEntity = (CBaseEntity *)m_hTarget;
if ( !pEntity || ( pEntity->GetFlags() & FL_NOTARGET ) )
{
m_hTarget = FindTarget( m_targetEntityName, NULL );
if ( IsActive() )
{
SetNextThink( gpGlobals->curtime + 2 ); // Wait 2 sec s
}
return;
}
pTarget = pEntity;
// Calculate angle needed to aim at target
worldTargetPosition = pEntity->EyePosition();
float range = (worldTargetPosition - barrelEnd).Length();
if ( !InRange( range ) )
{
m_bFireDelayed = false;
return;
}
UTIL_TraceLine( barrelEnd, worldTargetPosition, MASK_BLOCKLOS, this, COLLISION_GROUP_NONE, &tr );
lineOfSight = FALSE;
// No line of sight, don't track
if ( tr.fraction == 1.0 || tr.m_pEnt == pTarget )
{
lineOfSight = TRUE;
CBaseEntity *pInstance = pTarget;
if ( InRange( range ) && pInstance && pInstance->IsAlive() )
{
updateTime = TRUE;
// Sight position is BodyTarget with no noise (so gun doesn't bob up and down)
m_sightOrigin = pInstance->BodyTarget( GetLocalOrigin(), false );
}
}
// Convert targetPosition to parent
angles = AimBarrelAt( m_parentMatrix.WorldToLocal( m_sightOrigin ) );
// Force the angles to be relative to the center position
float offsetY = UTIL_AngleDistance( angles.y, m_yawCenter );
float offsetX = UTIL_AngleDistance( angles.x, m_pitchCenter );
angles.y = m_yawCenter + offsetY;
angles.x = m_pitchCenter + offsetX;
// Move toward target at rate or less
float distY = UTIL_AngleDistance( angles.y, GetLocalAngles().y );
QAngle vecAngVel = GetLocalAngularVelocity();
vecAngVel.y = distY * 10;
vecAngVel.y = clamp( vecAngVel.y, -m_yawRate, m_yawRate );
// Move toward target at rate or less
float distX = UTIL_AngleDistance( angles.x, GetLocalAngles().x );
vecAngVel.x = distX * 10;
vecAngVel.x = clamp( vecAngVel.x, -m_pitchRate, m_pitchRate );
SetLocalAngularVelocity( vecAngVel );
SetMoveDoneTime( 0.1 );
Vector forward;
AngleVectors( GetLocalAngles(), &forward );
forward = m_parentMatrix.ApplyRotation( forward );
AngleVectors(angles, &forward);
if ( lineOfSight == TRUE )
{
// FIXME: This will ultimately have to deal with NPCs being in the vehicle as well
// See if the target is in a vehicle. If so, check its relationship
CBasePlayer *pPlayer = ToBasePlayer( pTarget );
if ( pPlayer && pPlayer->IsInAVehicle() )
{
IServerVehicle *pVehicle = pPlayer->GetVehicle();
if ( pVehicle->ClassifyPassenger( pPlayer, CLASS_PLAYER ) == CLASS_PLAYER)
{
if ( !m_bFireDelayed )
{
m_bFireDelayed = true;
m_flFiringDelay = gpGlobals->curtime + 1.5; // setup delay time before we start firing
return;
}
if ( gpGlobals->curtime > m_flFiringDelay )
{
m_OnFireAtTarget.Set(forward, this, this); // tell apc to fire rockets, and what direction
}
}
}
}
else
{
m_bFireDelayed = false; // reset flag since we can no longer see target
}
}
void CAPCController::StartRotSound( void )
{
if ( m_soundLoopRotate != NULL_STRING )
{
CPASAttenuationFilter filter( this );
filter.MakeReliable();
EmitSound_t ep;
ep.m_nChannel = CHAN_STATIC;
ep.m_pSoundName = (char*)STRING(m_soundLoopRotate);
ep.m_SoundLevel = SNDLVL_NORM;
ep.m_flVolume = 0.85;
EmitSound( filter, entindex(), ep );
}
if ( m_soundStartRotate != NULL_STRING )
{
CPASAttenuationFilter filter( this );
EmitSound_t ep;
ep.m_nChannel = CHAN_BODY;
ep.m_pSoundName = (char*)STRING(m_soundStartRotate);
ep.m_SoundLevel = SNDLVL_NORM;
ep.m_flVolume = 1.0f;
EmitSound( filter, entindex(), ep );
}
}
void CAPCController::StopRotSound( void )
{
if ( m_soundLoopRotate != NULL_STRING )
{
StopSound( entindex(), CHAN_STATIC, (char*)STRING(m_soundLoopRotate) );
}
if ( m_soundStopRotate != NULL_STRING )
{
CPASAttenuationFilter filter( this );
EmitSound_t ep;
ep.m_nChannel = CHAN_BODY;
ep.m_pSoundName = (char*)STRING(m_soundStopRotate);
ep.m_SoundLevel = SNDLVL_NORM;
EmitSound( filter, entindex(), ep );
}
}
| mit |
hyonholee/azure-sdk-for-net | sdk/cognitiveservices/Language.TextAnalytics/src/Generated/TextAnalytics/Models/KeyPhraseBatchResultItem.cs | 2780 | // <auto-generated>
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
// </auto-generated>
namespace Microsoft.Azure.CognitiveServices.Language.TextAnalytics.Models
{
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
public partial class KeyPhraseBatchResultItem
{
/// <summary>
/// Initializes a new instance of the KeyPhraseBatchResultItem class.
/// </summary>
public KeyPhraseBatchResultItem()
{
CustomInit();
}
/// <summary>
/// Initializes a new instance of the KeyPhraseBatchResultItem class.
/// </summary>
/// <param name="id">Unique, non-empty document identifier.</param>
/// <param name="keyPhrases">A list of representative words or phrases.
/// The number of key phrases returned is proportional to the number of
/// words in the input document.</param>
/// <param name="statistics">(Optional) if showStats=true was specified
/// in the request this field will contain information about the
/// document payload.</param>
public KeyPhraseBatchResultItem(string id = default(string), IList<string> keyPhrases = default(IList<string>), DocumentStatistics statistics = default(DocumentStatistics))
{
Id = id;
KeyPhrases = keyPhrases;
Statistics = statistics;
CustomInit();
}
/// <summary>
/// An initialization method that performs custom operations like setting defaults
/// </summary>
partial void CustomInit();
/// <summary>
/// Gets or sets unique, non-empty document identifier.
/// </summary>
[JsonProperty(PropertyName = "id")]
public string Id { get; set; }
/// <summary>
/// Gets a list of representative words or phrases. The number of key
/// phrases returned is proportional to the number of words in the
/// input document.
/// </summary>
[JsonProperty(PropertyName = "keyPhrases")]
public IList<string> KeyPhrases { get; private set; }
/// <summary>
/// Gets or sets (Optional) if showStats=true was specified in the
/// request this field will contain information about the document
/// payload.
/// </summary>
[JsonProperty(PropertyName = "statistics")]
public DocumentStatistics Statistics { get; set; }
}
}
| mit |
smischke/helix-toolkit | Source/HelixToolkit.Wpf/Controls/CameraController/CameraSetting.cs | 3479 | // --------------------------------------------------------------------------------------------------------------------
// <copyright file="CameraSetting.cs" company="Helix Toolkit">
// Copyright (c) 2014 Helix Toolkit contributors
// </copyright>
// <summary>
// Represents a camera state.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace HelixToolkit.Wpf
{
using System.Windows.Media.Media3D;
/// <summary>
/// Represents a camera state.
/// </summary>
public class CameraSetting
{
/// <summary>
/// Initializes a new instance of the <see cref="CameraSetting"/> class.
/// </summary>
/// <param name="camera">
/// The camera.
/// </param>
public CameraSetting(ProjectionCamera camera)
{
this.Position = camera.Position;
this.LookDirection = camera.LookDirection;
this.UpDirection = camera.UpDirection;
this.NearPlaneDistance = camera.NearPlaneDistance;
this.FarPlaneDistance = camera.FarPlaneDistance;
var pcamera = camera as PerspectiveCamera;
if (pcamera != null)
{
this.FieldOfView = pcamera.FieldOfView;
}
var ocamera = camera as OrthographicCamera;
if (ocamera != null)
{
this.Width = ocamera.Width;
}
}
/// <summary>
/// Gets or sets the far plane distance.
/// </summary>
public double FarPlaneDistance { get; set; }
/// <summary>
/// Gets or sets the field of view.
/// </summary>
public double FieldOfView { get; set; }
/// <summary>
/// Gets or sets the look direction.
/// </summary>
public Vector3D LookDirection { get; set; }
/// <summary>
/// Gets or sets the near plane distance.
/// </summary>
public double NearPlaneDistance { get; set; }
/// <summary>
/// Gets or sets the position.
/// </summary>
public Point3D Position { get; set; }
/// <summary>
/// Gets or sets the up direction.
/// </summary>
public Vector3D UpDirection { get; set; }
/// <summary>
/// Gets or sets the width of an orthographic camera.
/// </summary>
public double Width { get; set; }
/// <summary>
/// Updates the camera to this state.
/// </summary>
/// <param name="camera">
/// The camera to update.
/// </param>
public void UpdateCamera(ProjectionCamera camera)
{
camera.Position = this.Position;
camera.LookDirection = this.LookDirection;
camera.UpDirection = this.UpDirection;
camera.NearPlaneDistance = this.NearPlaneDistance;
camera.FarPlaneDistance = this.FarPlaneDistance;
var pcamera = camera as PerspectiveCamera;
if (pcamera != null)
{
pcamera.FieldOfView = this.FieldOfView;
}
var ocamera = camera as OrthographicCamera;
if (ocamera != null)
{
ocamera.Width = this.Width;
}
}
}
} | mit |
yangdd1205/spring-boot | spring-boot-project/spring-boot/src/test/java/org/springframework/boot/context/properties/source/SpringConfigurationPropertySourcesTests.java | 7944 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.context.properties.source;
import java.util.Collections;
import java.util.Iterator;
import org.junit.jupiter.api.Test;
import org.springframework.core.env.Environment;
import org.springframework.core.env.MapPropertySource;
import org.springframework.core.env.MutablePropertySources;
import org.springframework.core.env.PropertySource;
import org.springframework.core.env.StandardEnvironment;
import org.springframework.core.env.SystemEnvironmentPropertySource;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException;
/**
* Tests for {@link SpringConfigurationPropertySources}.
*
* @author Phillip Webb
* @author Madhura Bhave
*/
class SpringConfigurationPropertySourcesTests {
@Test
void createWhenPropertySourcesIsNullShouldThrowException() {
assertThatIllegalArgumentException().isThrownBy(() -> new SpringConfigurationPropertySources(null))
.withMessageContaining("Sources must not be null");
}
@Test
void shouldAdaptPropertySource() {
MutablePropertySources sources = new MutablePropertySources();
sources.addFirst(new MapPropertySource("test", Collections.singletonMap("a", "b")));
Iterator<ConfigurationPropertySource> iterator = new SpringConfigurationPropertySources(sources).iterator();
ConfigurationPropertyName name = ConfigurationPropertyName.of("a");
assertThat(iterator.next().getConfigurationProperty(name).getValue()).isEqualTo("b");
assertThat(iterator.hasNext()).isFalse();
}
@Test
void shouldAdaptSystemEnvironmentPropertySource() {
MutablePropertySources sources = new MutablePropertySources();
sources.addLast(new SystemEnvironmentPropertySource(StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME,
Collections.singletonMap("SERVER_PORT", "1234")));
Iterator<ConfigurationPropertySource> iterator = new SpringConfigurationPropertySources(sources).iterator();
ConfigurationPropertyName name = ConfigurationPropertyName.of("server.port");
assertThat(iterator.next().getConfigurationProperty(name).getValue()).isEqualTo("1234");
assertThat(iterator.hasNext()).isFalse();
}
@Test
void shouldExtendedAdaptSystemEnvironmentPropertySource() {
MutablePropertySources sources = new MutablePropertySources();
sources.addLast(new SystemEnvironmentPropertySource(
"test-" + StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME,
Collections.singletonMap("SERVER_PORT", "1234")));
Iterator<ConfigurationPropertySource> iterator = new SpringConfigurationPropertySources(sources).iterator();
ConfigurationPropertyName name = ConfigurationPropertyName.of("server.port");
assertThat(iterator.next().getConfigurationProperty(name).getValue()).isEqualTo("1234");
assertThat(iterator.hasNext()).isFalse();
}
@Test
void shouldNotAdaptSystemEnvironmentPropertyOverrideSource() {
MutablePropertySources sources = new MutablePropertySources();
sources.addLast(
new SystemEnvironmentPropertySource("override", Collections.singletonMap("server.port", "1234")));
Iterator<ConfigurationPropertySource> iterator = new SpringConfigurationPropertySources(sources).iterator();
ConfigurationPropertyName name = ConfigurationPropertyName.of("server.port");
assertThat(iterator.next().getConfigurationProperty(name).getValue()).isEqualTo("1234");
assertThat(iterator.hasNext()).isFalse();
}
@Test
void shouldAdaptSystemEnvironmentPropertySourceWithUnderscoreValue() {
MutablePropertySources sources = new MutablePropertySources();
sources.addLast(new SystemEnvironmentPropertySource(StandardEnvironment.SYSTEM_ENVIRONMENT_PROPERTY_SOURCE_NAME,
Collections.singletonMap("_", "1234")));
Iterator<ConfigurationPropertySource> iterator = new SpringConfigurationPropertySources(sources).iterator();
ConfigurationPropertyName name = ConfigurationPropertyName.of("bar");
assertThat(iterator.next().getConfigurationProperty(name)).isNull();
assertThat(iterator.hasNext()).isFalse();
}
@Test
void shouldAdaptMultiplePropertySources() {
MutablePropertySources sources = new MutablePropertySources();
sources.addLast(new SystemEnvironmentPropertySource("system", Collections.singletonMap("SERVER_PORT", "1234")));
sources.addLast(new MapPropertySource("test1", Collections.singletonMap("server.po-rt", "4567")));
sources.addLast(new MapPropertySource("test2", Collections.singletonMap("a", "b")));
Iterator<ConfigurationPropertySource> iterator = new SpringConfigurationPropertySources(sources).iterator();
ConfigurationPropertyName name = ConfigurationPropertyName.of("server.port");
assertThat(iterator.next().getConfigurationProperty(name).getValue()).isEqualTo("1234");
assertThat(iterator.next().getConfigurationProperty(name).getValue()).isEqualTo("4567");
assertThat(iterator.next().getConfigurationProperty(ConfigurationPropertyName.of("a")).getValue())
.isEqualTo("b");
assertThat(iterator.hasNext()).isFalse();
}
@Test
void shouldFlattenEnvironment() {
StandardEnvironment environment = new StandardEnvironment();
environment.getPropertySources().addFirst(new MapPropertySource("foo", Collections.singletonMap("foo", "bar")));
environment.getPropertySources().addFirst(new MapPropertySource("far", Collections.singletonMap("far", "far")));
MutablePropertySources sources = new MutablePropertySources();
sources.addFirst(new PropertySource<Environment>("env", environment) {
@Override
public String getProperty(String key) {
return this.source.getProperty(key);
}
});
sources.addLast(new MapPropertySource("baz", Collections.singletonMap("baz", "barf")));
SpringConfigurationPropertySources configurationSources = new SpringConfigurationPropertySources(sources);
assertThat(configurationSources.iterator()).toIterable().hasSize(5);
}
@Test
void shouldTrackChanges() {
MutablePropertySources sources = new MutablePropertySources();
SpringConfigurationPropertySources configurationSources = new SpringConfigurationPropertySources(sources);
assertThat(configurationSources.iterator()).toIterable().hasSize(0);
MapPropertySource source1 = new MapPropertySource("test1", Collections.singletonMap("a", "b"));
sources.addLast(source1);
assertThat(configurationSources.iterator()).toIterable().hasSize(1);
MapPropertySource source2 = new MapPropertySource("test2", Collections.singletonMap("b", "c"));
sources.addLast(source2);
assertThat(configurationSources.iterator()).toIterable().hasSize(2);
}
@Test
void shouldTrackWhenSourceHasIdenticalName() {
MutablePropertySources sources = new MutablePropertySources();
SpringConfigurationPropertySources configurationSources = new SpringConfigurationPropertySources(sources);
ConfigurationPropertyName name = ConfigurationPropertyName.of("a");
MapPropertySource source1 = new MapPropertySource("test", Collections.singletonMap("a", "s1"));
sources.addLast(source1);
assertThat(configurationSources.iterator().next().getConfigurationProperty(name).getValue()).isEqualTo("s1");
MapPropertySource source2 = new MapPropertySource("test", Collections.singletonMap("a", "s2"));
sources.remove("test");
sources.addLast(source2);
assertThat(configurationSources.iterator().next().getConfigurationProperty(name).getValue()).isEqualTo("s2");
}
}
| mit |
hotchandanisagar/odata.net | test/FunctionalTests/Tests/DataServices/UnitTests/DesignT4UnitTests/CodeGen/PrefixConflict.cs | 11812 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.34014
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
// Generation date: 2/10/2015 4:08:54 PM
namespace PrefixConflict
{
/// <summary>
/// There are no comments for EntityContainer in the schema.
/// </summary>
public partial class EntityContainer : global::Microsoft.OData.Client.DataServiceContext
{
/// <summary>
/// Initialize a new EntityContainer object.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public EntityContainer(global::System.Uri serviceRoot) :
base(serviceRoot, global::Microsoft.OData.Client.ODataProtocolVersion.V4)
{
this.OnContextCreated();
this.Format.LoadServiceModel = GeneratedEdmModel.GetInstance;
this.Format.UseJson();
}
partial void OnContextCreated();
/// <summary>
/// There are no comments for Set1 in the schema.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public global::Microsoft.OData.Client.DataServiceQuery<EntityType> Set1
{
get
{
if ((this._Set1 == null))
{
this._Set1 = base.CreateQuery<EntityType>("Set1");
}
return this._Set1;
}
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private global::Microsoft.OData.Client.DataServiceQuery<EntityType> _Set1;
/// <summary>
/// There are no comments for Set1 in the schema.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public void AddToSet1(EntityType entityType)
{
base.AddObject("Set1", entityType);
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private abstract class GeneratedEdmModel
{
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private static global::Microsoft.OData.Edm.IEdmModel ParsedModel = LoadModelFromString();
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private const string Edmx = @"<edmx:Edmx Version=""4.0"" xmlns:edmx=""http://docs.oasis-open.org/odata/ns/edmx"">
<edmx:DataServices>
<Schema Namespace=""PrefixConflict"" xmlns=""http://docs.oasis-open.org/odata/ns/edm"">
<EntityType Name=""EntityType"">
<Key>
<PropertyRef Name=""Id"" />
</Key>
<Property Name=""Id"" Type=""Edm.Guid"" Nullable=""false"" />
<Property Name=""Name"" Type=""Edm.Int32"" Nullable=""true"" />
<Property Name=""_Name"" Type=""Edm.String"" Nullable=""true"" />
<Property Name=""__Name"" Type=""Edm.Int32"" Nullable=""true"" />
</EntityType>
<EntityContainer Name=""EntityContainer"">
<EntitySet Name=""Set1"" EntityType=""PrefixConflict.EntityType"" />
</EntityContainer>
</Schema>
</edmx:DataServices>
</edmx:Edmx>";
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public static global::Microsoft.OData.Edm.IEdmModel GetInstance()
{
return ParsedModel;
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private static global::Microsoft.OData.Edm.IEdmModel LoadModelFromString()
{
global::System.Xml.XmlReader reader = CreateXmlReader(Edmx);
try
{
return global::Microsoft.OData.Edm.Csdl.EdmxReader.Parse(reader);
}
finally
{
((global::System.IDisposable)(reader)).Dispose();
}
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private static global::System.Xml.XmlReader CreateXmlReader(string edmxToParse)
{
return global::System.Xml.XmlReader.Create(new global::System.IO.StringReader(edmxToParse));
}
}
}
/// <summary>
/// There are no comments for EntityTypeSingle in the schema.
/// </summary>
public partial class EntityTypeSingle : global::Microsoft.OData.Client.DataServiceQuerySingle<EntityType>
{
/// <summary>
/// Initialize a new EntityTypeSingle object.
/// </summary>
public EntityTypeSingle(global::Microsoft.OData.Client.DataServiceContext context, string path)
: base(context, path) {}
/// <summary>
/// Initialize a new EntityTypeSingle object.
/// </summary>
public EntityTypeSingle(global::Microsoft.OData.Client.DataServiceContext context, string path, bool isComposable)
: base(context, path, isComposable) {}
/// <summary>
/// Initialize a new EntityTypeSingle object.
/// </summary>
public EntityTypeSingle(global::Microsoft.OData.Client.DataServiceQuerySingle<EntityType> query)
: base(query) {}
}
/// <summary>
/// There are no comments for EntityType in the schema.
/// </summary>
/// <KeyProperties>
/// Id
/// </KeyProperties>
[global::Microsoft.OData.Client.Key("Id")]
public partial class EntityType : global::Microsoft.OData.Client.BaseEntityType
{
/// <summary>
/// Create a new EntityType object.
/// </summary>
/// <param name="ID">Initial value of Id.</param>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public static EntityType CreateEntityType(global::System.Guid ID)
{
EntityType entityType = new EntityType();
entityType.Id = ID;
return entityType;
}
/// <summary>
/// There are no comments for Property Id in the schema.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public global::System.Guid Id
{
get
{
return this._Id;
}
set
{
this.OnIdChanging(value);
this._Id = value;
this.OnIdChanged();
}
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private global::System.Guid _Id;
partial void OnIdChanging(global::System.Guid value);
partial void OnIdChanged();
/// <summary>
/// There are no comments for Property Name in the schema.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public global::System.Nullable<int> Name
{
get
{
return this._Name1;
}
set
{
this.OnNameChanging(value);
this._Name1 = value;
this.OnNameChanged();
}
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private global::System.Nullable<int> _Name1;
partial void OnNameChanging(global::System.Nullable<int> value);
partial void OnNameChanged();
/// <summary>
/// There are no comments for Property _Name in the schema.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public string _Name
{
get
{
return this.__Name1;
}
set
{
this.On_NameChanging(value);
this.__Name1 = value;
this.On_NameChanged();
}
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private string __Name1;
partial void On_NameChanging(string value);
partial void On_NameChanged();
/// <summary>
/// There are no comments for Property __Name in the schema.
/// </summary>
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
public global::System.Nullable<int> __Name
{
get
{
return this.___Name;
}
set
{
this.On__NameChanging(value);
this.___Name = value;
this.On__NameChanged();
}
}
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.OData.Client.Design.T4", "2.2.0")]
private global::System.Nullable<int> ___Name;
partial void On__NameChanging(global::System.Nullable<int> value);
partial void On__NameChanged();
}
/// <summary>
/// Class containing all extension methods
/// </summary>
public static class ExtensionMethods
{
/// <summary>
/// Get an entity of type global::PrefixConflict.EntityType as global::PrefixConflict.EntityTypeSingle specified by key from an entity set
/// </summary>
/// <param name="source">source entity set</param>
/// <param name="keys">dictionary with the names and values of keys</param>
public static global::PrefixConflict.EntityTypeSingle ByKey(this global::Microsoft.OData.Client.DataServiceQuery<global::PrefixConflict.EntityType> source, global::System.Collections.Generic.Dictionary<string, object> keys)
{
return new global::PrefixConflict.EntityTypeSingle(source.Context, source.GetKeyPath(global::Microsoft.OData.Client.Serializer.GetKeyString(source.Context, keys)));
}
/// <summary>
/// Get an entity of type global::PrefixConflict.EntityType as global::PrefixConflict.EntityTypeSingle specified by key from an entity set
/// </summary>
/// <param name="source">source entity set</param>
/// <param name="id">The value of id</param>
public static global::PrefixConflict.EntityTypeSingle ByKey(this global::Microsoft.OData.Client.DataServiceQuery<global::PrefixConflict.EntityType> source,
global::System.Guid id)
{
global::System.Collections.Generic.Dictionary<string, object> keys = new global::System.Collections.Generic.Dictionary<string, object>
{
{ "Id", id }
};
return new global::PrefixConflict.EntityTypeSingle(source.Context, source.GetKeyPath(global::Microsoft.OData.Client.Serializer.GetKeyString(source.Context, keys)));
}
}
}
| mit |
samchrisinger/ember-osf | tests/dummy/app/routes/users/detail.js | 271 | import Ember from 'ember';
import AuthenticatedRouteMixin from 'ember-simple-auth/mixins/authenticated-route-mixin';
export default Ember.Route.extend(AuthenticatedRouteMixin, {
model(params) {
return this.store.findRecord('user', params.user_id);
}
});
| mit |
NikRimington/Umbraco-CMS | src/Umbraco.Tests/Migrations/AdvancedMigrationTests.cs | 10518 | using System;
using System.Linq;
using Moq;
using NUnit.Framework;
using Umbraco.Core.Logging;
using Umbraco.Core.Migrations;
using Umbraco.Core.Migrations.Install;
using Umbraco.Core.Migrations.Upgrade;
using Umbraco.Core.Persistence.DatabaseModelDefinitions;
using Umbraco.Core.Persistence.Dtos;
using Umbraco.Core.Services;
using Umbraco.Tests.TestHelpers;
using Umbraco.Tests.Testing;
namespace Umbraco.Tests.Migrations
{
[TestFixture]
[UmbracoTest(Database = UmbracoTestOptions.Database.NewEmptyPerTest)]
public class AdvancedMigrationTests : TestWithDatabaseBase
{
[Test]
public void CreateTableOfTDto()
{
var logger = new DebugDiagnosticsLogger();
var builder = Mock.Of<IMigrationBuilder>();
Mock.Get(builder)
.Setup(x => x.Build(It.IsAny<Type>(), It.IsAny<IMigrationContext>()))
.Returns<Type, IMigrationContext>((t, c) =>
{
if (t != typeof(CreateTableOfTDtoMigration))
throw new NotSupportedException();
return new CreateTableOfTDtoMigration(c);
});
using (var scope = ScopeProvider.CreateScope())
{
var upgrader = new Upgrader(
new MigrationPlan("test")
.From(string.Empty)
.To<CreateTableOfTDtoMigration>("done"));
upgrader.Execute(ScopeProvider, builder, Mock.Of<IKeyValueService>(), logger);
var helper = new DatabaseSchemaCreator(scope.Database, logger);
var exists = helper.TableExists("umbracoUser");
Assert.IsTrue(exists);
scope.Complete();
}
}
[Test]
public void DeleteKeysAndIndexesOfTDto()
{
var logger = new DebugDiagnosticsLogger();
var builder = Mock.Of<IMigrationBuilder>();
Mock.Get(builder)
.Setup(x => x.Build(It.IsAny<Type>(), It.IsAny<IMigrationContext>()))
.Returns<Type, IMigrationContext>((t, c) =>
{
switch (t.Name)
{
case "CreateTableOfTDtoMigration":
return new CreateTableOfTDtoMigration(c);
case "DeleteKeysAndIndexesMigration":
return new DeleteKeysAndIndexesMigration(c);
default:
throw new NotSupportedException();
}
});
using (var scope = ScopeProvider.CreateScope())
{
var upgrader = new Upgrader(
new MigrationPlan("test")
.From(string.Empty)
.To<CreateTableOfTDtoMigration>("a")
.To<DeleteKeysAndIndexesMigration>("done"));
upgrader.Execute(ScopeProvider, builder, Mock.Of<IKeyValueService>(), logger);
scope.Complete();
}
}
[Test]
public void CreateKeysAndIndexesOfTDto()
{
var logger = new DebugDiagnosticsLogger();
var builder = Mock.Of<IMigrationBuilder>();
Mock.Get(builder)
.Setup(x => x.Build(It.IsAny<Type>(), It.IsAny<IMigrationContext>()))
.Returns<Type, IMigrationContext>((t, c) =>
{
switch (t.Name)
{
case "CreateTableOfTDtoMigration":
return new CreateTableOfTDtoMigration(c);
case "DeleteKeysAndIndexesMigration":
return new DeleteKeysAndIndexesMigration(c);
case "CreateKeysAndIndexesOfTDtoMigration":
return new CreateKeysAndIndexesOfTDtoMigration(c);
default:
throw new NotSupportedException();
}
});
using (var scope = ScopeProvider.CreateScope())
{
var upgrader = new Upgrader(
new MigrationPlan("test")
.From(string.Empty)
.To<CreateTableOfTDtoMigration>("a")
.To<DeleteKeysAndIndexesMigration>("b")
.To<CreateKeysAndIndexesOfTDtoMigration>("done"));
upgrader.Execute(ScopeProvider, builder, Mock.Of<IKeyValueService>(), logger);
scope.Complete();
}
}
[Test]
public void CreateKeysAndIndexes()
{
var logger = new DebugDiagnosticsLogger();
var builder = Mock.Of<IMigrationBuilder>();
Mock.Get(builder)
.Setup(x => x.Build(It.IsAny<Type>(), It.IsAny<IMigrationContext>()))
.Returns<Type, IMigrationContext>((t, c) =>
{
switch (t.Name)
{
case "CreateTableOfTDtoMigration":
return new CreateTableOfTDtoMigration(c);
case "DeleteKeysAndIndexesMigration":
return new DeleteKeysAndIndexesMigration(c);
case "CreateKeysAndIndexesMigration":
return new CreateKeysAndIndexesMigration(c);
default:
throw new NotSupportedException();
}
});
using (var scope = ScopeProvider.CreateScope())
{
var upgrader = new Upgrader(
new MigrationPlan("test")
.From(string.Empty)
.To<CreateTableOfTDtoMigration>("a")
.To<DeleteKeysAndIndexesMigration>("b")
.To<CreateKeysAndIndexesMigration>("done"));
upgrader.Execute(ScopeProvider, builder, Mock.Of<IKeyValueService>(), logger);
scope.Complete();
}
}
[Test]
public void CreateColumn()
{
var logger = new DebugDiagnosticsLogger();
var builder = Mock.Of<IMigrationBuilder>();
Mock.Get(builder)
.Setup(x => x.Build(It.IsAny<Type>(), It.IsAny<IMigrationContext>()))
.Returns<Type, IMigrationContext>((t, c) =>
{
switch (t.Name)
{
case "CreateTableOfTDtoMigration":
return new CreateTableOfTDtoMigration(c);
case "CreateColumnMigration":
return new CreateColumnMigration(c);
default:
throw new NotSupportedException();
}
});
using (var scope = ScopeProvider.CreateScope())
{
var upgrader = new Upgrader(
new MigrationPlan("test")
.From(string.Empty)
.To<CreateTableOfTDtoMigration>("a")
.To<CreateColumnMigration>("done"));
upgrader.Execute(ScopeProvider, builder, Mock.Of<IKeyValueService>(), logger);
scope.Complete();
}
}
public class CreateTableOfTDtoMigration : MigrationBase
{
public CreateTableOfTDtoMigration(IMigrationContext context)
: base(context)
{ }
public override void Migrate()
{
// creates User table with keys, indexes, etc
Create.Table<UserDto>().Do();
}
}
public class DeleteKeysAndIndexesMigration : MigrationBase
{
public DeleteKeysAndIndexesMigration(IMigrationContext context)
: base(context)
{ }
public override void Migrate()
{
// drops User table keys and indexes
//Execute.DropKeysAndIndexes("umbracoUser");
// drops *all* tables keys and indexes
var tables = SqlSyntax.GetTablesInSchema(Context.Database).ToList();
foreach (var table in tables)
Delete.KeysAndIndexes(table, false, true).Do();
foreach (var table in tables)
Delete.KeysAndIndexes(table, true, false).Do();
}
}
public class CreateKeysAndIndexesOfTDtoMigration : MigrationBase
{
public CreateKeysAndIndexesOfTDtoMigration(IMigrationContext context)
: base(context)
{ }
public override void Migrate()
{
// creates Node table keys and indexes
Create.KeysAndIndexes<UserDto>().Do();
}
}
public class CreateKeysAndIndexesMigration : MigrationBase
{
public CreateKeysAndIndexesMigration(IMigrationContext context)
: base(context)
{ }
public override void Migrate()
{
// creates *all* tables keys and indexes
foreach (var x in DatabaseSchemaCreator.OrderedTables)
{
// ok - for tests, restrict to Node
if (x != typeof(UserDto)) continue;
Create.KeysAndIndexes(x).Do();
}
}
}
public class CreateColumnMigration : MigrationBase
{
public CreateColumnMigration(IMigrationContext context)
: base(context)
{ }
public override void Migrate()
{
// cannot delete the column without this, of course
Delete.KeysAndIndexes("umbracoUser").Do();
Delete.Column("id").FromTable("umbracoUser").Do();
var table = DefinitionFactory.GetTableDefinition(typeof(UserDto), SqlSyntax);
var column = table.Columns.First(x => x.Name == "id");
var create = SqlSyntax.Format(column); // returns [id] INTEGER NOT NULL IDENTITY(1060,1)
Database.Execute($"ALTER TABLE {SqlSyntax.GetQuotedTableName("umbracoUser")} ADD " + create);
}
}
}
}
| mit |
codeck/XChange | xchange-lakebtc/src/main/java/com/xeiam/xchange/lakebtc/service/polling/LakeBTCTradeServiceRaw.java | 4365 | package com.xeiam.xchange.lakebtc.service.polling;
import java.io.IOException;
import com.xeiam.xchange.Exchange;
import com.xeiam.xchange.dto.trade.LimitOrder;
import com.xeiam.xchange.dto.trade.MarketOrder;
import com.xeiam.xchange.exceptions.ExchangeException;
import com.xeiam.xchange.lakebtc.LakeBTCUtil;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCBuyOrderRequest;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCCancelRequest;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCCancelResponse;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCOrderResponse;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCOrdersRequest;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCOrdersResponse;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCSellOrderRequest;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCTradeResponse;
import com.xeiam.xchange.lakebtc.dto.trade.LakeBTCTradesRequest;
/**
* Created by cristian.lucaci on 12/19/2014.
*/
public class LakeBTCTradeServiceRaw extends LakeBTCBasePollingService {
/**
* Constructor
*
* @param exchange
*/
public LakeBTCTradeServiceRaw(Exchange exchange) {
super(exchange);
}
public LakeBTCOrderResponse placeLakeBTCMarketOrder(MarketOrder marketOrder) throws IOException {
String pair = LakeBTCUtil.toPairString(marketOrder.getCurrencyPair());
try {
LakeBTCOrderResponse newOrder = null;
switch (marketOrder.getType()) {
case BID:
newOrder = lakeBTCAuthenticated.placeBuyOrder(signatureCreator, exchange.getNonceFactory(),
//unit price, amount, currency concatenated by commas
new LakeBTCBuyOrderRequest(String.format("\"%s,%s,%s\"", "0", marketOrder.getTradableAmount().toString(), pair)));
break;
case ASK:
newOrder = lakeBTCAuthenticated.placeSellOrder(signatureCreator, exchange.getNonceFactory(),
//unit price, amount, currency concatenated by commas
new LakeBTCSellOrderRequest(String.format("\"%s,%s,%s\"", "0", marketOrder.getTradableAmount().toString(), pair)));
break;
}
return newOrder;
} catch (IOException e) {
throw new ExchangeException("LakeBTC returned an error: " + e.getMessage());
}
}
public LakeBTCOrderResponse placeLakeBTCLimitOrder(LimitOrder limitOrder) throws IOException {
String pair = LakeBTCUtil.toPairString(limitOrder.getCurrencyPair());
try {
LakeBTCOrderResponse newOrder = null;
switch (limitOrder.getType()) {
case BID:
newOrder = lakeBTCAuthenticated.placeBuyOrder(signatureCreator, exchange.getNonceFactory(),
//unit price, amount, currency concatenated by commas
new LakeBTCBuyOrderRequest(String.format("\"%s,%s,%s\"", limitOrder.getLimitPrice(), limitOrder.getTradableAmount().toString(), pair)));
break;
case ASK:
newOrder = lakeBTCAuthenticated.placeSellOrder(signatureCreator, exchange.getNonceFactory(),
//unit price, amount, currency concatenated by commas
new LakeBTCSellOrderRequest(String.format("\"%s,%s,%s\"", limitOrder.getLimitPrice(), limitOrder.getTradableAmount().toString(), pair)));
break;
}
return newOrder;
} catch (IOException e) {
throw new ExchangeException("LakeBTC returned an error: " + e.getMessage());
}
}
public LakeBTCCancelResponse cancelLakeBTCOrder(String orderId) throws IOException {
try {
return lakeBTCAuthenticated.cancelOrder(signatureCreator, exchange.getNonceFactory(), new LakeBTCCancelRequest(orderId));
} catch (Exception e) {
throw new ExchangeException("LakeBTC returned an error: " + e.getMessage());
}
}
public LakeBTCTradeResponse[] getLakeBTCTradeHistory(long timestamp) throws IOException {
try {
return lakeBTCAuthenticated.pastTrades(signatureCreator, exchange.getNonceFactory(), new LakeBTCTradesRequest(String.valueOf(timestamp)));
} catch (IOException e) {
throw new ExchangeException("LakeBTC returned an error: " + e.getMessage());
}
}
public LakeBTCOrdersResponse[] getLakeBTCOrders() throws IOException {
return lakeBTCAuthenticated.getOrders(signatureCreator, exchange.getNonceFactory(), new LakeBTCOrdersRequest());
}
}
| mit |
francisco-filho/Ghost | core/server/middleware/static-theme.js | 816 | var _ = require('lodash'),
express = require('express'),
path = require('path'),
config = require('../config'),
utils = require('../utils');
function isBlackListedFileType(file) {
var blackListedFileTypes = ['.hbs', '.md', '.json'],
ext = path.extname(file);
return _.contains(blackListedFileTypes, ext);
}
function forwardToExpressStatic(req, res, next) {
express['static'](
path.join(config.paths.themePath, req.app.get('activeTheme')),
{maxAge: utils.ONE_YEAR_MS}
)(req, res, next);
}
function staticTheme() {
return function blackListStatic(req, res, next) {
if (isBlackListedFileType(req.url)) {
return next();
}
return forwardToExpressStatic(req, res, next);
};
}
module.exports = staticTheme;
| mit |
ycsoft/FatCat-Server | LIBS/boost_1_58_0/libs/ratio/example/type_traits/common_type.hpp | 5661 | // common_type.hpp ---------------------------------------------------------//
// Copyright 2008 Howard Hinnant
// Copyright 2008 Beman Dawes
// Distributed under the Boost Software License, Version 1.0.
// See http://www.boost.org/LICENSE_1_0.txt
#ifndef BOOST_EX_TYPE_TRAITS_EXT_COMMON_TYPE_HPP
#define BOOST_EX_TYPE_TRAITS_EXT_COMMON_TYPE_HPP
#include <boost/config.hpp>
//----------------------------------------------------------------------------//
#if defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)
#define BOOST_EX_COMMON_TYPE_ARITY 3
#endif
//----------------------------------------------------------------------------//
#if defined(BOOST_NO_CXX11_DECLTYPE) && !defined(BOOST_EX_COMMON_TYPE_DONT_USE_TYPEOF)
#define BOOST_TYPEOF_SILENT
#include <boost/typeof/typeof.hpp> // boost wonders never cease!
#endif
//----------------------------------------------------------------------------//
#ifndef BOOST_NO_CXX11_STATIC_ASSERT
#define BOOST_EX_COMMON_TYPE_STATIC_ASSERT(CND, MSG, TYPES) static_assert(CND,MSG)
#elif defined(BOOST_EX_COMMON_TYPE_USES_STATIC_ASSERT)
#include <boost/static_assert.hpp>
#define BOOST_EX_COMMON_TYPE_STATIC_ASSERT(CND, MSG, TYPES) BOOST_STATIC_ASSERT(CND)
#elif defined(BOOST_EX_COMMON_TYPE_USES_MPL_ASSERT)
#include <boost/mpl/assert.hpp>
#include <boost/mpl/bool.hpp>
#define BOOST_EX_COMMON_TYPE_STATIC_ASSERT(CND, MSG, TYPES) \
BOOST_MPL_ASSERT_MSG(boost::mpl::bool_< (CND) >::type::value, MSG, TYPES)
#else
//~ #elif defined(BOOST_EX_COMMON_TYPE_USES_ARRAY_ASSERT)
#define BOOST_EX_COMMON_TYPE_CONCAT(A,B) A##B
#define BOOST_EX_COMMON_TYPE_NAME(A,B) BOOST_EX_COMMON_TYPE_CONCAT(A,B)
#define BOOST_EX_COMMON_TYPE_STATIC_ASSERT(CND, MSG, TYPES) static char BOOST_EX_COMMON_TYPE_NAME(__boost_common_type_test_,__LINE__)[(CND)?1:-1]
//~ #define BOOST_EX_COMMON_TYPE_STATIC_ASSERT(CND, MSG, TYPES)
#endif
#if !defined(BOOST_NO_CXX11_STATIC_ASSERT) || !defined(BOOST_EX_COMMON_TYPE_USES_MPL_ASSERT)
#define BOOST_EX_COMMON_TYPE_MUST_BE_A_COMPLE_TYPE "must be complete type"
#endif
#if defined(BOOST_NO_CXX11_DECLTYPE) && defined(BOOST_EX_COMMON_TYPE_DONT_USE_TYPEOF)
#include "detail/common_type.hpp"
#include <boost/type_traits/remove_cv.hpp>
#endif
#include <boost/mpl/if.hpp>
#include "declval.hpp"
//----------------------------------------------------------------------------//
// //
// C++03 implementation of //
// 20.6.7 Other transformations [meta.trans.other] //
// Written by Howard Hinnant //
// Adapted for Boost by Beman Dawes, Vicente Botet and Jeffrey Hellrung //
// //
//----------------------------------------------------------------------------//
namespace boost_ex {
// prototype
#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)
template<typename... T>
struct common_type;
#else // or no specialization
template <class T, class U = void, class V = void>
struct common_type
{
public:
typedef typename common_type<typename common_type<T, U>::type, V>::type type;
};
#endif
// 1 arg
template<typename T>
#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)
struct common_type<T>
#else
struct common_type<T, void, void>
#endif
{
BOOST_EX_COMMON_TYPE_STATIC_ASSERT(sizeof(T) > 0, BOOST_EX_COMMON_TYPE_MUST_BE_A_COMPLE_TYPE, (T));
public:
typedef T type;
};
// 2 args
namespace type_traits_detail {
template <class T, class U>
struct common_type_2
{
private:
BOOST_EX_COMMON_TYPE_STATIC_ASSERT(sizeof(T) > 0, BOOST_EX_COMMON_TYPE_MUST_BE_A_COMPLE_TYPE, (T));
BOOST_EX_COMMON_TYPE_STATIC_ASSERT(sizeof(U) > 0, BOOST_EX_COMMON_TYPE_MUST_BE_A_COMPLE_TYPE, (U));
static bool declval_bool(); // workaround gcc bug; not required by std
static typename add_rvalue_reference<T>::type declval_T(); // workaround gcc bug; not required by std
static typename add_rvalue_reference<U>::type declval_U(); // workaround gcc bug; not required by std
#if !defined(BOOST_NO_CXX11_DECLTYPE)
public:
typedef decltype(declval<bool>() ? declval<T>() : declval<U>()) type;
#elif defined(BOOST_EX_COMMON_TYPE_DONT_USE_TYPEOF)
public:
typedef typename detail_type_traits_common_type::common_type_impl<
typename remove_cv<T>::type,
typename remove_cv<U>::type
>::type type;
#else
public:
//~ typedef BOOST_TYPEOF_TPL(declval_bool() ? declval_T() : declval_U()) type;
typedef BOOST_TYPEOF_TPL(declval<bool>() ? declval<T>() : declval<U>()) type;
#endif
};
template <class T>
struct common_type_2<T, T>
{
typedef T type;
};
}
#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)
template <class T, class U>
struct common_type<T, U>
#else
template <class T, class U>
struct common_type<T, U, void>
#endif
: type_traits_detail::common_type_2<T,U>
{ };
// 3 or more args
#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES)
template<typename T, typename U, typename... V>
struct common_type<T, U, V...> {
public:
typedef typename common_type<typename common_type<T, U>::type, V...>::type type;
};
#endif
} // namespace boost_ex
#endif // BOOST_TYPE_TRAITS_EXT_COMMON_TYPE_HPP
| mit |