code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
require 'spec_helper'
describe Mongo::Auth::CR do
let(:address) do
default_address
end
let(:monitoring) do
Mongo::Monitoring.new
end
let(:listeners) do
Mongo::Event::Listeners.new
end
let(:server) do
Mongo::Server.new(address, double('cluster'), monitoring, listeners, TEST_OPTIONS)
end
let(:connection) do
Mongo::Server::Connection.new(server, TEST_OPTIONS)
end
describe '#login' do
context 'when the user is not authorized' do
let(:user) do
Mongo::Auth::User.new(
database: 'driver',
user: 'notauser',
password: 'password'
)
end
let(:cr) do
described_class.new(user)
end
let(:login) do
cr.login(connection).documents[0]
end
it 'raises an exception' do
expect {
cr.login(connection)
}.to raise_error(Mongo::Auth::Unauthorized)
end
end
end
context 'when the user is authorized for the database' do
let(:cr) do
described_class.new(root_user)
end
let(:login) do
cr.login(connection).documents[0]
end
it 'logs the user into the connection', unless: scram_sha_1_enabled? do
expect(cr.login(connection).documents[0]['ok']).to eq(1)
end
end
end
| brandonblack/mongo-ruby-driver | spec/mongo/auth/cr_spec.rb | Ruby | apache-2.0 | 1,289 |
/*
* Copyright 2012-2016 bambooCORE, greenstep of copyright Chen Xin Nien
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* -----------------------------------------------------------------------
*
* author: Chen Xin Nien
* contact: chen.xin.nien@gmail.com
*
*/
package com.netsteadfast.greenstep.bsc.model;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import com.netsteadfast.greenstep.base.AppContext;
import com.netsteadfast.greenstep.base.Constants;
import com.netsteadfast.greenstep.base.exception.ServiceException;
import com.netsteadfast.greenstep.po.hbm.TbSysCode;
import com.netsteadfast.greenstep.service.ISysCodeService;
import com.netsteadfast.greenstep.util.SimpleUtils;
import com.netsteadfast.greenstep.vo.SysCodeVO;
@SuppressWarnings("unchecked")
public class BscMeasureDataFrequency {
private static ISysCodeService<SysCodeVO, TbSysCode, String> sysCodeService;
private static Map<String, String> frequencyMap = new LinkedHashMap<String, String>();
static {
sysCodeService = (ISysCodeService<SysCodeVO, TbSysCode, String>)
AppContext.getBean("core.service.SysCodeService");
}
private static void loadMapData() {
Map<String, Object> params = new HashMap<String, Object>();
params.put("type", CODE_TYPE);
Map<String, String> orderParams = new HashMap<String, String>();
orderParams.put("code", "ASC");
try {
List<TbSysCode> codes = sysCodeService.findListByParams(params, null, orderParams);
for (TbSysCode code : codes) {
if (code.getCode().equals(FREQUENCY_DAY_CODE)) {
frequencyMap.put(FREQUENCY_DAY, code.getName());
}
if (code.getCode().equals(FREQUENCY_WEEK_CODE)) {
frequencyMap.put(FREQUENCY_WEEK, code.getName());
}
if (code.getCode().equals(FREQUENCY_MONTH_CODE)) {
frequencyMap.put(FREQUENCY_MONTH, code.getName());
}
if (code.getCode().equals(FREQUENCY_QUARTER_CODE)) {
frequencyMap.put(FREQUENCY_QUARTER, code.getName());
}
if (code.getCode().equals(FREQUENCY_HALF_OF_YEAR_CODE)) {
frequencyMap.put(FREQUENCY_HALF_OF_YEAR, code.getName());
}
if (code.getCode().equals(FREQUENCY_YEAR_CODE)) {
frequencyMap.put(FREQUENCY_YEAR, code.getName());
}
}
} catch (ServiceException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
}
public static Map<String, String> getFrequencyMap(boolean pleaseSelect) {
Map<String, String> dataMap = new LinkedHashMap<String, String>();
if (pleaseSelect) {
dataMap.put(Constants.HTML_SELECT_NO_SELECT_ID, Constants.HTML_SELECT_NO_SELECT_NAME);
}
if (frequencyMap.size() < 1) {
loadMapData();
}
dataMap.putAll(frequencyMap);
return dataMap;
}
public static String getQueryDate(String date, String frequency) {
String queryDate = date.replaceAll("-", "").replaceAll("/", "");
if (FREQUENCY_DAY.equals(frequency) || FREQUENCY_WEEK.equals(frequency) ) {
queryDate = queryDate.substring(0, 6);
}
if (FREQUENCY_MONTH.equals(frequency) || FREQUENCY_QUARTER.equals(frequency)
|| FREQUENCY_HALF_OF_YEAR.equals(frequency) || FREQUENCY_YEAR.equals(frequency) ) {
queryDate = queryDate.substring(0, 4);
}
return queryDate;
}
/**
* TB_SYS_CODE.TYPE
*/
public final static String CODE_TYPE = "KMD";
/**
* 日
*/
public static final String FREQUENCY_DAY = "1";
public static final String FREQUENCY_DAY_CODE = "KMD_MEASURE01";
/**
* 周
*/
public static final String FREQUENCY_WEEK = "2";
public static final String FREQUENCY_WEEK_CODE = "KMD_MEASURE02";
/**
* 月
*/
public static final String FREQUENCY_MONTH = "3";
public static final String FREQUENCY_MONTH_CODE = "KMD_MEASURE03";
/**
* 季
*/
public static final String FREQUENCY_QUARTER = "4";
public static final String FREQUENCY_QUARTER_CODE = "KMD_MEASURE04";
/**
* 半年
*/
public static final String FREQUENCY_HALF_OF_YEAR = "5";
public static final String FREQUENCY_HALF_OF_YEAR_CODE = "KMD_MEASURE05";
/**
* 年
*/
public static final String FREQUENCY_YEAR = "6";
public static final String FREQUENCY_YEAR_CODE = "KMD_MEASURE06";
/**
* 給報表查詢時, 在 bb_measure_data 正確的 "月", "周" 的日期起迄
* yyyyMMdd
* 2013/01/01
* 20130101
*
* @param yyyyMMdd
* @return
* @throws Exception
*/
public static Map<String, String> getWeekOrMonthStartEnd(String frequency, String startDate, String endDate) throws Exception {
if (!BscMeasureDataFrequency.FREQUENCY_WEEK.equals(frequency) && !BscMeasureDataFrequency.FREQUENCY_MONTH.equals(frequency)) {
throw new java.lang.IllegalArgumentException("frequency error.");
}
Map<String, String> dateMap=new HashMap<String, String>();
if (!SimpleUtils.isDate(startDate) || !SimpleUtils.isDate(endDate)) {
throw new java.lang.IllegalArgumentException("startDate/endDate error.");
}
if (BscMeasureDataFrequency.FREQUENCY_WEEK.equals(frequency)) {
int firstDay = Integer.parseInt( startDate.substring(startDate.length()-2, startDate.length()) );
int endDay = Integer.parseInt( endDate.substring(endDate.length()-2, endDate.length()) );
if (firstDay>=1 && firstDay<8) {
firstDay = 1;
}
if (firstDay>=8 && firstDay<15) {
firstDay = 8;
}
if (firstDay>=15 && firstDay<22) {
firstDay = 15;
}
if (firstDay>=22) {
firstDay = 22;
}
if (endDay>=1 && endDay<8) {
endDay = 7;
}
if (endDay>=8 && endDay<15) {
endDay = 14;
}
if (endDay>=15 && endDay<22) {
endDay = 21;
}
if (endDay>=22) {
endDay = SimpleUtils.getMaxDayOfMonth(
Integer.parseInt(endDate.substring(0, 4)),
Integer.parseInt(endDate.substring(5, 7)) );
}
String newStartDate = startDate.substring(0, startDate.length()-2)
+ StringUtils.leftPad(String.valueOf(firstDay), 2, "0");
String newEndDate = endDate.substring(0, endDate.length()-2)
+ StringUtils.leftPad(String.valueOf(endDay), 2, "0");
dateMap.put("startDate", newStartDate);
dateMap.put("endDate", newEndDate);
}
if (BscMeasureDataFrequency.FREQUENCY_MONTH.equals(frequency)) {
int endDay = SimpleUtils.getMaxDayOfMonth( Integer.parseInt(endDate.substring(0, 4)),
Integer.parseInt(endDate.substring(5, 7)) );
String newStartDate = startDate.substring(0, startDate.length()-2) + "01";
String newEndDate = endDate.substring(0, endDate.length()-2)
+ StringUtils.leftPad(String.valueOf(endDay), 2, "0");
dateMap.put("startDate", newStartDate);
dateMap.put("endDate", newEndDate);
}
return dateMap;
}
}
| billchen198318/bamboobsc | gsbsc-standard/src/com/netsteadfast/greenstep/bsc/model/BscMeasureDataFrequency.java | Java | apache-2.0 | 7,192 |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.firefox;
import static java.util.Arrays.stream;
import static java.util.concurrent.TimeUnit.SECONDS;
import static java.util.stream.Collectors.toList;
import static org.openqa.selenium.Platform.MAC;
import static org.openqa.selenium.Platform.UNIX;
import static org.openqa.selenium.Platform.WINDOWS;
import static org.openqa.selenium.os.WindowsUtils.getPathsInProgramFiles;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.openqa.selenium.Platform;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.firefox.internal.Executable;
import org.openqa.selenium.io.FileHandler;
import org.openqa.selenium.os.CommandLine;
import org.openqa.selenium.os.ExecutableFinder;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Stream;
public class FirefoxBinary {
/**
* Enumerates Firefox channels, according to https://wiki.mozilla.org/RapidRelease
*/
public enum Channel {
ESR("esr"),
RELEASE("release"),
BETA("beta"),
AURORA("aurora"),
NIGHTLY("nightly");
private String name;
Channel(String name) {
this.name = name;
}
public String toString() {
return name;
}
/**
* Gets a channel with the name matching the parameter.
*
* @param name the channel name
* @return the Channel enum value matching the parameter
*/
public static Channel fromString(String name) {
final String lcName = name.toLowerCase();
return stream(Channel.values())
.filter(ch -> ch.name.equals(lcName))
.findFirst().orElseThrow(() -> new WebDriverException("Unrecognized channel: " + name));
}
}
private static final String NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so";
private static final String PATH_PREFIX = "/" +
FirefoxBinary.class.getPackage().getName().replace(".", "/") + "/";
private final Map<String, String> extraEnv = Maps.newHashMap();
private final List<String> extraOptions = Lists.newArrayList();
private final Executable executable;
private CommandLine process;
private OutputStream stream;
private long timeout = SECONDS.toMillis(45);
public FirefoxBinary() {
Executable systemBinary = locateFirefoxBinaryFromSystemProperty();
if (systemBinary != null) {
executable = systemBinary;
return;
}
Executable platformBinary = locateFirefoxBinariesFromPlatform().findFirst().orElse(null);
if (platformBinary != null) {
executable = platformBinary;
return;
}
throw new WebDriverException("Cannot find firefox binary in PATH. " +
"Make sure firefox is installed. OS appears to be: " + Platform.getCurrent());
}
public FirefoxBinary(Channel channel) {
Executable systemBinary = locateFirefoxBinaryFromSystemProperty();
if (systemBinary != null) {
if (systemBinary.getChannel() == channel) {
executable = systemBinary;
return;
} else {
throw new WebDriverException(
"Firefox executable specified by system property " + FirefoxDriver.SystemProperty.BROWSER_BINARY +
" does not belong to channel '" + channel + "', it appears to be '" + systemBinary.getChannel() + "'");
}
}
executable = locateFirefoxBinariesFromPlatform()
.filter(e -> e.getChannel() == channel)
.findFirst().orElseThrow(() -> new WebDriverException(
String.format("Cannot find firefox binary for channel '%s' in PATH", channel)));
}
/**
* @deprecated Perform the version check by taking a look at the version that comes back from the
* remote end in the returned {@link org.openqa.selenium.Capabilities}.
*/
@Deprecated
public FirefoxBinary(String version) {
Executable systemBinary = locateFirefoxBinaryFromSystemProperty();
if (systemBinary != null) {
if (systemBinary.getVersion().startsWith(version)) {
executable = systemBinary;
return;
} else {
throw new WebDriverException(
"Firefox executable specified by system property " + FirefoxDriver.SystemProperty.BROWSER_BINARY +
" has version '" + systemBinary.getVersion() + "', that does not match '" + version + "'");
}
}
executable = locateFirefoxBinariesFromPlatform()
.filter(e -> e.getVersion().startsWith(version))
.findFirst().orElseThrow(() -> new WebDriverException(
String.format("Cannot find firefox binary version '%s' in PATH", version)));
}
public FirefoxBinary(File pathToFirefoxBinary) {
executable = new Executable(pathToFirefoxBinary);
}
public void setEnvironmentProperty(String propertyName, String value) {
if (propertyName == null || value == null) {
throw new WebDriverException(
String.format("You must set both the property name and value: %s, %s", propertyName,
value));
}
extraEnv.put(propertyName, value);
}
public void addCommandLineOptions(String... options) {
extraOptions.addAll(Lists.newArrayList(options));
}
void amendOptions(FirefoxOptions options) {
options.addArguments(extraOptions);
}
protected boolean isOnLinux() {
return Platform.getCurrent().is(Platform.LINUX);
}
public void startProfile(FirefoxProfile profile, File profileDir, String... commandLineFlags)
throws IOException {
String profileAbsPath = profileDir.getAbsolutePath();
setEnvironmentProperty("XRE_PROFILE_PATH", profileAbsPath);
setEnvironmentProperty("MOZ_NO_REMOTE", "1");
setEnvironmentProperty("MOZ_CRASHREPORTER_DISABLE", "1"); // Disable Breakpad
setEnvironmentProperty("NO_EM_RESTART", "1"); // Prevent the binary from detaching from the
// console
if (isOnLinux() && profile.shouldLoadNoFocusLib()) {
modifyLinkLibraryPath(profileDir);
}
List<String> cmdArray = Lists.newArrayList();
cmdArray.addAll(extraOptions);
cmdArray.addAll(Lists.newArrayList(commandLineFlags));
CommandLine command = new CommandLine(getPath(), Iterables.toArray(cmdArray, String.class));
command.setEnvironmentVariables(getExtraEnv());
command.updateDynamicLibraryPath(getExtraEnv().get(CommandLine.getLibraryPathPropertyName()));
// On Snow Leopard, beware of problems the sqlite library
if (! (Platform.getCurrent().is(Platform.MAC) && Platform.getCurrent().getMinorVersion() > 5)) {
String firefoxLibraryPath = System.getProperty(
FirefoxDriver.SystemProperty.BROWSER_LIBRARY_PATH,
getFile().getAbsoluteFile().getParentFile().getAbsolutePath());
command.updateDynamicLibraryPath(firefoxLibraryPath);
}
if (stream == null) {
stream = getDefaultOutputStream();
}
command.copyOutputTo(stream);
startFirefoxProcess(command);
}
protected void startFirefoxProcess(CommandLine command) throws IOException {
process = command;
command.executeAsync();
}
protected File getFile() {
return executable.getFile();
}
protected String getPath() {
return executable.getPath();
}
public Map<String, String> getExtraEnv() {
return Collections.unmodifiableMap(extraEnv);
}
protected void modifyLinkLibraryPath(File profileDir) {
// Extract x_ignore_nofocus.so from x86, amd64 directories inside
// the jar into a real place in the filesystem and change LD_LIBRARY_PATH
// to reflect that.
String existingLdLibPath = System.getenv("LD_LIBRARY_PATH");
// The returned new ld lib path is terminated with ':'
String newLdLibPath =
extractAndCheck(profileDir, NO_FOCUS_LIBRARY_NAME, PATH_PREFIX + "x86", PATH_PREFIX +
"amd64");
if (existingLdLibPath != null && !existingLdLibPath.equals("")) {
newLdLibPath += existingLdLibPath;
}
setEnvironmentProperty("LD_LIBRARY_PATH", newLdLibPath);
// Set LD_PRELOAD to x_ignore_nofocus.so - this will be taken automagically
// from the LD_LIBRARY_PATH
setEnvironmentProperty("LD_PRELOAD", NO_FOCUS_LIBRARY_NAME);
}
protected String extractAndCheck(File profileDir, String noFocusSoName,
String jarPath32Bit, String jarPath64Bit) {
// 1. Extract x86/x_ignore_nofocus.so to profile.getLibsDir32bit
// 2. Extract amd64/x_ignore_nofocus.so to profile.getLibsDir64bit
// 3. Create a new LD_LIB_PATH string to contain:
// profile.getLibsDir32bit + ":" + profile.getLibsDir64bit
Set<String> pathsSet = new HashSet<>();
pathsSet.add(jarPath32Bit);
pathsSet.add(jarPath64Bit);
StringBuilder builtPath = new StringBuilder();
for (String path : pathsSet) {
try {
FileHandler.copyResource(profileDir, getClass(), path + File.separator + noFocusSoName);
} catch (IOException e) {
if (Boolean.getBoolean("webdriver.development")) {
System.err.println(
"Exception unpacking required library, but in development mode. Continuing");
} else {
throw new WebDriverException(e);
}
} // End catch.
String outSoPath = profileDir.getAbsolutePath() + File.separator + path;
File file = new File(outSoPath, noFocusSoName);
if (!file.exists()) {
throw new WebDriverException("Could not locate " + path + ": "
+ "native events will not work.");
}
builtPath.append(outSoPath).append(":");
}
return builtPath.toString();
}
/**
* Waits for the process to execute, returning the command output taken from the profile's
* execution.
*
* @throws InterruptedException if we are interrupted while waiting for the process to launch
* @throws IOException if there is a problem with reading the input stream of the launching
* process
*/
public void waitFor() throws InterruptedException, IOException {
process.waitFor();
}
/**
* Waits for the process to execute, returning the command output taken from the profile's
* execution.
*
* @param timeout the maximum time to wait in milliseconds
* @throws InterruptedException if we are interrupted while waiting for the process to launch
* @throws IOException if there is a problem with reading the input stream of the launching
* process
*/
public void waitFor(long timeout) throws InterruptedException, IOException {
process.waitFor(timeout);
}
/**
* Gets all console output of the binary. Output retrieval is non-destructive and non-blocking.
*
* @return the console output of the executed binary.
* @throws IOException IO exception reading from the output stream of the firefox process
*/
public String getConsoleOutput() throws IOException {
if (process == null) {
return null;
}
return process.getStdOut();
}
public long getTimeout() {
return timeout;
}
public void setTimeout(long timeout) {
this.timeout = timeout;
}
@Override
public String toString() {
return "FirefoxBinary(" + executable.getPath() + ")";
}
public void setOutputWatcher(OutputStream stream) {
this.stream = stream;
}
public void quit() {
if (process != null) {
process.destroy();
}
}
private OutputStream getDefaultOutputStream() throws FileNotFoundException {
String firefoxLogFile = System.getProperty(FirefoxDriver.SystemProperty.BROWSER_LOGFILE);
if (firefoxLogFile != null) {
if ("/dev/stdout".equals(firefoxLogFile)) {
return System.out;
}
return new FileOutputStream(firefoxLogFile);
}
return null;
}
/**
* Locates the firefox binary from a system property. Will throw an exception if the binary cannot
* be found.
*/
private static Executable locateFirefoxBinaryFromSystemProperty() {
String binaryName = System.getProperty(FirefoxDriver.SystemProperty.BROWSER_BINARY);
if (binaryName == null)
return null;
File binary = new File(binaryName);
if (binary.exists() && !binary.isDirectory())
return new Executable(binary);
Platform current = Platform.getCurrent();
if (current.is(WINDOWS)) {
if (!binaryName.endsWith(".exe")) {
binaryName += ".exe";
}
} else if (current.is(MAC)) {
if (!binaryName.endsWith(".app")) {
binaryName += ".app";
}
binaryName += "/Contents/MacOS/firefox-bin";
}
binary = new File(binaryName);
if (binary.exists())
return new Executable(binary);
throw new WebDriverException(
String.format("'%s' property set, but unable to locate the requested binary: %s",
FirefoxDriver.SystemProperty.BROWSER_BINARY, binaryName));
}
/**
* Locates the firefox binary by platform.
*/
private static Stream<Executable> locateFirefoxBinariesFromPlatform() {
ImmutableList.Builder<Executable> executables = new ImmutableList.Builder<>();
Platform current = Platform.getCurrent();
if (current.is(WINDOWS)) {
executables.addAll(Stream.of(getPathsInProgramFiles("Mozilla Firefox\\firefox.exe"),
getPathsInProgramFiles("Firefox Developer Edition\\firefox.exe"),
getPathsInProgramFiles("Nightly\\firefox.exe"))
.flatMap(List::stream)
.map(File::new).filter(File::exists)
.map(Executable::new).collect(toList()));
} else if (current.is(MAC)) {
// system
File binary = new File("/Applications/Firefox.app/Contents/MacOS/firefox-bin");
if (binary.exists()) {
executables.add(new Executable(binary));
}
// user home
binary = new File(System.getProperty("user.home") + binary.getAbsolutePath());
if (binary.exists()) {
executables.add(new Executable(binary));
}
} else if (current.is(UNIX)) {
String systemFirefoxBin = new ExecutableFinder().find("firefox-bin");
if (systemFirefoxBin != null) {
executables.add(new Executable(new File(systemFirefoxBin)));
}
}
String systemFirefox = new ExecutableFinder().find("firefox");
if (systemFirefox != null) {
Path firefoxPath = new File(systemFirefox).toPath();
if (Files.isSymbolicLink(firefoxPath)) {
try {
Path realPath = firefoxPath.toRealPath();
File attempt1 = realPath.getParent().resolve("firefox").toFile();
if (attempt1.exists()) {
executables.add(new Executable(attempt1));
} else {
File attempt2 = realPath.getParent().resolve("firefox-bin").toFile();
if (attempt2.exists()) {
executables.add(new Executable(attempt2));
}
}
} catch (IOException e) {
// ignore this path
}
} else {
executables.add(new Executable(new File(systemFirefox)));
}
}
return executables.build().stream();
}
}
| Jarob22/selenium | java/client/src/org/openqa/selenium/firefox/FirefoxBinary.java | Java | apache-2.0 | 16,114 |
#!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the add switch command."""
import unittest
import os
import socket
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
SW_HOSTNAME = "utpgsw0.aqd-unittest.ms.com"
class TestVlan(TestBrokerCommand):
def getswip(self):
return self.net.tor_net[10].usable[0]
def test_001_addvlan714(self):
command = ["add_vlan", "--vlan=714", "--name=user_714",
"--vlan_type=user"]
self.noouttest(command)
command = "show vlan --vlan 714"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Vlan: 714", command)
self.matchoutput(out, "Name: user_714", command)
def test_001_addutpgsw(self):
ip = self.getswip()
self.dsdb_expect_add(SW_HOSTNAME, ip, "xge49",
ip.mac)
command = ["add", "switch", "--type", "tor",
"--switch", SW_HOSTNAME, "--rack", "ut3",
"--model", "rs g8000", "--interface", "xge49",
"--mac", ip.mac, "--ip", ip]
self.ignoreoutputtest(command)
self.dsdb_verify()
def test_010_pollutpgsw(self):
command = ["poll", "switch", "--vlan", "--switch",
SW_HOSTNAME]
err = self.statustest(command)
self.matchoutput(err, "Using jump host nyaqd1.ms.com from service "
"instance poll_helper/unittest to run CheckNet for "
"switch utpgsw0.aqd-unittest.ms.com.", command)
self.matchoutput(err, "vlan 5 is not defined in AQ. Please use "
"add_vlan to add it.", command)
# Adding vlan 5 as unknown will suppress poll_switch vlan warning.
def test_012_addvlan5(self):
command = ["add_vlan", "--vlan=5", "--name=user_5",
"--vlan_type=unknown"]
self.noouttest(command)
command = "show vlan --vlan 5"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Vlan: 5", command)
self.matchoutput(out, "Name: user_5", command)
def test_012_pollutpgsw(self):
command = ["poll", "switch", "--vlan", "--switch",
SW_HOSTNAME]
err = self.statustest(command)
self.matchoutput(err, "Using jump host nyaqd1.ms.com from service "
"instance poll_helper/unittest to run CheckNet for "
"switch utpgsw0.aqd-unittest.ms.com.", command)
self.matchclean(err, "vlan 5 is not defined in AQ. Please use "
"add_vlan to add it.", command)
def test_015_searchswbyvlan(self):
command = ["search_switch", "--vlan=714",
"--format=csv"]
out = self.commandtest(command)
ip = self.getswip()
self.matchoutput(out,
"utpgsw0.aqd-unittest.ms.com,%s,tor,ut3,ut,bnt,"
"rs g8000,,xge49,%s" % (ip, ip.mac), command)
self.matchclean(out,
"ut3gd1r01.aqd-unittest.ms.com,4.2.5.8,bor,ut3,ut,hp,"
"uttorswitch,SNgd1r01,,", command)
def test_020_faildelvlan(self):
command = ["del_vlan", "--vlan=714"]
errOut = self.badrequesttest(command)
self.matchoutput(errOut,
"VlanInfo 714 is still in use and cannot be "
"deleted.", command)
# Unknown vlans have no dependencies, can be deleted.
def test_025_delvlan(self):
command = ["del_vlan", "--vlan=5"]
self.noouttest(command)
command = ["show_vlan", "--vlan=5"]
self.notfoundtest(command)
def test_030_delutpgsw(self):
self.dsdb_expect_delete(self.getswip())
command = "del switch --switch %s" % SW_HOSTNAME
self.noouttest(command.split(" "))
plenary = os.path.join(self.config.get("broker", "plenarydir"),
"switchdata", "%s.tpl" % SW_HOSTNAME)
self.failIf(os.path.exists(plenary),
"Plenary file '%s' still exists" % plenary)
self.dsdb_verify()
def test_040_delvlan(self):
command = ["del_vlan", "--vlan=714"]
self.noouttest(command)
command = ["show_vlan", "--vlan=714"]
self.notfoundtest(command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestVlan)
unittest.TextTestRunner(verbosity=2).run(suite)
| stdweird/aquilon | tests/broker/test_vlan.py | Python | apache-2.0 | 5,203 |
# -*- coding: utf-8 -*-
#
# QOpenScienceFramework documentation build configuration file, created by
# sphinx-quickstart on Thu May 12 11:05:40 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
from recommonmark.parser import CommonMarkParser
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the version from QNotifications
sys.path.insert(0, os.path.abspath("../.."))
import QOpenScienceFramework
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.autosummary',
'numpydoc'
]
numpydoc_show_class_members = True
numpydoc_show_inherited_class_members = False
numpydoc_class_members_toctree = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# Markdown support
# The suffix of source filenames.
source_suffix = ['.rst', '.md']
source_parsers = {
'.md': CommonMarkParser,
}
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'QOpenScienceFramework'
copyright = u'2016, Daniel Schreij'
author = u'Daniel Schreij'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = QOpenScienceFramework.__version__
# The full version, including alpha/beta/rc tags.
release = QOpenScienceFramework.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'monokai'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#html_title = u'QOpenScienceFramework v1.1.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'QOpenScienceFrameworkdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'QOpenScienceFramework.tex', u'QOpenScienceFramework Documentation',
u'Daniel Schreij', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'qopenscienceframework', u'QOpenScienceFramework Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'QOpenScienceFramework', u'QOpenScienceFramework Documentation',
author, 'QOpenScienceFramework', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| dschreij/osf-api-python-toolkit | docs/source/conf.py | Python | apache-2.0 | 10,111 |
// This THREEx helper makes it easy to handle window resize.
// It will update renderer and camera when window is resized.
//
// # Usage
//
// **Step 1**: Start updating renderer and camera
//
// ```var windowResize = THREEx.WindowResize(aRenderer, aCamera)```
//
// **Step 2**: Start updating renderer and camera
//
// ```windowResize.stop()```
// # Code
// Source: https://github.com/jeromeetienne/threex/blob/master/THREEx.WindowResize.js
//
/** @namespace */
var THREEx = THREEx || {};
/**
* Update renderer and camera when the window is resized
*
* @param {Object} renderer the renderer to update
* @param {Object} Camera the camera to update
*/
THREEx.WindowResize = function(renderer, camera){
var callback = function(){
// notify the renderer of the size change
renderer.setSize( window.innerWidth, window.innerHeight );
// update the camera
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
}
// bind the resize event
window.addEventListener('resize', callback, false);
// return .stop() the function to stop watching window resize
return {
/**
* Stop watching window resize
*/
stop : function(){
window.removeEventListener('resize', callback);
}
};
}
| hiroki-repo/hiroki-repo.github.com | game0001/public/lib/THREEx.WindowResize.js | JavaScript | apache-2.0 | 1,245 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gora.compiler.utils;
import java.util.HashMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Utility class which specifies a collection of license headers which can be
* used within the GoraCompiler for generating alternative license headers for
* Java interfaces and classes generated from protocols and schemas.
*/
public class LicenseHeaders {
private static final Logger LOG = LoggerFactory.getLogger(LicenseHeaders.class);
/**
* Chosen license to be included within the generated classes
*/
private String licenseName;
/**
* Licenses supported by Gora Compilers
*/
private static final String supportedLicenses[] = {"ASLv2", "AGPLv3", "CDDLv1", "FDLv13", "GPLv1", "GPLv2", "GPLv3", "LGPLv21", "LGPLv3"};
/**
* HashMap containing supported licenses' names and their corresponding text.
*/
private HashMap<String, String> relatedLicenses;
// ASLv2 license header
@SuppressWarnings("unused")
private static final String ASLv2 =
"/**\n" +
" *Licensed to the Apache Software Foundation (ASF) under one\n" +
" *or more contributor license agreements. See the NOTICE file\n" +
" *distributed with this work for additional information\n" +
" *regarding copyright ownership. The ASF licenses this file\n" +
" *to you under the Apache License, Version 2.0 (the\"\n" +
" *License\"); you may not use this file except in compliance\n" +
" *with the License. You may obtain a copy of the License at\n" +
" *\n " +
" * http://www.apache.org/licenses/LICENSE-2.0\n" +
" * \n" +
" *Unless required by applicable law or agreed to in writing, software\n" +
" *distributed under the License is distributed on an \"AS IS\" BASIS,\n" +
" *WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" +
" *See the License for the specific language governing permissions and\n" +
" *limitations under the License.\n" +
" */\n";
// AGPLv3 license header
@SuppressWarnings("unused")
private static final String AGPLv3 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU Affero General Public License as published by\n" +
" * the Free Software Foundation, either version 3 of the License, or\n" +
" * (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" */\n";
// CDDLv1 license header
@SuppressWarnings("unused")
private static final String CDDLv1 =
"/**\n" +
" * COMMON DEVELOPMENT AND DISTRIBUTION LICENSE (CDDL) Version 1.0\n" +
" *\n " +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the Common Development and Distrubtion License as\n" +
" * published by the Sun Microsystems, either version 1.0 of the\n" +
" * License, or (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Lesser Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the Common Development and Distrubtion\n" +
" * License along with this program. If not, see\n" +
" * <http://www.gnu.org/licenses/gpl-1.0.html>.\n" +
" */\n";
// FDLv1.3 license header
@SuppressWarnings("unused")
private static final String FDLv13 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU Free Documentation License as published by\n" +
" * the Free Software Foundation, either version 1.3 of the License, or\n" +
" * (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the GNU Free Documentation License\n" +
" * along with this program. If not, see <http://www.gnu.org/licenses/>.\n" +
" */\n";
// GPLv1 license header
@SuppressWarnings("unused")
private static final String GPLv1 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU General Public License as\n" +
" * published by the Free Software Foundation, either version 1 of the\n" +
" * License, or (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the GNU General Public\n" +
" * License along with this program. If not, see\n" +
" * <http://www.gnu.org/licenses/gpl-1.0.html>.\n" +
" */\n";
// GPLv2 license header
@SuppressWarnings("unused")
private static final String GPLv2 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU General Public License as\n" +
" * published by the Free Software Foundation, either version 2 of the\n" +
" * License, or (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the GNU General Public\n" +
" * License along with this program. If not, see\n" +
" * <http://www.gnu.org/licenses/gpl-2.0.html>.\n" +
" */\n";
// GPLv3 license header
@SuppressWarnings("unused")
private static final String GPLv3 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU General Public License as\n" +
" * published by the Free Software Foundation, either version 3 of the\n" +
" * License, or (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the GNU General Public\n" +
" * License along with this program. If not, see\n" +
" * <http://www.gnu.org/licenses/gpl-3.0.html>.\n" +
" */\n";
// LGPLv21 license header
@SuppressWarnings("unused")
private static final String LGPLv21 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU Lesser General Public License as\n" +
" * published by the Free Software Foundation, either version 2.1 of the\n" +
" * License, or (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the GNU Lesser General Public\n" +
" * License along with this program. If not, see\n" +
" * <http://www.gnu.org/licenses/lgpl-2.1.html>.\n" +
" */\n";
// LGPLv3 license header
@SuppressWarnings("unused")
private static final String LGPLv3 =
"/**\n" +
" * This program is free software: you can redistribute it and/or modify\n" +
" * it under the terms of the GNU Lesser General Public License as\n" +
" * published by the Free Software Foundation, either version 3 of the\n" +
" * License, or (at your option) any later version.\n" +
" *\n " +
" * This program is distributed in the hope that it will be useful,\n" +
" * but WITHOUT ANY WARRANTY; without even the implied warranty of\n" +
" * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n" +
" * GNU General Public License for more details.\n" +
" *\n " +
" * You should have received a copy of the GNU Lesser General Public\n" +
" * License along with this program. If not, see\n" +
" * <http://www.gnu.org/licenses/lgpl-3.0.html>.\n" +
" */\n";
/**
* @param license
*/
public LicenseHeaders(String pLicenseName) {
this.initializeRelations();
this.setLicenseName(pLicenseName);
}
/**
* Initializes relations between supported licenses and license text
*/
public void initializeRelations(){
relatedLicenses = new HashMap<>();
try {
for (String licenseValue : supportedLicenses) {
String var = (String) this.getClass().getDeclaredField(licenseValue).get(licenseValue);
relatedLicenses.put(licenseValue,var);
}
} catch (SecurityException | NoSuchFieldException | IllegalArgumentException | IllegalAccessException e) {
LOG.error(e.getMessage());
throw new RuntimeException(e);
}
}
/**
*Set the license header for the LicenseHeader object.
*
*/
public void setLicenseName(String pLicenseName) {
this.licenseName = pLicenseName;
}
/**
* Get the license header for the LicenseHeader object.
* @return
*/
public String getLicense() {
return relatedLicenses.get(licenseName)!=null?relatedLicenses.get(licenseName):"";
}
/**
* Get the license name for the LicenseHeader object.
* @return
*/
public String getLicenseName(){
return licenseName;
}
}
| NamrataM/gora | gora-compiler/src/main/java/org/apache/gora/compiler/utils/LicenseHeaders.java | Java | apache-2.0 | 11,360 |
/* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package xmlcursor.detailed;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlCursor.TokenType;
import org.apache.xmlbeans.XmlString;
import org.junit.jupiter.api.Test;
import xmlcursor.common.Common;
import static org.junit.jupiter.api.Assertions.*;
import static xmlcursor.common.BasicCursorTestCase.jcur;
public class MultipleCursorSetTest {
@Test
void testMultipleCursorSet() throws Exception {
try (XmlCursor xc = jcur(Common.TRANXML_FILE_CLM)) {
xc.selectPath(Common.CLM_NS_XQUERY_DEFAULT + "$this//EquipmentNumber");
xc.toNextSelection();
final XmlString xs = (XmlString) xc.getObject();
assertEquals("123456", xs.getStringValue());
assertEquals(TokenType.TEXT, xc.toNextToken());
try (XmlCursor x0 = nextCur(xc);
XmlCursor x1 = nextCur(xc);
XmlCursor x2 = nextCur(xc);
XmlCursor x3 = nextCur(xc);
XmlCursor x4 = nextCur(xc);
XmlCursor x5 = nextCur(xc);
) {
xc.close();
XmlCursor[] aCursors = { x0, x1, x2, x3, x4, x5 };
for (XmlCursor cur1 : aCursors) {
for (XmlCursor cur2 : aCursors) {
if (cur1 != cur2) {
assertFalse(cur1.isAtSamePositionAs(cur2));
}
}
}
xs.setStringValue("XYZ");
for (XmlCursor cur1 : aCursors) {
for (XmlCursor cur2 : aCursors) {
assertTrue(cur1.isAtSamePositionAs(cur2));
}
assertThrows(IllegalStateException.class, cur1::getTextValue);
}
assertEquals("XYZ", xs.getStringValue());
}
}
}
private static XmlCursor nextCur(XmlCursor xc) {
xc.toNextChar(1);
return xc.newCursor();
}
}
| apache/xmlbeans | src/test/java/xmlcursor/detailed/MultipleCursorSetTest.java | Java | apache-2.0 | 2,633 |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.tests.net;
import com.badlogic.gdx.ApplicationAdapter;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Net.HttpMethods;
import com.badlogic.gdx.Net.HttpRequest;
import com.badlogic.gdx.Net.HttpResponse;
import com.badlogic.gdx.Net.HttpResponseListener;
import com.badlogic.gdx.tests.utils.GdxTest;
/**
* Demonstrates how to perform a simple HTTP request.
* Need to add internet permission to AndroidManifest.xml.
* @author badlogic
*
*/
public class HttpRequestExample extends GdxTest {
@Override
public void create() {
HttpRequest request = new HttpRequest(HttpMethods.GET);
request.setUrl("http://libgdx.badlogicgames.com/nightlies/dist/AUTHORS");
Gdx.net.sendHttpRequest(request, new HttpResponseListener() {
@Override
public void handleHttpResponse(HttpResponse httpResponse) {
Gdx.app.log("HttpRequestExample", "response: " + httpResponse.getResultAsString());
}
@Override
public void failed(Throwable t) {
Gdx.app.error("HttpRequestExample", "something went wrong", t);
}
@Override
public void cancelled () {
Gdx.app.log("HttpRequestExample", "cancelled");
}
});
}
} | kzganesan/libgdx | tests/gdx-tests/src/com/badlogic/gdx/tests/net/HttpRequestExample.java | Java | apache-2.0 | 1,938 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.reroute;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.allocation.RerouteExplanation;
import org.elasticsearch.cluster.routing.allocation.RoutingExplanations;
import org.elasticsearch.cluster.routing.allocation.command.AllocateReplicaAllocationCommand;
import org.elasticsearch.cluster.routing.allocation.decider.Decision;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class ClusterRerouteResponseTests extends ESTestCase {
public void testToXContent() throws IOException {
DiscoveryNode node0 = new DiscoveryNode("node0", new TransportAddress(TransportAddress.META_ADDRESS, 9000), Version.CURRENT);
DiscoveryNodes nodes = new DiscoveryNodes.Builder().add(node0).masterNodeId(node0.getId()).build();
IndexMetadata indexMetadata = IndexMetadata.builder("index").settings(Settings.builder()
.put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), true)
.put(IndexSettings.MAX_SCRIPT_FIELDS_SETTING.getKey(), 10)
.put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)
.put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).build()).build();
ImmutableOpenMap.Builder<String, IndexMetadata> openMapBuilder = ImmutableOpenMap.builder();
openMapBuilder.put("index", indexMetadata);
Metadata metadata = Metadata.builder().indices(openMapBuilder.build()).build();
ClusterState clusterState = ClusterState.builder(new ClusterName("test")).nodes(nodes).metadata(metadata).build();
RoutingExplanations routingExplanations = new RoutingExplanations();
routingExplanations.add(new RerouteExplanation(new AllocateReplicaAllocationCommand("index", 0, "node0"), Decision.YES));
ClusterRerouteResponse clusterRerouteResponse = new ClusterRerouteResponse(true, clusterState, routingExplanations);
{
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
clusterRerouteResponse.toXContent(builder, ToXContent.EMPTY_PARAMS);
assertEquals("{\n" +
" \"acknowledged\" : true,\n" +
" \"state\" : {\n" +
" \"cluster_uuid\" : \"_na_\",\n" +
" \"version\" : 0,\n" +
" \"state_uuid\" : \"" + clusterState.stateUUID() + "\",\n" +
" \"master_node\" : \"node0\",\n" +
" \"blocks\" : { },\n" +
" \"nodes\" : {\n" +
" \"node0\" : {\n" +
" \"name\" : \"\",\n" +
" \"ephemeral_id\" : \"" + node0.getEphemeralId() + "\",\n" +
" \"transport_address\" : \"0.0.0.0:9000\",\n" +
" \"attributes\" : { }\n" +
" }\n" +
" },\n" +
" \"metadata\" : {\n" +
" \"cluster_uuid\" : \"_na_\",\n" +
" \"cluster_uuid_committed\" : false,\n" +
" \"cluster_coordination\" : {\n" +
" \"term\" : 0,\n" +
" \"last_committed_config\" : [ ],\n" +
" \"last_accepted_config\" : [ ],\n" +
" \"voting_config_exclusions\" : [ ]\n" +
" },\n" +
" \"templates\" : { },\n" +
" \"indices\" : {\n" +
" \"index\" : {\n" +
" \"version\" : 1,\n" +
" \"mapping_version\" : 1,\n" +
" \"settings_version\" : 1,\n" +
" \"aliases_version\" : 1,\n" +
" \"routing_num_shards\" : 1,\n" +
" \"state\" : \"open\",\n" +
" \"settings\" : {\n" +
" \"index\" : {\n" +
" \"shard\" : {\n" +
" \"check_on_startup\" : \"true\"\n" +
" },\n" +
" \"number_of_shards\" : \"1\",\n" +
" \"number_of_replicas\" : \"0\",\n" +
" \"version\" : {\n" +
" \"created\" : \"" + Version.CURRENT.id + "\"\n" +
" },\n" +
" \"max_script_fields\" : \"10\"\n" +
" }\n" +
" },\n" +
" \"mappings\" : { },\n" +
" \"aliases\" : [ ],\n" +
" \"primary_terms\" : {\n" +
" \"0\" : 0\n" +
" },\n" +
" \"in_sync_allocations\" : {\n" +
" \"0\" : [ ]\n" +
" },\n" +
" \"rollover_info\" : { },\n" +
" \"system\" : false\n" +
" }\n" +
" },\n" +
" \"index-graveyard\" : {\n" +
" \"tombstones\" : [ ]\n" +
" }\n" +
" },\n" +
" \"routing_table\" : {\n" +
" \"indices\" : { }\n" +
" },\n" +
" \"routing_nodes\" : {\n" +
" \"unassigned\" : [ ],\n" +
" \"nodes\" : {\n" +
" \"node0\" : [ ]\n" +
" }\n" +
" }\n" +
" }\n" +
"}", Strings.toString(builder));
}
{
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
Map<String, String> params = new HashMap<>();
params.put("explain", "true");
params.put("metric", "version,master_node");
clusterRerouteResponse.toXContent(builder, new ToXContent.MapParams(params));
assertEquals("{\n" +
" \"acknowledged\" : true,\n" +
" \"state\" : {\n" +
" \"cluster_uuid\" : \"_na_\",\n" +
" \"version\" : 0,\n" +
" \"state_uuid\" : \"" + clusterState.stateUUID() + "\",\n" +
" \"master_node\" : \"node0\"\n" +
" },\n" +
" \"explanations\" : [\n" +
" {\n" +
" \"command\" : \"allocate_replica\",\n" +
" \"parameters\" : {\n" +
" \"index\" : \"index\",\n" +
" \"shard\" : 0,\n" +
" \"node\" : \"node0\"\n" +
" },\n" +
" \"decisions\" : [\n" +
" {\n" +
" \"decider\" : null,\n" +
" \"decision\" : \"YES\",\n" +
" \"explanation\" : \"none\"\n" +
" }\n" +
" ]\n" +
" }\n" +
" ]\n" +
"}", Strings.toString(builder));
}
{
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
Map<String, String> params = new HashMap<>();
params.put("metric", "metadata");
params.put("settings_filter", "index.number*,index.version.created");
clusterRerouteResponse.toXContent(builder, new ToXContent.MapParams(params));
assertEquals("{\n" +
" \"acknowledged\" : true,\n" +
" \"state\" : {\n" +
" \"cluster_uuid\" : \"_na_\",\n" +
" \"metadata\" : {\n" +
" \"cluster_uuid\" : \"_na_\",\n" +
" \"cluster_uuid_committed\" : false,\n" +
" \"cluster_coordination\" : {\n" +
" \"term\" : 0,\n" +
" \"last_committed_config\" : [ ],\n" +
" \"last_accepted_config\" : [ ],\n" +
" \"voting_config_exclusions\" : [ ]\n" +
" },\n" +
" \"templates\" : { },\n" +
" \"indices\" : {\n" +
" \"index\" : {\n" +
" \"version\" : 1,\n" +
" \"mapping_version\" : 1,\n" +
" \"settings_version\" : 1,\n" +
" \"aliases_version\" : 1,\n" +
" \"routing_num_shards\" : 1,\n" +
" \"state\" : \"open\",\n" +
" \"settings\" : {\n" +
" \"index\" : {\n" +
" \"max_script_fields\" : \"10\",\n" +
" \"shard\" : {\n" +
" \"check_on_startup\" : \"true\"\n" +
" }\n" +
" }\n" +
" },\n" +
" \"mappings\" : { },\n" +
" \"aliases\" : [ ],\n" +
" \"primary_terms\" : {\n" +
" \"0\" : 0\n" +
" },\n" +
" \"in_sync_allocations\" : {\n" +
" \"0\" : [ ]\n" +
" },\n" +
" \"rollover_info\" : { },\n" +
" \"system\" : false\n" +
" }\n" +
" },\n" +
" \"index-graveyard\" : {\n" +
" \"tombstones\" : [ ]\n" +
" }\n" +
" }\n" +
" }\n" +
"}", Strings.toString(builder));
}
}
}
| gingerwizard/elasticsearch | server/src/test/java/org/elasticsearch/action/admin/cluster/reroute/ClusterRerouteResponseTests.java | Java | apache-2.0 | 12,147 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jakarta.servlet;
/**
* Defines a general exception a servlet can throw when it encounters
* difficulty.
*/
public class ServletException extends Exception {
private static final long serialVersionUID = 1L;
/**
* Constructs a new servlet exception.
*/
public ServletException() {
super();
}
/**
* Constructs a new servlet exception with the specified message. The
* message can be written to the server log and/or displayed for the user.
*
* @param message
* a <code>String</code> specifying the text of the exception
* message
*/
public ServletException(String message) {
super(message);
}
/**
* Constructs a new servlet exception when the servlet needs to throw an
* exception and include a message about the "root cause" exception that
* interfered with its normal operation, including a description message.
*
* @param message
* a <code>String</code> containing the text of the exception
* message
* @param rootCause
* the <code>Throwable</code> exception that interfered with the
* servlet's normal operation, making this servlet exception
* necessary
*/
public ServletException(String message, Throwable rootCause) {
super(message, rootCause);
}
/**
* Constructs a new servlet exception when the servlet needs to throw an
* exception and include a message about the "root cause" exception that
* interfered with its normal operation. The exception's message is based on
* the localized message of the underlying exception.
* <p>
* This method calls the <code>getLocalizedMessage</code> method on the
* <code>Throwable</code> exception to get a localized exception message.
* When subclassing <code>ServletException</code>, this method can be
* overridden to create an exception message designed for a specific locale.
*
* @param rootCause
* the <code>Throwable</code> exception that interfered with the
* servlet's normal operation, making the servlet exception
* necessary
*/
public ServletException(Throwable rootCause) {
super(rootCause);
}
/**
* Returns the exception that caused this servlet exception.
*
* @return the <code>Throwable</code> that caused this servlet exception
*/
public Throwable getRootCause() {
return getCause();
}
}
| apache/tomcat | java/jakarta/servlet/ServletException.java | Java | apache-2.0 | 3,381 |
/**
* @author Swagatam Mitra
*/
/*jslint vars: true, plusplus: true, devel: true, nomen: true, indent: 4, maxerr: 50 */
/*global define, document, console, brackets, $, Mustache */
define(function (require, exports, module) {
"use strict";
require("resizer/BottomCenterResizer");
require("resizer/BottomLeftResizer");
require("resizer/BottomRightResizer");
require("resizer/TopCenterResizer");
require("resizer/TopLeftResizer");
require("resizer/TopRightResizer");
require("resizer/MiddleLeftResizer");
require("resizer/MiddleRightResizer");
}); | HunseopJeong/WATT | libs/brackets-server/embedded-ext/swmitra.html-designer/ResizeHandler.js | JavaScript | apache-2.0 | 602 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.filter.codec;
import java.net.SocketAddress;
import java.util.Queue;
import java.util.concurrent.Semaphore;
import org.apache.mina.core.buffer.IoBuffer;
import org.apache.mina.core.file.FileRegion;
import org.apache.mina.core.filterchain.IoFilter;
import org.apache.mina.core.filterchain.IoFilterAdapter;
import org.apache.mina.core.filterchain.IoFilterChain;
import org.apache.mina.core.future.DefaultWriteFuture;
import org.apache.mina.core.future.WriteFuture;
import org.apache.mina.core.session.AttributeKey;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.core.write.DefaultWriteRequest;
import org.apache.mina.core.write.NothingWrittenException;
import org.apache.mina.core.write.WriteRequest;
import org.apache.mina.core.write.WriteRequestWrapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* An {@link IoFilter} which translates binary or protocol specific data into
* message objects and vice versa using {@link ProtocolCodecFactory},
* {@link ProtocolEncoder}, or {@link ProtocolDecoder}.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
* @org.apache.xbean.XBean
*/
public class ProtocolCodecFilter extends IoFilterAdapter {
/** A logger for this class */
private static final Logger LOGGER = LoggerFactory.getLogger(ProtocolCodecFilter.class);
private static final Class<?>[] EMPTY_PARAMS = new Class[0];
private static final IoBuffer EMPTY_BUFFER = IoBuffer.wrap(new byte[0]);
private static final AttributeKey ENCODER = new AttributeKey(ProtocolCodecFilter.class, "encoder");
private static final AttributeKey DECODER = new AttributeKey(ProtocolCodecFilter.class, "decoder");
private static final AttributeKey DECODER_OUT = new AttributeKey(ProtocolCodecFilter.class, "decoderOut");
private static final AttributeKey ENCODER_OUT = new AttributeKey(ProtocolCodecFilter.class, "encoderOut");
/** The factory responsible for creating the encoder and decoder */
private final ProtocolCodecFactory factory;
private final Semaphore lock = new Semaphore(1, true);
/**
* Creates a new instance of ProtocolCodecFilter, associating a factory
* for the creation of the encoder and decoder.
*
* @param factory The associated factory
*/
public ProtocolCodecFilter(ProtocolCodecFactory factory) {
if (factory == null) {
throw new IllegalArgumentException("factory");
}
this.factory = factory;
}
/**
* Creates a new instance of ProtocolCodecFilter, without any factory.
* The encoder/decoder factory will be created as an inner class, using
* the two parameters (encoder and decoder).
*
* @param encoder The class responsible for encoding the message
* @param decoder The class responsible for decoding the message
*/
public ProtocolCodecFilter(final ProtocolEncoder encoder, final ProtocolDecoder decoder) {
if (encoder == null) {
throw new IllegalArgumentException("encoder");
}
if (decoder == null) {
throw new IllegalArgumentException("decoder");
}
// Create the inner Factory based on the two parameters
this.factory = new ProtocolCodecFactory() {
public ProtocolEncoder getEncoder(IoSession session) {
return encoder;
}
public ProtocolDecoder getDecoder(IoSession session) {
return decoder;
}
};
}
/**
* Creates a new instance of ProtocolCodecFilter, without any factory.
* The encoder/decoder factory will be created as an inner class, using
* the two parameters (encoder and decoder), which are class names. Instances
* for those classes will be created in this constructor.
*
* @param encoderClass The class responsible for encoding the message
* @param decoderClass The class responsible for decoding the message
*/
public ProtocolCodecFilter(final Class<? extends ProtocolEncoder> encoderClass,
final Class<? extends ProtocolDecoder> decoderClass) {
if (encoderClass == null) {
throw new IllegalArgumentException("encoderClass");
}
if (decoderClass == null) {
throw new IllegalArgumentException("decoderClass");
}
if (!ProtocolEncoder.class.isAssignableFrom(encoderClass)) {
throw new IllegalArgumentException("encoderClass: " + encoderClass.getName());
}
if (!ProtocolDecoder.class.isAssignableFrom(decoderClass)) {
throw new IllegalArgumentException("decoderClass: " + decoderClass.getName());
}
try {
encoderClass.getConstructor(EMPTY_PARAMS);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("encoderClass doesn't have a public default constructor.");
}
try {
decoderClass.getConstructor(EMPTY_PARAMS);
} catch (NoSuchMethodException e) {
throw new IllegalArgumentException("decoderClass doesn't have a public default constructor.");
}
final ProtocolEncoder encoder;
try {
encoder = encoderClass.newInstance();
} catch (Exception e) {
throw new IllegalArgumentException("encoderClass cannot be initialized");
}
final ProtocolDecoder decoder;
try {
decoder = decoderClass.newInstance();
} catch (Exception e) {
throw new IllegalArgumentException("decoderClass cannot be initialized");
}
// Create the inner factory based on the two parameters.
this.factory = new ProtocolCodecFactory() {
public ProtocolEncoder getEncoder(IoSession session) throws Exception {
return encoder;
}
public ProtocolDecoder getDecoder(IoSession session) throws Exception {
return decoder;
}
};
}
/**
* Get the encoder instance from a given session.
*
* @param session The associated session we will get the encoder from
* @return The encoder instance, if any
*/
public ProtocolEncoder getEncoder(IoSession session) {
return (ProtocolEncoder) session.getAttribute(ENCODER);
}
@Override
public void onPreAdd(IoFilterChain parent, String name, NextFilter nextFilter) throws Exception {
if (parent.contains(this)) {
throw new IllegalArgumentException(
"You can't add the same filter instance more than once. Create another instance and add it.");
}
}
@Override
public void onPostRemove(IoFilterChain parent, String name, NextFilter nextFilter) throws Exception {
// Clean everything
disposeCodec(parent.getSession());
}
/**
* Process the incoming message, calling the session decoder. As the incoming
* buffer might contains more than one messages, we have to loop until the decoder
* throws an exception.
*
* while ( buffer not empty )
* try
* decode ( buffer )
* catch
* break;
*
*/
@Override
public void messageReceived(NextFilter nextFilter, IoSession session, Object message) throws Exception {
LOGGER.debug("Processing a MESSAGE_RECEIVED for session {}", session.getId());
if (!(message instanceof IoBuffer)) {
nextFilter.messageReceived(session, message);
return;
}
IoBuffer in = (IoBuffer) message;
ProtocolDecoder decoder = factory.getDecoder(session);
ProtocolDecoderOutput decoderOut = getDecoderOut(session, nextFilter);
// Loop until we don't have anymore byte in the buffer,
// or until the decoder throws an unrecoverable exception or
// can't decoder a message, because there are not enough
// data in the buffer
while (in.hasRemaining()) {
int oldPos = in.position();
try {
lock.acquire();
// Call the decoder with the read bytes
decoder.decode(session, in, decoderOut);
// Finish decoding if no exception was thrown.
decoderOut.flush(nextFilter, session);
} catch (Exception e) {
ProtocolDecoderException pde;
if (e instanceof ProtocolDecoderException) {
pde = (ProtocolDecoderException) e;
} else {
pde = new ProtocolDecoderException(e);
}
if (pde.getHexdump() == null) {
// Generate a message hex dump
int curPos = in.position();
in.position(oldPos);
pde.setHexdump(in.getHexDump());
in.position(curPos);
}
// Fire the exceptionCaught event.
decoderOut.flush(nextFilter, session);
nextFilter.exceptionCaught(session, pde);
// Retry only if the type of the caught exception is
// recoverable and the buffer position has changed.
// We check buffer position additionally to prevent an
// infinite loop.
if (!(e instanceof RecoverableProtocolDecoderException) || (in.position() == oldPos)) {
break;
}
} finally {
lock.release();
}
}
}
@Override
public void messageSent(NextFilter nextFilter, IoSession session, WriteRequest writeRequest) throws Exception {
if (writeRequest instanceof EncodedWriteRequest) {
return;
}
if (writeRequest instanceof MessageWriteRequest) {
MessageWriteRequest wrappedRequest = (MessageWriteRequest) writeRequest;
nextFilter.messageSent(session, wrappedRequest.getParentRequest());
} else {
nextFilter.messageSent(session, writeRequest);
}
}
@Override
public void filterWrite(NextFilter nextFilter, IoSession session, WriteRequest writeRequest) throws Exception {
Object message = writeRequest.getMessage();
// Bypass the encoding if the message is contained in a IoBuffer,
// as it has already been encoded before
if ((message instanceof IoBuffer) || (message instanceof FileRegion)) {
nextFilter.filterWrite(session, writeRequest);
return;
}
// Get the encoder in the session
ProtocolEncoder encoder = factory.getEncoder(session);
ProtocolEncoderOutput encoderOut = getEncoderOut(session, nextFilter, writeRequest);
if (encoder == null) {
throw new ProtocolEncoderException("The encoder is null for the session " + session);
}
if (encoderOut == null) {
throw new ProtocolEncoderException("The encoderOut is null for the session " + session);
}
try {
// Now we can try to encode the response
encoder.encode(session, message, encoderOut);
// Send it directly
Queue<Object> bufferQueue = ((AbstractProtocolEncoderOutput) encoderOut).getMessageQueue();
// Write all the encoded messages now
while (!bufferQueue.isEmpty()) {
Object encodedMessage = bufferQueue.poll();
if (encodedMessage == null) {
break;
}
// Flush only when the buffer has remaining.
if (!(encodedMessage instanceof IoBuffer) || ((IoBuffer) encodedMessage).hasRemaining()) {
SocketAddress destination = writeRequest.getDestination();
WriteRequest encodedWriteRequest = new EncodedWriteRequest(encodedMessage, null, destination);
nextFilter.filterWrite(session, encodedWriteRequest);
}
}
// Call the next filter
nextFilter.filterWrite(session, new MessageWriteRequest(writeRequest));
} catch (Exception e) {
ProtocolEncoderException pee;
// Generate the correct exception
if (e instanceof ProtocolEncoderException) {
pee = (ProtocolEncoderException) e;
} else {
pee = new ProtocolEncoderException(e);
}
throw pee;
}
}
@Override
public void sessionClosed(NextFilter nextFilter, IoSession session) throws Exception {
// Call finishDecode() first when a connection is closed.
ProtocolDecoder decoder = factory.getDecoder(session);
ProtocolDecoderOutput decoderOut = getDecoderOut(session, nextFilter);
try {
decoder.finishDecode(session, decoderOut);
} catch (Exception e) {
ProtocolDecoderException pde;
if (e instanceof ProtocolDecoderException) {
pde = (ProtocolDecoderException) e;
} else {
pde = new ProtocolDecoderException(e);
}
throw pde;
} finally {
// Dispose everything
disposeCodec(session);
decoderOut.flush(nextFilter, session);
}
// Call the next filter
nextFilter.sessionClosed(session);
}
private static class EncodedWriteRequest extends DefaultWriteRequest {
public EncodedWriteRequest(Object encodedMessage, WriteFuture future, SocketAddress destination) {
super(encodedMessage, future, destination);
}
public boolean isEncoded() {
return true;
}
}
private static class MessageWriteRequest extends WriteRequestWrapper {
public MessageWriteRequest(WriteRequest writeRequest) {
super(writeRequest);
}
@Override
public Object getMessage() {
return EMPTY_BUFFER;
}
@Override
public String toString() {
return "MessageWriteRequest, parent : " + super.toString();
}
}
private static class ProtocolDecoderOutputImpl extends AbstractProtocolDecoderOutput {
public ProtocolDecoderOutputImpl() {
// Do nothing
}
public void flush(NextFilter nextFilter, IoSession session) {
Queue<Object> messageQueue = getMessageQueue();
while (!messageQueue.isEmpty()) {
nextFilter.messageReceived(session, messageQueue.poll());
}
}
}
private static class ProtocolEncoderOutputImpl extends AbstractProtocolEncoderOutput {
private final IoSession session;
private final NextFilter nextFilter;
/** The WriteRequest destination */
private final SocketAddress destination;
public ProtocolEncoderOutputImpl(IoSession session, NextFilter nextFilter, WriteRequest writeRequest) {
this.session = session;
this.nextFilter = nextFilter;
// Only store the destination, not the full WriteRequest.
destination = writeRequest.getDestination();
}
public WriteFuture flush() {
Queue<Object> bufferQueue = getMessageQueue();
WriteFuture future = null;
while (!bufferQueue.isEmpty()) {
Object encodedMessage = bufferQueue.poll();
if (encodedMessage == null) {
break;
}
// Flush only when the buffer has remaining.
if (!(encodedMessage instanceof IoBuffer) || ((IoBuffer) encodedMessage).hasRemaining()) {
future = new DefaultWriteFuture(session);
nextFilter.filterWrite(session, new EncodedWriteRequest(encodedMessage, future, destination));
}
}
if (future == null) {
// Creates an empty writeRequest containing the destination
WriteRequest writeRequest = new DefaultWriteRequest(
DefaultWriteRequest.EMPTY_MESSAGE, null, destination);
future = DefaultWriteFuture.newNotWrittenFuture(session, new NothingWrittenException(writeRequest));
}
return future;
}
}
//----------- Helper methods ---------------------------------------------
/**
* Dispose the encoder, decoder, and the callback for the decoded
* messages.
*/
private void disposeCodec(IoSession session) {
// We just remove the two instances of encoder/decoder to release resources
// from the session
disposeEncoder(session);
disposeDecoder(session);
// We also remove the callback
disposeDecoderOut(session);
}
/**
* Dispose the encoder, removing its instance from the
* session's attributes, and calling the associated
* dispose method.
*/
private void disposeEncoder(IoSession session) {
ProtocolEncoder encoder = (ProtocolEncoder) session.removeAttribute(ENCODER);
if (encoder == null) {
return;
}
try {
encoder.dispose(session);
} catch (Exception e) {
LOGGER.warn("Failed to dispose: " + encoder.getClass().getName() + " (" + encoder + ')');
}
}
/**
* Dispose the decoder, removing its instance from the
* session's attributes, and calling the associated
* dispose method.
*/
private void disposeDecoder(IoSession session) {
ProtocolDecoder decoder = (ProtocolDecoder) session.removeAttribute(DECODER);
if (decoder == null) {
return;
}
try {
decoder.dispose(session);
} catch (Exception e) {
LOGGER.warn("Failed to dispose: " + decoder.getClass().getName() + " (" + decoder + ')');
}
}
/**
* Return a reference to the decoder callback. If it's not already created
* and stored into the session, we create a new instance.
*/
private ProtocolDecoderOutput getDecoderOut(IoSession session, NextFilter nextFilter) {
ProtocolDecoderOutput out = (ProtocolDecoderOutput) session.getAttribute(DECODER_OUT);
if (out == null) {
// Create a new instance, and stores it into the session
out = new ProtocolDecoderOutputImpl();
session.setAttribute(DECODER_OUT, out);
}
return out;
}
private ProtocolEncoderOutput getEncoderOut(IoSession session, NextFilter nextFilter, WriteRequest writeRequest) {
ProtocolEncoderOutput out = (ProtocolEncoderOutput) session.getAttribute(ENCODER_OUT);
if (out == null) {
// Create a new instance, and stores it into the session
out = new ProtocolEncoderOutputImpl(session, nextFilter, writeRequest);
session.setAttribute(ENCODER_OUT, out);
}
return out;
}
/**
* Remove the decoder callback from the session's attributes.
*/
private void disposeDecoderOut(IoSession session) {
session.removeAttribute(DECODER_OUT);
}
}
| zuoyebushiwo/apache-mina-2.0.9 | src/mina-core/src/main/java/org/apache/mina/filter/codec/ProtocolCodecFilter.java | Java | apache-2.0 | 20,118 |
import Component from './Component';
import XAxis from './XAxis';
import YAxis from './YAxis';
import Globals from '../../Globals';
import ConfidenceBand from './ConfidenceBand';
import {
map
} from 'd3';
class Statistics extends Component {
private x: XAxis;
private y: YAxis;
/**
* statistics configuration divided by 'type'
* type -> ('confidenceBand', ..)
* @private
* @memberof Statistics
*/
private statisticsConfig: { [type: string]: any[] } = {};
/**
* An array of Type of statistics component extracted from statisticsConfig
* type -> ['confidenceBand']
* @private
* @memberof Statistics
*/
private statisticsType: string[] = [];
/**
* Confidence band component
* @private
* @memberof Statistics
*/
private confidenceBand: ConfidenceBand;
private statisticsCallback: Function; // call transition() in all components added to container
constructor(x: XAxis, y: YAxis, statisticsCallback: Function) {
super();
this.x = x;
this.y = y;
this.statisticsCallback = statisticsCallback;
}
public render() {
let statistics = this.config.get('statistics');
if (!statistics) {
return;
}
this.svg.append('g').attr('class', 'statistics');
statistics.map((s: any) => {
if (this.statisticsType.indexOf(s.type) == -1) {
this.statisticsType.push(s.type);
this.statisticsConfig[s.type] = [];
}
this.statisticsConfig[s.type].push(s);
});
this.statisticsType.map((type: string) => {
switch (type) {
case 'confidenceBand':
// TODO discuss new design pattern of subsidiary component for statistics
this.confidenceBand = new ConfidenceBand(this.x, this.y, this.statisticsConfig[type]);
this.setComponent(this.confidenceBand);
break;
}
});
}
public update(data: any[]) {
if (!this.statisticsConfig || !this.statisticsType) {
return;
}
this.statisticsType.map((type: string) => {
switch (type) {
case 'confidenceBand':
this.updateComponent(this.confidenceBand, data);
break;
}
});
}
private setComponent(component: Component) {
component.configure(this.config, this.svg);
component.render();
}
private updateComponent(component: Component, data: any[]) {
component.update(data);
this.statisticsCallback();
}
public transition() {
if (this.confidenceBand) {
this.confidenceBand.transition();
}
}
public clear() {
this.update([]);
}
}
export default Statistics;
| proteus-h2020/proteic | src/svg/components/Statistics.ts | TypeScript | apache-2.0 | 2,900 |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Iap.V1.Snippets
{
// [START iap_v1_generated_IdentityAwareProxyAdminService_GetIapSettings_async]
using Google.Cloud.Iap.V1;
using System.Threading.Tasks;
public sealed partial class GeneratedIdentityAwareProxyAdminServiceClientSnippets
{
/// <summary>Snippet for GetIapSettingsAsync</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public async Task GetIapSettingsRequestObjectAsync()
{
// Create client
IdentityAwareProxyAdminServiceClient identityAwareProxyAdminServiceClient = await IdentityAwareProxyAdminServiceClient.CreateAsync();
// Initialize request argument(s)
GetIapSettingsRequest request = new GetIapSettingsRequest { Name = "", };
// Make the request
IapSettings response = await identityAwareProxyAdminServiceClient.GetIapSettingsAsync(request);
}
}
// [END iap_v1_generated_IdentityAwareProxyAdminService_GetIapSettings_async]
}
| googleapis/google-cloud-dotnet | apis/Google.Cloud.Iap.V1/Google.Cloud.Iap.V1.GeneratedSnippets/IdentityAwareProxyAdminServiceClient.GetIapSettingsRequestObjectAsyncSnippet.g.cs | C# | apache-2.0 | 1,791 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#include <aws/ec2/model/Subnet.h>
#include <aws/core/utils/xml/XmlSerializer.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/memory/stl/AWSStringStream.h>
#include <utility>
using namespace Aws::Utils::Xml;
using namespace Aws::Utils;
namespace Aws
{
namespace EC2
{
namespace Model
{
Subnet::Subnet() :
m_availabilityZoneHasBeenSet(false),
m_availableIpAddressCount(0),
m_availableIpAddressCountHasBeenSet(false),
m_cidrBlockHasBeenSet(false),
m_defaultForAz(false),
m_defaultForAzHasBeenSet(false),
m_mapPublicIpOnLaunch(false),
m_mapPublicIpOnLaunchHasBeenSet(false),
m_state(SubnetState::NOT_SET),
m_stateHasBeenSet(false),
m_subnetIdHasBeenSet(false),
m_vpcIdHasBeenSet(false),
m_assignIpv6AddressOnCreation(false),
m_assignIpv6AddressOnCreationHasBeenSet(false),
m_ipv6CidrBlockAssociationSetHasBeenSet(false),
m_tagsHasBeenSet(false)
{
}
Subnet::Subnet(const XmlNode& xmlNode) :
m_availabilityZoneHasBeenSet(false),
m_availableIpAddressCount(0),
m_availableIpAddressCountHasBeenSet(false),
m_cidrBlockHasBeenSet(false),
m_defaultForAz(false),
m_defaultForAzHasBeenSet(false),
m_mapPublicIpOnLaunch(false),
m_mapPublicIpOnLaunchHasBeenSet(false),
m_state(SubnetState::NOT_SET),
m_stateHasBeenSet(false),
m_subnetIdHasBeenSet(false),
m_vpcIdHasBeenSet(false),
m_assignIpv6AddressOnCreation(false),
m_assignIpv6AddressOnCreationHasBeenSet(false),
m_ipv6CidrBlockAssociationSetHasBeenSet(false),
m_tagsHasBeenSet(false)
{
*this = xmlNode;
}
Subnet& Subnet::operator =(const XmlNode& xmlNode)
{
XmlNode resultNode = xmlNode;
if(!resultNode.IsNull())
{
XmlNode availabilityZoneNode = resultNode.FirstChild("availabilityZone");
if(!availabilityZoneNode.IsNull())
{
m_availabilityZone = StringUtils::Trim(availabilityZoneNode.GetText().c_str());
m_availabilityZoneHasBeenSet = true;
}
XmlNode availableIpAddressCountNode = resultNode.FirstChild("availableIpAddressCount");
if(!availableIpAddressCountNode.IsNull())
{
m_availableIpAddressCount = StringUtils::ConvertToInt32(StringUtils::Trim(availableIpAddressCountNode.GetText().c_str()).c_str());
m_availableIpAddressCountHasBeenSet = true;
}
XmlNode cidrBlockNode = resultNode.FirstChild("cidrBlock");
if(!cidrBlockNode.IsNull())
{
m_cidrBlock = StringUtils::Trim(cidrBlockNode.GetText().c_str());
m_cidrBlockHasBeenSet = true;
}
XmlNode defaultForAzNode = resultNode.FirstChild("defaultForAz");
if(!defaultForAzNode.IsNull())
{
m_defaultForAz = StringUtils::ConvertToBool(StringUtils::Trim(defaultForAzNode.GetText().c_str()).c_str());
m_defaultForAzHasBeenSet = true;
}
XmlNode mapPublicIpOnLaunchNode = resultNode.FirstChild("mapPublicIpOnLaunch");
if(!mapPublicIpOnLaunchNode.IsNull())
{
m_mapPublicIpOnLaunch = StringUtils::ConvertToBool(StringUtils::Trim(mapPublicIpOnLaunchNode.GetText().c_str()).c_str());
m_mapPublicIpOnLaunchHasBeenSet = true;
}
XmlNode stateNode = resultNode.FirstChild("state");
if(!stateNode.IsNull())
{
m_state = SubnetStateMapper::GetSubnetStateForName(StringUtils::Trim(stateNode.GetText().c_str()).c_str());
m_stateHasBeenSet = true;
}
XmlNode subnetIdNode = resultNode.FirstChild("subnetId");
if(!subnetIdNode.IsNull())
{
m_subnetId = StringUtils::Trim(subnetIdNode.GetText().c_str());
m_subnetIdHasBeenSet = true;
}
XmlNode vpcIdNode = resultNode.FirstChild("vpcId");
if(!vpcIdNode.IsNull())
{
m_vpcId = StringUtils::Trim(vpcIdNode.GetText().c_str());
m_vpcIdHasBeenSet = true;
}
XmlNode assignIpv6AddressOnCreationNode = resultNode.FirstChild("assignIpv6AddressOnCreation");
if(!assignIpv6AddressOnCreationNode.IsNull())
{
m_assignIpv6AddressOnCreation = StringUtils::ConvertToBool(StringUtils::Trim(assignIpv6AddressOnCreationNode.GetText().c_str()).c_str());
m_assignIpv6AddressOnCreationHasBeenSet = true;
}
XmlNode ipv6CidrBlockAssociationSetNode = resultNode.FirstChild("ipv6CidrBlockAssociationSet");
if(!ipv6CidrBlockAssociationSetNode.IsNull())
{
XmlNode ipv6CidrBlockAssociationSetMember = ipv6CidrBlockAssociationSetNode.FirstChild("item");
while(!ipv6CidrBlockAssociationSetMember.IsNull())
{
m_ipv6CidrBlockAssociationSet.push_back(ipv6CidrBlockAssociationSetMember);
ipv6CidrBlockAssociationSetMember = ipv6CidrBlockAssociationSetMember.NextNode("item");
}
m_ipv6CidrBlockAssociationSetHasBeenSet = true;
}
XmlNode tagsNode = resultNode.FirstChild("tagSet");
if(!tagsNode.IsNull())
{
XmlNode tagsMember = tagsNode.FirstChild("item");
while(!tagsMember.IsNull())
{
m_tags.push_back(tagsMember);
tagsMember = tagsMember.NextNode("item");
}
m_tagsHasBeenSet = true;
}
}
return *this;
}
void Subnet::OutputToStream(Aws::OStream& oStream, const char* location, unsigned index, const char* locationValue) const
{
if(m_availabilityZoneHasBeenSet)
{
oStream << location << index << locationValue << ".AvailabilityZone=" << StringUtils::URLEncode(m_availabilityZone.c_str()) << "&";
}
if(m_availableIpAddressCountHasBeenSet)
{
oStream << location << index << locationValue << ".AvailableIpAddressCount=" << m_availableIpAddressCount << "&";
}
if(m_cidrBlockHasBeenSet)
{
oStream << location << index << locationValue << ".CidrBlock=" << StringUtils::URLEncode(m_cidrBlock.c_str()) << "&";
}
if(m_defaultForAzHasBeenSet)
{
oStream << location << index << locationValue << ".DefaultForAz=" << std::boolalpha << m_defaultForAz << "&";
}
if(m_mapPublicIpOnLaunchHasBeenSet)
{
oStream << location << index << locationValue << ".MapPublicIpOnLaunch=" << std::boolalpha << m_mapPublicIpOnLaunch << "&";
}
if(m_stateHasBeenSet)
{
oStream << location << index << locationValue << ".State=" << SubnetStateMapper::GetNameForSubnetState(m_state) << "&";
}
if(m_subnetIdHasBeenSet)
{
oStream << location << index << locationValue << ".SubnetId=" << StringUtils::URLEncode(m_subnetId.c_str()) << "&";
}
if(m_vpcIdHasBeenSet)
{
oStream << location << index << locationValue << ".VpcId=" << StringUtils::URLEncode(m_vpcId.c_str()) << "&";
}
if(m_assignIpv6AddressOnCreationHasBeenSet)
{
oStream << location << index << locationValue << ".AssignIpv6AddressOnCreation=" << std::boolalpha << m_assignIpv6AddressOnCreation << "&";
}
if(m_ipv6CidrBlockAssociationSetHasBeenSet)
{
unsigned ipv6CidrBlockAssociationSetIdx = 1;
for(auto& item : m_ipv6CidrBlockAssociationSet)
{
Aws::StringStream ipv6CidrBlockAssociationSetSs;
ipv6CidrBlockAssociationSetSs << location << index << locationValue << ".Ipv6CidrBlockAssociationSet." << ipv6CidrBlockAssociationSetIdx++;
item.OutputToStream(oStream, ipv6CidrBlockAssociationSetSs.str().c_str());
}
}
if(m_tagsHasBeenSet)
{
unsigned tagsIdx = 1;
for(auto& item : m_tags)
{
Aws::StringStream tagsSs;
tagsSs << location << index << locationValue << ".TagSet." << tagsIdx++;
item.OutputToStream(oStream, tagsSs.str().c_str());
}
}
}
void Subnet::OutputToStream(Aws::OStream& oStream, const char* location) const
{
if(m_availabilityZoneHasBeenSet)
{
oStream << location << ".AvailabilityZone=" << StringUtils::URLEncode(m_availabilityZone.c_str()) << "&";
}
if(m_availableIpAddressCountHasBeenSet)
{
oStream << location << ".AvailableIpAddressCount=" << m_availableIpAddressCount << "&";
}
if(m_cidrBlockHasBeenSet)
{
oStream << location << ".CidrBlock=" << StringUtils::URLEncode(m_cidrBlock.c_str()) << "&";
}
if(m_defaultForAzHasBeenSet)
{
oStream << location << ".DefaultForAz=" << std::boolalpha << m_defaultForAz << "&";
}
if(m_mapPublicIpOnLaunchHasBeenSet)
{
oStream << location << ".MapPublicIpOnLaunch=" << std::boolalpha << m_mapPublicIpOnLaunch << "&";
}
if(m_stateHasBeenSet)
{
oStream << location << ".State=" << SubnetStateMapper::GetNameForSubnetState(m_state) << "&";
}
if(m_subnetIdHasBeenSet)
{
oStream << location << ".SubnetId=" << StringUtils::URLEncode(m_subnetId.c_str()) << "&";
}
if(m_vpcIdHasBeenSet)
{
oStream << location << ".VpcId=" << StringUtils::URLEncode(m_vpcId.c_str()) << "&";
}
if(m_assignIpv6AddressOnCreationHasBeenSet)
{
oStream << location << ".AssignIpv6AddressOnCreation=" << std::boolalpha << m_assignIpv6AddressOnCreation << "&";
}
if(m_ipv6CidrBlockAssociationSetHasBeenSet)
{
unsigned ipv6CidrBlockAssociationSetIdx = 1;
for(auto& item : m_ipv6CidrBlockAssociationSet)
{
Aws::StringStream ipv6CidrBlockAssociationSetSs;
ipv6CidrBlockAssociationSetSs << location << ".Ipv6CidrBlockAssociationSet." << ipv6CidrBlockAssociationSetIdx++;
item.OutputToStream(oStream, ipv6CidrBlockAssociationSetSs.str().c_str());
}
}
if(m_tagsHasBeenSet)
{
unsigned tagsIdx = 1;
for(auto& item : m_tags)
{
Aws::StringStream tagsSs;
tagsSs << location << ".TagSet." << tagsIdx++;
item.OutputToStream(oStream, tagsSs.str().c_str());
}
}
}
} // namespace Model
} // namespace EC2
} // namespace Aws
| svagionitis/aws-sdk-cpp | aws-cpp-sdk-ec2/source/model/Subnet.cpp | C++ | apache-2.0 | 10,104 |
using Xamarin.Forms;
namespace HelloPageDialog.Views
{
public class PageDialogTabs : TabbedPage
{
public PageDialogTabs()
{
SetBinding(TitleProperty, new Binding("CurrentPage.Title", source: this));
}
}
}
| karlfl/Prism | e2e/Forms/src/HelloPageDialog/Views/PageDialogTabs.cs | C# | apache-2.0 | 257 |
package lucene4ir.predictor.pre;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Query;
import java.io.IOException;
/**
* Created by Harry Scells on 28/8/17.
* Maximum Collection Query Similarity MaxSCQ
*/
public class MaxSCQQPPredictor extends SumSCQQPPredictor {
public MaxSCQQPPredictor(IndexReader ir) {
super(ir);
}
public String name() {
return "MaxSCQ";
}
@Override
public double scoreQuery(String qno, Query q) {
String[] termTuples = q.toString().split(" ");
double maxSCQ = 0.0;
for (String termTuple : termTuples) {
String[] terms = termTuple.split(":");
if (terms.length == 2) {
String term = terms[1];
try {
double SCQ = calculateSCQ(term);
if (SCQ > maxSCQ) {
maxSCQ = SCQ;
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
return maxSCQ;
}
}
| lucene4ir/lucene4ir | src/main/java/lucene4ir/predictor/pre/MaxSCQQPPredictor.java | Java | apache-2.0 | 1,090 |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.laf.win10;
import com.intellij.ide.ui.laf.darcula.DarculaUIUtil;
import com.intellij.ide.ui.laf.darcula.ui.DarculaPasswordFieldUI;
import javax.swing.*;
import javax.swing.plaf.ComponentUI;
import javax.swing.text.JTextComponent;
import java.awt.*;
import java.awt.event.MouseListener;
import static com.intellij.laf.win10.WinIntelliJTextBorder.MINIMUM_HEIGHT;
import static com.intellij.laf.win10.WinIntelliJTextFieldUI.HOVER_PROPERTY;
public final class WinIntelliJPasswordFieldUI extends DarculaPasswordFieldUI {
private MouseListener hoverListener;
@SuppressWarnings({"MethodOverridesStaticMethodOfSuperclass", "UnusedDeclaration"})
public static ComponentUI createUI(JComponent c) {
return new WinIntelliJPasswordFieldUI();
}
@Override
public void installListeners() {
super.installListeners();
JTextComponent passwordField = getComponent();
hoverListener = new DarculaUIUtil.MouseHoverPropertyTrigger(passwordField, HOVER_PROPERTY);
passwordField.addMouseListener(hoverListener);
}
@Override
public void uninstallListeners() {
super.uninstallListeners();
JTextComponent passwordField = getComponent();
if (hoverListener != null) {
passwordField.removeMouseListener(hoverListener);
}
}
@Override
protected void paintBackground(Graphics g) {
JTextComponent c = getComponent();
Graphics2D g2 = (Graphics2D)g.create();
try {
Container parent = c.getParent();
if (c.isOpaque() && parent != null) {
g2.setColor(parent.getBackground());
g2.fillRect(0, 0, c.getWidth(), c.getHeight());
}
if (c.getBorder() instanceof WinIntelliJTextBorder) {
WinIntelliJTextFieldUI.paintTextFieldBackground(c, g2);
}
}
finally {
g2.dispose();
}
}
@Override
protected int getMinimumHeight(int originHeight) {
JComponent component = getComponent();
Insets insets = component.getInsets();
return MINIMUM_HEIGHT.get() + insets.top + insets.bottom;
}
}
| jwren/intellij-community | plugins/laf/win10/src/com/intellij/laf/win10/WinIntelliJPasswordFieldUI.java | Java | apache-2.0 | 2,632 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include <folly/io/async/test/SocketPair.h>
#include <folly/Conv.h>
#include <folly/portability/Fcntl.h>
#include <folly/portability/Sockets.h>
#include <folly/portability/Unistd.h>
#include <errno.h>
#include <stdexcept>
namespace folly {
SocketPair::SocketPair(Mode mode) {
if (socketpair(PF_UNIX, SOCK_STREAM, 0, fds_) != 0) {
throw std::runtime_error(
folly::to<std::string>("test::SocketPair: failed create socket pair",
errno));
}
if (mode == NONBLOCKING) {
if (fcntl(fds_[0], F_SETFL, O_NONBLOCK) != 0) {
throw std::runtime_error(
folly::to<std::string>("test::SocketPair: failed to set non-blocking "
"read mode", errno));
}
if (fcntl(fds_[1], F_SETFL, O_NONBLOCK) != 0) {
throw std::runtime_error(
folly::to<std::string>("test::SocketPair: failed to set non-blocking "
"write mode", errno));
}
}
}
SocketPair::~SocketPair() {
closeFD0();
closeFD1();
}
void SocketPair::closeFD0() {
if (fds_[0] >= 0) {
close(fds_[0]);
fds_[0] = -1;
}
}
void SocketPair::closeFD1() {
if (fds_[1] >= 0) {
close(fds_[1]);
fds_[1] = -1;
}
}
}
| floxard/folly | folly/io/async/test/SocketPair.cpp | C++ | apache-2.0 | 2,025 |
import { config, path, fs } from 'azk';
import h from 'spec/spec_helper';
import { Generator } from 'azk/generator';
import { Manifest } from 'azk/manifest';
var qfs = require('q-io/fs');
describe('Azk generator generation two nodes systems', function() {
var project = null;
var outputs = [];
var UI = h.mockUI(beforeEach, outputs);
var generator = new Generator(UI);
var rootFolder;
before(function() {
return h.tmp_dir().then((dir) => {
rootFolder = dir;
// `node 1` system folder
var projectFolder = path.join(dir, 'node1');
fs.mkdirSync(projectFolder);
var packageJson = path.join(projectFolder, 'package.json');
h.touchSync(packageJson);
// `node 2` system folder
projectFolder = path.join(dir, 'node2');
fs.mkdirSync(projectFolder);
packageJson = path.join(projectFolder, 'package.json');
h.touchSync(packageJson);
return qfs.write(packageJson, '');
});
});
var generateAndReturnManifest = (project) => {
var manifest = path.join(project, config('manifest'));
generator.render({
systems: generator.findSystems(project),
}, manifest);
return new Manifest(project);
};
it('should detect two node projects', function() {
var manifest = generateAndReturnManifest(rootFolder);
var allKeys = Object.keys(manifest.systems);
h.expect(allKeys).to.have.contains('node1');
h.expect(allKeys).to.have.contains('node2');
});
}); | saitodisse/azk-travis-test | spec/generator/rules/generation/two_node_gen_spec.js | JavaScript | apache-2.0 | 1,473 |
package com.hubspot.singularity.s3uploader;
import java.util.Arrays;
import com.hubspot.singularity.runner.base.config.SingularityRunnerBaseModule;
import com.hubspot.singularity.runner.base.shared.SingularityRunner;
import com.hubspot.singularity.s3.base.config.SingularityS3ConfigurationLoader;
import com.hubspot.singularity.s3uploader.config.SingularityS3UploaderConfigurationLoader;
import com.hubspot.singularity.s3uploader.config.SingularityS3UploaderModule;
public class SingularityS3UploaderRunner {
public static void main(String... args) {
new SingularityS3UploaderRunner().run(args);
}
private SingularityS3UploaderRunner() {}
public void run(String[] args) {
new SingularityRunner().run(Arrays.asList(new SingularityRunnerBaseModule(new SingularityS3ConfigurationLoader(), new SingularityS3UploaderConfigurationLoader()), new SingularityS3UploaderModule()));
}
}
| mjball/Singularity | SingularityS3Uploader/src/main/java/com/hubspot/singularity/s3uploader/SingularityS3UploaderRunner.java | Java | apache-2.0 | 901 |
/*
* tangfeixiong<tangfx128@gmail.com>
*/
package cloudnativelandscape.appdefdev.streaming.kafka.batchprocessing;
import cloudnativelandscape.appdefdev.streaming.kafka.KafkaProperties;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.util.concurrent.ExecutionException;
public class ProduceSample {
private final KafkaProducer<Integer, String> producer;
private final String topic;
private final Boolean isAsync;
@Autowired
private KafkaProperties kafkaProps;
public ProduceSample() {
producer = new KafkaProducer<>(kafkaProps.value());
topic = kafkaProps.value().getProperty("topic");
isAsync = kafkaProps.value().getProperty("async-send");
}
public void run() {
int messageNo = 1;
while (true) {
String messageStr = "Message_" + messageNo;
long startTime = System.currentTimeMillis();
if (isAsync) { // Send asynchronously
producer.send(new ProducerRecord<>(topic,
messageNo,
messageStr), new DemoCallBack(startTime, messageNo, messageStr));
} else { // Send synchronously
try {
producer.send(new ProducerRecord<>(topic,
messageNo,
messageStr)).get();
System.out.println("Sent message: (" + messageNo + ", " + messageStr + ")");
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
}
++messageNo;
}
}
}
class DemoCallBack implements Callback {
private final long startTime;
private final int key;
private final String message;
public DemoCallBack(long startTime, int key, String message) {
this.startTime = startTime;
this.key = key;
this.message = message;
}
/**
* A callback method the user can implement to provide asynchronous handling of request completion. This method will
* be called when the record sent to the server has been acknowledged. Exactly one of the arguments will be
* non-null.
*
* @param metadata The metadata for the record that was sent (i.e. the partition and offset). Null if an error
* occurred.
* @param exception The exception thrown during processing of this record. Null if no error occurred.
*/
public void onCompletion(RecordMetadata metadata, Exception exception) {
long elapsedTime = System.currentTimeMillis() - startTime;
if (metadata != null) {
System.out.println(
"message(" + key + ", " + message + ") sent to partition(" + metadata.partition() +
"), " +
"offset(" + metadata.offset() + ") in " + elapsedTime + " ms");
} else {
exception.printStackTrace();
}
}
}
| tangfeixiong/go-for-kubernetes | java/console-k8s/kafka-operator-component/src/main/java/cloudnativelandscape/appdefdev/streaming/kafka/batchprocessing/ProduceSample.java | Java | apache-2.0 | 3,092 |
# frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Vision
module V1p3beta1
module ProductSearch
# Path helper methods for the ProductSearch API.
module Paths
##
# Create a fully-qualified Location resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}`
#
# @param project [String]
# @param location [String]
#
# @return [::String]
def location_path project:, location:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
"projects/#{project}/locations/#{location}"
end
##
# Create a fully-qualified Product resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/products/{product}`
#
# @param project [String]
# @param location [String]
# @param product [String]
#
# @return [::String]
def product_path project:, location:, product:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
"projects/#{project}/locations/#{location}/products/#{product}"
end
##
# Create a fully-qualified ProductSet resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/productSets/{product_set}`
#
# @param project [String]
# @param location [String]
# @param product_set [String]
#
# @return [::String]
def product_set_path project:, location:, product_set:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
"projects/#{project}/locations/#{location}/productSets/#{product_set}"
end
##
# Create a fully-qualified ReferenceImage resource string.
#
# The resource will be in the following format:
#
# `projects/{project}/locations/{location}/products/{product}/referenceImages/{reference_image}`
#
# @param project [String]
# @param location [String]
# @param product [String]
# @param reference_image [String]
#
# @return [::String]
def reference_image_path project:, location:, product:, reference_image:
raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
raise ::ArgumentError, "location cannot contain /" if location.to_s.include? "/"
raise ::ArgumentError, "product cannot contain /" if product.to_s.include? "/"
"projects/#{project}/locations/#{location}/products/#{product}/referenceImages/#{reference_image}"
end
extend self
end
end
end
end
end
end
| dazuma/google-cloud-ruby | google-cloud-vision-v1p3beta1/lib/google/cloud/vision/v1p3beta1/product_search/paths.rb | Ruby | apache-2.0 | 4,057 |
package quotas_test
import (
"net/http"
"time"
"code.cloudfoundry.org/cli/cf/api/quotas"
"code.cloudfoundry.org/cli/cf/configuration/coreconfig"
"code.cloudfoundry.org/cli/cf/models"
"code.cloudfoundry.org/cli/cf/net"
"code.cloudfoundry.org/cli/cf/terminal/terminalfakes"
testconfig "code.cloudfoundry.org/cli/util/testhelpers/configuration"
"github.com/onsi/gomega/ghttp"
"encoding/json"
"code.cloudfoundry.org/cli/cf/trace/tracefakes"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("CloudControllerQuotaRepository", func() {
var (
ccServer *ghttp.Server
configRepo coreconfig.ReadWriter
repo quotas.CloudControllerQuotaRepository
)
BeforeEach(func() {
ccServer = ghttp.NewServer()
configRepo = testconfig.NewRepositoryWithDefaults()
configRepo.SetAPIEndpoint(ccServer.URL())
gateway := net.NewCloudControllerGateway(configRepo, time.Now, new(terminalfakes.FakeUI), new(tracefakes.FakePrinter), "")
repo = quotas.NewCloudControllerQuotaRepository(configRepo, gateway)
})
AfterEach(func() {
ccServer.Close()
})
Describe("FindByName", func() {
BeforeEach(func() {
ccServer.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/v2/quota_definitions"),
ghttp.RespondWith(http.StatusOK, `{
"next_url": "/v2/quota_definitions?page=2",
"resources": [
{
"metadata": { "guid": "my-quota-guid" },
"entity": {
"name": "my-remote-quota",
"memory_limit": 1024,
"instance_memory_limit": -1,
"total_routes": 123,
"total_services": 321,
"non_basic_services_allowed": true,
"app_instance_limit": 7,
"total_reserved_route_ports": 5
}
}
]
}`),
),
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/v2/quota_definitions", "page=2"),
ghttp.RespondWith(http.StatusOK, `{
"resources": [
{
"metadata": { "guid": "my-quota-guid2" },
"entity": { "name": "my-remote-quota2", "memory_limit": 1024 }
},
{
"metadata": { "guid": "my-quota-guid3" },
"entity": { "name": "my-remote-quota3", "memory_limit": 1024 }
}
]
}`),
),
)
})
It("Finds Quota definitions by name", func() {
quota, err := repo.FindByName("my-remote-quota")
Expect(err).NotTo(HaveOccurred())
Expect(ccServer.ReceivedRequests()).To(HaveLen(2))
Expect(quota).To(Equal(models.QuotaFields{
GUID: "my-quota-guid",
Name: "my-remote-quota",
MemoryLimit: 1024,
InstanceMemoryLimit: -1,
RoutesLimit: 123,
ServicesLimit: 321,
NonBasicServicesAllowed: true,
AppInstanceLimit: 7,
ReservedRoutePorts: "5",
}))
})
})
Describe("FindAll", func() {
var (
quotas []models.QuotaFields
err error
)
BeforeEach(func() {
ccServer.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/v2/quota_definitions"),
ghttp.RespondWith(http.StatusOK, `{
"next_url": "/v2/quota_definitions?page=2",
"resources": [
{
"metadata": { "guid": "my-quota-guid" },
"entity": {
"name": "my-remote-quota",
"memory_limit": 1024,
"instance_memory_limit": -1,
"total_routes": 123,
"total_services": 321,
"non_basic_services_allowed": true,
"app_instance_limit": 7,
"total_reserved_route_ports": 3
}
}
]
}`),
),
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/v2/quota_definitions", "page=2"),
ghttp.RespondWith(http.StatusOK, `{
"resources": [
{
"metadata": { "guid": "my-quota-guid2" },
"entity": { "name": "my-remote-quota2", "memory_limit": 1024 }
},
{
"metadata": { "guid": "my-quota-guid3" },
"entity": { "name": "my-remote-quota3", "memory_limit": 1024 }
}
]
}`),
),
)
})
JustBeforeEach(func() {
quotas, err = repo.FindAll()
Expect(err).NotTo(HaveOccurred())
})
It("finds all Quota definitions", func() {
Expect(ccServer.ReceivedRequests()).To(HaveLen(2))
Expect(quotas).To(HaveLen(3))
Expect(quotas[0].GUID).To(Equal("my-quota-guid"))
Expect(quotas[0].Name).To(Equal("my-remote-quota"))
Expect(quotas[0].MemoryLimit).To(Equal(int64(1024)))
Expect(quotas[0].RoutesLimit).To(Equal(123))
Expect(quotas[0].ServicesLimit).To(Equal(321))
Expect(quotas[0].AppInstanceLimit).To(Equal(7))
Expect(quotas[0].ReservedRoutePorts).To(Equal(json.Number("3")))
Expect(quotas[1].GUID).To(Equal("my-quota-guid2"))
Expect(quotas[2].GUID).To(Equal("my-quota-guid3"))
})
It("defaults missing app instance limit to -1 (unlimited)", func() {
Expect(quotas[1].AppInstanceLimit).To(Equal(-1))
})
It("defaults missing reserved route ports to be empty", func() {
Expect(quotas[1].ReservedRoutePorts).To(BeEmpty())
})
})
Describe("AssignQuotaToOrg", func() {
BeforeEach(func() {
ccServer.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("PUT", "/v2/organizations/my-org-guid"),
ghttp.VerifyJSON(`{"quota_definition_guid":"my-quota-guid"}`),
ghttp.RespondWith(http.StatusCreated, nil),
),
)
})
It("sets the quota for an organization", func() {
err := repo.AssignQuotaToOrg("my-org-guid", "my-quota-guid")
Expect(ccServer.ReceivedRequests()).To(HaveLen(1))
Expect(err).NotTo(HaveOccurred())
})
})
Describe("Create", func() {
BeforeEach(func() {
ccServer.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("POST", "/v2/quota_definitions"),
ghttp.VerifyJSON(`{
"name": "not-so-strict",
"non_basic_services_allowed": false,
"total_services": 1,
"total_routes": 12,
"memory_limit": 123,
"app_instance_limit": 42,
"instance_memory_limit": 0,
"total_reserved_route_ports": 10
}`),
ghttp.RespondWith(http.StatusCreated, nil),
),
)
})
It("creates a new quota with the given name", func() {
quota := models.QuotaFields{
Name: "not-so-strict",
ServicesLimit: 1,
RoutesLimit: 12,
MemoryLimit: 123,
AppInstanceLimit: 42,
ReservedRoutePorts: "10",
}
err := repo.Create(quota)
Expect(err).NotTo(HaveOccurred())
Expect(ccServer.ReceivedRequests()).To(HaveLen(1))
})
})
Describe("Update", func() {
BeforeEach(func() {
ccServer.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("PUT", "/v2/quota_definitions/my-quota-guid"),
ghttp.VerifyJSON(`{
"guid": "my-quota-guid",
"non_basic_services_allowed": false,
"name": "amazing-quota",
"total_services": 1,
"total_routes": 12,
"memory_limit": 123,
"app_instance_limit": 42,
"instance_memory_limit": 0,
"total_reserved_route_ports": 10
}`),
ghttp.RespondWith(http.StatusOK, nil),
),
)
})
It("updates an existing quota", func() {
quota := models.QuotaFields{
GUID: "my-quota-guid",
Name: "amazing-quota",
ServicesLimit: 1,
RoutesLimit: 12,
MemoryLimit: 123,
AppInstanceLimit: 42,
ReservedRoutePorts: "10",
}
err := repo.Update(quota)
Expect(err).NotTo(HaveOccurred())
Expect(ccServer.ReceivedRequests()).To(HaveLen(1))
})
})
Describe("Delete", func() {
BeforeEach(func() {
ccServer.AppendHandlers(
ghttp.CombineHandlers(
ghttp.VerifyRequest("DELETE", "/v2/quota_definitions/my-quota-guid"),
ghttp.RespondWith(http.StatusNoContent, nil),
),
)
})
It("deletes the quota with the given name", func() {
err := repo.Delete("my-quota-guid")
Expect(err).NotTo(HaveOccurred())
Expect(ccServer.ReceivedRequests()).To(HaveLen(1))
})
})
})
| simonleung8/cli | cf/api/quotas/quotas_test.go | GO | apache-2.0 | 7,943 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.util;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.SysInfo;
/**
* Plugin to calculate resource information on the system.
*/
@InterfaceAudience.LimitedPrivate({"YARN", "MAPREDUCE"})
@InterfaceStability.Unstable
public class ResourceCalculatorPlugin extends Configured {
private static final Log LOG =
LogFactory.getLog(ResourceCalculatorPlugin.class);
private final SysInfo sys;
protected ResourceCalculatorPlugin() {
this(SysInfo.newInstance());
}
public ResourceCalculatorPlugin(SysInfo sys) {
this.sys = sys;
}
/**
* Obtain the total size of the virtual memory present in the system.
*
* @return virtual memory size in bytes.
*/
public long getVirtualMemorySize() {
return sys.getVirtualMemorySize();
}
/**
* Obtain the total size of the physical memory present in the system.
*
* @return physical memory size bytes.
*/
public long getPhysicalMemorySize() {
return sys.getPhysicalMemorySize();
}
/**
* Obtain the total size of the available virtual memory present
* in the system.
*
* @return available virtual memory size in bytes.
*/
public long getAvailableVirtualMemorySize() {
return sys.getAvailableVirtualMemorySize();
}
/**
* Obtain the total size of the available physical memory present
* in the system.
*
* @return available physical memory size bytes.
*/
public long getAvailablePhysicalMemorySize() {
return sys.getAvailablePhysicalMemorySize();
}
/**
* Obtain the total number of logical processors present on the system.
*
* @return number of logical processors
*/
public int getNumProcessors() {
return sys.getNumProcessors();
}
/**
* Obtain total number of physical cores present on the system.
*
* @return number of physical cores
*/
public int getNumCores() {
return sys.getNumCores();
}
/**
* Obtain the CPU frequency of on the system.
*
* @return CPU frequency in kHz
*/
public long getCpuFrequency() {
return sys.getCpuFrequency();
}
/**
* Obtain the cumulative CPU time since the system is on.
*
* @return cumulative CPU time in milliseconds
*/
public long getCumulativeCpuTime() {
return sys.getCumulativeCpuTime();
}
/**
* Obtain the CPU usage % of the machine. Return -1 if it is unavailable.
*
* @return CPU usage in %
*/
public float getCpuUsagePercentage() {
return sys.getCpuUsagePercentage();
}
/**
* Obtain the number of VCores used. Return -1 if it is unavailable.
*
* @return Number of VCores used a percentage (from 0 to #VCores)
*/
public float getNumVCoresUsed() {
return sys.getNumVCoresUsed();
}
/**
* Obtain the aggregated number of bytes read over the network.
* @return total number of bytes read.
*/
public long getNetworkBytesRead() {
return sys.getNetworkBytesRead();
}
/**
* Obtain the aggregated number of bytes written to the network.
* @return total number of bytes written.
*/
public long getNetworkBytesWritten() {
return sys.getNetworkBytesWritten();
}
/**
* Obtain the aggregated number of bytes read from disks.
*
* @return total number of bytes read.
*/
public long getStorageBytesRead() {
return sys.getStorageBytesRead();
}
/**
* Obtain the aggregated number of bytes written to disks.
*
* @return total number of bytes written.
*/
public long getStorageBytesWritten() {
return sys.getStorageBytesWritten();
}
/**
* Obtain the total number of usable GPUs (in non-erroneous state)
* @return number of GPUs
*/
public int getNumGPUs(){
return sys.getNumGPUs();
}
/**
* Create the ResourceCalculatorPlugin from the class name and configure it. If
* class name is null, this method will try and return a memory calculator
* plugin available for this system.
*
* @param clazz ResourceCalculator plugin class-name
* @param conf configure the plugin with this.
* @return ResourceCalculatorPlugin or null if ResourceCalculatorPlugin is not
* available for current system
*/
public static ResourceCalculatorPlugin getResourceCalculatorPlugin(
Class<? extends ResourceCalculatorPlugin> clazz, Configuration conf) {
if (clazz != null) {
return ReflectionUtils.newInstance(clazz, conf);
}
try {
return new ResourceCalculatorPlugin();
} catch (Throwable t) {
LOG.warn(t + ": Failed to instantiate default resource calculator.", t);
}
return null;
}
}
| robzor92/hops | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ResourceCalculatorPlugin.java | Java | apache-2.0 | 5,721 |
<?php
echo "Migrating task dependencies to edges...\n";
$table = new ManiphestTask();
$table->openTransaction();
foreach (new LiskMigrationIterator($table) as $task) {
$id = $task->getID();
echo "Task {$id}: ";
$deps = $task->getAttachedPHIDs(PhabricatorPHIDConstants::PHID_TYPE_TASK);
if (!$deps) {
echo "-\n";
continue;
}
$editor = new PhabricatorEdgeEditor();
$editor->setSuppressEvents(true);
foreach ($deps as $dep) {
$editor->addEdge(
$task->getPHID(),
PhabricatorEdgeConfig::TYPE_TASK_DEPENDS_ON_TASK,
$dep);
}
$editor->save();
echo "OKAY\n";
}
$table->saveTransaction();
echo "Done.\n";
| telerik/phabricator | resources/sql/patches/migrate-maniphest-dependencies.php | PHP | apache-2.0 | 652 |
package org.deeplearning4j.spark.iterator;
import lombok.extern.slf4j.Slf4j;
import org.apache.spark.TaskContext;
import org.apache.spark.TaskContextHelper;
import org.deeplearning4j.datasets.iterator.AsyncDataSetIterator;
import org.deeplearning4j.datasets.iterator.callbacks.DataSetCallback;
import org.deeplearning4j.datasets.iterator.callbacks.DefaultCallback;
import org.nd4j.linalg.api.memory.MemoryWorkspace;
import org.nd4j.linalg.dataset.DataSet;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.factory.Nd4j;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
/**
* Spark version of AsyncDataSetIterator, made separate to propagate Spark TaskContext to new background thread, for Spark block locks compatibility
*
* @author raver119@gmail.com
*/
@Slf4j
public class SparkADSI extends AsyncDataSetIterator {
protected TaskContext context;
protected SparkADSI() {
super();
}
public SparkADSI(DataSetIterator baseIterator) {
this(baseIterator, 8);
}
public SparkADSI(DataSetIterator iterator, int queueSize, BlockingQueue<DataSet> queue) {
this(iterator, queueSize, queue, true);
}
public SparkADSI(DataSetIterator baseIterator, int queueSize) {
this(baseIterator, queueSize, new LinkedBlockingQueue<DataSet>(queueSize));
}
public SparkADSI(DataSetIterator baseIterator, int queueSize, boolean useWorkspace) {
this(baseIterator, queueSize, new LinkedBlockingQueue<DataSet>(queueSize), useWorkspace);
}
public SparkADSI(DataSetIterator baseIterator, int queueSize, boolean useWorkspace, Integer deviceId) {
this(baseIterator, queueSize, new LinkedBlockingQueue<DataSet>(queueSize), useWorkspace, new DefaultCallback(),
deviceId);
}
public SparkADSI(DataSetIterator baseIterator, int queueSize, boolean useWorkspace, DataSetCallback callback) {
this(baseIterator, queueSize, new LinkedBlockingQueue<DataSet>(queueSize), useWorkspace, callback);
}
public SparkADSI(DataSetIterator iterator, int queueSize, BlockingQueue<DataSet> queue, boolean useWorkspace) {
this(iterator, queueSize, queue, useWorkspace, new DefaultCallback());
}
public SparkADSI(DataSetIterator iterator, int queueSize, BlockingQueue<DataSet> queue, boolean useWorkspace,
DataSetCallback callback) {
this(iterator, queueSize, queue, useWorkspace, callback, Nd4j.getAffinityManager().getDeviceForCurrentThread());
}
public SparkADSI(DataSetIterator iterator, int queueSize, BlockingQueue<DataSet> queue, boolean useWorkspace,
DataSetCallback callback, Integer deviceId) {
this();
if (queueSize < 2)
queueSize = 2;
this.deviceId = deviceId;
this.callback = callback;
this.useWorkspace = useWorkspace;
this.buffer = queue;
this.prefetchSize = queueSize;
this.backedIterator = iterator;
this.workspaceId = "SADSI_ITER-" + java.util.UUID.randomUUID().toString();
if (iterator.resetSupported())
this.backedIterator.reset();
context = TaskContext.get();
this.thread = new SparkPrefetchThread(buffer, iterator, terminator, null);
/**
* We want to ensure, that background thread will have the same thread->device affinity, as master thread
*/
Nd4j.getAffinityManager().attachThreadToDevice(thread, deviceId);
thread.setDaemon(true);
thread.start();
}
@Override
protected void externalCall() {
TaskContextHelper.setTaskContext(context);
}
public class SparkPrefetchThread extends AsyncPrefetchThread {
protected SparkPrefetchThread(BlockingQueue<DataSet> queue, DataSetIterator iterator, DataSet terminator,
MemoryWorkspace workspace) {
super(queue, iterator, terminator, workspace);
}
}
}
| kinbod/deeplearning4j | deeplearning4j-scaleout/spark/dl4j-spark/src/main/java/org/deeplearning4j/spark/iterator/SparkADSI.java | Java | apache-2.0 | 4,031 |
#
# Author:: Adam Jacob (<adam@chef.io>)
# Author:: Tyler Cloke (<tyler@chef.io>)
# Copyright:: Copyright 2008-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/resource"
require "chef/provider/http_request"
class Chef
class Resource
# Use the http_request resource to send an HTTP request (GET, PUT, POST, DELETE, HEAD, or OPTIONS) with an arbitrary
# message. This resource is often useful when custom callbacks are necessary.
class HttpRequest < Chef::Resource
identity_attr :url
default_action :get
allowed_actions :get, :patch, :put, :post, :delete, :head, :options
def initialize(name, run_context = nil)
super
@message = name
@url = nil
@headers = {}
end
def url(args = nil)
set_or_return(
:url,
args,
:kind_of => String
)
end
def message(args = nil, &block)
args = block if block_given?
set_or_return(
:message,
args,
:kind_of => Object
)
end
def headers(args = nil)
set_or_return(
:headers,
args,
:kind_of => Hash
)
end
end
end
end
| jonlives/chef | lib/chef/resource/http_request.rb | Ruby | apache-2.0 | 1,791 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.portal.pluto.om.common;
import java.util.Collection;
import java.util.Iterator;
import java.util.Locale;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import java.util.Vector;
import org.apache.pluto.om.common.Language;
import org.apache.pluto.om.common.LanguageSet;
import org.apache.pluto.util.StringUtils;
/**
*
* @version $Id$
*/
public class LanguageSetImpl
extends AbstractSupportSet
implements LanguageSet, java.io.Serializable, Support {
private String castorKeywords;
private ClassLoader classLoader;
/**
* contains Locale objects for locales supported by the portlet
*/
private Vector locales;
private boolean resourceBundleInitialized;
private String resources;
private String shortTitle;
private String title;
public LanguageSetImpl() {
locales = new Vector();
}
// create Language object with data from this class (title, short-title, description, keywords)
private Language createLanguage(Locale locale, ResourceBundle bundle) {
LanguageImpl lang = new LanguageImpl(locale, bundle, title, shortTitle, castorKeywords);
return lang;
}
/* never used
// creates a locale object from a string representation
private Locale createLocale(String locale)
{
// parse locale String
StringTokenizer tokenizer = new StringTokenizer(locale,"_");
String[] localeDef = new String[5]; // just in case we have more than one variant
for (int i = 0; i < localeDef.length; i++)
{
if (tokenizer.hasMoreTokens())
{
localeDef[i] = tokenizer.nextToken();
}
else
{
localeDef[i] = "";
}
}
return new java.util.Locale(localeDef[0], localeDef[1], localeDef[2]+localeDef[3]+localeDef[4]);
}
*/
/**
* @see org.apache.pluto.om.common.LanguageSet#get(java.util.Locale)
*/
public Language get(Locale locale) {
if (resources != null && !resourceBundleInitialized) {
initResourceBundle();
this.resourceBundleInitialized = true;
}
if (!locales.contains(locale)) {
locale = matchLocale(locale);
}
Iterator iterator = this.iterator();
while (iterator.hasNext()) {
Language language = (Language)iterator.next();
if (language.getLocale().equals(locale) || size()==1) {
return language;
}
}
return null;
}
/**
* @see org.apache.pluto.om.common.LanguageSet#getLocales()
*/
public Iterator getLocales() {
return locales.iterator();
}
/**
* @see org.apache.pluto.om.common.LanguageSet#getDefaultLocale()
*/
public Locale getDefaultLocale() {
Locale defLoc = null;
if (locales != null && locales.size() > 0) {
defLoc = (Locale)locales.firstElement();
if (defLoc == null) {
defLoc = new Locale("en","");
locales.add(defLoc);
}
} else {
defLoc = new Locale("en","");
locales.add(defLoc);
}
return defLoc;
}
/**
* @see org.apache.cocoon.portal.pluto.om.common.Support#postBuild(java.lang.Object)
*/
public void postBuild(Object parameter) throws Exception {
// nothing to do
}
/**
* @see org.apache.cocoon.portal.pluto.om.common.Support#postLoad(java.lang.Object)
*/
public void postLoad(Object parameter) throws Exception {
locales.addAll((Collection)parameter);
initInlinedInfos();
}
/**
* @see org.apache.cocoon.portal.pluto.om.common.Support#postStore(java.lang.Object)
*/
public void postStore(Object parameter) throws Exception {
// nothing to do
}
/**
* @see org.apache.cocoon.portal.pluto.om.common.Support#preBuild(java.lang.Object)
*/
public void preBuild(Object parameter) throws Exception {
// nothing to do
}
/**
* @see org.apache.cocoon.portal.pluto.om.common.Support#preStore(java.lang.Object)
*/
public void preStore(Object parameter) throws Exception {
// nothing to do
}
// internal methods.
private void initInlinedInfos() throws Exception {
// if resource-bundle is given
// must be initialized later when classloader is known by initResourceBundle()
if (locales.isEmpty()) {
getDefaultLocale(); //the defualt gets automaticaly added to the locals
}
if (castorKeywords == null) {
castorKeywords="";
}
if (shortTitle == null) {
shortTitle="";
}
if (title == null) {
title="";
}
add(createLanguage(getDefaultLocale(), null));
}
// create and add all resource bundle information as Language objects to this set
private void initResourceBundle() {
Iterator iter = locales.iterator();
while (iter.hasNext()) {
Locale locale = (Locale)iter.next();
ResourceBundle bundle = null;
bundle = loadResourceBundle(locale);
if (bundle != null) {
/*String title;
String shortTitle;
String keywords;
try {
title=bundle.getString("javax.portlet.title");
} catch(MissingResourceException x) {
title = this.title;
}
try {
shortTitle=bundle.getString("javax.portlet.short-title");
} catch(MissingResourceException x) {
shortTitle = this.shortTitle;
}
try {
keywords=bundle.getString("javax.portlet.keywords");
} catch(MissingResourceException x) {
keywords = this.castorKeywords;
}*/
Language language = createLanguage(locale, bundle);
remove(language);
add(language);
}
}
}
// try to match the given locale to a supported locale
private Locale matchLocale(Locale locale) {
String variant = locale.getVariant();
if (variant != null && variant.length() > 0) {
locale = new Locale(locale.getLanguage(), locale.getCountry());
}
if (! locales.contains(locale)) {
String country = locale.getCountry();
if (country != null && country.length() > 0) {
locale = new Locale(locale.getLanguage(), "");
}
}
if (! locales.contains(locale)) {
locale = getDefaultLocale();
}
return locale;
}
// additional methods.
public String getCastorKeywords() {
return this.castorKeywords;
}
// additional methods
public String getResources() {
return resources;
}
public String getShortTitle() {
return this.shortTitle;
}
// internal methods used by castor
public String getTitle() {
return this.title;
}
// loads resource bundle files from WEB-INF/classes directory
protected ResourceBundle loadResourceBundle(Locale locale) {
ResourceBundle resourceBundle = null;
try {
if (classLoader != null) {
resourceBundle=ResourceBundle.getBundle(resources, locale, classLoader);
} else {
resourceBundle=ResourceBundle.getBundle(resources, locale, Thread.currentThread().getContextClassLoader());
}
} catch (MissingResourceException x) {
return null;
}
return resourceBundle;
}
public void setCastorKeywords(String keywords) {
this.castorKeywords = keywords;
}
// end castor methods
public void setClassLoader(ClassLoader loader) {
this.classLoader = loader;
}
public void setResources(String resources) {
this.resources = resources;
}
public void setShortTitle(String shortTitle) {
this.shortTitle = shortTitle;
}
public void setTitle(String title) {
this.title = title;
}
/**
* @see java.lang.Object#toString()
*/
public String toString() {
return toString(0);
}
public String toString(int indent) {
StringBuffer buffer = new StringBuffer(50);
StringUtils.newLine(buffer,indent);
buffer.append(getClass().toString());
buffer.append(": ");
Iterator iterator = this.iterator();
while (iterator.hasNext())
{
buffer.append(((LanguageImpl)iterator.next()).toString(indent+2));
}
return buffer.toString();
}
}
| apache/cocoon | blocks/cocoon-portal/cocoon-portal-portlet-impl/src/main/java/org/apache/cocoon/portal/pluto/om/common/LanguageSetImpl.java | Java | apache-2.0 | 9,789 |
/*
* Copyright 2013 Cloudera Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.cdk.morphline.api;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.management.ManagementFactory;
import java.net.InetAddress;
import java.net.URI;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException;
import javax.management.IntrospectionException;
import javax.management.MBeanException;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import javax.management.ReflectionException;
import org.junit.Ignore;
import org.junit.Test;
import com.cloudera.cdk.morphline.base.Compiler;
import com.cloudera.cdk.morphline.base.FaultTolerance;
import com.cloudera.cdk.morphline.base.Fields;
import com.cloudera.cdk.morphline.base.Metrics;
import com.cloudera.cdk.morphline.base.Notifications;
import com.cloudera.cdk.morphline.shaded.com.google.code.regexp.Matcher;
import com.cloudera.cdk.morphline.shaded.com.google.code.regexp.Pattern;
import com.cloudera.cdk.morphline.shaded.com.google.common.reflect.ClassPath;
import com.cloudera.cdk.morphline.shaded.com.google.common.reflect.ClassPath.ResourceInfo;
import com.codahale.metrics.Counter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.health.HealthCheck;
import com.codahale.metrics.health.HealthCheckRegistry;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Multimap;
import com.google.common.io.Files;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigException;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigUtil;
public class MorphlineTest extends AbstractMorphlineTest {
private void processAndVerifySuccess(Record input, Record expected) {
processAndVerifySuccess(input, expected, true);
}
private void processAndVerifySuccess(Record input, Record expected, boolean isSame) {
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(input));
assertEquals(expected, collector.getFirstRecord());
if (isSame) {
assertSame(input, collector.getFirstRecord());
} else {
assertNotSame(input, collector.getFirstRecord());
}
}
private void processAndVerifySuccess(Record input, Multimap... expectedMaps) {
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(input));
Iterator<Record> iter = collector.getRecords().iterator();
for (Multimap expected : expectedMaps) {
assertTrue(iter.hasNext());
assertEquals(expected, iter.next().getFields());
}
assertFalse(iter.hasNext());
}
private void processAndVerifyFailure(Record input) {
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertFalse(morphline.process(input));
assertEquals(0, collector.getRecords().size());
}
@Test
public void testMorphlineContext() throws Exception {
ExceptionHandler ex = new ExceptionHandler() {
@Override
public void handleException(Throwable t, Record record) {
throw new RuntimeException(t);
}
};
MetricRegistry metricRegistry = new MetricRegistry();
metricRegistry.register("myCounter", new Counter());
HealthCheckRegistry healthChecks = new HealthCheckRegistry();
healthChecks.register("foo", new HealthCheck() {
@Override
protected Result check() throws Exception {
return Result.healthy("flawless");
}
});
MorphlineContext ctx = new MorphlineContext.Builder()
.setExceptionHandler(ex)
.setHealthCheckRegistry(healthChecks)
.setMetricRegistry(metricRegistry)
.build();
assertSame(ex, ctx.getExceptionHandler());
assertSame(metricRegistry, ctx.getMetricRegistry());
assertSame(healthChecks, ctx.getHealthCheckRegistry());
ctx.getHealthCheckRegistry().runHealthChecks();
}
@Test
public void testParseComplexConfig() throws Exception {
parse("test-morphlines/parseComplexConfig");
parse("test-morphlines/tutorialReadAvroContainer");
parse("test-morphlines/tutorialReadJsonTestTweets");
}
@Test
public void testParseVariables() throws Exception {
System.setProperty("ENV_ZK_HOST", "zk.foo.com:2181/solr");
System.setProperty("ENV_SOLR_URL", "http://foo.com:8983/solr/myCollection");
System.setProperty("ENV_SOLR_LOCATOR", "{ collection : collection1 }");
try {
Config override = ConfigFactory.parseString("SOLR_LOCATOR : { collection : fallback } ");
Config config = parse("test-morphlines/parseVariables", override);
//System.out.println(config.root().render());
} finally {
System.clearProperty("ENV_ZK_HOST");
System.clearProperty("ENV_SOLR_URL");
System.clearProperty("ENV_SOLR_LOCATOR");
}
}
@Test
public void testPipeWithTwoBasicCommands() throws Exception {
morphline = createMorphline("test-morphlines/pipeWithTwoBasicCommands");
Record record = createBasicRecord();
processAndVerifySuccess(record, record);
}
@Test
public void testNotifications() throws Exception {
morphline = createMorphline("test-morphlines/pipeWithTwoBasicCommands");
Notifications.notifyBeginTransaction(morphline);
Notifications.notifyStartSession(morphline);
Notifications.notifyCommitTransaction(morphline);
Notifications.notifyRollbackTransaction(morphline);
}
@Test
public void testCompile() throws Exception {
String file = "test-morphlines/pipeWithTwoBasicCommands";
morphline = new Compiler().compile(
new File(RESOURCES_DIR + "/" + file + ".conf"),
"",
new MorphlineContext.Builder().build(),
null);
assertNotNull(morphline);
new Fields();
new Metrics();
}
@Test
public void testCompileWithExplicitMorphlineId() throws Exception {
String file = "test-morphlines/pipeWithTwoBasicCommands";
morphline = new Compiler().compile(
new File(RESOURCES_DIR + "/" + file + ".conf"),
"morphline1",
new MorphlineContext.Builder().build(),
null);
assertNotNull(morphline);
}
@Test
public void testCompileWithUnknownMorphlineId() throws Exception {
String file = "test-morphlines/pipeWithTwoBasicCommands";
try {
new Compiler().compile(
new File(RESOURCES_DIR + "/" + file + ".conf"),
"morphline2",
new MorphlineContext.Builder().build(),
null);
fail();
} catch (MorphlineCompilationException e) {
; // expected
}
}
@Test
public void testCompileWithMissingMorphline() throws Exception {
String file = "test-morphlines/compileWithMissingMorphline";
try {
new Compiler().compile(
new File(RESOURCES_DIR + "/" + file + ".conf"),
"morphline1",
new MorphlineContext.Builder().build(),
null);
fail();
} catch (MorphlineCompilationException e) {
; // expected
}
}
@Test
public void testFaultTolerance() throws Exception {
FaultTolerance tolerance = new FaultTolerance(true, false);
tolerance = new FaultTolerance(true, true, IOException.class.getName());
tolerance.handleException(new IOException(), new Record());
tolerance = new FaultTolerance(true, true, IOException.class.getName());
tolerance.handleException(new RuntimeException(), new Record());
tolerance = new FaultTolerance(true, true, IOException.class.getName());
tolerance.handleException(new RuntimeException(new IOException()), new Record());
tolerance = new FaultTolerance(true, false, IOException.class.getName());
try {
tolerance.handleException(new IOException(), new Record());
fail();
} catch (MorphlineRuntimeException e) {
; // expected
}
tolerance = new FaultTolerance(false, false, IOException.class.getName());
try {
tolerance.handleException(new IOException(), new Record());
fail();
} catch (MorphlineRuntimeException e) {
; // expected
}
tolerance = new FaultTolerance(false, false, IOException.class.getName());
try {
tolerance.handleException(new RuntimeException(), new Record());
fail();
} catch (MorphlineRuntimeException e) {
; // expected
}
tolerance = new FaultTolerance(true, true, Error.class.getName());
try {
tolerance.handleException(new Error(), new Record());
fail();
} catch (Error e) {
; // expected
}
}
@Test
public void testLog() throws Exception {
morphline = createMorphline("test-morphlines/log");
Record record = createBasicRecord();
processAndVerifySuccess(record, record);
}
@Test
public void testAddValues() throws Exception {
morphline = createMorphline("test-morphlines/addValues");
Record record = new Record();
record.put("first_name", "Nadja");
Record expected = new Record();
expected.put("first_name", "Nadja");
expected.put("source_type", "text/log");
expected.put("source_type", "text/log2");
expected.put("source_host", "123");
expected.put("name", "Nadja");
expected.put("names", "@{first_name}");
expected.put("pids", 456);
expected.put("pids", "hello");
processAndVerifySuccess(record, expected);
}
@Test
public void testAddValuesIfAbsent() throws Exception {
morphline = createMorphline("test-morphlines/addValuesIfAbsent");
Record record = new Record();
Record expected = new Record();
expected.put("source_type", "text/log");
processAndVerifySuccess(record, expected);
}
@Test
public void testSetValues() throws Exception {
morphline = createMorphline("test-morphlines/setValues");
Record record = new Record();
record.put("first_name", "Nadja");
record.put("source_type", "XXXX");
record.put("source_type", "XXXX");
record.put("source_host", 999);
record.put("name", "XXXX");
record.put("names", "XXXX");
record.put("pids", 789);
record.put("pids", "YYYY");
Record expected = new Record();
expected.put("first_name", "Nadja");
expected.put("source_type", "text/log");
expected.put("source_type", "text/log2");
expected.put("source_host", "123");
expected.put("name", "Nadja");
expected.put("names", "@{first_name}");
expected.put("pids", 456);
expected.put("pids", "hello");
processAndVerifySuccess(record, expected);
}
@Test
public void testAddCurrentTime() throws Exception {
morphline = createMorphline("test-morphlines/addCurrentTime");
Record record = new Record();
startSession();
assertEquals(1, collector.getNumStartEvents());
long now = System.currentTimeMillis();
assertTrue(morphline.process(record));
assertSame(record, collector.getFirstRecord());
long actual = ((Long) record.getFirstValue("ts")).longValue();
assertTrue(actual >= now);
assertTrue(actual <= now + 1000);
// test that preserveExisting = true preserves the existing timestamp
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
Thread.sleep(1);
assertTrue(morphline.process(record));
assertSame(record, collector.getFirstRecord());
long actual2 = ((Long) record.getFirstValue("ts")).longValue();
assertEquals(actual, actual2);
}
@Test
public void testAddLocalHost() throws Exception {
morphline = createMorphline("test-morphlines/addLocalHost");
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
return;
}
testAddLocalHostInternal(localHost.getHostAddress());
}
@Test
public void testAddLocalHostWithName() throws Exception {
morphline = createMorphline("test-morphlines/addLocalHostWithName");
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
return;
}
testAddLocalHostInternal(localHost.getCanonicalHostName());
}
private void testAddLocalHostInternal(String name) throws Exception {
Record record = new Record();
Record expected = new Record();
expected.put("myhost", name);
processAndVerifySuccess(record, expected);
// test that preserveExisting = true preserves the existing value
record = new Record();
record.put("myhost", "myname");
expected = record.copy();
processAndVerifySuccess(record, expected);
}
@Test
public void testToByteArray() throws Exception {
morphline = createMorphline("test-morphlines/toByteArray");
Record record = new Record();
record.put("first_name", "Nadja");
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
assertEquals(1, collector.getFirstRecord().getFields().size());
byte[] expected = "Nadja".getBytes("UTF-8");
assertArrayEquals(expected, (byte[]) collector.getFirstRecord().getFirstValue("first_name"));
assertSame(record, collector.getFirstRecord());
}
@Test
public void testToString() throws Exception {
morphline = createMorphline("test-morphlines/toString");
Record record = new Record();
record.put("first_name", "Nadja");
Record expected = new Record();
expected.put("first_name", "Nadja");
expected.put("source_type", "text/log");
expected.put("source_type", "text/log2");
expected.put("source_host", "123");
expected.put("name", "Nadja");
expected.put("names", "@{first_name}");
expected.put("pids", "456");
expected.put("pids", "hello");
processAndVerifySuccess(record, expected);
}
@Test
public void testToStringWithTrim() throws Exception {
morphline = createMorphline("test-morphlines/toStringWithTrim");
Record record = new Record();
Record expected = new Record();
expected.put("source_type", "hello world");
expected.put("source_host", " hello world ");
processAndVerifySuccess(record, expected);
}
@Test
public void testCharacterEscaping() throws Exception {
morphline = createMorphline("test-morphlines/characterEscaping");
Record record = new Record();
Record expected = new Record();
expected.put("foo", "\t");
expected.put("foo", "\n");
expected.put("foo", "\r");
expected.put("foo", "\\.");
expected.put("foo", String.valueOf((char)1));
expected.put("foo", "byte[]");
processAndVerifySuccess(record, expected);
}
@Test
public void testEqualsSuccess() throws Exception {
morphline = createMorphline("test-morphlines/equalsSuccess");
Record record = new Record();
record.put("first_name", "Nadja");
// record.put("field0", null);
record.put("field1", "true");
// record.put("field2", 123);
record.put("field3", "123");
record.put("field4", "123");
record.put("field4", 456);
record.put("field5", "Nadja");
processAndVerifySuccess(record, record);
}
@Test
/* Fails because Boolean.TRUE is not equals to the String "true" */
public void testEqualsFailure() throws Exception {
morphline = createMorphline("test-morphlines/equalsFailure");
Record record = new Record();
record.put("field0", true);
processAndVerifyFailure(record);
}
@Test
public void testContainsSuccess() throws Exception {
morphline = createMorphline("test-morphlines/contains");
Record record = new Record();
record.put("food", "veggies");
record.put("food", "cookie");
processAndVerifySuccess(record, record);
}
@Test
public void testContainsFailure() throws Exception {
morphline = createMorphline("test-morphlines/contains");
Record record = new Record();
record.put("food", "veggies");
record.put("food", "xxxxxxxxxxxxxx");
processAndVerifyFailure(record);
}
@Test
public void testSeparateAttachments() throws Exception {
morphline = createMorphline("test-morphlines/separateAttachments");
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, "a_foo");
record.put(Fields.ATTACHMENT_BODY, "a_bar");
record.put(Fields.ATTACHMENT_BODY, "a_baz");
record.put(Fields.ATTACHMENT_MIME_TYPE, "m_foo");
record.put(Fields.ATTACHMENT_MIME_TYPE, "m_bar");
record.put(Fields.ATTACHMENT_CHARSET, "c_foo");
record.put(Fields.ATTACHMENT_NAME, "n_foo");
record.put("first_name", "Nadja");
processAndVerifySuccess(record,
ImmutableMultimap.of("first_name", "Nadja", Fields.ATTACHMENT_BODY, "a_foo",
Fields.ATTACHMENT_MIME_TYPE, "m_foo", Fields.ATTACHMENT_CHARSET, "c_foo", Fields.ATTACHMENT_NAME, "n_foo"),
ImmutableMultimap.of("first_name", "Nadja", Fields.ATTACHMENT_BODY, "a_bar", Fields.ATTACHMENT_MIME_TYPE, "m_bar"),
ImmutableMultimap.of("first_name", "Nadja", Fields.ATTACHMENT_BODY, "a_baz")
);
}
@Test
public void testTryRulesPass() throws Exception {
morphline = createMorphline("test-morphlines/tryRulesPass");
Record record = new Record();
record.put("first_name", "Nadja");
List<Record> expectedList = new ArrayList();
for (int i = 0; i < 2; i++) {
Record expected = record.copy();
expected.put("foo", "bar");
expected.replaceValues("iter", i);
expectedList.add(expected);
}
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
assertEquals(expectedList, collector.getRecords());
assertNotSame(record, collector.getRecords().get(0));
}
@Test
public void testTryRulesFail() throws Exception {
morphline = createMorphline("test-morphlines/tryRulesFail");
Record record = new Record();
record.put("first_name", "Nadja");
List<Record> expectedList = new ArrayList();
for (int i = 0; i < 2; i++) {
Record expected = record.copy();
expected.put("foo2", "bar2");
expected.replaceValues("iter2", i);
expectedList.add(expected);
}
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
assertEquals(expectedList, collector.getRecords());
assertNotSame(record, collector.getRecords().get(0));
}
@Test
public void testTryRulesCatchException() throws Exception {
morphline = createMorphline("test-morphlines/tryRulesCatchException");
Record record = new Record();
record.put("first_name", "Nadja");
List<Record> expectedList = new ArrayList();
for (int i = 0; i < 2; i++) {
Record expected = record.copy();
expected.put("foo2", "bar2");
expected.replaceValues("iter2", i);
expectedList.add(expected);
}
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
assertEquals(expectedList, collector.getRecords());
assertNotSame(record, collector.getRecords().get(0));
}
@Test
public void testTryRulesFailTwice() throws Exception {
morphline = createMorphline("test-morphlines/tryRulesFailTwice");
Record record = new Record();
record.put("first_name", "Nadja");
List<Record> expectedList = new ArrayList();
startSession();
assertEquals(1, collector.getNumStartEvents());
try {
morphline.process(record);
fail();
} catch (MorphlineRuntimeException e) {
assertTrue(e.getMessage().startsWith("tryRules command found no successful rule for record"));
}
assertEquals(expectedList, collector.getRecords());
}
@Test
public void testIsTrue() throws Exception {
System.setProperty("MY_VARIABLE", "true");
morphline = createMorphline("test-morphlines/isTrue");
Record record = new Record();
record.put("isTooYoung", "true");
processAndVerifySuccess(record, record);
System.setProperty("MY_VARIABLE", "false");
morphline = createMorphline("test-morphlines/isTrue");
processAndVerifyFailure(createBasicRecord());
System.clearProperty("MY_VARIABLE");
try {
morphline = createMorphline("test-morphlines/isTrue");
fail();
} catch (ConfigException.UnresolvedSubstitution e) {
;
}
}
@Test
public void testIfThenElseWithThen() throws Exception {
morphline = createMorphline("test-morphlines/ifThenElseWithThen");
Record record = createBasicRecord();
processAndVerifySuccess(record, record);
assertEquals("then1", collector.getFirstRecord().getFirstValue("state"));
}
@Test
public void testIfThenElseWithThenEmpty() throws Exception {
morphline = createMorphline("test-morphlines/ifThenElseWithThenEmpty");
Record record = createBasicRecord();
processAndVerifySuccess(record, record);
assertEquals("init1", collector.getFirstRecord().getFirstValue("state"));
}
@Test
public void testIfThenElseWithElse() throws Exception {
morphline = createMorphline("test-morphlines/ifThenElseWithElse");
Record record = createBasicRecord();
processAndVerifySuccess(record, record);
assertEquals("else1", collector.getFirstRecord().getFirstValue("state"));
}
@Test
public void testIfThenElseWithElseEmpty() throws Exception {
morphline = createMorphline("test-morphlines/ifThenElseWithElseEmpty");
Record record = createBasicRecord();
processAndVerifySuccess(record, record);
assertEquals("init1", collector.getFirstRecord().getFirstValue("state"));
}
@Test
public void testNotWithTrue() throws Exception {
morphline = createMorphline("test-morphlines/notWithTrue");
Record record = createBasicRecord();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertFalse(morphline.process(record));
assertEquals(record, collector.getFirstRecord());
assertSame(record, collector.getFirstRecord());
assertEquals("touched", collector.getFirstRecord().getFirstValue("state"));
}
@Test
public void testNotWithFalse() throws Exception {
morphline = createMorphline("test-morphlines/notWithFalse");
Record record = createBasicRecord();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
assertEquals(Arrays.asList(), collector.getRecords());
}
@Test
public void testReadClob() throws Exception {
morphline = createMorphline("test-morphlines/readClob");
for (int i = 0; i < 3; i++) {
Record record = new Record();
String msg = "foo";
record.put(Fields.ATTACHMENT_BODY, msg.getBytes("UTF-8"));
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
assertEquals(expected, collector.getFirstRecord());
assertNotSame(record, collector.getFirstRecord());
}
}
@Test
public void testReadCSV() throws Exception {
morphline = createMorphline("test-morphlines/readCSV");
for (int i = 0; i < 3; i++) {
InputStream in = new FileInputStream(new File(RESOURCES_DIR + "/test-documents/cars2.csv"));
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifySuccess(record,
ImmutableMultimap.of("Age", "Age", "Extras", "Extras", "Type", "Type", "column4", "Used"),
ImmutableMultimap.of("Age", "2", "Extras", "GPS", "Type", "Gas, with electric", "column4", ""),
ImmutableMultimap.of("Age", "10", "Extras", "Labeled \"Vintage, 1913\"", "Type", "", "column4", "yes"),
ImmutableMultimap.of("Age", "100", "Extras", "Labeled \"Vintage 1913\"", "Type", "yes"),
ImmutableMultimap.of("Age", "5", "Extras", "none", "Type", "This is a\nmulti, line text", "column4", "no\""),
ImmutableMultimap.of("Age", "6", "Extras", "many", "Type", "Another multi, line text", "column4", "maybe")
);
in.close();
}
}
@Test
public void testReadCSVWithoutQuoting() throws Exception {
morphline = createMorphline("test-morphlines/readCSVWithoutQuoting");
for (int i = 0; i < 3; i++) {
InputStream in = new FileInputStream(new File(RESOURCES_DIR + "/test-documents/cars.csv"));
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifySuccess(record,
ImmutableMultimap.of("Age", "2", "Extras", "GPS", "Type", "\"Gas", "column4", " with electric\"", "column5", "\"\""),
ImmutableMultimap.of("Age", "10", "Extras", "\"Labeled \"\"Vintage", "Type", " 1913\"\"\"", "column4", "", "column5", "yes"),
ImmutableMultimap.of("Age", "100", "Extras", "\"Labeled \"\"Vintage 1913\"\"\"", "Type", "yes"),
ImmutableMultimap.of("Age", "5", "Extras", "none", "Type", "\"This is a"),
ImmutableMultimap.of("Age", "multi", "Extras", "no")
);
in.close();
}
}
@Test
public void testReadCSVDetail() throws Exception {
File expectedValuesFile = new File(RESOURCES_DIR + "/test-documents/csvdetails-expected-values.txt");
if (!expectedValuesFile.exists()) {
return;
}
morphline = createMorphline("test-morphlines/readCSVDetails");
InputStream in = new FileInputStream(new File(RESOURCES_DIR + "/test-documents/csvdetails.csv"));
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
Iterator<Record> iter = collector.getRecords().iterator();
BufferedReader expectedReader = new BufferedReader(new InputStreamReader(new FileInputStream(expectedValuesFile), "UTF-8"));
String line;
long recNum = 0;
while ((line = expectedReader.readLine()) != null) {
String[] expectedCols = line.split(":");
if (line.endsWith(":")) {
expectedCols = concat(expectedCols, new String[]{""});
}
assertTrue("cols.length: " + expectedCols.length, expectedCols.length >= 1);
if (expectedCols[0].startsWith("#")) {
continue;
}
int expectedRecNum = Integer.parseInt(expectedCols[0]);
expectedCols = Arrays.copyOfRange(expectedCols, 1, expectedCols.length);
Record expectedRecord = new Record();
for (int i = 0; i < expectedCols.length; i++) {
expectedCols[i] = expectedCols[i].replace("\\n", "\n");
expectedCols[i] = expectedCols[i].replace("\\r", "\r");
expectedRecord.put("column" + i, expectedCols[i]);
}
while (iter.hasNext()) {
Record actualRecord = iter.next();
recNum++;
//System.out.println("recNum:" + recNum + ":" + actualRecord);
if (recNum == expectedRecNum) {
//System.out.println("expect="+expectedRecord);
//System.out.println("actual="+actualRecord);
assertEquals(expectedRecord, actualRecord);
break;
}
}
}
assertEquals(30, recNum);
expectedReader.close();
}
@Test
public void testReadLine() throws Exception {
String threeLines = "first\nsecond\nthird";
byte[] in = threeLines.getBytes("UTF-8");
morphline = createMorphline("test-morphlines/readLine"); // uses ignoreFirstLine : true
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifySuccess(record,
ImmutableMultimap.of(Fields.MESSAGE, "second"),
ImmutableMultimap.of(Fields.MESSAGE, "third")
);
// verify counters
boolean foundCounter = false;
for (Entry<String, Meter> entry : morphContext.getMetricRegistry().getMeters().entrySet()) {
if (entry.getKey().equals("morphline.readLine." + Metrics.NUM_RECORDS)) {
assertEquals(2, entry.getValue().getCount());
foundCounter = true;
}
}
assertTrue(foundCounter);
}
@Test
public void testReadLineWithMimeType() throws Exception {
String threeLines = "first\nsecond\nthird";
byte[] in = threeLines.getBytes("UTF-8");
morphline = createMorphline("test-morphlines/readLineWithMimeType"); // uses ignoreFirstLine : true
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifySuccess(record,
ImmutableMultimap.of(Fields.MESSAGE, "second"),
ImmutableMultimap.of(Fields.MESSAGE, "third")
);
}
@Test
public void testReadLineWithMimeTypeWildcard() throws Exception {
String threeLines = "first\nsecond\nthird";
byte[] in = threeLines.getBytes("UTF-8");
morphline = createMorphline("test-morphlines/readLineWithMimeTypeWildcard"); // uses ignoreFirstLine : true
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifySuccess(record,
ImmutableMultimap.of(Fields.MESSAGE, "second"),
ImmutableMultimap.of(Fields.MESSAGE, "third")
);
}
@Test
public void testReadLineWithMimeTypeMismatch() throws Exception {
String threeLines = "first\nsecond\nthird";
byte[] in = threeLines.getBytes("UTF-8");
morphline = createMorphline("test-morphlines/readLineWithMimeTypeMismatch"); // uses ignoreFirstLine : true
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifyFailure(record);
}
@Test
public void testReadMultiLineWithWhatPrevious() throws Exception {
morphline = createMorphline("test-morphlines/readMultiLine");
InputStream in = new FileInputStream(new File(RESOURCES_DIR + "/test-documents/multiline-stacktrace.log"));
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
String multiLineEvent = Files.toString(new File(RESOURCES_DIR + "/test-documents/multiline-stacktrace-expected-long-event.log"), Charsets.UTF_8);
processAndVerifySuccess(record,
ImmutableMultimap.of(Fields.MESSAGE, "juil. 25, 2012 10:49:46 AM hudson.triggers.SafeTimerTask run"),
ImmutableMultimap.of(Fields.MESSAGE, multiLineEvent),
ImmutableMultimap.of(Fields.MESSAGE, "juil. 25, 2012 10:49:54 AM hudson.slaves.SlaveComputer tryReconnect"),
ImmutableMultimap.of(Fields.MESSAGE, "Infos: Attempting to reconnect CentosVagrant")
);
in.close();
}
@Test
public void testReadMultiLineWithWhatNext() throws Exception {
morphline = createMorphline("test-morphlines/readMultiLineWithWhatNext");
InputStream in = new FileInputStream(new File(RESOURCES_DIR + "/test-documents/multiline-sessions.log"));
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, in);
processAndVerifySuccess(record,
ImmutableMultimap.of(Fields.MESSAGE, "Started GET /foo" + "\n Foo Started GET as HTML" + "\nCompleted 401 Unauthorized in 0ms" + "\n\n"),
ImmutableMultimap.of(Fields.MESSAGE, "Started GET /bar" + "\n Bar as HTML" + "\nCompleted 200 OK in 339ms"),
ImmutableMultimap.of(Fields.MESSAGE, "Started GET /baz")
);
in.close();
}
@Test
public void testJavaHelloWorld() throws Exception {
morphline = createMorphline("test-morphlines/javaHelloWorld");
Record record = new Record();
record.put("tags", "hello");
Record expected = new Record();
expected.put("tags", "hello");
expected.put("tags", "world");
processAndVerifySuccess(record, expected);
}
@Test
public void testJavaRuntimeException() throws Exception {
morphline = createMorphline("test-morphlines/javaRuntimeException");
Record record = new Record();
startSession();
assertEquals(1, collector.getNumStartEvents());
try {
morphline.process(record);
fail();
} catch (MorphlineRuntimeException e) {
assertTrue(e.getMessage().startsWith("Cannot execute script"));
}
assertEquals(Arrays.asList(), collector.getRecords());
}
@Test
public void testJavaCompilationException() throws Exception {
Config config = parse("test-morphlines/javaCompilationException");
try {
createMorphline(config);
fail();
} catch (MorphlineCompilationException e) {
assertTrue(e.getMessage().startsWith("Cannot compile script"));
}
}
@Test
public void testGenerateUUID() throws Exception {
testGenerateUUID("");
}
@Test
public void testGenerateUUIDSecure() throws Exception {
testGenerateUUID("Secure");
}
private void testGenerateUUID(String suffix) throws Exception {
morphline = createMorphline("test-morphlines/generateUUID" + suffix);
Record record = new Record();
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
Record actual = collector.getFirstRecord();
assertEquals(1, actual.get("id").size());
String uuid = (String) actual.getFirstValue("id");
assertEquals(36, uuid.length());
}
@Test
public void testGrokSyslogMatch() throws Exception {
testGrokSyslogMatchInternal(false, false);
}
@Test
public void testGrokSyslogMatchInplace() throws Exception {
testGrokSyslogMatchInternal(true, false);
}
@Test
public void testGrokSyslogMatchInplaceTwoExpressions() throws Exception {
testGrokSyslogMatchInternal(true, true, "atLeastOnce");
}
private void testGrokSyslogMatchInternal(boolean inplace, boolean twoExpressions, String... numRequiredMatchesParams) throws Exception {
if (numRequiredMatchesParams.length == 0) {
numRequiredMatchesParams = new String[] {"atLeastOnce", "all", "once"};
}
for (String numRequiredMatches : numRequiredMatchesParams) {
morphline = createMorphline(
"test-morphlines/grokSyslogMatch"
+ (inplace ? "Inplace" : "")
+ (twoExpressions ? "TwoExpressions" : "")
+ "",
ConfigFactory.parseMap(ImmutableMap.of("numRequiredMatches", numRequiredMatches)));
Record record = new Record();
String msg = "<164>Feb 4 10:46:14 syslog sshd[607]: Server listening on 0.0.0.0 port 22.";
record.put(Fields.MESSAGE, msg);
String id = "myid";
record.put(Fields.ID, id);
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put(Fields.ID, id);
expected.put("syslog_pri", "164");
expected.put("syslog_timestamp", "Feb 4 10:46:14");
expected.put("syslog_hostname", "syslog");
expected.put("syslog_program", "sshd");
expected.put("syslog_pid", "607");
expected.put("syslog_message", "Server listening on 0.0.0.0 port 22.");
assertEquals(expected, collector.getFirstRecord());
if (inplace) {
assertSame(record, collector.getFirstRecord());
} else {
assertNotSame(record, collector.getFirstRecord());
}
// mismatch
collector.reset();
record = new Record();
record.put(Fields.MESSAGE, "foo" + msg);
startSession();
assertEquals(1, collector.getNumStartEvents());
assertFalse(morphline.process(record));
assertEquals(Arrays.asList(), collector.getRecords());
// double match
collector.reset();
record = new Record();
record.put(Fields.MESSAGE, msg);
record.put(Fields.MESSAGE, msg);
record.put(Fields.ID, id);
record.put(Fields.ID, id);
startSession();
assertEquals(1, collector.getNumStartEvents());
if ("once".equals(numRequiredMatches)) {
assertFalse(morphline.process(record));
} else {
assertTrue(morphline.process(record));
Record tmp = expected.copy();
for (Map.Entry<String, Object> entry : tmp.getFields().entries()) {
expected.put(entry.getKey(), entry.getValue());
}
assertEquals(expected, collector.getFirstRecord());
if (inplace) {
assertSame(record, collector.getFirstRecord());
} else {
assertNotSame(record, collector.getFirstRecord());
}
}
}
}
@Test
public void testGrokFindSubstrings() throws Exception {
testGrokFindSubstringsInternal(false, false);
}
@Test
public void testGrokFindSubstringsInplace() throws Exception {
testGrokFindSubstringsInternal(true, false);
}
@Test
public void testGrokFindSubstringsInplaceTwoExpressions() throws Exception {
testGrokFindSubstringsInternal(true, true);
}
private void testGrokFindSubstringsInternal(boolean inplace, boolean twoExpressions) throws Exception {
// match
morphline = createMorphline(
"test-morphlines/grokFindSubstrings"
+ (inplace ? "Inplace" : "")
+ (twoExpressions ? "TwoExpressions" : "")
+ "");
Record record = new Record();
String msg = "hello\t\tworld\tfoo";
record.put(Fields.MESSAGE, msg);
String id = "myid";
record.put(Fields.ID, id);
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put(Fields.ID, id);
expected.put("word", "hello");
expected.put("word", "world");
expected.put("word", "foo");
assertEquals(expected, collector.getFirstRecord());
if (inplace) {
assertSame(record, collector.getFirstRecord());
} else {
assertNotSame(record, collector.getFirstRecord());
}
// mismatch
collector.reset();
record = new Record();
record.put(Fields.MESSAGE, "");
record.put(Fields.ID, id);
startSession();
assertEquals(1, collector.getNumStartEvents());
assertFalse(morphline.process(record));
assertEquals(Arrays.asList(), collector.getRecords());
}
@Test
public void testGrokSeparatedValues() throws Exception {
String msg = "hello\tworld\tfoo";
Pattern pattern = Pattern.compile("(?<word>.+?)(\\t|\\z)");
Matcher matcher = pattern.matcher(msg);
List<String> results = new ArrayList();
while (matcher.find()) {
//System.out.println("match:'" + matcher.group(1) + "'");
results.add(matcher.group(1));
}
assertEquals(Arrays.asList("hello", "world", "foo"), results);
}
@Test
public void testGrokSyslogNgCisco() throws Exception {
morphline = createMorphline("test-morphlines/grokSyslogNgCisco");
Record record = new Record();
String msg = "<179>Jun 10 04:42:51 www.foo.com Jun 10 2013 04:42:51 : %myproduct-3-mysubfacility-251010: " +
"Health probe failed for server 1.2.3.4 on port 8083, connection refused by server";
record.put(Fields.MESSAGE, msg);
assertTrue(morphline.process(record));
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("cisco_message_code", "%myproduct-3-mysubfacility-251010");
expected.put("cisco_product", "myproduct");
expected.put("cisco_level", "3");
expected.put("cisco_subfacility", "mysubfacility");
expected.put("cisco_message_id", "251010");
expected.put("syslog_message", "%myproduct-3-mysubfacility-251010: Health probe failed for server 1.2.3.4 " +
"on port 8083, connection refused by server");
assertEquals(expected, collector.getFirstRecord());
assertNotSame(record, collector.getFirstRecord());
}
public void testGrokSyslogNgCiscoWithoutSubFacility() throws Exception {
morphline = createMorphline("test-morphlines/grokSyslogNgCisco");
Record record = new Record();
String msg = "<179>Jun 10 04:42:51 www.foo.com Jun 10 2013 04:42:51 : %myproduct-3-mysubfacility-251010: " +
"Health probe failed for server 1.2.3.4 on port 8083, connection refused by server";
record.put(Fields.MESSAGE, msg);
assertTrue(morphline.process(record));
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("cisco_message_code", "%myproduct-3-251010");
expected.put("cisco_product", "myproduct");
expected.put("cisco_level", "3");
// expected.put("cisco_subfacility", "mysubfacility");
expected.put("cisco_message_id", "251010");
expected.put("syslog_message", "%myproduct-3-mysubfacility-251010: Health probe failed for server 1.2.3.4 " +
"on port 8083, connection refused by server");
assertEquals(expected, collector.getFirstRecord());
assertNotSame(record, collector.getFirstRecord());
}
@Test
public void testGrokEmail() throws Exception {
morphline = createMorphline("test-morphlines/grokEmail");
Record record = new Record();
byte[] bytes = Files.toByteArray(new File(RESOURCES_DIR + "/test-documents/email.txt"));
record.put(Fields.ATTACHMENT_BODY, bytes);
assertTrue(morphline.process(record));
Record expected = new Record();
String msg = new String(bytes, "UTF-8"); //.replaceAll("(\r)?\n", "\n");
expected.put(Fields.MESSAGE, msg);
expected.put("message_id", "12345.6789.JavaMail.foo@bar");
expected.put("date", "Wed, 6 Feb 2012 06:06:05 -0800");
expected.put("from", "foo@bar.com");
expected.put("to", "baz@bazoo.com");
expected.put("subject", "WEDNESDAY WEATHER HEADLINES");
expected.put("from_names", "Foo Bar <foo@bar.com>@xxx");
expected.put("to_names", "'Weather News Distribution' <wfoo@bar.com>");
expected.put("text",
"Average 1 to 3- degrees above normal: Mid-Atlantic, Southern Plains.." +
"\nAverage 4 to 6-degrees above normal: Ohio Valley, Rockies, Central Plains");
assertEquals(expected, collector.getFirstRecord());
assertNotSame(record, collector.getFirstRecord());
}
@Test
public void testConvertTimestamp() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestamp");
Record record = new Record();
record.put("ts1", "2011-09-06T14:14:34.789Z"); // "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"
record.put("ts1", "2012-09-06T14:14:34");
record.put("ts1", "2013-09-06");
Record expected = new Record();
expected.put("ts1", "2011-09-06T07:14:34.789-0700");
expected.put("ts1", "2012-09-06T07:14:34.000-0700");
expected.put("ts1", "2013-09-05T17:00:00.000-0700");
processAndVerifySuccess(record, expected);
}
@Test
public void testDecodeBase64() throws Exception {
morphline = createMorphline("test-morphlines/decodeBase64");
Record record = new Record();
record.put("data", "SGVsbG8gV29ybGQ=");
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
byte[] actual = (byte[]) collector.getFirstRecord().getFirstValue("data");
assertArrayEquals("Hello World".getBytes(Charsets.UTF_8), actual);
assertSame(record, collector.getFirstRecord());
}
@Test
public void testFindReplace() throws Exception {
Config override = ConfigFactory.parseString("replaceFirst : false");
morphline = createMorphline("test-morphlines/findReplace", override);
Record record = new Record();
record.put("text", "hello ic world ic");
Record expected = new Record();
expected.put("text", "hello I see world I see");
processAndVerifySuccess(record, expected);
}
@Test
public void testFindReplaceWithReplaceFirst() throws Exception {
Config override = ConfigFactory.parseString("replaceFirst : true");
morphline = createMorphline("test-morphlines/findReplace", override);
Record record = new Record();
record.put("text", "hello ic world ic");
Record expected = new Record();
expected.put("text", "hello I see world ic");
processAndVerifySuccess(record, expected);
}
@Test
public void testFindReplaceWithGrok() throws Exception {
Config override = ConfigFactory.parseString("replaceFirst : false");
morphline = createMorphline("test-morphlines/findReplaceWithGrok", override);
Record record = new Record();
record.put("text", "hello ic world ic");
Record expected = new Record();
expected.put("text", "hello! ic! world! ic!");
processAndVerifySuccess(record, expected);
}
@Test
public void testFindReplaceWithRegex() throws Exception {
morphline = createMorphline("test-morphlines/findReplaceWithRegex");
Record record = new Record();
record.put("text", "hello ic world ic");
Record expected = new Record();
expected.put("text", "hello! ic! world! ic!");
processAndVerifySuccess(record, expected);
}
@Test
public void testFindReplaceWithGrokWithReplaceFirst() throws Exception {
Config override = ConfigFactory.parseString("replaceFirst : true");
morphline = createMorphline("test-morphlines/findReplaceWithGrok", override);
Record record = new Record();
record.put("text", "hello ic world ic");
Record expected = new Record();
expected.put("text", "hello! ic world ic");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplit() throws Exception {
morphline = createMorphline("test-morphlines/split");
Record record = new Record();
String msg = " _a ,_b_ ,c__";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("output", "_a");
expected.put("output", "_b_");
expected.put("output", "c__");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplitWithMultipleChars() throws Exception {
morphline = createMorphline("test-morphlines/splitWithMultipleChars");
Record record = new Record();
String msg = " _a ,_b_ ,c__";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("output", " _a");
expected.put("output", "_b_");
expected.put("output", "c__");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplitWithEdgeCases() throws Exception {
morphline = createMorphline("test-morphlines/splitWithEdgeCases");
Record record = new Record();
String msg = ",, _a ,_b_ ,,";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("output", "");
expected.put("output", "");
expected.put("output", "_a");
expected.put("output", "_b_");
expected.put("output", "");
expected.put("output", "");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplitWithGrok() throws Exception {
morphline = createMorphline("test-morphlines/splitWithGrok");
Record record = new Record();
String msg = " _a ,_b_ ,c__";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("output", " _a");
expected.put("output", "_b_");
expected.put("output", "c__");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplitWithOutputFields() throws Exception {
morphline = createMorphline("test-morphlines/splitWithOutputFields");
Record record = new Record();
String msg = " _a ,_b_ , ,c__,d";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("col0", "_a");
expected.put("col2", "c__");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplitKeyValue() throws Exception {
morphline = createMorphline("test-morphlines/splitKeyValue");
Record record = new Record();
record.put("params", "foo=x");
record.put("params", " foo = y ");
record.put("params", "foo ");
record.put("params", "fragment=z");
Record expected = new Record();
expected.getFields().putAll("params", record.get("params"));
expected.put("/foo", "x");
expected.put("/foo", "y");
expected.put("/fragment", "z");
processAndVerifySuccess(record, expected);
}
@Test
public void testSplitKeyValueWithIPTables() throws Exception {
morphline = createMorphline("test-morphlines/splitKeyValueWithIPTables");
Record record = new Record();
String msg = "Feb 6 12:04:42 IN=eth1 OUT=eth0 SRC=1.2.3.4 DST=6.7.8.9 ACK DF WINDOW=0";
record.put(Fields.ATTACHMENT_BODY, msg.getBytes("UTF-8"));
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
expected.put("timestamp", "Feb 6 12:04:42");
expected.put("IN", "eth1");
expected.put("OUT", "eth0");
expected.put("SRC", "1.2.3.4");
expected.put("DST", "6.7.8.9");
expected.put("WINDOW", "0");
processAndVerifySuccess(record, expected, false);
}
@Test
public void testStartReportingMetricsToCSV() throws Exception {
File testMetricsOutput1 = new File("target/testMetricsOutput1/morphline.logDebug.numProcessCalls.csv");
File testMetricsOutput2 = new File("target/testMetricsOutput2/morphline.logDebug.numProcessCalls.csv");
FileUtils.deleteDirectory(testMetricsOutput1.getParentFile());
FileUtils.deleteDirectory(testMetricsOutput2.getParentFile());
assertFalse(testMetricsOutput1.getParentFile().exists());
assertFalse(testMetricsOutput2.getParentFile().exists());
morphline = createMorphline("test-morphlines/startReportingMetricsToCSV");
try {
Record record = new Record();
String msg = "foo";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
processAndVerifySuccess(record, expected);
// verify reporter is active, i.e. verify reporter is writing to file
waitForFileLengthGreaterThan(testMetricsOutput1, 0);
waitForFileLengthGreaterThan(testMetricsOutput2, 0);
assertTrue(testMetricsOutput1.isFile());
assertTrue(testMetricsOutput2.isFile());
long len1 = testMetricsOutput1.length();
long len2 = testMetricsOutput2.length();
assertTrue(len1 > 0);
assertTrue(len2 > 0);
for (int i = 0; i < 2; i++) {
waitForFileLengthGreaterThan(testMetricsOutput1, len1);
waitForFileLengthGreaterThan(testMetricsOutput2, len2);
long len1b = testMetricsOutput1.length();
long len2b = testMetricsOutput2.length();
assertTrue(len1b > len1);
assertTrue(len2b > len2);
len1 = len1b;
len2 = len2b;
}
Notifications.notifyShutdown(morphline);
// verify reporter is shutdown, i.e. verify reporter isn't writing to file anymore
len1 = testMetricsOutput1.length();
len2 = testMetricsOutput2.length();
for (int i = 0; i < 2; i++) {
Thread.sleep(200);
assertEquals(len1, testMetricsOutput1.length());
assertEquals(len2, testMetricsOutput2.length());
Notifications.notifyShutdown(morphline);
}
} finally {
Notifications.notifyShutdown(morphline);
}
}
private void waitForFileLengthGreaterThan(File file, long minLength) throws InterruptedException {
long timeout = System.currentTimeMillis() + 1000;
while (file.length() <= minLength && System.currentTimeMillis() <= timeout) {
Thread.sleep(10);
}
}
@Test
public void testStartReportingMetricsToJMX() throws Exception {
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName obj1Name = new ObjectName("domain1", "name", "morphline.logDebug.numProcessCalls");
ObjectName obj2Name = new ObjectName("domain2", "name", "morphline.logDebug.numProcessCalls");
ObjectName timerName = new ObjectName("domain1", "name", "myMetrics.myTimer");
ObjectName timer2Name = new ObjectName("domain1", "name", "myMetrics.myTimer2");
assertMBeanInstanceNotFound(obj1Name, mBeanServer);
assertMBeanInstanceNotFound(obj2Name, mBeanServer);
assertMBeanInstanceNotFound(timerName, mBeanServer);
assertMBeanInstanceNotFound(timer2Name, mBeanServer);
morphline = createMorphline("test-morphlines/startReportingMetricsToJMX");
assertEquals(0L, mBeanServer.getAttribute(obj1Name, "Count"));
mBeanServer.getMBeanInfo(obj1Name);
assertEquals(0L, mBeanServer.getAttribute(obj2Name, "Count"));
mBeanServer.getMBeanInfo(obj2Name);
assertMBeanInstanceNotFound(timerName, mBeanServer);
assertMBeanInstanceNotFound(timer2Name, mBeanServer);
Record record = new Record();
String msg = "foo";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
processAndVerifySuccess(record, expected);
// verify reporter is active
assertEquals(2L, mBeanServer.getAttribute(obj1Name, "Count"));
assertEquals("events/second", mBeanServer.getAttribute(obj1Name, "RateUnit"));
mBeanServer.getMBeanInfo(obj1Name);
assertEquals(2L, mBeanServer.getAttribute(obj2Name, "Count"));
assertEquals("events/second", mBeanServer.getAttribute(obj2Name, "RateUnit"));
mBeanServer.getMBeanInfo(obj2Name);
assertEquals(1L, mBeanServer.getAttribute(timerName, "Count"));
assertEquals("milliseconds", mBeanServer.getAttribute(timerName, "DurationUnit"));
assertEquals("events/millisecond", mBeanServer.getAttribute(timerName, "RateUnit"));
mBeanServer.getMBeanInfo(timerName);
assertEquals(1L, mBeanServer.getAttribute(timer2Name, "Count"));
assertEquals("milliseconds", mBeanServer.getAttribute(timer2Name, "DurationUnit"));
assertEquals("events/minute", mBeanServer.getAttribute(timer2Name, "RateUnit"));
mBeanServer.getMBeanInfo(timerName);
// verify reporter is shutdown
for (int i = 0; i < 2; i++) {
Notifications.notifyShutdown(morphline);
assertMBeanInstanceNotFound(obj1Name, mBeanServer);
assertMBeanInstanceNotFound(obj2Name, mBeanServer);
assertMBeanInstanceNotFound(timerName, mBeanServer);
assertMBeanInstanceNotFound(timer2Name, mBeanServer);
}
}
@Test
public void testStartReportingMetricsToSLF4J() throws Exception {
morphline = createMorphline("test-morphlines/startReportingMetricsToSLF4J");
Record record = new Record();
String msg = "foo";
record.put(Fields.MESSAGE, msg);
Record expected = new Record();
expected.put(Fields.MESSAGE, msg);
processAndVerifySuccess(record, expected);
Notifications.notifyShutdown(morphline);
Notifications.notifyShutdown(morphline);
}
private void assertMBeanInstanceNotFound(ObjectName objName, MBeanServer mBeanServer)
throws IntrospectionException, ReflectionException, AttributeNotFoundException, MBeanException {
try {
mBeanServer.getMBeanInfo(objName);
fail();
} catch (InstanceNotFoundException e) {
; // expected
}
try {
mBeanServer.getAttribute(objName, "Count");
fail();
} catch (InstanceNotFoundException e) {
; // expected
}
}
@Test
public void testTranslate() throws Exception {
morphline = createMorphline("test-morphlines/translate");
Record record = new Record();
Record expected = new Record();
record.replaceValues("level", "0");
expected.replaceValues("level", "Emergency");
processAndVerifySuccess(record, expected);
record.replaceValues("level", 0);
expected.replaceValues("level", "Emergency");
processAndVerifySuccess(record, expected);
record.replaceValues("level", "1");
expected.replaceValues("level", "Alert");
processAndVerifySuccess(record, expected);
record.replaceValues("level", 1);
expected.replaceValues("level", "Alert");
processAndVerifySuccess(record, expected);
record.replaceValues("level", 999);
expected.replaceValues("level", "unknown");
processAndVerifySuccess(record, expected);
}
@Test
public void testTranslateFailure() throws Exception {
morphline = createMorphline("test-morphlines/translateFailure");
Record record = new Record();
record.replaceValues("level", 999);
processAndVerifyFailure(record);
}
@Test
public void testConvertTimestampEmpty() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestamp");
Record record = new Record();
Record expected = new Record();
processAndVerifySuccess(record, expected);
}
@Test
public void testConvertTimestampBad() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestamp");
Record record = new Record();
record.put("ts1", "this is an invalid timestamp");
processAndVerifyFailure(record);
}
@Test
public void testConvertTimestampWithDefaults() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestampWithDefaults");
Record record = new Record();
record.put(Fields.TIMESTAMP, "2011-09-06T14:14:34.789Z");
record.put(Fields.TIMESTAMP, "2012-09-06T14:14:34");
record.put(Fields.TIMESTAMP, "2013-09-06");
Record expected = new Record();
expected.put(Fields.TIMESTAMP, "2011-09-06T14:14:34.789Z");
expected.put(Fields.TIMESTAMP, "2012-09-06T14:14:34.000Z");
expected.put(Fields.TIMESTAMP, "2013-09-06T00:00:00.000Z");
processAndVerifySuccess(record, expected);
}
@Test
public void testConvertTimestampWithBadTimezone() throws Exception {
Config config = parse("test-morphlines/convertTimestampWithBadTimezone");
try {
createMorphline(config);
fail();
} catch (MorphlineCompilationException e) {
assertTrue(e.getMessage().startsWith("Unknown timezone"));
}
}
@Test
public void testConvertTimestampWithInputFormatUnixTimeInMillis() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestampWithInputFormatUnixTimeInMillis");
Record record = new Record();
record.put("ts1", "0");
record.put("ts1", "1370636123501");
Record expected = new Record();
expected.put("ts1", "1970-01-01T00:00:00.000Z");
expected.put("ts1", "2013-06-07T20:15:23.501Z");
processAndVerifySuccess(record, expected);
}
@Test
public void testConvertTimestampWithInputFormatUnixTimeInSeconds() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestampWithInputFormatUnixTimeInSeconds");
Record record = new Record();
record.put("ts1", "0");
record.put("ts1", "1370636123");
Record expected = new Record();
expected.put("ts1", "1970-01-01T00:00:00.000Z");
expected.put("ts1", "2013-06-07T20:15:23.000Z");
processAndVerifySuccess(record, expected);
}
@Test
public void testConvertTimestampWithOutputFormatUnixTimeInMillis() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestampWithOutputFormatUnixTimeInMillis");
Record record = new Record();
record.put("ts1", "1970-01-01T00:00:00.000Z");
record.put("ts1", "2013-06-07T20:15:23.501Z");
Record expected = new Record();
expected.put("ts1", "0");
expected.put("ts1", "1370636123501");
processAndVerifySuccess(record, expected);
}
@Test
public void testConvertTimestampWithOutputFormatUnixTimeInSeconds() throws Exception {
morphline = createMorphline("test-morphlines/convertTimestampWithOutputFormatUnixTimeInSeconds");
Record record = new Record();
record.put("ts1", "1970-01-01T00:00:00.000Z");
record.put("ts1", "2013-06-07T20:15:23.501Z");
Record expected = new Record();
expected.put("ts1", "0");
expected.put("ts1", "1370636123");
processAndVerifySuccess(record, expected);
}
@Test
public void testExtractURIComponents() throws Exception {
String uriStr = "http://user-info@www.fool.com:8080/errors.log?foo=x&foo=y&foo=z#fragment";
morphline = createMorphline("test-morphlines/extractURIComponents");
Record record = new Record();
record.put("uri", uriStr);
Record expected = new Record();
URI uri = new URI(uriStr);
String prefix = "uri_component_";
expected.put("uri", uriStr);
expected.put(prefix + "scheme", uri.getScheme());
expected.put(prefix + "authority", uri.getAuthority());
expected.put(prefix + "path", uri.getPath());
expected.put(prefix + "query", uri.getQuery());
expected.put(prefix + "fragment", uri.getFragment());
expected.put(prefix + "host", uri.getHost());
expected.put(prefix + "port", uri.getPort());
expected.put(prefix + "schemeSpecificPart", uri.getSchemeSpecificPart());
expected.put(prefix + "userInfo", uri.getUserInfo());
processAndVerifySuccess(record, expected);
record = new Record();
record.put("uri", "invalidURI:");
processAndVerifyFailure(record);
}
@Test
public void testExtractURIComponent() throws Exception {
String uriStr = "http://user-info@www.fool.com:8080/errors.log?foo=x&foo=y&foo=z#fragment";
URI uri = new URI(uriStr);
testExtractURIComponent2(uriStr, "scheme", uri.getScheme());
testExtractURIComponent2(uriStr, "authority", uri.getAuthority());
testExtractURIComponent2(uriStr, "path", uri.getPath());
testExtractURIComponent2(uriStr, "query", uri.getQuery());
testExtractURIComponent2(uriStr, "fragment", uri.getFragment());
testExtractURIComponent2(uriStr, "host", uri.getHost());
testExtractURIComponent2(uriStr, "port", uri.getPort());
testExtractURIComponent2(uriStr, "schemeSpecificPart", uri.getSchemeSpecificPart());
testExtractURIComponent2(uriStr, "userInfo", uri.getUserInfo());
try {
testExtractURIComponent2(uriStr, "illegalType", uri.getUserInfo());
fail();
} catch (MorphlineCompilationException e) {
; // expected
}
testExtractURIComponent2("invalidURI:", "host", uri.getHost(), false);
}
private void testExtractURIComponent2(String uriStr, String component, Object expectedComponent) throws Exception {
testExtractURIComponent2(uriStr, component, expectedComponent, true);
}
private void testExtractURIComponent2(String uriStr, String component, Object expectedComponent, boolean success) throws Exception {
morphline = createMorphline(
"test-morphlines/extractURIComponent",
ConfigFactory.parseMap(ImmutableMap.of("component", component)));
Record record = new Record();
record.put("uri", uriStr);
Record expected = new Record();
expected.put("uri", uriStr);
expected.put("output", expectedComponent);
if (success) {
processAndVerifySuccess(record, expected);
} else {
processAndVerifyFailure(record);
}
}
@Test
public void testExtractURIQueryParameters() throws Exception {
String host = "http://www.fool.com/errors.log";
internalExtractURIQueryParams("foo", host + "?foo=x&foo=y&foo=z", Arrays.asList("x", "y", "z"));
internalExtractURIQueryParams("foo", host + "?foo=x&foo=y&foo=z#fragment", Arrays.asList("x", "y", "z"));
internalExtractURIQueryParams("foo", host + "?boo=x&foo=y&boo=z", Arrays.asList("y"));
internalExtractURIQueryParams("foo", host + "?boo=x&bar=y&baz=z", Arrays.asList());
internalExtractURIQueryParams("foo", host + "?foo=x&foo=y&foo=z", Arrays.asList("x"), 1);
internalExtractURIQueryParams("foo", host + "?foo=x&foo=y&foo=z", Arrays.asList(), 0);
internalExtractURIQueryParams("foo", "", Arrays.asList());
internalExtractURIQueryParams("foo", "?", Arrays.asList());
internalExtractURIQueryParams("foo", "::", Arrays.asList()); // syntax error in URI
internalExtractURIQueryParams("foo", new String(new byte[10], "ASCII"), Arrays.asList());
internalExtractURIQueryParams("foo", host + "", Arrays.asList());
internalExtractURIQueryParams("foo", host + "?", Arrays.asList());
internalExtractURIQueryParams("foo", host + "?foo=hello%26%3D%23&bar=world", Arrays.asList("hello&=#"));
internalExtractURIQueryParams("foo&=#", host + "?foo%26%3D%23=hello%26%3D%23&bar=world", Arrays.asList("hello&=#"));
internalExtractURIQueryParams("foo&=#", host + "?foo&=#=hello%26%3D%23&bar=world", Arrays.asList());
internalExtractURIQueryParams("foo%26%3D%23", host + "?foo%26%3D%23=hello%26%3D%23&bar=world", Arrays.asList());
internalExtractURIQueryParams("bar", host + "?foo=hello%26%3D%23&bar=world", Arrays.asList("world"));
internalExtractURIQueryParams("bar", host + "?foo%26%3D%23=hello%26%3D%23&bar=world", Arrays.asList("world"));
internalExtractURIQueryParams("bar", host + "?foo&===hello%26%3D%23&bar=world", Arrays.asList("world"));
}
private void internalExtractURIQueryParams(String paramName, String url, List expected) throws Exception {
internalExtractURIQueryParams(paramName, url, expected, -1);
}
private void internalExtractURIQueryParams(String paramName, String url, List expected, int maxParams) throws Exception {
String fileName = "test-morphlines/extractURIQueryParameters";
String overridesStr = "queryParam : " + ConfigUtil.quoteString(paramName);
if (maxParams >= 0) {
fileName += "WithMaxParameters";
overridesStr += "\nmaxParameters : " + maxParams;
}
Config override = ConfigFactory.parseString(overridesStr);
morphline = createMorphline(fileName, override);
Record record = new Record();
record.put("in", url);
Record expectedRecord = new Record();
expectedRecord.put("in", url);
expectedRecord.getFields().putAll("out", expected);
processAndVerifySuccess(record, expectedRecord);
}
@Test
public void testImportSpecs() {
List<String> importSpecs = Arrays.asList("com.cloudera.**", "org.apache.solr.**", "net.*", getClass().getName());
for (Class clazz : new MorphlineContext().getTopLevelClasses(importSpecs, CommandBuilder.class)) {
//System.out.println("found " + clazz);
}
MorphlineContext ctx = new MorphlineContext.Builder().build();
ctx.importCommandBuilders(importSpecs);
ctx.importCommandBuilders(importSpecs);
}
@Test
@Ignore
public void testHugeImportSpecs() {
long start = System.currentTimeMillis();
List<String> importSpecs = Arrays.asList("com.**", "org.**", "net.*", getClass().getName());
for (Class clazz : new MorphlineContext().getTopLevelClasses(importSpecs, CommandBuilder.class)) {
System.out.println("found " + clazz);
}
float secs = (System.currentTimeMillis() - start) / 1000.0f;
System.out.println("secs=" + secs);
}
@Test
@Ignore
public void testFindResources() throws Exception {
// TODO maybe expose as Resources.copyClassPathFilesToCWD("test-morphlines/");
// or importClassPathFiles(...)
for (ResourceInfo info : ClassPath.from(getClass().getClassLoader()).getResources()) {
if (info.getResourceName().startsWith("test-morphlines/")) {
System.out.println("info=" + info.url());
// ByteStreams.toByteArray(info.url().openStream());
}
}
// Enumeration<URL> iter = getClass().getClassLoader().getResources("test-morphlines");
// while (iter.hasMoreElements()) {
// URL url = iter.nextElement();
// System.out.println("url=" + url);
// }
}
private Record createBasicRecord() {
Record record = new Record();
record.put("first_name", "Nadja");
record.put("age", 8);
record.put("tags", "one");
record.put("tags", 2);
record.put("tags", "three");
return record;
}
@Test
@Ignore
// Before running this disable debug logging
// via log4j.logger.com.cloudera.cdk.morphline=INFO in log4j.properties
public void benchmark() throws Exception {
String morphlineConfigFile = "test-morphlines/readCSVWithoutQuoting";
//String morphlineConfigFile = "test-morphlines/grokEmail";
//String morphlineConfigFile = "test-morphlines/grokSyslogNgCisco";
long durationSecs = 20;
//File file = new File(RESOURCES_DIR + "/test-documents/email.txt");
//File file = new File(RESOURCES_DIR + "/test-documents/emails.txt");
File file = new File(RESOURCES_DIR + "/test-documents/cars3.csv");
String msg = "<179>Jun 10 04:42:51 www.foo.com Jun 10 2013 04:42:51 : %myproduct-3-mysubfacility-251010: " +
"Health probe failed for server 1.2.3.4 on port 8083, connection refused by server";
System.out.println("Now benchmarking " + morphlineConfigFile + " ...");
morphline = createMorphline(morphlineConfigFile);
byte[] bytes = Files.toByteArray(file);
long start = System.currentTimeMillis();
long duration = durationSecs * 1000;
int iters = 0;
while (System.currentTimeMillis() < start + duration) {
Record record = new Record();
record.put(Fields.ATTACHMENT_BODY, bytes);
// record.put(Fields.MESSAGE, msg);
collector.reset();
startSession();
assertEquals(1, collector.getNumStartEvents());
assertTrue(morphline.process(record));
iters++;
}
float secs = (System.currentTimeMillis() - start) / 1000.0f;
System.out.println("Results: iters=" + iters + ", took[secs]=" + secs + ", iters/secs=" + (iters/secs));
}
}
| yuzhu712/cdk | cdk-morphlines/cdk-morphlines-core/src/test/java/com/cloudera/cdk/morphline/api/MorphlineTest.java | Java | apache-2.0 | 70,467 |
package _16_figRefMethodExtractionFromIntact;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.xml.sax.Attributes;
import org.xml.sax.helpers.DefaultHandler;
import bioc.BioCAnnotation;
class SimpleXmlHandler extends DefaultHandler {
boolean error = false;
private List<SimpleIntactExperimentCode> intactData;
private Map<Integer,SimpleIntactExperimentCode> experiments;
BioCAnnotation currentAnnotation;
String currentAttribute = "";
String currentMatch = "";
int pos = 0;
String content = "";
Pattern whiteReplace = Pattern.compile("\\s+");
List<Exception> exceptions = new ArrayList<Exception>();
private String interactionCode;
private String participantCode;
private String pmid;
private Integer exptId;
private String interactionLabel;
private String participantLabel;
private Integer lookupId;
private String fig;
private boolean figFlag;
public void startDocument() {
intactData = new ArrayList<SimpleIntactExperimentCode>();
experiments = new HashMap<Integer,SimpleIntactExperimentCode>();
}
public void endDocument() {
int pause = 0;
}
public void startElement(String uri, String localName, String qName,
Attributes attributes) {
this.currentMatch += "." + qName;
this.currentAttribute = attributes.getValue("IdType");
if(currentMatch.endsWith("interactionDetectionMethod.xref.primaryRef")) {
interactionCode = attributes.getValue("id");
} else if(currentMatch.endsWith("participantIdentificationMethod.xref.primaryRef")) {
participantCode = attributes.getValue("id");
} else if(currentMatch.endsWith("experimentList.experimentDescription.bibref.xref.primaryRef")) {
pmid = attributes.getValue("id");
} else if(currentMatch.endsWith("experimentList.experimentDescription")) {
exptId = new Integer(attributes.getValue("id"));
} else if(currentMatch.endsWith("attributeList.attribute")) {
figFlag = false;
for( int i=0; i<attributes.getLength(); i++) {
if( attributes.getValue(i).equals("MI:0599") ){
figFlag = true;
break;
}
}
}
}
public void endElement(String uri, String localName, String qName) {
if(currentMatch.endsWith("experimentList.experimentDescription")) {
SimpleIntactExperimentCode intactRecord = new SimpleIntactExperimentCode();
intactRecord.exptId = this.exptId;
intactRecord.interactionCode = this.interactionCode;
intactRecord.partipantCode = this.participantCode;
intactRecord.pmid = this.pmid;
intactRecord.interactionLabel = this.interactionLabel;
intactRecord.participantLabel = this.participantLabel;
intactData.add(intactRecord);
experiments.put(exptId, intactRecord);
this.exptId = null;
this.interactionCode = null;
this.participantCode = null;
this.pmid = null;
pos++;
} else if(currentMatch.endsWith("interactionList.interaction")) {
if( lookupId != null && experiments.containsKey(lookupId)) {
SimpleIntactExperimentCode intactRecord = experiments.get(lookupId);
intactRecord.figCode = fig;
} else {
int pause =0;
}
}
String c = this.currentMatch;
this.currentMatch = c.substring(0, c.lastIndexOf("." + qName));
}
public void characters(char[] ch, int start, int length) {
String value = new String(ch, start, length);
if(currentMatch.endsWith("interactionDetectionMethod.names.fullName")) {
interactionLabel = value;
} else if(currentMatch.endsWith("participantIdentificationMethod.names.fullName")) {
participantLabel = value;
} else if(currentMatch.endsWith("participantIdentificationMethod.names.fullName")) {
participantLabel = value;
} else if(currentMatch.endsWith("interactionList.interaction.experimentList.experimentRef")) {
lookupId = new Integer(value);
} else if(currentMatch.endsWith("attributeList.attribute") && figFlag) {
fig = value;
}
}
public List<SimpleIntactExperimentCode> getIntactData() {
return intactData;
}
public void setIntactData(List<SimpleIntactExperimentCode> intactData) {
this.intactData = intactData;
}
}
| BMKEG/bigMechKefedJavaWorkbook | src/main/java/_16_figRefMethodExtractionFromIntact/SimpleXmlHandler.java | Java | apache-2.0 | 4,202 |
// Copyright 2015 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --expose-wasm
load("test/mjsunit/wasm/wasm-constants.js");
load("test/mjsunit/wasm/wasm-module-builder.js");
function testCallFFI(func, check) {
var builder = new WasmModuleBuilder();
var sig_index = builder.addType(kSig_i_dd);
builder.addImport("func", sig_index);
builder.addFunction("main", sig_index)
.addBody([
kExprGetLocal, 0, // --
kExprGetLocal, 1, // --
kExprCallImport, kArity2, 0 // --
]) // --
.exportFunc();
var main = builder.instantiate({func: func}).exports.main;
for (var i = 0; i < 100000; i += 10003) {
var a = 22.5 + i, b = 10.5 + i;
var r = main(a, b);
check(r, a, b);
}
}
var global = (function() { return this; })();
var params = [-99, -99, -99, -99, -99];
var was_called = false;
var length = -1;
function FOREIGN_SUB(a, b) {
print("FOREIGN_SUB(" + a + ", " + b + ")");
was_called = true;
params[0] = this;
params[1] = a;
params[2] = b;
return (a - b) | 0;
}
function check_FOREIGN_SUB(r, a, b) {
assertEquals(a - b | 0, r);
assertTrue(was_called);
// assertEquals(global, params[0]); // sloppy mode
assertEquals(a, params[1]);
assertEquals(b, params[2]);
was_called = false;
}
testCallFFI(FOREIGN_SUB, check_FOREIGN_SUB);
function FOREIGN_ABCD(a, b, c, d) {
print("FOREIGN_ABCD(" + a + ", " + b + ", " + c + ", " + d + ")");
was_called = true;
params[0] = this;
params[1] = a;
params[2] = b;
params[3] = c;
params[4] = d;
return (a * b * 6) | 0;
}
function check_FOREIGN_ABCD(r, a, b) {
assertEquals((a * b * 6) | 0, r);
assertTrue(was_called);
// assertEquals(global, params[0]); // sloppy mode.
assertEquals(a, params[1]);
assertEquals(b, params[2]);
assertEquals(undefined, params[3]);
assertEquals(undefined, params[4]);
was_called = false;
}
testCallFFI(FOREIGN_ABCD, check_FOREIGN_ABCD);
function FOREIGN_ARGUMENTS0() {
print("FOREIGN_ARGUMENTS0");
was_called = true;
length = arguments.length;
for (var i = 0; i < arguments.length; i++) {
params[i] = arguments[i];
}
return (arguments[0] * arguments[1] * 7) | 0;
}
function FOREIGN_ARGUMENTS1(a) {
print("FOREIGN_ARGUMENTS1", a);
was_called = true;
length = arguments.length;
for (var i = 0; i < arguments.length; i++) {
params[i] = arguments[i];
}
return (arguments[0] * arguments[1] * 7) | 0;
}
function FOREIGN_ARGUMENTS2(a, b) {
print("FOREIGN_ARGUMENTS2", a, b);
was_called = true;
length = arguments.length;
for (var i = 0; i < arguments.length; i++) {
params[i] = arguments[i];
}
return (a * b * 7) | 0;
}
function FOREIGN_ARGUMENTS3(a, b, c) {
print("FOREIGN_ARGUMENTS3", a, b, c);
was_called = true;
length = arguments.length;
for (var i = 0; i < arguments.length; i++) {
params[i] = arguments[i];
}
return (a * b * 7) | 0;
}
function FOREIGN_ARGUMENTS4(a, b, c, d) {
print("FOREIGN_ARGUMENTS4", a, b, c, d);
was_called = true;
length = arguments.length;
for (var i = 0; i < arguments.length; i++) {
params[i] = arguments[i];
}
return (a * b * 7) | 0;
}
function check_FOREIGN_ARGUMENTS(r, a, b) {
assertEquals((a * b * 7) | 0, r);
assertTrue(was_called);
assertEquals(2, length);
assertEquals(a, params[0]);
assertEquals(b, params[1]);
was_called = false;
}
// Check a bunch of uses of the arguments object.
testCallFFI(FOREIGN_ARGUMENTS0, check_FOREIGN_ARGUMENTS);
testCallFFI(FOREIGN_ARGUMENTS1, check_FOREIGN_ARGUMENTS);
testCallFFI(FOREIGN_ARGUMENTS2, check_FOREIGN_ARGUMENTS);
testCallFFI(FOREIGN_ARGUMENTS3, check_FOREIGN_ARGUMENTS);
testCallFFI(FOREIGN_ARGUMENTS4, check_FOREIGN_ARGUMENTS);
function returnValue(val) {
return function(a, b) {
print("RETURN_VALUE ", val);
return val;
}
}
function checkReturn(expected) {
return function(r, a, b) { assertEquals(expected, r); }
}
// Check that returning weird values doesn't crash
testCallFFI(returnValue(undefined), checkReturn(0));
testCallFFI(returnValue(null), checkReturn(0));
testCallFFI(returnValue("0"), checkReturn(0));
testCallFFI(returnValue("-77"), checkReturn(-77));
var objWithValueOf = {valueOf: function() { return 198; }}
testCallFFI(returnValue(objWithValueOf), checkReturn(198));
function testCallBinopVoid(type, func, check) {
var passed_length = -1;
var passed_a = -1;
var passed_b = -1;
var args_a = -1;
var args_b = -1;
ffi = {func: function(a, b) {
passed_length = arguments.length;
passed_a = a;
passed_b = b;
args_a = arguments[0];
args_b = arguments[1];
}};
var builder = new WasmModuleBuilder();
builder.addImport("func", makeSig_v_xx(type));
builder.addFunction("main", makeSig_r_xx(kAstI32, type))
.addBody([
kExprGetLocal, 0, // --
kExprGetLocal, 1, // --
kExprCallImport, kArity2, 0, // --
kExprI8Const, 99 // --
]) // --
.exportFunc()
var main = builder.instantiate(ffi).exports.main;
print("testCallBinopVoid", type);
for (var i = 0; i < 100000; i += 10003.1) {
var a = 22.5 + i, b = 10.5 + i;
var r = main(a, b);
assertEquals(99, r);
assertEquals(2, passed_length);
var expected_a, expected_b;
switch (type) {
case kAstI32: {
expected_a = a | 0;
expected_b = b | 0;
break;
}
case kAstF32: {
expected_a = Math.fround(a);
expected_b = Math.fround(b);
break;
}
case kAstF64: {
expected_a = a;
expected_b = b;
break;
}
}
assertEquals(expected_a, args_a);
assertEquals(expected_b, args_b);
assertEquals(expected_a, passed_a);
assertEquals(expected_b, passed_b);
}
}
testCallBinopVoid(kAstI32);
// TODO testCallBinopVoid(kAstI64);
testCallBinopVoid(kAstF32);
testCallBinopVoid(kAstF64);
function testCallPrint() {
var builder = new WasmModuleBuilder();
builder.addImport("print", makeSig_v_x(kAstI32));
builder.addImport("print", makeSig_v_x(kAstF64));
builder.addFunction("main", makeSig_v_x(kAstF64))
.addBody([
kExprI8Const, 97, // --
kExprCallImport, kArity1, 0, // --
kExprGetLocal, 0, // --
kExprCallImport, kArity1, 1 // --
]) // --
.exportFunc()
var main = builder.instantiate({print: print}).exports.main;
for (var i = -9; i < 900; i += 6.125) main(i);
}
testCallPrint();
testCallPrint();
| runtimejs/runtime | deps/v8/test/mjsunit/wasm/ffi.js | JavaScript | apache-2.0 | 6,653 |
/*
* Copyright (C) 2015-2016 Emanuel Moecklin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.onegravity.rteditor;
import com.onegravity.rteditor.effects.Effect;
import com.onegravity.rteditor.spans.RTSpan;
/*
* Listener interface to listen to toolbar events like bold selected/unselected.
* The methods should be fairly self-explaining.
*/
public interface RTToolbarListener {
public <V, C extends RTSpan<V>> void onEffectSelected(Effect<V, C> effect, V value);
public void onUndo();
public void onRedo();
public void onClearFormatting();
public void onCreateLink();
public void onPickImage();
public void onCaptureImage();
}
| Ronak-LM/Memoir | RTEditor/src/main/java/com/onegravity/rteditor/RTToolbarListener.java | Java | apache-2.0 | 1,196 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Ext.define("Wdesktop.app.UserManager.store.UserStore", {
extend : "Ext.data.Store",
requires : [ 'Wdesktop.app.UserManager.model.UserModel',
'Ext.data.proxy.Ajax', 'Ext.data.reader.Json' ],
constructor : function(cfg) {
var me = this;
cfg = cfg || {};
me.callParent([ Ext.apply({
autoLoad : false,
model : 'Wdesktop.app.UserManager.model.UserModel',
proxy : {
type : "ajax",
url : "admin/userManager/userStore.json",
reader : {
type : "json",
root : "users"
}
},
}, cfg) ]);
}
});
| wu560130911/MultimediaDesktop | Client/src/main/webapp/resources/desktop/app/UserManager/store/UserStore.js | JavaScript | apache-2.0 | 1,100 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.*;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.management.MBeanServer;
import javax.management.ObjectName;
import com.google.common.collect.MapMaker;
import org.apache.commons.collections.PredicateUtils;
import org.apache.commons.collections.iterators.CollatingIterator;
import org.apache.commons.collections.iterators.FilterIterator;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cache.AutoSavingCache;
import org.apache.cassandra.concurrent.DebuggableThreadPoolExecutor;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.*;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.io.sstable.*;
import org.apache.cassandra.io.util.BufferedRandomAccessFile;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.service.AntiEntropyService;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.streaming.OperationType;
import org.apache.cassandra.utils.*;
import org.cliffc.high_scale_lib.NonBlockingHashMap;
/**
* A singleton which manages a private executor of ongoing compactions. A readwrite lock
* controls whether compactions can proceed: an external consumer can completely stop
* compactions by acquiring the write half of the lock via getCompactionLock().
*
* Scheduling for compaction is accomplished by swapping sstables to be compacted into
* a set via DataTracker. New scheduling attempts will ignore currently compacting
* sstables.
*/
public class CompactionManager implements CompactionManagerMBean
{
public static final String MBEAN_OBJECT_NAME = "org.apache.cassandra.db:type=CompactionManager";
private static final Logger logger = LoggerFactory.getLogger(CompactionManager.class);
public static final CompactionManager instance;
// acquire as read to perform a compaction, and as write to prevent compactions
private final ReentrantReadWriteLock compactionLock = new ReentrantReadWriteLock();
static
{
instance = new CompactionManager();
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
try
{
mbs.registerMBean(instance, new ObjectName(MBEAN_OBJECT_NAME));
}
catch (Exception e)
{
throw new RuntimeException(e);
}
}
private CompactionExecutor executor = new CompactionExecutor();
private CompactionExecutor validationExecutor = new ValidationExecutor();
private Map<ColumnFamilyStore, Integer> estimatedCompactions = new MapMaker().concurrencyLevel(1).weakKeys().makeMap();
/**
* @return A lock, for which acquisition means no compactions can run.
*/
public Lock getCompactionLock()
{
return compactionLock.writeLock();
}
/**
* Call this whenever a compaction might be needed on the given columnfamily.
* It's okay to over-call (within reason) since the compactions are single-threaded,
* and if a call is unnecessary, it will just be no-oped in the bucketing phase.
*/
public Future<Integer> submitMinorIfNeeded(final ColumnFamilyStore cfs)
{
Callable<Integer> callable = new Callable<Integer>()
{
public Integer call() throws IOException
{
compactionLock.readLock().lock();
try
{
if (cfs.isInvalid())
return 0;
Integer minThreshold = cfs.getMinimumCompactionThreshold();
Integer maxThreshold = cfs.getMaximumCompactionThreshold();
if (minThreshold == 0 || maxThreshold == 0)
{
logger.debug("Compaction is currently disabled.");
return 0;
}
logger.debug("Checking to see if compaction of " + cfs.columnFamily + " would be useful");
Set<List<SSTableReader>> buckets = getBuckets(convertSSTablesToPairs(cfs.getSSTables()), 50L * 1024L * 1024L);
updateEstimateFor(cfs, buckets);
int gcBefore = getDefaultGcBefore(cfs);
for (List<SSTableReader> sstables : buckets)
{
if (sstables.size() < minThreshold)
continue;
// if we have too many to compact all at once, compact older ones first -- this avoids
// re-compacting files we just created.
Collections.sort(sstables);
Collection<SSTableReader> tocompact = cfs.getDataTracker().markCompacting(sstables, minThreshold, maxThreshold);
if (tocompact == null)
// enough threads are busy in this bucket
continue;
try
{
return doCompaction(cfs, tocompact, gcBefore);
}
finally
{
cfs.getDataTracker().unmarkCompacting(tocompact);
}
}
}
finally
{
compactionLock.readLock().unlock();
}
return 0;
}
};
return executor.submit(callable);
}
private void updateEstimateFor(ColumnFamilyStore cfs, Set<List<SSTableReader>> buckets)
{
Integer minThreshold = cfs.getMinimumCompactionThreshold();
Integer maxThreshold = cfs.getMaximumCompactionThreshold();
if (minThreshold > 0 && maxThreshold > 0)
{
int n = 0;
for (List<SSTableReader> sstables : buckets)
{
if (sstables.size() >= minThreshold)
{
n += Math.ceil((double)sstables.size() / maxThreshold);
}
}
estimatedCompactions.put(cfs, n);
}
else
{
logger.debug("Compaction is currently disabled.");
}
}
public void performCleanup(final ColumnFamilyStore cfStore, final NodeId.OneShotRenewer renewer) throws InterruptedException, ExecutionException
{
Callable<Object> runnable = new Callable<Object>()
{
public Object call() throws IOException
{
// acquire the write lock to schedule all sstables
compactionLock.writeLock().lock();
try
{
if (cfStore.isInvalid())
return this;
Collection<SSTableReader> tocleanup = cfStore.getDataTracker().markCompacting(cfStore.getSSTables(), 1, Integer.MAX_VALUE);
if (tocleanup == null || tocleanup.isEmpty())
return this;
try
{
// downgrade the lock acquisition
compactionLock.readLock().lock();
compactionLock.writeLock().unlock();
try
{
doCleanupCompaction(cfStore, tocleanup, renewer);
}
finally
{
compactionLock.readLock().unlock();
}
}
finally
{
cfStore.getDataTracker().unmarkCompacting(tocleanup);
}
return this;
}
finally
{
// we probably already downgraded
if (compactionLock.writeLock().isHeldByCurrentThread())
compactionLock.writeLock().unlock();
}
}
};
executor.submit(runnable).get();
}
public void performScrub(final ColumnFamilyStore cfStore) throws InterruptedException, ExecutionException
{
Callable<Object> runnable = new Callable<Object>()
{
public Object call() throws IOException
{
// acquire the write lock to schedule all sstables
compactionLock.writeLock().lock();
try
{
if (cfStore.isInvalid())
return this;
Collection<SSTableReader> toscrub = cfStore.getDataTracker().markCompacting(cfStore.getSSTables(), 1, Integer.MAX_VALUE);
if (toscrub == null || toscrub.isEmpty())
return this;
try
{
// downgrade the lock acquisition
compactionLock.readLock().lock();
compactionLock.writeLock().unlock();
try
{
doScrub(cfStore, toscrub);
}
finally
{
compactionLock.readLock().unlock();
}
}
finally
{
cfStore.getDataTracker().unmarkCompacting(toscrub);
}
return this;
}
finally
{
// we probably already downgraded
if (compactionLock.writeLock().isHeldByCurrentThread())
compactionLock.writeLock().unlock();
}
}
};
executor.submit(runnable).get();
}
public void performMajor(final ColumnFamilyStore cfStore) throws InterruptedException, ExecutionException
{
submitMajor(cfStore, 0, getDefaultGcBefore(cfStore)).get();
}
public Future<Object> submitMajor(final ColumnFamilyStore cfStore, final long skip, final int gcBefore)
{
Callable<Object> callable = new Callable<Object>()
{
public Object call() throws IOException
{
// acquire the write lock long enough to schedule all sstables
compactionLock.writeLock().lock();
try
{
if (cfStore.isInvalid())
return this;
Collection<SSTableReader> sstables;
if (skip > 0)
{
sstables = new ArrayList<SSTableReader>();
for (SSTableReader sstable : cfStore.getSSTables())
{
if (sstable.length() < skip * 1024L * 1024L * 1024L)
{
sstables.add(sstable);
}
}
}
else
{
sstables = cfStore.getSSTables();
}
Collection<SSTableReader> tocompact = cfStore.getDataTracker().markCompacting(sstables, 0, Integer.MAX_VALUE);
if (tocompact == null || tocompact.isEmpty())
return this;
try
{
// downgrade the lock acquisition
compactionLock.readLock().lock();
compactionLock.writeLock().unlock();
try
{
doCompaction(cfStore, tocompact, gcBefore);
}
finally
{
compactionLock.readLock().unlock();
}
}
finally
{
cfStore.getDataTracker().unmarkCompacting(tocompact);
}
return this;
}
finally
{
// we probably already downgraded
if (compactionLock.writeLock().isHeldByCurrentThread())
compactionLock.writeLock().unlock();
}
}
};
return executor.submit(callable);
}
public void forceUserDefinedCompaction(String ksname, String dataFiles)
{
if (!DatabaseDescriptor.getTables().contains(ksname))
throw new IllegalArgumentException("Unknown keyspace " + ksname);
File directory = new File(ksname);
String[] filenames = dataFiles.split(",");
Collection<Descriptor> descriptors = new ArrayList<Descriptor>(filenames.length);
String cfname = null;
for (String filename : filenames)
{
Pair<Descriptor, String> p = Descriptor.fromFilename(directory, filename.trim());
if (!p.right.equals(Component.DATA.name()))
{
throw new IllegalArgumentException(filename + " does not appear to be a data file");
}
if (cfname == null)
{
cfname = p.left.cfname;
}
else if (!cfname.equals(p.left.cfname))
{
throw new IllegalArgumentException("All provided sstables should be for the same column family");
}
descriptors.add(p.left);
}
ColumnFamilyStore cfs = Table.open(ksname).getColumnFamilyStore(cfname);
submitUserDefined(cfs, descriptors, getDefaultGcBefore(cfs));
}
public Future<Object> submitUserDefined(final ColumnFamilyStore cfs, final Collection<Descriptor> dataFiles, final int gcBefore)
{
Callable<Object> callable = new Callable<Object>()
{
public Object call() throws IOException
{
compactionLock.readLock().lock();
try
{
if (cfs.isInvalid())
return this;
// look up the sstables now that we're on the compaction executor, so we don't try to re-compact
// something that was already being compacted earlier.
Collection<SSTableReader> sstables = new ArrayList<SSTableReader>();
for (Descriptor desc : dataFiles)
{
// inefficient but not in a performance sensitive path
SSTableReader sstable = lookupSSTable(cfs, desc);
if (sstable == null)
{
logger.info("Will not compact {}: it is not an active sstable", desc);
}
else
{
sstables.add(sstable);
}
}
if (sstables.isEmpty())
{
logger.error("No file to compact for user defined compaction");
}
// attempt to schedule the set
else if ((sstables = cfs.getDataTracker().markCompacting(sstables, 1, Integer.MAX_VALUE)) != null)
{
String location = cfs.table.getDataFileLocation(1);
// success: perform the compaction
try
{
// Forcing deserialization because in case the user wants expired columns to be transformed to tombstones
doCompactionWithoutSizeEstimation(cfs, sstables, gcBefore, location, true);
}
finally
{
cfs.getDataTracker().unmarkCompacting(sstables);
}
}
else
{
logger.error("SSTables for user defined compaction are already being compacted.");
}
return this;
}
finally
{
compactionLock.readLock().unlock();
}
}
};
return executor.submit(callable);
}
private SSTableReader lookupSSTable(final ColumnFamilyStore cfs, Descriptor descriptor)
{
for (SSTableReader sstable : cfs.getSSTables())
{
// .equals() with no other changes won't work because in sstable.descriptor, the directory is an absolute path.
// We could construct descriptor with an absolute path too but I haven't found any satisfying way to do that
// (DB.getDataFileLocationForTable() may not return the right path if you have multiple volumes). Hence the
// endsWith.
if (sstable.descriptor.toString().endsWith(descriptor.toString()))
return sstable;
}
return null;
}
/**
* Does not mutate data, so is not scheduled.
*/
public Future<Object> submitValidation(final ColumnFamilyStore cfStore, final AntiEntropyService.Validator validator)
{
Callable<Object> callable = new Callable<Object>()
{
public Object call() throws IOException
{
compactionLock.readLock().lock();
try
{
if (!cfStore.isInvalid())
doValidationCompaction(cfStore, validator);
return this;
}
finally
{
compactionLock.readLock().unlock();
}
}
};
return validationExecutor.submit(callable);
}
/* Used in tests. */
public void disableAutoCompaction()
{
for (String ksname : DatabaseDescriptor.getNonSystemTables())
{
for (ColumnFamilyStore cfs : Table.open(ksname).getColumnFamilyStores())
cfs.disableAutoCompaction();
}
}
int doCompaction(ColumnFamilyStore cfs, Collection<SSTableReader> sstables, int gcBefore) throws IOException
{
if (sstables.size() < 2)
{
logger.info("Nothing to compact in " + cfs.getColumnFamilyName() + "; use forceUserDefinedCompaction if you wish to force compaction of single sstables (e.g. for tombstone collection)");
return 0;
}
Table table = cfs.table;
// If the compaction file path is null that means we have no space left for this compaction.
// try again w/o the largest one.
Set<SSTableReader> smallerSSTables = new HashSet<SSTableReader>(sstables);
while (smallerSSTables.size() > 1)
{
String compactionFileLocation = table.getDataFileLocation(cfs.getExpectedCompactedFileSize(smallerSSTables));
if (compactionFileLocation != null)
return doCompactionWithoutSizeEstimation(cfs, smallerSSTables, gcBefore, compactionFileLocation, false);
logger.warn("insufficient space to compact all requested files " + StringUtils.join(smallerSSTables, ", "));
smallerSSTables.remove(cfs.getMaxSizeFile(smallerSSTables));
}
logger.error("insufficient space to compact even the two smallest files, aborting");
return 0;
}
/**
* For internal use and testing only. The rest of the system should go through the submit* methods,
* which are properly serialized.
*/
int doCompactionWithoutSizeEstimation(ColumnFamilyStore cfs, Collection<SSTableReader> sstables, int gcBefore, String compactionFileLocation, boolean forceDeserialize) throws IOException
{
// The collection of sstables passed may be empty (but not null); even if
// it is not empty, it may compact down to nothing if all rows are deleted.
assert sstables != null;
Table table = cfs.table;
if (DatabaseDescriptor.isSnapshotBeforeCompaction())
table.snapshot(System.currentTimeMillis() + "-" + "compact-" + cfs.columnFamily);
// sanity check: all sstables must belong to the same cfs
for (SSTableReader sstable : sstables)
assert sstable.descriptor.cfname.equals(cfs.columnFamily);
CompactionController controller = new CompactionController(cfs, sstables, gcBefore, forceDeserialize);
// new sstables from flush can be added during a compaction, but only the compaction can remove them,
// so in our single-threaded compaction world this is a valid way of determining if we're compacting
// all the sstables (that existed when we started)
CompactionType type = controller.isMajor()
? CompactionType.MAJOR
: CompactionType.MINOR;
logger.info("Compacting {}: {}", type, sstables);
long startTime = System.currentTimeMillis();
long totalkeysWritten = 0;
// TODO the int cast here is potentially buggy
int expectedBloomFilterSize = Math.max(DatabaseDescriptor.getIndexInterval(), (int)SSTableReader.getApproximateKeyCount(sstables));
if (logger.isDebugEnabled())
logger.debug("Expected bloom filter size : " + expectedBloomFilterSize);
SSTableWriter writer;
CompactionIterator ci = new CompactionIterator(type, sstables, controller); // retain a handle so we can call close()
Iterator<AbstractCompactedRow> nni = new FilterIterator(ci, PredicateUtils.notNullPredicate());
Map<DecoratedKey, Long> cachedKeys = new HashMap<DecoratedKey, Long>();
executor.beginCompaction(ci);
try
{
if (!nni.hasNext())
{
// don't mark compacted in the finally block, since if there _is_ nondeleted data,
// we need to sync it (via closeAndOpen) first, so there is no period during which
// a crash could cause data loss.
cfs.markCompacted(sstables);
return 0;
}
writer = cfs.createCompactionWriter(expectedBloomFilterSize, compactionFileLocation, sstables);
while (nni.hasNext())
{
AbstractCompactedRow row = nni.next();
if (row.isEmpty())
continue;
long position = writer.append(row);
totalkeysWritten++;
if (DatabaseDescriptor.getPreheatKeyCache())
{
for (SSTableReader sstable : sstables)
{
if (sstable.getCachedPosition(row.key) != null)
{
cachedKeys.put(row.key, position);
break;
}
}
}
}
}
finally
{
ci.close();
executor.finishCompaction(ci);
}
SSTableReader ssTable = writer.closeAndOpenReader(getMaxDataAge(sstables));
cfs.replaceCompactedSSTables(sstables, Arrays.asList(ssTable));
for (Entry<DecoratedKey, Long> entry : cachedKeys.entrySet()) // empty if preheat is off
ssTable.cacheKey(entry.getKey(), entry.getValue());
submitMinorIfNeeded(cfs);
long dTime = System.currentTimeMillis() - startTime;
long startsize = SSTable.getTotalBytes(sstables);
long endsize = ssTable.length();
double ratio = (double)endsize / (double)startsize;
logger.info(String.format("Compacted to %s. %,d to %,d (~%d%% of original) bytes for %,d keys. Time: %,dms.",
writer.getFilename(), startsize, endsize, (int) (ratio * 100), totalkeysWritten, dTime));
return sstables.size();
}
private static long getMaxDataAge(Collection<SSTableReader> sstables)
{
long max = 0;
for (SSTableReader sstable : sstables)
{
if (sstable.maxDataAge > max)
max = sstable.maxDataAge;
}
return max;
}
/**
* Deserialize everything in the CFS and re-serialize w/ the newest version. Also attempts to recover
* from bogus row keys / sizes using data from the index, and skips rows with garbage columns that resulted
* from early ByteBuffer bugs.
*
* @throws IOException
*/
private void doScrub(ColumnFamilyStore cfs, Collection<SSTableReader> sstables) throws IOException
{
assert !cfs.isIndex();
for (final SSTableReader sstable : sstables)
scrubOne(cfs, sstable);
}
private void scrubOne(ColumnFamilyStore cfs, SSTableReader sstable) throws IOException
{
logger.info("Scrubbing " + sstable);
CompactionController controller = new CompactionController(cfs, Collections.singletonList(sstable), getDefaultGcBefore(cfs), true);
boolean isCommutative = cfs.metadata.getDefaultValidator().isCommutative();
// Calculate the expected compacted filesize
String compactionFileLocation = cfs.table.getDataFileLocation(sstable.length());
if (compactionFileLocation == null)
throw new IOException("disk full");
int expectedBloomFilterSize = Math.max(DatabaseDescriptor.getIndexInterval(),
(int)(SSTableReader.getApproximateKeyCount(Arrays.asList(sstable))));
// loop through each row, deserializing to check for damage.
// we'll also loop through the index at the same time, using the position from the index to recover if the
// row header (key or data size) is corrupt. (This means our position in the index file will be one row
// "ahead" of the data file.)
final BufferedRandomAccessFile dataFile = BufferedRandomAccessFile.getUncachingReader(sstable.getFilename());
String indexFilename = sstable.descriptor.filenameFor(Component.PRIMARY_INDEX);
BufferedRandomAccessFile indexFile = BufferedRandomAccessFile.getUncachingReader(indexFilename);
try
{
ByteBuffer nextIndexKey = ByteBufferUtil.readWithShortLength(indexFile);
{
// throw away variable so we don't have a side effect in the assert
long firstRowPositionFromIndex = indexFile.readLong();
assert firstRowPositionFromIndex == 0 : firstRowPositionFromIndex;
}
SSTableWriter writer = maybeCreateWriter(cfs, compactionFileLocation, expectedBloomFilterSize, null, Collections.singletonList(sstable));
executor.beginCompaction(new ScrubInfo(dataFile, sstable));
int goodRows = 0, badRows = 0, emptyRows = 0;
while (!dataFile.isEOF())
{
long rowStart = dataFile.getFilePointer();
if (logger.isDebugEnabled())
logger.debug("Reading row at " + rowStart);
DecoratedKey key = null;
long dataSize = -1;
try
{
key = SSTableReader.decodeKey(sstable.partitioner, sstable.descriptor, ByteBufferUtil.readWithShortLength(dataFile));
dataSize = sstable.descriptor.hasIntRowSize ? dataFile.readInt() : dataFile.readLong();
if (logger.isDebugEnabled())
logger.debug(String.format("row %s is %s bytes", ByteBufferUtil.bytesToHex(key.key), dataSize));
}
catch (Throwable th)
{
throwIfFatal(th);
// check for null key below
}
ByteBuffer currentIndexKey = nextIndexKey;
long nextRowPositionFromIndex;
try
{
nextIndexKey = indexFile.isEOF() ? null : ByteBufferUtil.readWithShortLength(indexFile);
nextRowPositionFromIndex = indexFile.isEOF() ? dataFile.length() : indexFile.readLong();
}
catch (Throwable th)
{
logger.warn("Error reading index file", th);
nextIndexKey = null;
nextRowPositionFromIndex = dataFile.length();
}
long dataStart = dataFile.getFilePointer();
long dataStartFromIndex = currentIndexKey == null
? -1
: rowStart + 2 + currentIndexKey.remaining() + (sstable.descriptor.hasIntRowSize ? 4 : 8);
long dataSizeFromIndex = nextRowPositionFromIndex - dataStartFromIndex;
assert currentIndexKey != null || indexFile.isEOF();
if (logger.isDebugEnabled() && currentIndexKey != null)
logger.debug(String.format("Index doublecheck: row %s is %s bytes", ByteBufferUtil.bytesToHex(currentIndexKey), dataSizeFromIndex));
writer.mark();
try
{
if (key == null)
throw new IOError(new IOException("Unable to read row key from data file"));
if (dataSize > dataFile.length())
throw new IOError(new IOException("Impossible row size " + dataSize));
SSTableIdentityIterator row = new SSTableIdentityIterator(sstable, dataFile, key, dataStart, dataSize, true);
AbstractCompactedRow compactedRow = controller.getCompactedRow(row);
if (compactedRow.isEmpty())
{
emptyRows++;
}
else
{
writer.append(compactedRow);
goodRows++;
}
if (!key.key.equals(currentIndexKey) || dataStart != dataStartFromIndex)
logger.warn("Index file contained a different key or row size; using key from data file");
}
catch (Throwable th)
{
throwIfFatal(th);
logger.warn("Non-fatal error reading row (stacktrace follows)", th);
writer.reset();
if (currentIndexKey != null
&& (key == null || !key.key.equals(currentIndexKey) || dataStart != dataStartFromIndex || dataSize != dataSizeFromIndex))
{
logger.info(String.format("Retrying from row index; data is %s bytes starting at %s",
dataSizeFromIndex, dataStartFromIndex));
key = SSTableReader.decodeKey(sstable.partitioner, sstable.descriptor, currentIndexKey);
try
{
SSTableIdentityIterator row = new SSTableIdentityIterator(sstable, dataFile, key, dataStartFromIndex, dataSizeFromIndex, true);
AbstractCompactedRow compactedRow = controller.getCompactedRow(row);
if (compactedRow.isEmpty())
{
emptyRows++;
}
else
{
writer.append(compactedRow);
goodRows++;
}
}
catch (Throwable th2)
{
throwIfFatal(th2);
// Skipping rows is dangerous for counters (see CASSANDRA-2759)
if (isCommutative)
throw new IOError(th2);
logger.warn("Retry failed too. Skipping to next row (retry's stacktrace follows)", th2);
writer.reset();
dataFile.seek(nextRowPositionFromIndex);
badRows++;
}
}
else
{
// Skipping rows is dangerous for counters (see CASSANDRA-2759)
if (isCommutative)
throw new IOError(th);
logger.warn("Row at " + dataStart + " is unreadable; skipping to next");
if (currentIndexKey != null)
dataFile.seek(nextRowPositionFromIndex);
badRows++;
}
}
}
if (writer.getFilePointer() > 0)
{
SSTableReader newSstable = writer.closeAndOpenReader(sstable.maxDataAge);
cfs.replaceCompactedSSTables(Arrays.asList(sstable), Arrays.asList(newSstable));
logger.info("Scrub of " + sstable + " complete: " + goodRows + " rows in new sstable and " + emptyRows + " empty (tombstoned) rows dropped");
if (badRows > 0)
logger.warn("Unable to recover " + badRows + " rows that were skipped. You can attempt manual recovery from the pre-scrub snapshot. You can also run nodetool repair to transfer the data from a healthy replica, if any");
}
else
{
cfs.markCompacted(Arrays.asList(sstable));
if (badRows > 0)
logger.warn("No valid rows found while scrubbing " + sstable + "; it is marked for deletion now. If you want to attempt manual recovery, you can find a copy in the pre-scrub snapshot");
else
logger.info("Scrub of " + sstable + " complete; looks like all " + emptyRows + " rows were tombstoned");
}
}
finally
{
FileUtils.closeQuietly(dataFile);
FileUtils.closeQuietly(indexFile);
}
}
private void throwIfFatal(Throwable th)
{
if (th instanceof Error && !(th instanceof AssertionError || th instanceof IOError))
throw (Error) th;
}
/**
* This function goes over each file and removes the keys that the node is not responsible for
* and only keeps keys that this node is responsible for.
*
* @throws IOException
*/
private void doCleanupCompaction(ColumnFamilyStore cfs, Collection<SSTableReader> sstables, NodeId.OneShotRenewer renewer) throws IOException
{
assert !cfs.isIndex();
Table table = cfs.table;
Collection<Range> ranges = StorageService.instance.getLocalRanges(table.name);
boolean isCommutative = cfs.metadata.getDefaultValidator().isCommutative();
if (ranges.isEmpty())
{
logger.info("Cleanup cannot run before a node has joined the ring");
return;
}
for (SSTableReader sstable : sstables)
{
CompactionController controller = new CompactionController(cfs, Collections.singletonList(sstable), getDefaultGcBefore(cfs), false);
long startTime = System.currentTimeMillis();
long totalkeysWritten = 0;
int expectedBloomFilterSize = Math.max(DatabaseDescriptor.getIndexInterval(),
(int)(SSTableReader.getApproximateKeyCount(Arrays.asList(sstable))));
if (logger.isDebugEnabled())
logger.debug("Expected bloom filter size : " + expectedBloomFilterSize);
SSTableWriter writer = null;
logger.info("Cleaning up " + sstable);
// Calculate the expected compacted filesize
long expectedRangeFileSize = cfs.getExpectedCompactedFileSize(Arrays.asList(sstable)) / 2;
String compactionFileLocation = table.getDataFileLocation(expectedRangeFileSize);
if (compactionFileLocation == null)
throw new IOException("disk full");
SSTableScanner scanner = sstable.getDirectScanner(CompactionIterator.FILE_BUFFER_SIZE);
SortedSet<ByteBuffer> indexedColumns = cfs.getIndexedColumns();
CleanupInfo ci = new CleanupInfo(sstable, scanner);
executor.beginCompaction(ci);
try
{
while (scanner.hasNext())
{
SSTableIdentityIterator row = (SSTableIdentityIterator) scanner.next();
if (Range.isTokenInRanges(row.getKey().token, ranges))
{
AbstractCompactedRow compactedRow = controller.getCompactedRow(row);
if (compactedRow.isEmpty())
continue;
writer = maybeCreateWriter(cfs, compactionFileLocation, expectedBloomFilterSize, writer, Collections.singletonList(sstable));
writer.append(compactedRow);
totalkeysWritten++;
}
else
{
cfs.invalidateCachedRow(row.getKey());
if (!indexedColumns.isEmpty() || isCommutative)
{
while (row.hasNext())
{
IColumn column = row.next();
if (column instanceof CounterColumn)
renewer.maybeRenew((CounterColumn) column);
if (indexedColumns.contains(column.name()))
Table.cleanupIndexEntry(cfs, row.getKey().key, column);
}
}
}
}
}
finally
{
scanner.close();
executor.finishCompaction(ci);
}
List<SSTableReader> results = new ArrayList<SSTableReader>();
if (writer != null)
{
SSTableReader newSstable = writer.closeAndOpenReader(sstable.maxDataAge);
results.add(newSstable);
String format = "Cleaned up to %s. %,d to %,d (~%d%% of original) bytes for %,d keys. Time: %,dms.";
long dTime = System.currentTimeMillis() - startTime;
long startsize = sstable.length();
long endsize = newSstable.length();
double ratio = (double)endsize / (double)startsize;
logger.info(String.format(format, writer.getFilename(), startsize, endsize, (int)(ratio*100), totalkeysWritten, dTime));
}
// flush to ensure we don't lose the tombstones on a restart, since they are not commitlog'd
for (ByteBuffer columnName : cfs.getIndexedColumns())
{
try
{
cfs.getIndexedColumnFamilyStore(columnName).forceBlockingFlush();
}
catch (ExecutionException e)
{
throw new RuntimeException(e);
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
}
cfs.replaceCompactedSSTables(Arrays.asList(sstable), results);
}
}
private SSTableWriter maybeCreateWriter(ColumnFamilyStore cfs, String compactionFileLocation, int expectedBloomFilterSize, SSTableWriter writer, Collection<SSTableReader> sstables)
throws IOException
{
if (writer == null)
{
FileUtils.createDirectory(compactionFileLocation);
writer = cfs.createCompactionWriter(expectedBloomFilterSize, compactionFileLocation, sstables);
}
return writer;
}
/**
* Performs a readonly "compaction" of all sstables in order to validate complete rows,
* but without writing the merge result
*/
private void doValidationCompaction(ColumnFamilyStore cfs, AntiEntropyService.Validator validator) throws IOException
{
// flush first so everyone is validating data that is as similar as possible
try
{
StorageService.instance.forceTableFlush(cfs.table.name, cfs.getColumnFamilyName());
}
catch (ExecutionException e)
{
throw new IOException(e);
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
CompactionIterator ci = new ValidationCompactionIterator(cfs, validator.request.range);
validationExecutor.beginCompaction(ci);
try
{
Iterator<AbstractCompactedRow> nni = new FilterIterator(ci, PredicateUtils.notNullPredicate());
// validate the CF as we iterate over it
validator.prepare(cfs);
while (nni.hasNext())
{
AbstractCompactedRow row = nni.next();
validator.add(row);
}
validator.complete();
}
finally
{
ci.close();
validationExecutor.finishCompaction(ci);
}
}
/*
* Group files of similar size into buckets.
*/
static <T> Set<List<T>> getBuckets(Collection<Pair<T, Long>> files, long min)
{
// Sort the list in order to get deterministic results during the grouping below
List<Pair<T, Long>> sortedFiles = new ArrayList<Pair<T, Long>>(files);
Collections.sort(sortedFiles, new Comparator<Pair<T, Long>>()
{
public int compare(Pair<T, Long> p1, Pair<T, Long> p2)
{
return p1.right.compareTo(p2.right);
}
});
Map<List<T>, Long> buckets = new HashMap<List<T>, Long>();
for (Pair<T, Long> pair: sortedFiles)
{
long size = pair.right;
boolean bFound = false;
// look for a bucket containing similar-sized files:
// group in the same bucket if it's w/in 50% of the average for this bucket,
// or this file and the bucket are all considered "small" (less than `min`)
for (Entry<List<T>, Long> entry : buckets.entrySet())
{
List<T> bucket = entry.getKey();
long averageSize = entry.getValue();
if ((size > (averageSize / 2) && size < (3 * averageSize) / 2)
|| (size < min && averageSize < min))
{
// remove and re-add because adding changes the hash
buckets.remove(bucket);
long totalSize = bucket.size() * averageSize;
averageSize = (totalSize + size) / (bucket.size() + 1);
bucket.add(pair.left);
buckets.put(bucket, averageSize);
bFound = true;
break;
}
}
// no similar bucket found; put it in a new one
if (!bFound)
{
ArrayList<T> bucket = new ArrayList<T>();
bucket.add(pair.left);
buckets.put(bucket, size);
}
}
return buckets.keySet();
}
private static Collection<Pair<SSTableReader, Long>> convertSSTablesToPairs(Collection<SSTableReader> collection)
{
Collection<Pair<SSTableReader, Long>> tablePairs = new ArrayList<Pair<SSTableReader, Long>>();
for(SSTableReader table: collection)
{
tablePairs.add(new Pair<SSTableReader, Long>(table, table.length()));
}
return tablePairs;
}
/**
* Is not scheduled, because it is performing disjoint work from sstable compaction.
*/
public Future submitIndexBuild(final ColumnFamilyStore cfs, final Table.IndexBuilder builder)
{
Runnable runnable = new Runnable()
{
public void run()
{
compactionLock.readLock().lock();
try
{
if (cfs.isInvalid())
return;
executor.beginCompaction(builder);
try
{
builder.build();
}
finally
{
executor.finishCompaction(builder);
}
}
finally
{
compactionLock.readLock().unlock();
}
}
};
// don't submit to the executor if the compaction lock is held by the current thread. Instead return a simple
// future that will be immediately immediately get()ed and executed. Happens during a migration, which locks
// the compaction thread and then reinitializes a ColumnFamilyStore. Under normal circumstances, CFS spawns
// index jobs to the compaction manager (this) and blocks on them.
if (compactionLock.isWriteLockedByCurrentThread())
return new SimpleFuture(runnable);
else
return executor.submit(runnable);
}
/**
* Submits an sstable to be rebuilt: is not scheduled, since the sstable must not exist.
*/
public Future<SSTableReader> submitSSTableBuild(final Descriptor desc, OperationType type)
{
// invalid descriptions due to missing or dropped CFS are handled by SSTW and StreamInSession.
final SSTableWriter.Builder builder = SSTableWriter.createBuilder(desc, type);
Callable<SSTableReader> callable = new Callable<SSTableReader>()
{
public SSTableReader call() throws IOException
{
compactionLock.readLock().lock();
try
{
executor.beginCompaction(builder);
try
{
return builder.build();
}
finally
{
executor.finishCompaction(builder);
}
}
finally
{
compactionLock.readLock().unlock();
}
}
};
return executor.submit(callable);
}
public Future<?> submitCacheWrite(final AutoSavingCache.Writer writer)
{
Runnable runnable = new WrappedRunnable()
{
public void runMayThrow() throws IOException
{
if (!AutoSavingCache.flushInProgress.compareAndSet(false, true))
{
logger.debug("Cache flushing was already in progress: skipping {}", writer.getCompactionInfo());
return;
}
try
{
executor.beginCompaction(writer);
try
{
writer.saveCache();
}
finally
{
executor.finishCompaction(writer);
}
}
finally
{
AutoSavingCache.flushInProgress.set(false);
}
}
};
return executor.submit(runnable);
}
public Future<?> submitTruncate(final ColumnFamilyStore main, final long truncatedAt)
{
Runnable runnable = new WrappedRunnable()
{
public void runMayThrow() throws InterruptedException, IOException
{
for (ColumnFamilyStore cfs : main.concatWithIndexes())
{
List<SSTableReader> truncatedSSTables = new ArrayList<SSTableReader>();
for (SSTableReader sstable : cfs.getSSTables())
{
if (!sstable.newSince(truncatedAt))
truncatedSSTables.add(sstable);
}
cfs.markCompacted(truncatedSSTables);
}
main.invalidateRowCache();
}
};
return executor.submit(runnable);
}
private static int getDefaultGcBefore(ColumnFamilyStore cfs)
{
return cfs.isIndex()
? Integer.MAX_VALUE
: (int) (System.currentTimeMillis() / 1000) - cfs.metadata.getGcGraceSeconds();
}
private static class ValidationCompactionIterator extends CompactionIterator
{
public ValidationCompactionIterator(ColumnFamilyStore cfs, Range range) throws IOException
{
super(CompactionType.VALIDATION,
getCollatingIterator(cfs.getSSTables(), range),
new CompactionController(cfs, cfs.getSSTables(), getDefaultGcBefore(cfs), true));
}
protected static CollatingIterator getCollatingIterator(Iterable<SSTableReader> sstables, Range range) throws IOException
{
CollatingIterator iter = FBUtilities.getCollatingIterator();
for (SSTableReader sstable : sstables)
{
iter.addIterator(sstable.getDirectScanner(FILE_BUFFER_SIZE, range));
}
return iter;
}
}
public int getActiveCompactions()
{
return executor.getActiveCount() + validationExecutor.getActiveCount();
}
private static class CompactionExecutor extends DebuggableThreadPoolExecutor
{
// a synchronized identity set of running tasks to their compaction info
private static final Set<CompactionInfo.Holder> compactions = Collections.synchronizedSet(Collections.newSetFromMap(new IdentityHashMap<CompactionInfo.Holder, Boolean>()));
protected CompactionExecutor(int minThreads, int maxThreads, String name, BlockingQueue<Runnable> queue)
{
super(minThreads,
maxThreads,
60,
TimeUnit.SECONDS,
queue,
new NamedThreadFactory(name, DatabaseDescriptor.getCompactionThreadPriority()));
}
private CompactionExecutor(int threadCount, String name)
{
this(threadCount, threadCount, name, new LinkedBlockingQueue<Runnable>());
}
public CompactionExecutor()
{
this(Math.max(1, DatabaseDescriptor.getConcurrentCompactors()), "CompactionExecutor");
}
void beginCompaction(CompactionInfo.Holder ci)
{
compactions.add(ci);
}
void finishCompaction(CompactionInfo.Holder ci)
{
compactions.remove(ci);
}
public static List<CompactionInfo.Holder> getCompactions()
{
return new ArrayList<CompactionInfo.Holder>(compactions);
}
}
private static class ValidationExecutor extends CompactionExecutor
{
public ValidationExecutor()
{
super(1, Integer.MAX_VALUE, "ValidationExecutor", new SynchronousQueue<Runnable>());
}
}
public List<CompactionInfo> getCompactions()
{
List<CompactionInfo> out = new ArrayList<CompactionInfo>();
for (CompactionInfo.Holder ci : CompactionExecutor.getCompactions())
out.add(ci.getCompactionInfo());
return out;
}
public List<String> getCompactionSummary()
{
List<String> out = new ArrayList<String>();
for (CompactionInfo.Holder ci : CompactionExecutor.getCompactions())
out.add(ci.getCompactionInfo().toString());
return out;
}
public int getPendingTasks()
{
int n = 0;
for (Integer i : estimatedCompactions.values())
n += i;
return (int) (executor.getTaskCount() + validationExecutor.getTaskCount() - executor.getCompletedTaskCount() - validationExecutor.getCompletedTaskCount()) + n;
}
public long getCompletedTasks()
{
return executor.getCompletedTaskCount() + validationExecutor.getCompletedTaskCount();
}
private static class SimpleFuture implements Future
{
private Runnable runnable;
private SimpleFuture(Runnable r)
{
runnable = r;
}
public boolean cancel(boolean mayInterruptIfRunning)
{
throw new IllegalStateException("May not call SimpleFuture.cancel()");
}
public boolean isCancelled()
{
return false;
}
public boolean isDone()
{
return runnable == null;
}
public Object get() throws InterruptedException, ExecutionException
{
runnable.run();
runnable = null;
return runnable;
}
public Object get(long timeout, TimeUnit unit) throws InterruptedException, ExecutionException, TimeoutException
{
throw new IllegalStateException("May not call SimpleFuture.get(long, TimeUnit)");
}
}
private static class CleanupInfo implements CompactionInfo.Holder
{
private final SSTableReader sstable;
private final SSTableScanner scanner;
public CleanupInfo(SSTableReader sstable, SSTableScanner scanner)
{
this.sstable = sstable;
this.scanner = scanner;
}
public CompactionInfo getCompactionInfo()
{
try
{
return new CompactionInfo(sstable.descriptor.ksname,
sstable.descriptor.cfname,
CompactionType.CLEANUP,
scanner.getFilePointer(),
scanner.getFileLength());
}
catch (Exception e)
{
throw new RuntimeException();
}
}
}
private static class ScrubInfo implements CompactionInfo.Holder
{
private final BufferedRandomAccessFile dataFile;
private final SSTableReader sstable;
public ScrubInfo(BufferedRandomAccessFile dataFile, SSTableReader sstable)
{
this.dataFile = dataFile;
this.sstable = sstable;
}
public CompactionInfo getCompactionInfo()
{
try
{
return new CompactionInfo(sstable.descriptor.ksname,
sstable.descriptor.cfname,
CompactionType.SCRUB,
dataFile.getFilePointer(),
dataFile.length());
}
catch (Exception e)
{
throw new RuntimeException();
}
}
}
}
| devdattakulkarni/Cassandra-KVPM | src/java/org/apache/cassandra/db/compaction/CompactionManager.java | Java | apache-2.0 | 56,464 |
package com.huawei.esdk.fusioncompute.local.impl.resources.alarm;
import java.util.ArrayList;
import java.util.List;
import org.junit.BeforeClass;
import org.junit.Test;
import com.google.gson.Gson;
import com.huawei.esdk.fusioncompute.local.ServiceFactory;
import com.huawei.esdk.fusioncompute.local.model.ClientProviderBean;
import com.huawei.esdk.fusioncompute.local.model.FCSDKResponse;
import com.huawei.esdk.fusioncompute.local.model.PageList;
import com.huawei.esdk.fusioncompute.local.model.alarm.ActiveAlarmQueryParams;
import com.huawei.esdk.fusioncompute.local.model.alarm.Alarm;
import com.huawei.esdk.fusioncompute.local.model.alarm.HistoryAlarmQueryParams;
import com.huawei.esdk.fusioncompute.local.model.common.LoginResp;
import com.huawei.esdk.fusioncompute.local.resources.alarm.AlarmResource;
import com.huawei.esdk.fusioncompute.local.resources.common.AuthenticateResource;
@SuppressWarnings(value = {"all"})
public class AlarmResourceTest
{
private static Gson gson = new Gson();
private static AlarmResource alarmResource;
@BeforeClass
public static void setUpBeforeClass()
{
// System.setProperty("javax.net.ssl.trustStore", "D:\\workspace\\esdk_fc_neadp_1.3_native_java\\src\\main\\java");
ClientProviderBean bean = new ClientProviderBean();
bean.setProtocol("https");
bean.setServerIp("110.168.10.8");
bean.setServerPort("7443");
bean.setUserName("");
AuthenticateResource a = null;
a = ServiceFactory.getService(AuthenticateResource.class, bean);
FCSDKResponse<LoginResp> r = a.login("suxin", "Huawei@123");
System.out.println(r.getErrorCode());
System.out.println(gson.toJson(r));
alarmResource = ServiceFactory.getService(AlarmResource.class, bean);
}
@Test
public void testQueryActiveAlarms()
{
ActiveAlarmQueryParams par = new ActiveAlarmQueryParams();
String siteUri = "/service/sites/400E07BB";
par.setLimit(10);
par.setOffset(10);
List<String> or = new ArrayList<String>();
or.add("cleartype ASC");
or.add("occurtime DESC");
par.setOrder(or);
List<String> eventType = new ArrayList<String>();
par.setEventType(eventType);
FCSDKResponse<PageList<Alarm>> lists = alarmResource.queryActiveAlarms(par, siteUri);
List<Alarm> list = lists.getResult().getList();
for (Alarm alarm : list)
{
System.out.println(alarm.getSvAlarmName());
}
}
@Test
public void testQueryHistoryAlarms()
{
HistoryAlarmQueryParams par = new HistoryAlarmQueryParams();
String siteUri = "/service/sites/400E07BB";
List<String> al = new ArrayList<String>();
par.setAlarmLevel(al);
par.setLimit(10);
par.setOffset(10);
List<String> or = new ArrayList<String>();
or.add("cleartype ASC");
or.add("occurtime DESC");
par.setOrder(or);
par.setDisplay(0);
par.setOccurStartTime("1307790455960");
par.setOccurStopTime("1407844379863");
FCSDKResponse<PageList<Alarm>> lists = alarmResource.queryHistoryAlarms(par, siteUri);
List<Alarm> list = lists.getResult().getList();
for (Alarm alarm : list)
{
System.out.println(alarm.getSvAlarmName());
}
}
}
| eSDK/esdk_cloud_fc_native_java | source/src/test/java/com/huawei/esdk/fusioncompute/local/impl/resources/alarm/AlarmResourceTest.java | Java | apache-2.0 | 3,531 |
//
// Copyright 2015 Blu Age Corporation - Plano, Texas
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This file has been modified.
// Original copyright notice :
/*
* Copyright 2006-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using NLog;
using Summer.Batch.Infrastructure.Repeat;
using Summer.Batch.Common.Util;
using System;
using System.Collections.Concurrent;
namespace Summer.Batch.Core.Scope.Context
{
/// <summary>
/// Do the work required for this chunk of the step. The ChunkContext
/// provided is managed by the base class, so that if there is still work to
/// do for the task in hand state can be stored here. In a multi-threaded
/// client, the base class ensures that only one thread at a time can be
/// working on each instance of ChunkContext} Workers should signal
/// that they are finished with a context by removing all the attributes they
/// have added. If a worker does not remove them another thread might see
/// stale state.
/// NOTE : moved from abstract method to delegate
/// </summary>
/// <param name="context"></param>
/// <param name="chunkContext"></param>
/// <exception cref="Exception"> </exception>
/// <returns></returns>
public delegate RepeatStatus DoInChunkContext(IRepeatContext context, ChunkContext chunkContext);
/// <summary>
/// Convenient base class for clients who need to do something in a repeat callback inside a IStep.
/// </summary>
public static class StepContextRepeatCallback
{
/// <summary>
/// Manage the StepContext lifecycle. Business processing should be
/// delegated to <see cref="DoInChunkContext"/>. This
/// is to ensure that the current thread has a reference to the context, even
/// if the callback is executed in a pooled thread. Handles the registration
/// and unregistration of the step context, so clients should not duplicate
/// those calls.
/// </summary>
/// <param name="stepExecution"></param>
/// <param name="doInChunkContext"></param>
/// <returns></returns>
public static RepeatCallback GetRepeatCallback(StepExecution stepExecution, DoInChunkContext doInChunkContext)
{
BlockingCollection<ChunkContext> attributeQueue = new BlockingCollection<ChunkContext>();
return context =>
{
// The StepContext has to be the same for all chunks,
// otherwise step-scoped beans will be re-initialised for each chunk.
StepContext stepContext = StepSynchronizationManager.Register(stepExecution);
if (Logger.IsDebugEnabled)
{
Logger.Debug("Preparing chunk execution for StepContext: {0}",
ObjectUtils.IdentityToString(stepContext));
}
ChunkContext chunkContext;
attributeQueue.TryTake(out chunkContext);
if (chunkContext == null)
{
chunkContext = new ChunkContext(stepContext);
}
try
{
Logger.Debug("Chunk execution starting: queue size= {0}", attributeQueue.Count);
return doInChunkContext(context, chunkContext); //Delegation
}
finally
{
// Still some stuff to do with the data in this chunk,
// pass it back.
if (!chunkContext.Complete)
{
attributeQueue.Add(chunkContext);
}
StepSynchronizationManager.Close();
}
};
}
/// <summary>
/// Logger
/// </summary>
private static readonly Logger Logger = LogManager.GetCurrentClassLogger();
}
}
| pkubryk/SummerBatch | Summer.Batch.Core/Core/Scope/Context/StepContextRepeatCallback.cs | C# | apache-2.0 | 5,156 |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.platform.templates;
import com.intellij.facet.frameworks.beans.Artifact;
import com.intellij.ide.util.projectWizard.ModuleBuilder;
import com.intellij.ide.util.projectWizard.ProjectTemplateParameterFactory;
import com.intellij.ide.util.projectWizard.WizardInputField;
import com.intellij.openapi.module.ModuleType;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.openapi.util.io.StreamUtil;
import com.intellij.platform.ProjectTemplate;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xmlb.XmlSerializer;
import com.intellij.util.xmlb.annotations.Property;
import com.intellij.util.xmlb.annotations.Tag;
import com.intellij.util.xmlb.annotations.XCollection;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.zip.ZipInputStream;
/**
* @author Dmitry Avdeev
*/
@Tag("template")
public abstract class ArchivedProjectTemplate implements ProjectTemplate {
public static final String INPUT_FIELD = "input-field";
public static final String TEMPLATE = "template";
public static final String INPUT_DEFAULT = "default";
protected final String myDisplayName;
@Nullable private final String myCategory;
private List<WizardInputField> myInputFields = Collections.emptyList();
private List<String> myFrameworks = new ArrayList<>();
private List<Artifact> myArtifacts = new ArrayList<>();
public ArchivedProjectTemplate(@NotNull String displayName, @Nullable String category) {
myDisplayName = displayName;
myCategory = category;
}
@NotNull
@Override
public String getName() {
return myDisplayName;
}
@Override
public Icon getIcon() {
return getModuleType().getIcon();
}
protected abstract ModuleType getModuleType();
@NotNull
@Override
public ModuleBuilder createModuleBuilder() {
return new TemplateModuleBuilder(this, getModuleType(), getInputFields());
}
@NotNull
public List<WizardInputField> getInputFields() {
return myInputFields;
}
@Property(surroundWithTag = false)
@XCollection(elementName = "artifact")
public List<Artifact> getArtifacts() {
return myArtifacts;
}
public void setArtifacts(List<Artifact> artifacts) {
myArtifacts = artifacts;
}
@NotNull
@Property(surroundWithTag = false)
@XCollection(elementName = "framework", valueAttributeName = "")
public List<String> getFrameworks() {
return myFrameworks;
}
public void setFrameworks(List<String> frameworks) {
myFrameworks = frameworks;
}
@Nullable
@Override
public ValidationInfo validateSettings() {
return null;
}
public void handleUnzippedDirectories(@NotNull File dir, @NotNull List<? super File> filesToRefresh) throws IOException {
filesToRefresh.add(dir);
}
public static abstract class StreamProcessor<T> {
public abstract T consume(@NotNull ZipInputStream stream) throws IOException;
}
public abstract <T> T processStream(@NotNull StreamProcessor<T> consumer) throws IOException;
@Nullable
public String getCategory() {
return myCategory;
}
public void populateFromElement(@NotNull Element element) {
XmlSerializer.deserializeInto(this, element);
myInputFields = getFields(element);
}
private static List<WizardInputField> getFields(Element templateElement) {
return ContainerUtil
.mapNotNull(templateElement.getChildren(INPUT_FIELD), element -> {
ProjectTemplateParameterFactory factory = WizardInputField.getFactoryById(element.getText());
return factory == null ? null : factory.createField(element.getAttributeValue(INPUT_DEFAULT));
});
}
protected static <T> T consumeZipStream(@NotNull StreamProcessor<T> consumer, @NotNull ZipInputStream stream) throws IOException {
try {
return consumer.consume(stream);
}
finally {
StreamUtil.closeStream(stream);
}
}
}
| msebire/intellij-community | platform/lang-impl/src/com/intellij/platform/templates/ArchivedProjectTemplate.java | Java | apache-2.0 | 4,228 |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.xml.impl.schema;
import com.intellij.psi.PsiElement;
import com.intellij.psi.xml.XmlDocument;
import com.intellij.psi.xml.XmlElement;
import com.intellij.psi.xml.XmlFile;
import com.intellij.psi.xml.XmlTag;
import com.intellij.xml.XmlNSDescriptor;
import com.intellij.xml.util.XmlUtil;
import org.jetbrains.annotations.NonNls;
/**
* @author ik
*/
public class XmlElementDescriptorByType extends XmlElementDescriptorImpl {
private ComplexTypeDescriptor myType;
@NonNls
public static final String QUALIFIED_ATTR_VALUE = "qualified";
public XmlElementDescriptorByType(XmlTag instanceTag, ComplexTypeDescriptor descriptor) {
myDescriptorTag = instanceTag;
myType = descriptor;
}
public XmlElementDescriptorByType() {}
public XmlTag getDeclaration(){
return myDescriptorTag;
}
public String getName(PsiElement context){
return myDescriptorTag.getName();
}
public XmlNSDescriptor getNSDescriptor() {
XmlNSDescriptor nsDescriptor = NSDescriptor;
if (nsDescriptor ==null) {
final XmlFile file = XmlUtil.getContainingFile(getType(null).getDeclaration());
if(file == null) return null;
final XmlDocument document = file.getDocument();
if(document == null) return null;
NSDescriptor = nsDescriptor = (XmlNSDescriptor)document.getMetaData();
}
return nsDescriptor;
}
public ComplexTypeDescriptor getType(XmlElement context) {
return myType;
}
public String getDefaultName() {
XmlTag rootTag = ((XmlFile)getType(null).getDeclaration().getContainingFile()).getDocument().getRootTag();
if (QUALIFIED_ATTR_VALUE.equals(rootTag.getAttributeValue("elementFormDefault"))) {
return getQualifiedName();
}
return getName();
}
protected boolean askParentDescriptorViaXsi() {
return false;
}
public boolean equals(final Object o) {
if (this == o) return true;
if (!(o instanceof XmlElementDescriptorByType)) return false;
final XmlElementDescriptorByType that = (XmlElementDescriptorByType)o;
if (myType != null ? !myType.equals(that.myType) : that.myType != null) return false;
return true;
}
public int hashCode() {
return (myType != null ? myType.hashCode() : 0);
}
}
| IllusionRom-deprecated/android_platform_tools_idea | xml/xml-psi-impl/src/com/intellij/xml/impl/schema/XmlElementDescriptorByType.java | Java | apache-2.0 | 2,851 |
using System;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Consul.Net;
namespace Consul.Net.TestSuite
{
[TestClass]
public class AgentTests
{
[TestInitialize]
public void Setup()
{
client = new ConsulClient();
}
private ConsulClient client;
[TestMethod]
public void GetAgentChecks()
{
}
[TestMethod]
public void GetAgentSelf()
{
var self = client.AgentSelf();
var config = self.Item1;
var member = self.Item2;
Assert.IsNotNull(config);
Assert.IsNotNull(member);
}
}
}
| yonglehou/consul-net | src/Consul.Net.TestSuite/AgentTests.cs | C# | apache-2.0 | 746 |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cli;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.facebook.buck.io.MorePathsForTests;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.parser.NoSuchBuildTargetException;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.testutil.integration.DebuggableTemporaryFolder;
import com.facebook.buck.testutil.integration.ProjectWorkspace;
import com.facebook.buck.testutil.integration.ProjectWorkspace.ProcessResult;
import com.facebook.buck.testutil.integration.TestDataHelper;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.environment.Architecture;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Functions;
import com.google.common.base.Joiner;
import com.google.common.base.Optional;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.easymock.EasyMock;
import org.hamcrest.Matchers;
import org.junit.Rule;
import org.junit.Test;
import java.io.IOException;
import java.io.Reader;
import java.io.StringReader;
import java.nio.file.Path;
import java.nio.file.Paths;
public class BuckConfigTest {
@Rule
public DebuggableTemporaryFolder temporaryFolder = new DebuggableTemporaryFolder();
/**
* Ensure that whichever alias is listed first in the file is the one used in the reverse map if
* the value appears multiple times.
*/
@Test
public void testGetBasePathToAliasMap() throws IOException, NoSuchBuildTargetException {
Reader reader1 = new StringReader(Joiner.on('\n').join(
"[alias]",
"fb4a = //java/com/example:fbandroid",
"katana = //java/com/example:fbandroid"));
BuckConfig config1 = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader1);
assertEquals(
ImmutableMap.of(Paths.get("java/com/example"), "fb4a"),
config1.getBasePathToAliasMap());
assertEquals(
ImmutableMap.of(
"fb4a", "//java/com/example:fbandroid",
"katana", "//java/com/example:fbandroid"),
config1.getEntriesForSection("alias"));
Reader reader2 = new StringReader(Joiner.on('\n').join(
"[alias]",
"katana = //java/com/example:fbandroid",
"fb4a = //java/com/example:fbandroid"));
BuckConfig config2 = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader2);
assertEquals(
ImmutableMap.of(Paths.get("java/com/example"), "katana"),
config2.getBasePathToAliasMap());
assertEquals(
ImmutableMap.of(
"fb4a", "//java/com/example:fbandroid",
"katana", "//java/com/example:fbandroid"),
config2.getEntriesForSection("alias"));
Reader noAliasesReader = new StringReader("");
BuckConfig noAliasesConfig = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
noAliasesReader);
assertEquals(ImmutableMap.of(), noAliasesConfig.getBasePathToAliasMap());
assertEquals(ImmutableMap.of(), noAliasesConfig.getEntriesForSection("alias"));
}
@Test
public void testConstructorThrowsForMalformedBuildTarget() throws IOException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"fb4a = :fb4a"));
ProjectFilesystem projectFilesystem = EasyMock.createMock(ProjectFilesystem.class);
EasyMock.replay(projectFilesystem);
try {
BuckConfigTestUtils.createWithDefaultFilesystem(temporaryFolder, reader);
fail("Should have thrown HumanReadableException.");
} catch (HumanReadableException e) {
assertEquals("Path in :fb4a must start with //", e.getHumanReadableErrorMessage());
}
EasyMock.verify(projectFilesystem);
}
@Test
public void testConstructorWithNonExistentBasePath() throws IOException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"katana = //java/com/example:fb4a"));
// BuckConfig should allow nonexistent targets without throwing.
BuckConfigTestUtils.createWithDefaultFilesystem(temporaryFolder, reader);
}
@Test
public void testGetBuildTargetForAlias() throws IOException, NoSuchBuildTargetException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"foo = //java/com/example:foo",
"bar = //java/com/example:bar"));
BuckConfig config = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader);
assertEquals("//java/com/example:foo", config.getBuildTargetForAliasAsString("foo"));
assertEquals("//java/com/example:bar", config.getBuildTargetForAliasAsString("bar"));
// Flavors on alias.
assertEquals("//java/com/example:foo#src_jar", config.getBuildTargetForAliasAsString(
"foo#src_jar"));
assertEquals("//java/com/example:bar#fl1,fl2", config.getBuildTargetForAliasAsString(
"bar#fl1,fl2"));
assertNull(
"Invalid alias names, such as build targets, should be tolerated by this method.",
config.getBuildTargetForAliasAsString("//java/com/example:foo"));
assertNull(config.getBuildTargetForAliasAsString("baz"));
assertNull(config.getBuildTargetForAliasAsString("baz#src_jar"));
Reader noAliasesReader = new StringReader("");
BuckConfig noAliasesConfig = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
noAliasesReader);
assertNull(noAliasesConfig.getBuildTargetForAliasAsString("foo"));
assertNull(noAliasesConfig.getBuildTargetForAliasAsString("bar"));
assertNull(noAliasesConfig.getBuildTargetForAliasAsString("baz"));
}
@Test
public void testGetBuildTargetListResolvesAliases()
throws IOException, NoSuchBuildTargetException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"foo = //java/com/example:foo",
"[section]",
"some_list = \\",
"foo, \\",
"//java/com/example:bar"));
BuckConfig config = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader);
ImmutableList<String> expected = ImmutableList.<String>of(
"//java/com/example:foo",
"//java/com/example:bar");
ImmutableList<String> result = ImmutableList.copyOf(FluentIterable
.from(config.getBuildTargetList("section", "some_list"))
.transform(Functions.toStringFunction()));
assertThat(result, Matchers.equalTo(expected));
}
/**
* Ensures that all public methods of BuckConfig return reasonable values for an empty config.
*/
@Test
public void testEmptyConfig() {
BuckConfig emptyConfig = FakeBuckConfig.builder().build();
assertEquals(ImmutableMap.<String, String>of(), emptyConfig.getEntriesForSection("alias"));
assertNull(emptyConfig.getBuildTargetForAliasAsString("fb4a"));
assertEquals(ImmutableMap.<Path, String>of(), emptyConfig.getBasePathToAliasMap());
}
@Test
public void testValidateAliasName() {
BuckConfig.validateAliasName("f");
BuckConfig.validateAliasName("_");
BuckConfig.validateAliasName("F");
BuckConfig.validateAliasName("fb4a");
BuckConfig.validateAliasName("FB4A");
BuckConfig.validateAliasName("FB4_");
try {
BuckConfig.validateAliasName("");
fail("Should have thrown HumanReadableException");
} catch (HumanReadableException e) {
assertEquals("Alias cannot be the empty string.", e.getHumanReadableErrorMessage());
}
try {
BuckConfig.validateAliasName("42meaningOfLife");
fail("Should have thrown HumanReadableException");
} catch (HumanReadableException e) {
assertEquals("Not a valid alias: 42meaningOfLife.", e.getHumanReadableErrorMessage());
}
}
@Test
public void testReferentialAliases() throws IOException, NoSuchBuildTargetException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"foo = //java/com/example:foo",
"bar = //java/com/example:bar",
"foo_codename = foo",
"",
"# Do not delete these: automation builds require these aliases to exist!",
"automation_foo = foo_codename",
"automation_bar = bar"));
BuckConfig config = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader);
assertEquals("//java/com/example:foo", config.getBuildTargetForAliasAsString("foo"));
assertEquals("//java/com/example:bar", config.getBuildTargetForAliasAsString("bar"));
assertEquals("//java/com/example:foo", config.getBuildTargetForAliasAsString("foo_codename"));
assertEquals("//java/com/example:foo", config.getBuildTargetForAliasAsString("automation_foo"));
assertEquals("//java/com/example:bar", config.getBuildTargetForAliasAsString("automation_bar"));
assertNull(config.getBuildTargetForAliasAsString("baz"));
}
@Test
public void testUnresolvedAliasThrows() throws IOException, NoSuchBuildTargetException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"foo = //java/com/example:foo",
"bar = food"));
try {
BuckConfigTestUtils.createWithDefaultFilesystem(temporaryFolder, reader);
fail("Should have thrown HumanReadableException.");
} catch (HumanReadableException e) {
assertEquals("No alias for: food.", e.getHumanReadableErrorMessage());
}
}
@Test
public void testDuplicateAliasDefinitionThrows() throws IOException, NoSuchBuildTargetException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[alias]",
"foo = //java/com/example:foo",
"foo = //java/com/example:foo"));
try {
BuckConfigTestUtils.createWithDefaultFilesystem(temporaryFolder, reader);
fail("Should have thrown HumanReadableException.");
} catch (HumanReadableException e) {
assertEquals(
"Throw an exception if there are duplicate definitions for an alias, " +
"even if the values are the same.",
"Duplicate definition for foo in the [alias] section of your .buckconfig or " +
".buckconfig.local.",
e.getHumanReadableErrorMessage());
}
}
@Test
public void testExcludedLabels() throws IOException {
Reader reader = new StringReader(Joiner.on('\n').join(
"[test]",
"excluded_labels = windows, linux"));
BuckConfig config = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader);
assertEquals(
ImmutableList.of("windows", "linux"),
config.getDefaultRawExcludedLabelSelectors());
}
@Test
public void testResolveNullPathThatMayBeOutsideTheProjectFilesystem() throws IOException {
BuckConfig config = createFromText("");
assertNull(config.resolvePathThatMayBeOutsideTheProjectFilesystem(null));
}
@Test
public void testResolveAbsolutePathThatMayBeOutsideTheProjectFilesystem() throws IOException {
BuckConfig config = createFromText("");
assertEquals(
MorePathsForTests.rootRelativePath("foo/bar"),
config.resolvePathThatMayBeOutsideTheProjectFilesystem(
MorePathsForTests.rootRelativePath("foo/bar")));
}
@Test
public void testResolveRelativePathThatMayBeOutsideTheProjectFilesystem() throws IOException {
BuckConfig config = createFromText("");
assertEquals(
MorePathsForTests.rootRelativePath("project/foo/bar"),
config.resolvePathThatMayBeOutsideTheProjectFilesystem(Paths.get("../foo/bar")));
}
@Test
public void testBuckPyIgnorePaths() throws IOException {
ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(
this, "buck_py_ignore_paths", temporaryFolder);
workspace.setUp();
ProcessResult result = workspace.runBuckCommand("test", "--all");
result.assertSuccess("buck test --all should exit cleanly");
}
@Test
public void testGetDefaultTestTimeoutMillis() throws IOException {
assertEquals(0L, FakeBuckConfig.builder().build().getDefaultTestTimeoutMillis());
Reader reader = new StringReader(Joiner.on('\n').join(
"[test]",
"timeout = 54321"));
BuckConfig config = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader);
assertEquals(54321L, config.getDefaultTestTimeoutMillis());
}
@Test
public void testGetMaxTraces() throws IOException {
assertEquals(25, FakeBuckConfig.builder().build().getMaxTraces());
Reader reader = new StringReader(Joiner.on('\n').join(
"[log]",
"max_traces = 42"));
BuckConfig config = BuckConfigTestUtils.createWithDefaultFilesystem(
temporaryFolder,
reader);
assertEquals(42, config.getMaxTraces());
}
@Test
public void testGetAndroidTargetSdkWithSpaces() throws IOException {
BuckConfig config = createFromText(
"[android]",
"target = Google Inc.:Google APIs:16");
assertEquals(
"Google Inc.:Google APIs:16",
config.getValue("android", "target").get());
}
@Test
public void testCreateAnsi() {
BuckConfig windowsConfig = FakeBuckConfig.builder()
.setArchitecture(Architecture.X86_64)
.setPlatform(Platform.WINDOWS)
.build();
// "auto" on Windows is equivalent to "never".
assertFalse(windowsConfig.createAnsi(Optional.<String>absent()).isAnsiTerminal());
assertFalse(windowsConfig.createAnsi(Optional.of("auto")).isAnsiTerminal());
assertTrue(windowsConfig.createAnsi(Optional.of("always")).isAnsiTerminal());
assertFalse(windowsConfig.createAnsi(Optional.of("never")).isAnsiTerminal());
BuckConfig linuxConfig = FakeBuckConfig.builder()
.setArchitecture(Architecture.I386)
.setPlatform(Platform.LINUX)
.build();
// We don't test "auto" on Linux, because the behavior would depend on how the test was run.
assertTrue(linuxConfig.createAnsi(Optional.of("always")).isAnsiTerminal());
assertFalse(linuxConfig.createAnsi(Optional.of("never")).isAnsiTerminal());
}
@Test
public void getEnvUsesSuppliedEnvironment() {
String name = "SOME_ENVIRONMENT_VARIABLE";
String value = "SOME_VALUE";
BuckConfig config = FakeBuckConfig.builder()
.setEnvironment(ImmutableMap.of(name, value))
.build();
String[] expected = {value};
assertArrayEquals("Should match value in environment.", expected, config.getEnv(name, ":"));
}
private BuckConfig createFromText(String... lines) throws IOException {
ProjectFilesystem projectFilesystem = new FakeProjectFilesystem() {
@Override
public Path getRootPath() {
return MorePathsForTests.rootRelativePath("project/root");
}
};
StringReader reader = new StringReader(Joiner.on('\n').join(lines));
return BuckConfigTestUtils.createFromReader(
reader,
projectFilesystem,
Architecture.detect(),
Platform.detect(),
ImmutableMap.copyOf(System.getenv()));
}
@Test
public void testShouldSetNumberOfThreadsFromBuckConfig() {
BuckConfig buckConfig = FakeBuckConfig.builder().setSections(ImmutableMap.of(
"build",
ImmutableMap.of("threads", "3"))).build();
assertThat(buckConfig.getNumThreads(), Matchers.equalTo(3));
}
@Test
public void testDefaultsNumberOfBuildThreadsToOneAndAQuarterTheNumberOfAvailableProcessors() {
BuckConfig buckConfig = FakeBuckConfig.builder().build();
assertThat(
buckConfig.getNumThreads(),
Matchers.equalTo(Runtime.getRuntime().availableProcessors()));
}
@Test
public void testDefaultsNumberOfBuildThreadsSpecified() {
BuckConfig buckConfig = FakeBuckConfig.builder().build();
assertThat(buckConfig.getNumThreads(42), Matchers.equalTo(42));
}
@Test
public void testBuildThreadsRatioSanityCheck() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("thread_core_ratio", "1")))
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(10), Matchers.equalTo(10));
}
@Test
public void testBuildThreadsRatioGreaterThanZero() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("thread_core_ratio", "0.00001")))
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(1), Matchers.equalTo(1));
}
@Test
public void testBuildThreadsRatioRoundsUp() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("thread_core_ratio", "0.3")))
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(4), Matchers.equalTo(2));
}
@Test
public void testNonZeroBuildThreadsRatio() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("thread_core_ratio", "0.1")))
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(1), Matchers.equalTo(1));
}
@Test
public void testZeroBuildThreadsRatio() {
try {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("thread_core_ratio", "0")))
.build();
buckConfig.getDefaultMaximumNumberOfThreads(1);
} catch (HumanReadableException e) {
assertThat(
e.getHumanReadableErrorMessage(),
Matchers.startsWith("thread_core_ratio must be greater than zero"));
}
}
@Test
public void testLessThanZeroBuildThreadsRatio() {
try {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("thread_core_ratio", "-0.1")))
.build();
buckConfig.getDefaultMaximumNumberOfThreads(1);
} catch (HumanReadableException e) {
assertThat(
e.getHumanReadableErrorMessage(),
Matchers.startsWith("thread_core_ratio must be greater than zero"));
}
}
@Test
public void testBuildThreadsRatioWithReservedCores() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build",
ImmutableMap.of(
"thread_core_ratio", "1",
"thread_core_ratio_reserved_cores", "2"
)
)
)
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(10), Matchers.equalTo(8));
}
@Test
public void testCappedBuildThreadsRatio() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build",
ImmutableMap.of(
"thread_core_ratio", "0.5",
"thread_core_ratio_max_threads", "4"
)
)
)
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(10), Matchers.equalTo(4));
}
@Test
public void testFloorLimitedBuildThreadsRatio() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build",
ImmutableMap.of(
"thread_core_ratio", "0.25",
"thread_core_ratio_min_threads", "6"
)
)
)
.build();
assertThat(buckConfig.getDefaultMaximumNumberOfThreads(10), Matchers.equalTo(6));
}
@Test
public void testEqualsForDaemonRestart() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("threads", "3"),
"cxx", ImmutableMap.of("cc", "/some_location/gcc")))
.build();
BuckConfig buckConfigMoreThreads = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("threads", "4"),
"cxx", ImmutableMap.of("cc", "/some_location/gcc")))
.build();
BuckConfig buckConfigDifferentCompiler = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"build", ImmutableMap.of("threads", "3"),
"cxx", ImmutableMap.of("cc", "/some_location/clang")))
.build();
assertFalse(buckConfig.equals(buckConfigMoreThreads));
assertFalse(buckConfig.equals(buckConfigDifferentCompiler));
assertTrue(buckConfig.equalsForDaemonRestart(buckConfigMoreThreads));
assertFalse(buckConfig.equalsForDaemonRestart(buckConfigDifferentCompiler));
assertFalse(buckConfigMoreThreads.equalsForDaemonRestart(buckConfigDifferentCompiler));
}
@Test
public void hasUserDefinedValueReturnsTrueForEmptySetting() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"cache", ImmutableMap.of("mode", "")))
.build();
assertTrue(buckConfig.hasUserDefinedValue("cache", "mode"));
}
@Test
public void hasUserDefinedValueReturnsFalseForNoSetting() {
BuckConfig buckConfig = FakeBuckConfig.builder()
.setSections(ImmutableMap.<String, ImmutableMap<String, String>>of())
.build();
assertFalse(buckConfig.hasUserDefinedValue("cache", "mode"));
}
}
| bocon13/buck | test/com/facebook/buck/cli/BuckConfigTest.java | Java | apache-2.0 | 22,432 |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include <windows.h>
#include <audiopolicy.h>
#include <mmdeviceapi.h>
#include "nsIStringBundle.h"
#include "nsIUUIDGenerator.h"
#include "nsIXULAppInfo.h"
//#include "AudioSession.h"
#include "nsCOMPtr.h"
#include "nsAutoPtr.h"
#include "nsServiceManagerUtils.h"
#include "nsString.h"
#include "nsThreadUtils.h"
#include "nsXULAppAPI.h"
#include "mozilla/Attributes.h"
#include <objbase.h>
namespace mozilla {
namespace widget {
/*
* To take advantage of what Vista+ have to offer with respect to audio,
* we need to maintain an audio session. This class wraps IAudioSessionControl
* and implements IAudioSessionEvents (for callbacks from Windows)
*/
class AudioSession MOZ_FINAL : public IAudioSessionEvents {
private:
AudioSession();
~AudioSession();
public:
static AudioSession* GetSingleton();
// COM IUnknown
STDMETHODIMP_(ULONG) AddRef();
STDMETHODIMP QueryInterface(REFIID, void**);
STDMETHODIMP_(ULONG) Release();
// IAudioSessionEvents
STDMETHODIMP OnChannelVolumeChanged(DWORD aChannelCount,
float aChannelVolumeArray[],
DWORD aChangedChannel,
LPCGUID aContext);
STDMETHODIMP OnDisplayNameChanged(LPCWSTR aDisplayName, LPCGUID aContext);
STDMETHODIMP OnGroupingParamChanged(LPCGUID aGroupingParam, LPCGUID aContext);
STDMETHODIMP OnIconPathChanged(LPCWSTR aIconPath, LPCGUID aContext);
STDMETHODIMP OnSessionDisconnected(AudioSessionDisconnectReason aReason);
private:
nsresult OnSessionDisconnectedInternal();
public:
STDMETHODIMP OnSimpleVolumeChanged(float aVolume,
BOOL aMute,
LPCGUID aContext);
STDMETHODIMP OnStateChanged(AudioSessionState aState);
nsresult Start();
nsresult Stop();
void StopInternal();
nsresult GetSessionData(nsID& aID,
nsString& aSessionName,
nsString& aIconPath);
nsresult SetSessionData(const nsID& aID,
const nsString& aSessionName,
const nsString& aIconPath);
enum SessionState {
UNINITIALIZED, // Has not been initialized yet
STARTED, // Started
CLONED, // SetSessionInfoCalled, Start not called
FAILED, // The autdio session failed to start
STOPPED, // Stop called
AUDIO_SESSION_DISCONNECTED // Audio session disconnected
};
protected:
nsRefPtr<IAudioSessionControl> mAudioSessionControl;
nsString mDisplayName;
nsString mIconPath;
nsID mSessionGroupingParameter;
SessionState mState;
nsAutoRefCnt mRefCnt;
NS_DECL_OWNINGTHREAD
static AudioSession* sService;
};
nsresult
StartAudioSession()
{
return AudioSession::GetSingleton()->Start();
}
nsresult
StopAudioSession()
{
return AudioSession::GetSingleton()->Stop();
}
nsresult
GetAudioSessionData(nsID& aID,
nsString& aSessionName,
nsString& aIconPath)
{
return AudioSession::GetSingleton()->GetSessionData(aID,
aSessionName,
aIconPath);
}
nsresult
RecvAudioSessionData(const nsID& aID,
const nsString& aSessionName,
const nsString& aIconPath)
{
return AudioSession::GetSingleton()->SetSessionData(aID,
aSessionName,
aIconPath);
}
AudioSession* AudioSession::sService = NULL;
AudioSession::AudioSession()
{
mState = UNINITIALIZED;
}
AudioSession::~AudioSession()
{
}
AudioSession*
AudioSession::GetSingleton()
{
if (!(AudioSession::sService)) {
nsRefPtr<AudioSession> service = new AudioSession();
service.forget(&AudioSession::sService);
}
// We don't refcount AudioSession on the Gecko side, we hold one single ref
// as long as the appshell is running.
return AudioSession::sService;
}
// It appears Windows will use us on a background thread ...
NS_IMPL_THREADSAFE_ADDREF(AudioSession)
NS_IMPL_THREADSAFE_RELEASE(AudioSession)
STDMETHODIMP
AudioSession::QueryInterface(REFIID iid, void **ppv)
{
const IID IID_IAudioSessionEvents = __uuidof(IAudioSessionEvents);
if ((IID_IUnknown == iid) ||
(IID_IAudioSessionEvents == iid)) {
*ppv = static_cast<IAudioSessionEvents*>(this);
AddRef();
return S_OK;
}
return E_NOINTERFACE;
}
// Once we are started Windows will hold a reference to us through our
// IAudioSessionEvents interface that will keep us alive until the appshell
// calls Stop.
nsresult
AudioSession::Start()
{
NS_ABORT_IF_FALSE(mState == UNINITIALIZED ||
mState == CLONED ||
mState == AUDIO_SESSION_DISCONNECTED,
"State invariants violated");
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
const IID IID_IAudioSessionManager = __uuidof(IAudioSessionManager);
HRESULT hr;
// Don't check for errors in case something already initialized COM
// on this thread.
CoInitialize(NULL);
if (mState == UNINITIALIZED) {
mState = FAILED;
// XXXkhuey implement this for content processes
if (XRE_GetProcessType() == GeckoProcessType_Content)
return NS_ERROR_FAILURE;
NS_ABORT_IF_FALSE(XRE_GetProcessType() == GeckoProcessType_Default,
"Should only get here in a chrome process!");
nsCOMPtr<nsIStringBundleService> bundleService =
do_GetService(NS_STRINGBUNDLE_CONTRACTID);
NS_ENSURE_TRUE(bundleService, NS_ERROR_FAILURE);
nsCOMPtr<nsIStringBundle> bundle;
bundleService->CreateBundle("chrome://branding/locale/brand.properties",
getter_AddRefs(bundle));
NS_ENSURE_TRUE(bundle, NS_ERROR_FAILURE);
bundle->GetStringFromName(NS_LITERAL_STRING("brandFullName").get(),
getter_Copies(mDisplayName));
PRUnichar *buffer;
mIconPath.GetMutableData(&buffer, MAX_PATH);
// XXXkhuey we should provide a way for a xulrunner app to specify an icon
// that's not in the product binary.
::GetModuleFileNameW(NULL, buffer, MAX_PATH);
nsCOMPtr<nsIUUIDGenerator> uuidgen =
do_GetService("@mozilla.org/uuid-generator;1");
NS_ENSURE_TRUE(uuidgen, NS_ERROR_FAILURE);
uuidgen->GenerateUUIDInPlace(&mSessionGroupingParameter);
}
mState = FAILED;
NS_ABORT_IF_FALSE(!mDisplayName.IsEmpty() || !mIconPath.IsEmpty(),
"Should never happen ...");
nsRefPtr<IMMDeviceEnumerator> enumerator;
hr = ::CoCreateInstance(CLSID_MMDeviceEnumerator,
NULL,
CLSCTX_ALL,
IID_IMMDeviceEnumerator,
getter_AddRefs(enumerator));
if (FAILED(hr))
return NS_ERROR_NOT_AVAILABLE;
nsRefPtr<IMMDevice> device;
hr = enumerator->GetDefaultAudioEndpoint(EDataFlow::eRender,
ERole::eMultimedia,
getter_AddRefs(device));
if (FAILED(hr)) {
if (hr == E_NOTFOUND)
return NS_ERROR_NOT_AVAILABLE;
return NS_ERROR_FAILURE;
}
nsRefPtr<IAudioSessionManager> manager;
hr = device->Activate(IID_IAudioSessionManager,
CLSCTX_ALL,
NULL,
getter_AddRefs(manager));
if (FAILED(hr))
return NS_ERROR_FAILURE;
hr = manager->GetAudioSessionControl(NULL,
FALSE,
getter_AddRefs(mAudioSessionControl));
if (FAILED(hr))
return NS_ERROR_FAILURE;
hr = mAudioSessionControl->SetGroupingParam((LPCGUID)&mSessionGroupingParameter,
NULL);
if (FAILED(hr)) {
StopInternal();
return NS_ERROR_FAILURE;
}
hr = mAudioSessionControl->SetDisplayName(mDisplayName.get(), NULL);
if (FAILED(hr)) {
StopInternal();
return NS_ERROR_FAILURE;
}
hr = mAudioSessionControl->SetIconPath(mIconPath.get(), NULL);
if (FAILED(hr)) {
StopInternal();
return NS_ERROR_FAILURE;
}
hr = mAudioSessionControl->RegisterAudioSessionNotification(this);
if (FAILED(hr)) {
StopInternal();
return NS_ERROR_FAILURE;
}
mState = STARTED;
return NS_OK;
}
void
AudioSession::StopInternal()
{
static const nsID blankId = {0, 0, 0, {0, 0, 0, 0, 0, 0, 0, 0} };
if (mAudioSessionControl) {
mAudioSessionControl->SetGroupingParam((LPCGUID)&blankId, NULL);
mAudioSessionControl->UnregisterAudioSessionNotification(this);
mAudioSessionControl = nullptr;
}
}
nsresult
AudioSession::Stop()
{
NS_ABORT_IF_FALSE(mState == STARTED ||
mState == UNINITIALIZED || // XXXremove this
mState == FAILED,
"State invariants violated");
mState = STOPPED;
nsRefPtr<AudioSession> kungFuDeathGrip;
kungFuDeathGrip.swap(sService);
if (XRE_GetProcessType() != GeckoProcessType_Content)
StopInternal();
// At this point kungFuDeathGrip should be the only reference to AudioSession
::CoUninitialize();
return NS_OK;
}
void CopynsID(nsID& lhs, const nsID& rhs)
{
lhs.m0 = rhs.m0;
lhs.m1 = rhs.m1;
lhs.m2 = rhs.m2;
for (int i = 0; i < 8; i++ ) {
lhs.m3[i] = rhs.m3[i];
}
}
nsresult
AudioSession::GetSessionData(nsID& aID,
nsString& aSessionName,
nsString& aIconPath)
{
NS_ABORT_IF_FALSE(mState == FAILED ||
mState == STARTED ||
mState == CLONED,
"State invariants violated");
CopynsID(aID, mSessionGroupingParameter);
aSessionName = mDisplayName;
aIconPath = mIconPath;
if (mState == FAILED)
return NS_ERROR_FAILURE;
return NS_OK;
}
nsresult
AudioSession::SetSessionData(const nsID& aID,
const nsString& aSessionName,
const nsString& aIconPath)
{
NS_ABORT_IF_FALSE(mState == UNINITIALIZED,
"State invariants violated");
NS_ABORT_IF_FALSE(XRE_GetProcessType() != GeckoProcessType_Default,
"Should never get here in a chrome process!");
mState = CLONED;
CopynsID(mSessionGroupingParameter, aID);
mDisplayName = aSessionName;
mIconPath = aIconPath;
return NS_OK;
}
STDMETHODIMP
AudioSession::OnChannelVolumeChanged(DWORD aChannelCount,
float aChannelVolumeArray[],
DWORD aChangedChannel,
LPCGUID aContext)
{
return S_OK; // NOOP
}
STDMETHODIMP
AudioSession::OnDisplayNameChanged(LPCWSTR aDisplayName,
LPCGUID aContext)
{
return S_OK; // NOOP
}
STDMETHODIMP
AudioSession::OnGroupingParamChanged(LPCGUID aGroupingParam,
LPCGUID aContext)
{
return S_OK; // NOOP
}
STDMETHODIMP
AudioSession::OnIconPathChanged(LPCWSTR aIconPath,
LPCGUID aContext)
{
return S_OK; // NOOP
}
STDMETHODIMP
AudioSession::OnSessionDisconnected(AudioSessionDisconnectReason aReason)
{
// Run our code asynchronously. Per MSDN we can't do anything interesting
// in this callback.
nsCOMPtr<nsIRunnable> runnable =
NS_NewRunnableMethod(this, &AudioSession::OnSessionDisconnectedInternal);
NS_DispatchToMainThread(runnable);
return S_OK;
}
nsresult
AudioSession::OnSessionDisconnectedInternal()
{
if (!mAudioSessionControl)
return NS_OK;
mAudioSessionControl->UnregisterAudioSessionNotification(this);
mAudioSessionControl = nullptr;
mState = AUDIO_SESSION_DISCONNECTED;
CoUninitialize();
Start(); // If it fails there's not much we can do.
return NS_OK;
}
STDMETHODIMP
AudioSession::OnSimpleVolumeChanged(float aVolume,
BOOL aMute,
LPCGUID aContext)
{
return S_OK; // NOOP
}
STDMETHODIMP
AudioSession::OnStateChanged(AudioSessionState aState)
{
return S_OK; // NOOP
}
} // namespace widget
} // namespace mozilla
| sergecodd/FireFox-OS | B2G/gecko/widget/windows/AudioSession.cpp | C++ | apache-2.0 | 12,568 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.clerezza.sparql.query;
import org.apache.clerezza.Literal;
/**
* Wraps a {@link Literal} in an {@link Expression}.
*
* @author hasan
*/
public class LiteralExpression implements Expression {
private final Literal literal;
public LiteralExpression(Literal literal) {
this.literal = literal;
}
public Literal getLiteral() {
return literal;
}
}
| apache/clerezza | sparql/src/main/java/org/apache/clerezza/sparql/query/LiteralExpression.java | Java | apache-2.0 | 1,215 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan.serial;
import org.apache.geode.internal.cache.wan.AbstractGatewaySender;
import org.apache.geode.internal.monitoring.ThreadsMonitoring;
public class TestSerialGatewaySenderEventProcessor extends SerialGatewaySenderEventProcessor {
public TestSerialGatewaySenderEventProcessor(AbstractGatewaySender sender, String id,
ThreadsMonitoring tMonitoring) {
super(sender, id, tMonitoring);
}
@Override
protected void initializeMessageQueue(String id) {
// Overridden to not create the RegionQueue in the constructor.
}
protected int getUnprocessedTokensSize() {
return this.unprocessedTokens.size();
}
}
| davebarnes97/geode | geode-core/src/test/java/org/apache/geode/internal/cache/wan/serial/TestSerialGatewaySenderEventProcessor.java | Java | apache-2.0 | 1,468 |
"""
mbed CMSIS-DAP debugger
Copyright (c) 2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import conversion
| molejar/pyOCD | pyOCD/utility/__init__.py | Python | apache-2.0 | 619 |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.bookmarks;
import com.intellij.codeInsight.daemon.GutterMark;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.structureView.StructureViewBuilder;
import com.intellij.ide.structureView.StructureViewModel;
import com.intellij.ide.structureView.TreeBasedStructureViewBuilder;
import com.intellij.lang.LanguageStructureViewBuilder;
import com.intellij.navigation.ItemPresentation;
import com.intellij.navigation.NavigationItem;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.EditorColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.ex.MarkupModelEx;
import com.intellij.openapi.editor.ex.RangeHighlighterEx;
import com.intellij.openapi.editor.impl.DocumentMarkupModel;
import com.intellij.openapi.editor.markup.GutterIconRenderer;
import com.intellij.openapi.editor.markup.HighlighterLayer;
import com.intellij.openapi.editor.markup.RangeHighlighter;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.fileEditor.FileDocumentManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.DumbAware;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.ui.ColorUtil;
import com.intellij.ui.JBColor;
import com.intellij.ui.RetrievableIcon;
import com.intellij.util.NotNullProducer;
import com.intellij.util.PlatformIcons;
import com.intellij.util.Processor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
public class Bookmark implements Navigatable, Comparable<Bookmark> {
public static final Icon DEFAULT_ICON = new MyCheckedIcon();
private final VirtualFile myFile;
@NotNull private OpenFileDescriptor myTarget;
private final Project myProject;
private String myDescription;
private char myMnemonic = 0;
public static final Font MNEMONIC_FONT = new Font("Monospaced", Font.PLAIN, 11);
public Bookmark(@NotNull Project project, @NotNull VirtualFile file, int line, @NotNull String description) {
myFile = file;
myProject = project;
myDescription = description;
myTarget = new OpenFileDescriptor(project, file, line, -1, true);
addHighlighter();
}
@Override
public int compareTo(Bookmark o) {
int i = myMnemonic != 0 ? o.myMnemonic != 0 ? myMnemonic - o.myMnemonic : -1: o.myMnemonic != 0 ? 1 : 0;
if (i != 0) return i;
i = myProject.getName().compareTo(o.myProject.getName());
if (i != 0) return i;
i = myFile.getName().compareTo(o.getFile().getName());
if (i != 0) return i;
return getTarget().compareTo(o.getTarget());
}
public void updateHighlighter() {
release();
addHighlighter();
}
private void addHighlighter() {
Document document = FileDocumentManager.getInstance().getCachedDocument(getFile());
if (document != null) {
createHighlighter((MarkupModelEx)DocumentMarkupModel.forDocument(document, myProject, true));
}
}
public RangeHighlighter createHighlighter(@NotNull MarkupModelEx markup) {
final RangeHighlighterEx myHighlighter;
int line = getLine();
if (line >= 0) {
myHighlighter = markup.addPersistentLineHighlighter(line, HighlighterLayer.ERROR + 1, null);
if (myHighlighter != null) {
myHighlighter.setGutterIconRenderer(new MyGutterIconRenderer(this));
TextAttributes textAttributes =
EditorColorsManager.getInstance().getGlobalScheme().getAttributes(CodeInsightColors.BOOKMARKS_ATTRIBUTES);
Color stripeColor = textAttributes.getErrorStripeColor();
myHighlighter.setErrorStripeMarkColor(stripeColor != null ? stripeColor : Color.black);
myHighlighter.setErrorStripeTooltip(getBookmarkTooltip());
TextAttributes attributes = myHighlighter.getTextAttributes();
if (attributes == null) {
attributes = new TextAttributes();
}
attributes.setBackgroundColor(textAttributes.getBackgroundColor());
attributes.setForegroundColor(textAttributes.getForegroundColor());
myHighlighter.setTextAttributes(attributes);
}
}
else {
myHighlighter = null;
}
return myHighlighter;
}
@Nullable
public Document getDocument() {
return FileDocumentManager.getInstance().getCachedDocument(getFile());
}
public void release() {
int line = getLine();
if (line < 0) {
return;
}
final Document document = getDocument();
if (document == null) return;
MarkupModelEx markup = (MarkupModelEx)DocumentMarkupModel.forDocument(document, myProject, true);
final Document markupDocument = markup.getDocument();
if (markupDocument.getLineCount() <= line) return;
RangeHighlighterEx highlighter = findMyHighlighter();
if (highlighter != null) {
highlighter.dispose();
}
}
private RangeHighlighterEx findMyHighlighter() {
final Document document = getDocument();
if (document == null) return null;
MarkupModelEx markup = (MarkupModelEx)DocumentMarkupModel.forDocument(document, myProject, true);
final Document markupDocument = markup.getDocument();
final int startOffset = 0;
final int endOffset = markupDocument.getTextLength();
final Ref<RangeHighlighterEx> found = new Ref<RangeHighlighterEx>();
markup.processRangeHighlightersOverlappingWith(startOffset, endOffset, new Processor<RangeHighlighterEx>() {
@Override
public boolean process(RangeHighlighterEx highlighter) {
GutterMark renderer = highlighter.getGutterIconRenderer();
if (renderer instanceof MyGutterIconRenderer && ((MyGutterIconRenderer)renderer).myBookmark == Bookmark.this) {
found.set(highlighter);
return false;
}
return true;
}
});
return found.get();
}
public Icon getIcon() {
return myMnemonic == 0 ? DEFAULT_ICON : MnemonicIcon.getIcon(myMnemonic);
}
public String getDescription() {
return myDescription;
}
public void setDescription(String description) {
myDescription = description;
}
public char getMnemonic() {
return myMnemonic;
}
public void setMnemonic(char mnemonic) {
myMnemonic = Character.toUpperCase(mnemonic);
}
@NotNull
public VirtualFile getFile() {
return myFile;
}
@Nullable
public String getNotEmptyDescription() {
return StringUtil.isEmpty(myDescription) ? null : myDescription;
}
public boolean isValid() {
if (!getFile().isValid()) {
return false;
}
if (getLine() ==-1) {
return true;
}
RangeHighlighterEx highlighter = findMyHighlighter();
return highlighter != null && highlighter.isValid();
}
@Override
public boolean canNavigate() {
return getTarget().canNavigate();
}
@Override
public boolean canNavigateToSource() {
return getTarget().canNavigateToSource();
}
@Override
public void navigate(boolean requestFocus) {
getTarget().navigate(requestFocus);
}
public int getLine() {
int targetLine = myTarget.getLine();
if (targetLine == -1) return targetLine;
//What user sees in gutter
RangeHighlighterEx highlighter = findMyHighlighter();
if (highlighter != null && highlighter.isValid()) {
Document document = getDocument();
if (document != null) {
return document.getLineNumber(highlighter.getStartOffset());
}
}
RangeMarker marker = myTarget.getRangeMarker();
if (marker != null && marker.isValid()) {
Document document = marker.getDocument();
return document.getLineNumber(marker.getStartOffset());
}
return targetLine;
}
private OpenFileDescriptor getTarget() {
int line = getLine();
if (line != myTarget.getLine()) {
myTarget = new OpenFileDescriptor(myProject, myFile, line, -1, true);
}
return myTarget;
}
@Override
public String toString() {
StringBuilder result = new StringBuilder(getQualifiedName());
String description = StringUtil.escapeXml(getNotEmptyDescription());
if (description != null) {
result.append(": ").append(description);
}
return result.toString();
}
public String getQualifiedName() {
String presentableUrl = myFile.getPresentableUrl();
if (myFile.isDirectory()) return presentableUrl;
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
final PsiFile psiFile = PsiManager.getInstance(myProject).findFile(myFile);
if (psiFile == null) return presentableUrl;
StructureViewBuilder builder = LanguageStructureViewBuilder.INSTANCE.getStructureViewBuilder(psiFile);
if (builder instanceof TreeBasedStructureViewBuilder) {
StructureViewModel model = ((TreeBasedStructureViewBuilder)builder).createStructureViewModel(null);
Object element;
try {
element = model.getCurrentEditorElement();
}
finally {
model.dispose();
}
if (element instanceof NavigationItem) {
ItemPresentation presentation = ((NavigationItem)element).getPresentation();
if (presentation != null) {
presentableUrl = ((NavigationItem)element).getName() + " " + presentation.getLocationString();
}
}
}
return IdeBundle.message("bookmark.file.X.line.Y", presentableUrl, getLine() + 1);
}
private String getBookmarkTooltip() {
StringBuilder result = new StringBuilder("Bookmark");
if (myMnemonic != 0) {
result.append(" ").append(myMnemonic);
}
String description = StringUtil.escapeXml(getNotEmptyDescription());
if (description != null) {
result.append(": ").append(description);
}
return result.toString();
}
static class MnemonicIcon implements Icon {
private static final MnemonicIcon[] cache = new MnemonicIcon[36];//0..9 + A..Z
private final char myMnemonic;
@NotNull
static MnemonicIcon getIcon(char mnemonic) {
int index = mnemonic - 48;
if (index > 9)
index -= 7;
if (index < 0 || index > cache.length-1)
return new MnemonicIcon(mnemonic);
if (cache[index] == null)
cache[index] = new MnemonicIcon(mnemonic);
return cache[index];
}
private MnemonicIcon(char mnemonic) {
myMnemonic = mnemonic;
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
g.setColor(new JBColor(new NotNullProducer<Color>() {
@NotNull
@Override
public Color produce() {
//noinspection UseJBColor
return !darkBackground() ? new Color(0xffffcc) : new Color(0x675133);
}
}));
g.fillRect(x, y, getIconWidth(), getIconHeight());
g.setColor(JBColor.GRAY);
g.drawRect(x, y, getIconWidth(), getIconHeight());
g.setColor(EditorColorsManager.getInstance().getGlobalScheme().getDefaultForeground());
final Font oldFont = g.getFont();
g.setFont(MNEMONIC_FONT);
((Graphics2D)g).drawString(Character.toString(myMnemonic), x + 3, y + getIconHeight() - 1.5F);
g.setFont(oldFont);
}
@Override
public int getIconWidth() {
return DEFAULT_ICON.getIconWidth();
}
@Override
public int getIconHeight() {
return DEFAULT_ICON.getIconHeight();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MnemonicIcon that = (MnemonicIcon)o;
return myMnemonic == that.myMnemonic;
}
@Override
public int hashCode() {
return (int)myMnemonic;
}
}
private static class MyCheckedIcon implements Icon, RetrievableIcon {
@Nullable
@Override
public Icon retrieveIcon() {
return PlatformIcons.CHECK_ICON;
}
@Override
public void paintIcon(Component c, Graphics g, int x, int y) {
(darkBackground() ? AllIcons.Actions.CheckedGrey : AllIcons.Actions.CheckedBlack).paintIcon(c, g, x, y);
}
@Override
public int getIconWidth() {
return PlatformIcons.CHECK_ICON.getIconWidth();
}
@Override
public int getIconHeight() {
return PlatformIcons.CHECK_ICON.getIconHeight();
}
}
private static boolean darkBackground() {
Color gutterBackground = EditorColorsManager.getInstance().getGlobalScheme().getColor(EditorColors.GUTTER_BACKGROUND);
if (gutterBackground == null) {
gutterBackground = EditorColors.GUTTER_BACKGROUND.getDefaultColor();
}
return ColorUtil.isDark(gutterBackground);
}
private static class MyGutterIconRenderer extends GutterIconRenderer implements DumbAware {
private final Bookmark myBookmark;
public MyGutterIconRenderer(@NotNull Bookmark bookmark) {
myBookmark = bookmark;
}
@Override
@NotNull
public Icon getIcon() {
return myBookmark.getIcon();
}
@Override
public String getTooltipText() {
return myBookmark.getBookmarkTooltip();
}
@Override
public boolean equals(Object obj) {
return obj instanceof MyGutterIconRenderer &&
Comparing.equal(getTooltipText(), ((MyGutterIconRenderer)obj).getTooltipText()) &&
Comparing.equal(getIcon(), ((MyGutterIconRenderer)obj).getIcon());
}
@Override
public int hashCode() {
return getIcon().hashCode();
}
}
}
| retomerz/intellij-community | platform/lang-impl/src/com/intellij/ide/bookmarks/Bookmark.java | Java | apache-2.0 | 14,452 |
<?php
use App\Models\VRPagesCategories;
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
class PagesCategoriesSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
{
$list = [
["id" => "without_categories_id", "name" => "Does not have a category"],
["id" => "vr_categories_id", "name" => "virtualus_kambariai_kategorija"],
];
DB::beginTransaction();
try {
foreach ($list as $single) {
$record = VRPagesCategories::find($single['id']);
if(!$record) {
VRPagesCategories::create($single);
}
}
} catch(Exception $e) {
DB::rollback();
throw new Exception($e);
}
DB::commit();
}
}
}
| CodeAcademyDreamTeam/atraskvr | database/seeds/PagesCategoriesSeeder.php | PHP | apache-2.0 | 959 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*global module:false,require:false */
module.exports = function (grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
karma: {
unit: {
configFile: 'src/test/karma.conf.js',
autoWatch: true
},
continuous: {
configFile: 'src/test/karma.conf.js',
singleRun: true,
browsers: ['PhantomJS']
}
},
jshint: {
// only check BrowserMap files and Gruntfile.js
files: {
src: [
'Gruntfile.js',
'src/main/js/*.js'
]
},
options: {
browser: true,
curly: true,
forin: true,
camelcase: true,
quotmark: true,
undef: true,
unused: true,
trailing: true,
maxlen: 140,
multistr: true
}
},
copy: {
browsermap: {
files: [
{src: ['src/main/js/*.js'], dest: 'target/libs/browsermap/', expand: true, flatten: true},
{cwd: 'src/main/resources/demo/', src: ['**'], dest: 'target/demo/', expand: true},
{cwd: 'src/main/lib/', src: ['**'], dest: 'target/libs/externals/', expand: true},
{src: ['NOTICE'], dest: 'target/', expand: true}
]
},
minified: {
files: [
{src: ['target/libs/min/browsermap.min.js'], dest: 'target/demo/js/browsermap.min.js'}
]
}
},
uglify: {
options: {
banner: '/*! <%= pkg.name %> - v<%= pkg.version %> - <%= grunt.template.today("yyyy-mm-dd") %> */\n',
mangle: {
except: ['BrowserMap', 'BrowserMapUtil', 'Modernizr']
}
},
target: {
files: {
'target/libs/min/browsermap.min.js': [
'target/libs/browsermap/bmaputil.js',
'target/libs/browsermap/bmap.js',
'target/libs/externals/modernizr/modernizr.custom.js',
'target/libs/externals/matchMedia/matchMedia.js',
'target/libs/browsermap/probes.js',
'target/libs/browsermap/devicegroups.js'
]
}
}
},
jsdoc: {
dist: {
src: ['src/main/js/*.js', 'README.md'],
dest: 'target/doc'
}
},
compress: {
source: {
options: {
archive: 'target/browsermap-<%= pkg.version %>-incubating.tar.gz',
mode: 'tgz',
pretty: true
},
files: [
// the following entries provide the source files in the archive
{cwd: 'src/', src: ['**/*.js'], dest: 'browsermap-<%= pkg.version %>-incubating/src/', expand: true},
{cwd: 'src/', src: ['**/*.css'], dest: 'browsermap-<%= pkg.version %>-incubating/src/', expand: true},
{cwd: 'src/', src: ['**/*.html'], dest: 'browsermap-<%= pkg.version %>-incubating/src/', expand: true},
{cwd: 'target/', src: ['NOTICE'], dest: 'browsermap-<%= pkg.version %>-incubating/', expand: true},
{
src: [
'.gitignore', '.travis.yml', 'Gruntfile.js', 'package.json', 'README.md', 'LICENSE', 'DISCLAIMER', 'rat.exclude'
],
dest: 'browsermap-<%= pkg.version %>-incubating/'
},
{src: ['ci/**'], dest: 'browsermap-<%= pkg.version %>-incubating/'}
]
},
dist: {
options: {
archive: 'target/browsermap-<%= pkg.version %>-incubating-dist.tar.gz',
mode: 'tgz',
pretty: true
},
files: [
{src: ['LICENSE', 'README.md', 'DISCLAIMER'], dest: 'browsermap-<%= pkg.version %>-incubating-dist/'},
{cwd: 'target/demo', src: ['**'], dest: 'browsermap-<%= pkg.version %>-incubating-dist/demo/', expand: true},
{cwd: 'target/doc/', src: ['**'], dest: 'browsermap-<%= pkg.version %>-incubating-dist/doc/', expand: true},
{cwd: 'target/', src: ['NOTICE'], dest: 'browsermap-<%= pkg.version %>-incubating-dist/', expand: true},
{cwd: 'target/libs/min/', src: ['*.js'], dest: 'browsermap-<%= pkg.version %>-incubating-dist/', expand: true}
]
}
},
qunit: {
options: {
'--web-security': 'no',
coverage: {
disposeCollector: true,
src: ['src/main/js/bmap.js', 'src/main/js/bmaputil.js'],
instrumentedFiles: 'target/report/ins/',
htmlReport: 'target/report/coverage',
coberturaReport: 'target/report/',
linesThresholdPct: 50
}
},
all: ['src/test/resources/**/*.html']
},
clean: ['target/'],
demo: {
demoFolder: 'target/demo/',
templateFile: 'index.html',
selectors: [
'browser',
'highResolutionDisplay',
'oldBrowser',
'smartphone.highResolutionDisplay',
'smartphone',
'tablet.highResolutionDisplay',
'tablet'
]
},
sourcetemplates: {
files: ['target/libs/browsermap/bmap.js', 'target/NOTICE']
}
});
grunt.registerTask('demo', 'Provides the demo pages', function() {
grunt.task.requires('clean', 'test', 'copy:browsermap', 'minify', 'copy:minified');
var data = grunt.config('demo'),
evaluatedContent,
path = require('path');
if (data) {
if (!data.demoFolder) {
grunt.log.error('No demo folder has been defined (demo.demoFolder).');
return;
}
if (!data.templateFile) {
grunt.log.error('No template file has been defined (demo.templateFile).');
return;
}
if (!data.selectors || data.selectors.length < 1) {
grunt.log.error('No selectors have been defined (demo.selectors).');
return;
}
var templateFile = path.join(data.demoFolder, data.templateFile);
evaluatedContent = grunt.template.process(grunt.file.read(templateFile));
grunt.file.write(templateFile, evaluatedContent);
for (var i = 0; i < data.selectors.length; i++) {
var fileName = data.templateFile.replace('.html', '.' + data.selectors[i] + '.html');
grunt.file.write(path.join(data.demoFolder, fileName), evaluatedContent);
}
grunt.log.writeln('Generated demo site at ' + data.demoFolder);
} else {
grunt.log.error('Cannot find a configuration for the demo task!');
return;
}
});
grunt.registerTask('sourcetemplates', 'Replaces templates from source files', function() {
grunt.task.requires('clean', 'test', 'copy:browsermap');
var data = grunt.config('sourcetemplates'),
path = require('path'),
files,
file,
content;
if (data) {
files = data.files;
if (!files || !(files instanceof Array)) {
grunt.log.error('No files array defined.');
return;
}
for (var i = 0; i < files.length; i++) {
file = path.normalize(files[i]);
content = grunt.template.process(grunt.file.read(file));
grunt.file.write(file, content);
grunt.log.writeln('Replaced template variables at ' + file);
}
} else {
grunt.log.error('Cannot find a configuration for the sourcetemplates task!');
return;
}
});
grunt.loadNpmTasks('grunt-karma');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-jsdoc');
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-compress');
grunt.loadNpmTasks('grunt-qunit-istanbul');
grunt.registerTask('minify', ['uglify']);
grunt.registerTask('coverage', ['qunit-cov']);
grunt.registerTask('test', ['jshint', 'karma:continuous', 'qunit']);
grunt.registerTask('package', ['clean', 'test', 'copy:browsermap', 'sourcetemplates', 'minify', 'copy:minified', 'demo', 'jsdoc',
'compress:source', 'compress:dist']);
};
| apache/devicemap-browsermap | Gruntfile.js | JavaScript | apache-2.0 | 10,049 |
/*
The MIT License
Copyright (c) 2004-2015 Paul R. Holser, Jr.
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package joptsimple;
import java.io.File;
import static java.util.Arrays.*;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* @author <a href="mailto:pholser@alumni.rice.edu">Paul Holser</a>
*/
public class ParsingSeparatedTypedOptionValuesTest extends AbstractOptionParserFixture {
@Test
public void parsesSeparatedValuesAsSeparateArgument() {
assertCorrectParse( "classpath", ':', "--classpath", "/usr:/opt:/var" );
}
@Test
public void parsesSeparatedValuesWhenArgumentAbuttedWithEquals() {
assertCorrectParse( "classpath", ':', "--classpath=/usr:/opt:/var" );
}
@Test
public void parsesEqualsSeparatedValuesWhenArgumentAbuttedWithEquals() {
assertCorrectParse( "classpath", '=', "--classpath=/usr=/opt=/var" );
}
@Test
public void parsesSeparatedValuesAbutting() {
assertCorrectParse( "c", ':', "-c/usr:/opt:/var" );
}
private void assertCorrectParse( String option, char separator, String... args ) {
parser.accepts( option ).withRequiredArg().ofType( File.class ).withValuesSeparatedBy( separator );
OptionSet options = parser.parse( args );
assertEquals(
asList( new File( "/usr" ), new File( "/opt" ), new File( "/var" ) ),
options.valuesOf( option ) );
}
}
| juhalindfors/bazel-patches | third_party/java/jopt-simple/src/test/java/joptsimple/ParsingSeparatedTypedOptionValuesTest.java | Java | apache-2.0 | 2,436 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { render } from '@testing-library/react';
import React from 'react';
import { ShowValue } from './show-value';
describe('rule editor', () => {
it('matches snapshot', () => {
const showJson = <ShowValue endpoint="test" downloadFilename="test" />;
const { container } = render(showJson);
expect(container.firstChild).toMatchSnapshot();
});
});
| nishantmonu51/druid | web-console/src/components/show-value/show-value.spec.tsx | TypeScript | apache-2.0 | 1,174 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.util;
import static org.assertj.core.util.DateUtil.toCalendar;
import static org.assertj.core.api.Assertions.*;
import java.text.*;
import java.util.*;
import org.junit.jupiter.api.Test;
/**
* Tests for <code>{@link DateUtil#toCalendar(java.util.Date)}</code>.
*
* @author Joel Costigliola
*/
public class DateUtil_toCalendar_Test {
@Test
public void should_convert_date_to_calendar() throws ParseException {
String dateAsString = "26/08/1994";
Date date = new SimpleDateFormat("dd/MM/yyyy").parse(dateAsString);
Calendar calendar = new GregorianCalendar();
// clear all fields to have a Date without time (no hours, minutes...).
calendar.clear();
calendar.set(1994, 07, 26); // month is 0 value based.
assertThat(toCalendar(date)).isEqualTo(calendar);
}
@Test
public void should_return_null_if_date_to_convert_is_null() {
assertThat(toCalendar(null)).isNull();
}
}
| xasx/assertj-core | src/test/java/org/assertj/core/util/DateUtil_toCalendar_Test.java | Java | apache-2.0 | 1,555 |
package org.javarosa.xpath.expr;
import org.javarosa.core.model.condition.EvaluationContext;
import org.javarosa.core.model.instance.DataInstance;
import org.javarosa.xpath.parser.XPathSyntaxException;
public class XPathAbsFunc extends XPathFuncExpr {
public static final String NAME = "abs";
private static final int EXPECTED_ARG_COUNT = 1;
public XPathAbsFunc() {
name = NAME;
expectedArgCount = EXPECTED_ARG_COUNT;
}
public XPathAbsFunc(XPathExpression[] args) throws XPathSyntaxException {
super(NAME, args, EXPECTED_ARG_COUNT, true);
}
@Override
public Object evalBody(DataInstance model, EvaluationContext evalContext, Object[] evaluatedArgs) {
return Math.abs(FunctionUtils.toDouble(evaluatedArgs[0]));
}
}
| dimagi/commcare | src/main/java/org/javarosa/xpath/expr/XPathAbsFunc.java | Java | apache-2.0 | 788 |
/*
* Copyright (C) 2008 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.zxing.client.android.result;
import com.google.zxing.Result;
import com.google.zxing.client.android.R;
import com.google.zxing.client.result.ParsedResult;
import android.app.Activity;
/**
* This class handles TextParsedResult as well as unknown formats. It's the fallback handler.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
public final class TextResultHandler extends ResultHandler {
private static final int[] buttons = {
R.string.zxing_button_web_search,
R.string.zxing_button_share_by_email,
R.string.zxing_button_share_by_sms,
R.string.zxing_button_custom_product_search,
};
public TextResultHandler(Activity activity, ParsedResult result, Result rawResult) {
super(activity, result, rawResult);
}
@Override
public int getButtonCount() {
return hasCustomProductSearch() ? buttons.length : buttons.length - 1;
}
@Override
public int getButtonText(int index) {
return buttons[index];
}
@Override
public void handleButtonPress(int index) {
String text = getResult().getDisplayResult();
switch (index) {
case 0:
webSearch(text);
break;
case 1:
shareByEmail(text);
break;
case 2:
shareBySMS(text);
break;
case 3:
openURL(fillInCustomSearchURL(text));
break;
}
}
@Override
public int getDisplayTitle() {
return R.string.zxing_result_text;
}
}
| jonzl/sample-zxing | zxing-android-complete/src/com/google/zxing/client/android/result/TextResultHandler.java | Java | apache-2.0 | 2,058 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.configrepo.contract;
import org.junit.jupiter.api.Test;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CRTimerTest extends AbstractCRTest<CRTimer> {
private final CRTimer timer;
private final CRTimer invalidNoTimerSpec;
public CRTimerTest() {
timer = new CRTimer("0 15 10 * * ? *", false);
invalidNoTimerSpec = new CRTimer();
}
@Override
public void addGoodExamples(Map<String, CRTimer> examples) {
examples.put("timer",timer);
}
@Override
public void addBadExamples(Map<String, CRTimer> examples) {
examples.put("invalidNoTimerSpec",invalidNoTimerSpec);
}
@Test
public void shouldDeserializeFromAPILikeObject() {
String json = "{\n" +
" \"spec\": \"0 0 22 ? * MON-FRI\",\n" +
" \"only_on_changes\": true\n" +
" }";
CRTimer deserializedValue = gson.fromJson(json,CRTimer.class);
assertThat(deserializedValue.getSpec(),is("0 0 22 ? * MON-FRI"));
assertThat(deserializedValue.isOnlyOnChanges(),is(true));
ErrorCollection errors = deserializedValue.getErrors();
assertTrue(errors.isEmpty());
}
}
| gocd/gocd | plugin-infra/go-plugin-config-repo/src/test/java/com/thoughtworks/go/plugin/configrepo/contract/CRTimerTest.java | Java | apache-2.0 | 1,969 |
/*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.lienzo.toolbox;
import java.util.function.BiConsumer;
import com.ait.lienzo.client.core.animation.AnimationCallback;
import com.ait.lienzo.client.core.animation.AnimationProperties;
import com.ait.lienzo.client.core.animation.AnimationProperty;
import com.ait.lienzo.client.core.animation.AnimationTweener;
import com.ait.lienzo.client.core.animation.IAnimation;
import com.ait.lienzo.client.core.animation.IAnimationHandle;
import com.ait.lienzo.client.core.shape.Group;
import com.ait.lienzo.client.core.types.Point2D;
import org.uberfire.mvp.Command;
public class ToolboxVisibilityExecutors {
private static final double ANIMATION_SCALE_DELAY_MILLIS = 150;
private static final double ANIMATION_ALPHA_DELAY_MILLIS = 150;
public static AnimatedAlphaGroupExecutor alpha(final double targetAlphaValue) {
return new AnimatedAlphaGroupExecutor(targetAlphaValue);
}
public static AnimatedScaleXGroupExecutor upScaleX() {
return scaleX(1,
0.1,
1);
}
public static AnimatedScaleXGroupExecutor downScaleX() {
return scaleX(0,
1,
0.1);
}
public static AnimatedScaleYGroupExecutor upScaleY() {
return scaleY(1,
0.1,
1);
}
public static AnimatedScaleYGroupExecutor downScaleY() {
return scaleY(0,
1,
0.1);
}
public static AnimatedScaleXGroupExecutor scaleX(final double targetAlphaValue,
final double startScale,
final double endScale) {
return new AnimatedScaleXGroupExecutor(targetAlphaValue,
startScale,
endScale);
}
private static AnimatedScaleYGroupExecutor scaleY(final double targetAlphaValue,
final double startScale,
final double endScale) {
return new AnimatedScaleYGroupExecutor(targetAlphaValue,
startScale,
endScale);
}
public abstract static class AnimatedGroupExecutor<T extends AnimatedGroupExecutor>
implements BiConsumer<Group, Command> {
private double animationDuration;
private AnimationTweener animationTweener;
public AnimatedGroupExecutor(final double duration) {
this.animationTweener = AnimationTweener.LINEAR;
this.animationDuration = duration;
}
protected abstract AnimationProperties getProperties();
@Override
public void accept(final Group group,
final Command callback) {
animate(group,
callback);
}
private void animate(final Group group,
final Command callback) {
group.animate(animationTweener,
getProperties(),
animationDuration,
new AnimationCallback() {
@Override
public void onClose(IAnimation animation,
IAnimationHandle handle) {
super.onClose(animation,
handle);
callback.execute();
}
});
}
public T setAnimationTweener(final AnimationTweener animationTweener) {
this.animationTweener = animationTweener;
return cast();
}
public T setAnimationDuration(final double millis) {
this.animationDuration = millis;
return cast();
}
@SuppressWarnings("unchecked")
private T cast() {
return (T) this;
}
}
public static class AnimatedAlphaGroupExecutor extends AnimatedGroupExecutor<AnimatedAlphaGroupExecutor> {
private double alpha;
protected AnimatedAlphaGroupExecutor(final double alpha) {
super(ANIMATION_ALPHA_DELAY_MILLIS);
this.alpha = alpha;
}
@Override
protected AnimationProperties getProperties() {
return AnimationProperties.toPropertyList(AnimationProperty.Properties.ALPHA(alpha));
}
public AnimatedAlphaGroupExecutor setAlpha(final double alpha) {
this.alpha = alpha;
return this;
}
}
public abstract static class AnimatedScaleGroupExecutor<T extends AnimatedScaleGroupExecutor>
extends AnimatedGroupExecutor<T> {
private final double alpha;
protected AnimatedScaleGroupExecutor(final double alpha) {
super(ANIMATION_SCALE_DELAY_MILLIS);
this.alpha = alpha;
}
protected abstract Point2D getInitialScale();
protected abstract Point2D getEndScale();
@Override
protected AnimationProperties getProperties() {
return AnimationProperties.toPropertyList(AnimationProperty.Properties.SCALE(getEndScale()));
}
@Override
public void accept(final Group group,
final Command callback) {
group
.setScale(getInitialScale())
.setAlpha(1);
super.accept(group,
() -> {
group.setAlpha(alpha);
callback.execute();
});
}
}
public static class AnimatedScaleXGroupExecutor extends AnimatedScaleGroupExecutor<AnimatedScaleXGroupExecutor> {
private final double start;
private final double end;
protected AnimatedScaleXGroupExecutor(final double alpha,
final double start,
final double end) {
super(alpha);
this.start = start;
this.end = end;
}
@Override
protected Point2D getInitialScale() {
return new Point2D(start,
1);
}
@Override
protected Point2D getEndScale() {
return new Point2D(end,
1);
}
}
public static class AnimatedScaleYGroupExecutor extends AnimatedScaleGroupExecutor<AnimatedScaleYGroupExecutor> {
private final double start;
private final double end;
protected AnimatedScaleYGroupExecutor(final double alpha,
final double start,
final double end) {
super(alpha);
this.start = start;
this.end = end;
}
@Override
protected Point2D getInitialScale() {
return new Point2D(1,
start);
}
@Override
protected Point2D getEndScale() {
return new Point2D(1,
end);
}
}
}
| etirelli/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-extensions/kie-wb-common-stunner-lienzo-extensions/src/main/java/org/kie/workbench/common/stunner/lienzo/toolbox/ToolboxVisibilityExecutors.java | Java | apache-2.0 | 8,074 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.shacl.engine.constraint;
import static org.apache.jena.shacl.compact.writer.CompactOut.compact;
import java.util.Objects;
import org.apache.jena.atlas.io.IndentedWriter;
import org.apache.jena.graph.Node;
import org.apache.jena.riot.out.NodeFormatter;
import org.apache.jena.shacl.engine.ValidationContext;
import org.apache.jena.shacl.validation.ReportItem;
import org.apache.jena.sparql.expr.Expr;
import org.apache.jena.sparql.expr.ExprNotComparableException;
import org.apache.jena.sparql.expr.NodeValue;
import org.apache.jena.sparql.expr.ValueSpaceClassification;
/** A constraint that tests the value of a node. */
public abstract class ValueRangeConstraint extends ConstraintTerm {
protected final NodeValue nodeValue;
private final Node constraintComponent;
protected ValueRangeConstraint(Node value, Node constraintComponent) {
this.nodeValue = NodeValue.makeNode(value);
this.constraintComponent = constraintComponent;
}
public NodeValue getNodeValue() {
return nodeValue;
}
@Override
final public ReportItem validate(ValidationContext vCxt, Node n) {
NodeValue nv = NodeValue.makeNode(n);
ValueSpaceClassification vs = NodeValue.classifyValueOp(nodeValue, nv);
try {
int r = NodeValue.compare(nodeValue, nv);
if ( r == Expr.CMP_INDETERMINATE )
return new ReportItem(toString()+" indeterminant to "+n, n);
boolean b = test(r);
if ( b )
return null;
return new ReportItem(getErrorMessage(n), n);
} catch (ExprNotComparableException ex) {
return new ReportItem(toString()+" can't compare to "+n, n);
}
}
protected abstract String getErrorMessage(Node n);
protected abstract boolean test(int r);
protected abstract String getName();
@Override
final
public Node getComponent() {
return constraintComponent ;
}
@Override
public void printCompact(IndentedWriter out, NodeFormatter nodeFmt) {
compact(out, nodeFmt, getName(), nodeValue.asNode());
}
@Override
public String toString() {
return getName()+"["+nodeValue+"]";
}
@Override
public int hashCode() {
return Objects.hash(constraintComponent, nodeValue);
}
@Override
public boolean equals(Object obj) {
if ( this == obj )
return true;
if ( obj == null )
return false;
if ( ! this.getClass().equals(obj.getClass()) )
return false;
ValueRangeConstraint other = (ValueRangeConstraint)obj;
return Objects.equals(constraintComponent, other.constraintComponent) && Objects.equals(nodeValue, other.nodeValue);
}
}
| apache/jena | jena-shacl/src/main/java/org/apache/jena/shacl/engine/constraint/ValueRangeConstraint.java | Java | apache-2.0 | 3,604 |
/* Copyright (C) 2013-2015 Computer Sciences Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ezbake.groups.graph;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.tinkerpop.blueprints.Vertex;
import ezbake.groups.graph.exception.AccessDeniedException;
import ezbake.groups.graph.frames.edge.BaseEdge;
import ezbake.groups.graph.frames.vertex.Group;
import ezbake.groups.graph.frames.vertex.User;
import ezbake.groups.graph.query.BaseQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.List;
import java.util.Set;
/**
* PermissionQuery class enforces permissions on EzGroups
*
* Permissions on EzGroups are enforced with "admin" edges, these queries can determine whether a user has the requested
* permission or not.
*/
public class PermissionEnforcer {
private static final Logger logger = LoggerFactory.getLogger(PermissionEnforcer.class);
private BaseQuery query;
@Inject
public PermissionEnforcer(BaseQuery query) {
this.query = query;
}
/**
* Definitions:
*
* DISCOVER: Able to "discover" that the group exists, but not view anything else
* READ: View the group, child groups, and group members
* WRITE: Add/Remove users to/from the group
* MANAGE: Update group inheritance, change group name
* CREATE_CHILD: Add new child groups
*/
public enum Permission {
DISCOVER(BaseEdge.EdgeType.A_READ),
READ(BaseEdge.EdgeType.A_READ),
WRITE(BaseEdge.EdgeType.A_WRITE),
MANAGE(BaseEdge.EdgeType.A_MANAGE),
CREATE_CHILD(BaseEdge.EdgeType.A_CREATE_CHILD);
private BaseEdge.EdgeType grantingEdge;
Permission(BaseEdge.EdgeType grantingEdge) {
this.grantingEdge = grantingEdge;
}
public BaseEdge.EdgeType getGrantingEdge() {
return grantingEdge;
}
public String getGrantingEdgePropertyName() {
return grantingEdge.toString();
}
}
/**
* Validate the user has the required permissions
*
* @param requester requester whose auths to check
* @param group vertex against which an user must have auths
* @param requiredPermissions array of required edges. Each one will be checked for a path to the requestor
* @throws ezbake.groups.graph.exception.AccessDeniedException if the user does not have permission to manage all of the groups in the given
* pipe.
*/
public void validateAuthorized(User requester, Group group, Permission... requiredPermissions) throws AccessDeniedException {
validateAuthorized(requester, Lists.newArrayList(group), requiredPermissions);
}
/**
* Validate the user has the required permissions
*
* @param requester requester whose auths to check
* @param group vertex against which an user must have auths
* @param requiredPermissions array of required edges. Each one will be checked for a path to the requestor
* @throws ezbake.groups.graph.exception.AccessDeniedException if the user does not have permission to manage all of the groups in the given
* pipe.
*/
public void validateAuthorized(User requester, Vertex group, Permission... requiredPermissions) throws AccessDeniedException {
validateAuthorized(requester, Lists.newArrayList(group), requiredPermissions);
}
/**
* Validate that an User has manage auths on all groups in a given pipe.
*
* @param requester requester whose auths to check
* @param vertices list containing group-vertices against which an user must have auths
* @param requiredPermissions array of required edges. Each one will be checked for a path to the requestor
* @throws ezbake.groups.graph.exception.AccessDeniedException if the user does not have permission to manage all of the groups in the given
* pipe.
*/
public void validateAuthorized(User requester, List vertices, Permission... requiredPermissions) throws AccessDeniedException {
for (Object object : vertices) {
Vertex vertex;
if (object instanceof Group) {
vertex = ((Group) object).asVertex();
} else if (object instanceof Vertex) {
vertex = (Vertex) object;
} else {
throw new IllegalStateException("Invalid object type passed in list: " + object.getClass());
}
for (Permission permission : requiredPermissions) {
if (!query.pathExists(
requester.asVertex().getId(), vertex.getId(), permission.getGrantingEdgePropertyName())) {
query.getGraph().getBaseGraph().rollback();
final String errMsg = String.format(
"requester '%s' does not have the required permission '%s'", requester.getPrincipal(),
permission);
logger.error(errMsg);
throw new AccessDeniedException(errMsg);
}
}
}
}
/**
* Determines if an user has any permission on a group including DATA_ACCESS.
*
* @param user user for which to determine if they have any permission against a group
* @param group group to determine if the given user has any permission on
* @return true if the user has any permission, false if not
*/
public boolean hasAnyPermission(User user, Group group) {
final Set<String> permissionEdgeLabels = Sets.newHashSet();
for (Permission permission : Permission.values()) {
permissionEdgeLabels.add(permission.getGrantingEdgePropertyName());
}
permissionEdgeLabels.add(BaseEdge.EdgeType.DATA_ACCESS.toString());
for (String label : permissionEdgeLabels) {
if (query.pathExists(user.asVertex().getId(), group.asVertex().getId(), label)) {
return true;
}
}
return false;
}
}
| infochimps-forks/ezbake-platform-services | groups/graph/src/main/java/ezbake/groups/graph/PermissionEnforcer.java | Java | apache-2.0 | 6,597 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.cost;
import com.google.common.collect.ImmutableMap;
import io.trino.Session;
import io.trino.metadata.Metadata;
import io.trino.plugin.tpch.TpchConnectorFactory;
import io.trino.sql.planner.PlanNodeIdAllocator;
import io.trino.sql.planner.iterative.rule.test.PlanBuilder;
import io.trino.sql.planner.plan.PlanNode;
import io.trino.testing.LocalQueryRunner;
import java.util.function.Function;
import static io.trino.testing.TestingSession.testSessionBuilder;
public class StatsCalculatorTester
implements AutoCloseable
{
private final StatsCalculator statsCalculator;
private final Metadata metadata;
private final Session session;
private final LocalQueryRunner queryRunner;
public StatsCalculatorTester()
{
this(testSessionBuilder().build());
}
public StatsCalculatorTester(Session session)
{
this(createQueryRunner(session));
}
private StatsCalculatorTester(LocalQueryRunner queryRunner)
{
this.statsCalculator = queryRunner.getStatsCalculator();
this.session = queryRunner.getDefaultSession();
this.metadata = queryRunner.getMetadata();
this.queryRunner = queryRunner;
}
public Metadata getMetadata()
{
return metadata;
}
private static LocalQueryRunner createQueryRunner(Session session)
{
LocalQueryRunner queryRunner = LocalQueryRunner.create(session);
queryRunner.createCatalog(session.getCatalog().get(),
new TpchConnectorFactory(1),
ImmutableMap.of());
return queryRunner;
}
public StatsCalculatorAssertion assertStatsFor(Function<PlanBuilder, PlanNode> planProvider)
{
PlanBuilder planBuilder = new PlanBuilder(new PlanNodeIdAllocator(), metadata, session);
PlanNode planNode = planProvider.apply(planBuilder);
return new StatsCalculatorAssertion(statsCalculator, session, planNode, planBuilder.getTypes());
}
@Override
public void close()
{
queryRunner.close();
}
}
| ebyhr/presto | core/trino-main/src/test/java/io/trino/cost/StatsCalculatorTester.java | Java | apache-2.0 | 2,625 |
package ru.evgenyhodz.controller;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import ru.evgenyhodz.models.Advertisement;
import ru.evgenyhodz.service.AdvertisementService;
/**
* @author Evgeny Khodzitskiy (evgeny.hodz@gmail.com)
* @since 04.10.2017
*/
@Controller
public class MainController {
/**
* Advertisement service.
*/
private AdvertisementService advertisementService;
/**
* Setter for spring autowire.
*
* @param as advertisement service.
*/
@Autowired
@Qualifier(value = "advertisementService")
public void setPersonService(AdvertisementService as) {
this.advertisementService = as;
}
/**
* Shows basic page. // Здесь немного костыльнул, т.к. использовал БД с предыдущего задания. //
*
* @return basic page.
*/
@RequestMapping(value = "/basic", method = RequestMethod.GET)
public String showBasicPage(Model model) {
String htmlResponse = " <div class=\"row\">";
int count = 0;
for (Advertisement adv : this.advertisementService.getAllAds()) {
htmlResponse = htmlResponse + "<div class=\"col-md-4 center-block\">\n"
+ "<h2>" + adv.getCar().getBrand().getName() + " "
+ adv.getCar().getModel().getModel() + "</h2>\n"
+ "<p><img src=\"/carstore/resources/" + adv.getImage().getUrl().substring(3) + "\"/></p>\n"
+ "<p><a class=\"btn btn-secondary\" "
+ "role=\"button\" id=\"" + adv.getId()
+ "\" onclick=\"redirect_by_id()\">View details</a></p>\n" + "</div>";
count++;
if (count == 3) {
htmlResponse = htmlResponse + "</div><div class=\"row\">";
count = 0;
}
}
htmlResponse = htmlResponse + "</div>";
model.addAttribute("advertisements", htmlResponse);
return "basic";
}
/**
* Shows car details page.
*
* @return car details page.
*/
@RequestMapping(value = "/car", method = RequestMethod.GET)
public String showCarDetailsPage(Model model) {
int id = 1;
Advertisement advertisement = advertisementService.findById(id);
model.addAttribute("advertisement", advertisement);
return "car";
}
/**
* Shows sign in page.
*
* @return sign in page.
*/
@RequestMapping(value = "/signin", method = RequestMethod.GET)
public String showSignInPage() {
return "sign_in";
}
/**
* Shows sign up page.
*
* @return sign up page.
*/
@RequestMapping(value = "/signup", method = RequestMethod.GET)
public String showSignUpPage() {
return "sign_up";
}
/**
* Shows add_new_adv page.
*
* @return advertisement's creation page.
*/
@RequestMapping(value = "/add_new_adv", method = RequestMethod.GET)
public String showAddAdvPage() {
return "add_new_adv";
}
}
| alittlemind/git_test | chapter_011/mvc/src/main/java/ru/evgenyhodz/controller/MainController.java | Java | apache-2.0 | 3,394 |
var contextName = getContextName(); //return page context such as ACS
var week = new Array('یك شنبه', 'دوشنبه', 'سه شنبه', 'چهار شنبه', 'پنج شنبه', 'جمعه', 'شنبه');
function getContextName() {
var i = location.href.indexOf('/', 8);
var j = location.href.indexOf('/', i + 1);
return location.href.substring(i + 1, j);
}
function errh(msg) {
try {
if (localErrh(msg))//If has local errh function and error locally handled dont continue
return;
} catch (e) { }
switch (msg) {
case 'User expired':
//window.showModalDialog('/' + contextName +'/login.jsp', null, 'dialogHeight=200px;dialogWidth=300px;dialogTop=' + (window.screen.height / 2 - 200) + ';dialogLeft=' + (window.screen.width / 2 - 100));
top.location.assign('/' + contextName + '/index.jsp');
break;
case 'Image big size':
showErrorMessage('اندازه فایل بیش از حد مجاز است', 4);
break;
case 'Image big dim':
showErrorMessage('حد اكثر اندازه عكس 1024 در 1024 است', 4);
break;
case 'File big size':
showErrorMessage('اندازه فایل بیش از حد مجاز است', 4);
break;
case 'Organ diff':
showErrorMessage('شما مجاز به تغییر اطلاعات مركز دیگر نیستید', 4);
break;
case 'The input file was not found':
showErrorMessage('فايلي براي بارگذاري انتخاب نشده است', 4);
break;
default:
if (msg.indexOf('Access denied') > -1) {
showErrorMessage('دسترسی غیر مجاز <a href="javascript:{showErrorMessage(\'' + msg + '\')}">جزئيات<a>');
} else
if (msg.indexOf('Entity does not exist for user') > -1) {
showErrorMessage('مجاز به انجام عمليات درخواست شده نيستيد <a href="javascript:{showErrorMessage(\'' + msg + '\')}">جزئيات<a>');
} else
if (msg.indexOf('integrity constraint') > -1) {
showErrorMessage('به علت ارتباط با سایر اطلاعات قابل حذف نیست', 5);
}
else if (msg.indexOf('unique constraint') > -1) {
showErrorMessage('این مورد تكراری است و قابل ذخیره نمی باشد', 5);
} else if (msg.indexOf('DatabaseException') > -1) {
showErrorMessage('خطای مرتبط با بانك داده! عملیات انجام نشد', 10);
}
else {
showErrorMessage('خطا! عمیلات انجام نشد <a href="javascript:{showErrorMessage(\'' + msg + '\')}">جزئيات<a>');
}
}
}
function useLoadingMessage(msg) {
dwr.util.useLoadingMessage(msg);
}
var tmp;
function pageLoad() {
dwr.engine.setErrorHandler(errh);
pageNo = 0;
if (window.screen.height < 800)
dwr.util.setValue('pageSize', '10');
else if (window.screen.height < 1000)
dwr.util.setValue('pageSize', '14');
else
dwr.util.setValue('pageSize', '18');
//showElements(new Array('wait'));
init();
Tabs.init('tabList', 'tabContents');
SearchTabs.init('searchTabList', 'searchTabContents');
}
function highLight(id) {
tmp = getById(id).style.backgroundColor;
getById(id).style.backgroundColor = '#ADEBAB';
}
function getById(id) {
return document.getElementById(id);
}
function backPage() {
if (window.event != null)
history.back();
else
back();
}
function normLight(id) {
getById(id).style.backgroundColor = tmp;
}
var ids = new Array('table_content', 'edit_form', 'search_form', 'wait', 'parentTitle');
function showElements(showIds) {
for (var i = 0; i < ids.length; i++) {
if (getById(ids[i]))
getById(ids[i]).style.display = 'none';
}
for (i = 0; i < showIds.length; i++) {
if (getById(showIds[i]))
getById(showIds[i]).style.display = 'block';
}
}
function dwrUploadSupport() {
return !isIE();
}
function isIE() {
return navigator.appVersion.indexOf('MSIE') != -1;
}
function addEvent(obj, evType, fn) {
if (obj.addEventListener) {
obj.addEventListener(evType, fn, false);
return true;
} else if (obj.attachEvent) {
var r = obj.attachEvent("on" + evType, fn);
return r;
} else {
return false;
}
}
function cancelEvent(e) {
if (window.event)
window.event.returnValue = false;
else
e.preventDefault();
return false;
}
function showDate(dateInputName, anchorName) {
if (getById(dateInputName.replace('@to', '@from') + 'Div').style.visibility == 'visible') {
getById(dateInputName.replace('@to', '@from') + 'Div').style.visibility = 'hidden';
return;
}
var cal = new CalendarPopup(dateInputName.replace('@to', '@from') + 'Div');
cal.setCssPrefix('TEST');
cal.showNavigationDropdowns();
cal.select(window.getById(dateInputName), anchorName, 'yyyy/MM/dd');
return false;
}
function changeDownUpArrow(id) {
var elm = getById(id);
if (elm.src.indexOf('downArrow') > -1)
elm.src = elm.src.replace('downArrow', 'upArrow');
else
elm.src = elm.src.replace('upArrow', 'downArrow');
}
function showHide(id) {
var elm = getById(id)
if (elm) {
if (elm.style.display == 'block' || elm.style.display == '')
elm.style.display = 'none';
else
elm.style.display = '';
}
}
function showElement(id) {
var elm = getById(id)
if (elm)
elm.style.display = 'block';
}
function hideElement(id) {
var elm = getById(id)
if (elm)
getById(id).style.display = 'none';
}
function nextPage() {
if (resultNum == pageSize) {
pageNo++;
init();
}
}
function prevPage() {
if (pageNo > 0) {
pageNo--;
init();
}
}
function showPage(pNo) {
pageNo = pNo;
init();
}
function createNavigation(resultNum, current, pageSize) {
var template = '<A class="noborder" href="javascript:{}" onclick="showPage(pageNo)" >num </A>';
var nav = getById('navigateNums');
nav.innerHTML = '';
var pageCount = Math.floor(resultNum / pageSize);
if (pageCount * pageSize < resultNum)
pageCount++;
if (current > pageCount)
current = pageCount;
var start = Math.max(1, current - 3);
var end = Math.min(pageCount, start + 9);
if (end - start < 10)
start = Math.max(1, end - 9);
if (current + 1 < pageCount)
showElement('nextIcon');
else
hideElement('nextIcon');
if (current == 0)
hideElement('prevIcon');
else
showElement('prevIcon');
var childs = '';
for (i = start; i <= end; i++) {
if (i == current + 1)
childs += '<font face="tahoma" size="2" color="red"><b>' + i + '</b> </font>';
else
childs += template.replace('num', ' ' + i).replace('pageNo', i - 1);
}
if (isIE()) {
try {
nav.insertAdjacentHTML('BeforeEnd', childs); //IE
} catch (e) { }
} else {
nav.innerHTML = childs;
}
dwr.util.setValue('resultNum', resultNum);
}
function refreshForm() {
if (currentId != -1)
showCurrent(currentId);
}
function dotToDolar(str) {
if (str)
return str.replace(/\./gi, "$");
return str;
}
function dolarToDot(str) {
if (str)
return str.replace(/\$/gi, ".");
return str;
}
function toFarsi(str) {
return str.replace(/ك/gi, "ک").replace(/ي/gi, "ی");
}
//return id of selected item in seaarch grid
function selectedRowId(parentId) {
var selected = null;
var inputs = getById(parentId).getElementsByTagName('input');
for (i = 0; i < inputs.length; i++) {
if (inputs[i].id && inputs[i].id.indexOf('selectedItem') == 0 && inputs[i].checked) {
selected = inputs[i].id.substring(14);
break;
}
}
return selected;
}
function showFilter() {
showElements(new Array('search_form'));
}
var filter = '';
function addFilter(template, fname, value) {
if (filter.length > 0)
filter += '@;@';
filter += template + '@@' + fname + '@@' + value;
}
var initFilter = ''; //In some entities we need initially filter list, this field use for this
//This method gets searchFields from page and makes filter string.
function getSearchFilter() {
filter = initFilter;
for (i = 0; i < searchFields.length; i++) {
var fname = 'search_' + dotToDolar(searchFields[i]);
if (getById(fname) == null) {
continue;
}
var val = dwr.util.getValue(fname);
if (val != null && val != '-1' && val != '') {
var template = dwr.util.getValue(fname + '_template');
if (template == null || template.length < 2)
template = "fname = 'value'";
addFilter(template, searchFields[i], val);
}
}
if (filter.length == 0) {
getById('filteredIcon').style.visibility = 'hidden';
if (getById('simpleSearch') != null)
getById('simpleSearch').style.display = 'block';
}
else {
getById('filteredIcon').style.visibility = 'visible';
if (getById('simpleSearch') != null)
getById('simpleSearch').style.display = 'none';
}
return toFarsi(filter);
}
var isSearchState = false;
function search() {
isSearchState = true;
resetTopFilter(false);
pageNo = 0;
init();
}
function clearFilter() {
parent.topFilter = '';
filter = '';
var inputs = getById('search_form').getElementsByTagName('input');
for (i = 0; i < inputs.length; i++) {
if (inputs[i].type == 'button' ||
(inputs[i].type == 'hidden' && inputs[i].id.indexOf('search_') == 0 && (inputs[i].id.indexOf('EntityName') > -1 || inputs[i].id.indexOf('FieldName') > -1 || inputs[i].id.indexOf('Filter') > -1))
|| inputs[i].id.indexOf('SimpleSearch') > -1)
continue;
dwr.util.setValue(inputs[i], null);
}
var selects = getById('search_form').getElementsByTagName('select');
for (i = 0; i < selects.length; i++) {
if (selects[i].getAttribute('class') != 'template' || (selects[i].outerHTML && selects[i].outerHTML.indexOf('template') == -1)) {
selects[i].selectedIndex = 0;
var autoCompletId = selects[i].id.substring(0, selects[i].id.length - 6) + '$id';
dwr.util.setValue(autoCompletId, '-1');
}
}
var inputs = getById('search_form').getElementsByTagName('textarea');
for (i = 0; i < inputs.length; i++) {
dwr.util.setValue(inputs[i], null);
}
dwr.util.setValue('parentTitle', null);
if (getById('simpleSearch') != null) {
var simpleSearchs = getById('simpleSearch').getElementsByTagName('input');
for (i = 0; i < simpleSearchs.length; i++) {
if (simpleSearchs[i].type != 'hidden') {//all filter query and order fileld in auto complete keeped in hidden inputs. For autocomplets only id must be -1 to clear
dwr.util.setValue(simpleSearchs[i], null);
}
}
simpleSearchs = getById('simpleSearch').getElementsByTagName('select');
for (i = 0; i < simpleSearchs.length; i++) {
simpleSearchs[i].selectedIndex = 0;
var autoCompletId = simpleSearchs[i].id.substring(0, simpleSearchs[i].id.length - 6) + 'Id';
dwr.util.setValue(autoCompletId, '-1');
}
}
// var inputs = getById('search_form').getElementsByTagName('select');
// for(i = 0; i < inputs.length; i++){
// inputs[i].selectedIndex = 0;
// }
}
function delFilter() {
clearFilter();
init();
}
function showMessage(msg, delay) {
if (parent != this)
parent.showMessage(msg, delay);
// else
// alert(msg);
}
function showErrorMessage(msg, delay) {
if (parent != this)
parent.showErrorMessage(msg, delay);
// else
// alert(msg);
}
function showCategory(cats, shown) {
var tags = document.getElementsByTagName('tr');
for (i = 0; i < tags.length; i++) {
if (tags[i].id == shown)
tags[i].style.display = 'block';
else
if (cats.indexOf(',' + tags[i].id + ',') > -1) {
tags[i].style.display = 'none';
}
}
}
//------------------------------------- Base ---------------------------------------------------------------------
//parent node is a filter and is parent name such as 'fldPersonCure' and childNode is field name of this entity that is equal to parent
function baseSetTopFilter(parentNodes, childNodes) {
// if(parentNodes.length ==0 || parent.topFilter.length ==0)
//return true;
var pnodes = parentNodes.split(',');
var cnodes = childNodes.split(',');
var parentNodeSetted = false;
for (i = 0; i < pnodes.length; i++) {
var parentNode = pnodes[i];
var childNode = cnodes[i];
// if(parentNodes.length ==0 || parent.topFilter.indexOf(parentNode) ==-1){
// continue;
//}
var fields = parent.topFilter.split('@@');
for (i = 0; /*parentNode.length > 0 && */i < fields.length; i += 3) {
if (fields[i].length < 2)
continue;
if (fields[i] == 'search_e$id')
dwr.util.setValue(fields[i], fields[i + 1]);
else
if (fields[i] == parentNode) {
dwr.util.setValue('search_e$' + childNode + '$id', fields[i + 1]);
dwr.util.setValue(childNode + 'SimpleSearchId', fields[i + 1]);
if (getById('search_e$' + childNode + 'Title'))
dwr.util.setValue('search_e$' + childNode + 'Title', fields[i + 2]);
if (fields[i + 2] && fields[i + 2].length > 0 && getById(childNode + 'SimpleSearchTitle'))
dwr.util.setValue(childNode + 'SimpleSearchTitle', fields[i + 2].split('-')[0]);
addToAutoComplete(childNode, fields[i + 1], fields[i + 2]);
addToAutoComplete("search_e$" + childNode, fields[i + 1], fields[i + 2]);
addToAutoComplete(childNode + 'SimpleSearch', fields[i + 1], fields[i + 2]);
dwr.util.setValue(childNode + 'Id', fields[i + 1]);
if (fields[i + 2] && fields[i + 2].length > 0)
dwr.util.setValue(childNode + 'Title', fields[i + 2].split('-')[0]);
if (fields[i + 2] && fields[i + 2].length > 4)
dwr.util.setValue('parentTitle', fields[i + 2]);
parentNodeSetted = true;
}
}
}
// if(parentNodes.length > 0 && !parentNodeSetted)
// return false;
return true;
}
// field name such as fldCureCase. this field and his value set as top filter of next page
function baseGo(fieldName, noItemSelectMsg) {
var entityId = selectedRowId('table_content');
var url = getById('selectAction').value;
if (entityId || url.indexOf("command:") > -1) {
if (url.length < 2) {
showMessage(noItemSelectMsg, 2);
}
else {
if (url.indexOf("command:") == 0) {
setTimeout(url.substring(8), 1);
} else {//go to url
var path = location.href.split('/');
var currURL = path[path.length - 2];
parent.addHistory(currURL, entityId);
parent.topFilter = fieldName + '@@' + entityId + '@@' + entitiesCache[entityId].title;
parent.setContent(url);
}
}
}
else {
showMessage('ردیفی انتخاب نشده است', 4);
}
}
function baseResetTopFilter(parentNodes, childNodes) {
}
//------------------------------------- Base -------------------------------------------------------------------- end -
function filterStaticCombo(elmId, id) {
var grps = getById(elmId).getElementsByTagName("optgroup");
for (i = 0; i < grps.length; i++) {
if (grps[i].id == id)
grps[i].style.display = 'block';
else
grps[i].style.display = 'none';
}
}
function testMandatories() {
var tags = new Array('input', 'textarea', 'select');
for (j = 0; j < tags.length; j++) {
var elm = document.getElementsByTagName(tags[j]);
for (i = 0; i < elm.length; i++) {
if (elm[i].name == "*") {
if (!testMandatory(elm[i].id))
return false;
}
}
}
return true;
}
function testMandatory(id) {
var elm = getById(id);
var val = dwr.util.getValue(id);
if (elm.tagName == 'SELECT' && elm.selectedIndex == 0) {
val = '';
}
if (getById(id + 'Caption') == null)
alert(id + 'Caption is null');
if (val == null || val.length == 0) {
var title = getById(id + 'Caption').innerHTML;
var i = title.indexOf('</font>');
title = title.substring(i == -1 ? 0 : i + 7);
showErrorMessage('<font color=\"red\">"</font><font color=\"navy\">' + title.replace(':', '</font><font color=\"red\">"</font>') + ' اجباری است', 4);
elm.focus();
return false;
}
return true;
}
function logout() {
if (!confirm('آیا مطمئنید؟ شما قصد خروج از سامانه را دارید'))
return;
exitSystem();
}
function winClosed() {
/*if(!logoutSelected){
dwr.util.useLoadingMessage();
dwr.engine.beginBatch();
SecurityCreator.logout( function(done) {
});
dwr.engine.endBatch();
alert('كاربر از سامانه خارج شد');
document.location.replace('index.jsp');
}*/
}
var logoutSelected = false;
function exitSystem() {
dwr.util.useLoadingMessage();
dwr.engine.beginBatch();
SecurityCreator.logout(function (done) {
logoutSelected = true;
showMessage('خروج با موفقیت انجام شد', 4);
window.setTimeout("document.location.replace('index.jsp')", 1500);
});
dwr.engine.endBatch();
}
function getToday() {
return parent.today;
}
function setToday(id) {
try {
var oldVal = dwr.util.getValue(id);
if (oldVal == null || oldVal.length < 5) {
dwr.util.setValue(id, getToday());
}
} catch (e) { }
}
function setSelectOptions(selectElmId, optionsHTML) {
var selElm = getById(selectElmId);
if (isIE()) {
var parentELM = selElm.parentNode;
if (selElm.innerHTML.length < 5)
selElm.innerHTML = 'aa';
var x = selElm.outerHTML.replace(selElm.innerHTML, optionsHTML);
//var id = selElm.getAttribute('id');
parentELM.removeChild(selElm);
parentELM.insertAdjacentHTML('BeforeEnd', x); //IE
} else {
selElm.innerHTML = optionsHTML;
}
}
// ---------------------------- cookie --------------------------------------------
function setCookie(name, value) {
document.cookie += '||' + name + '=' + value;
}
function getCookie(name) {
if (document.cookie.indexOf('||' + name) == -1)
return null;
var i = document.cookie.indexOf('||' + name) + name.length + 3;
var j = document.cookie.indexOf(';', i);
return document.cookie.substring(i, j);
}
// ---------------------------- cookie end------------------------------------------
function orderAsc(fieldName) {
order = fieldName + ' asc';
init();
}
function orderDesc(fieldName) {
order = fieldName + ' desc';
init();
}
function encode(str) {
if (str == null)
return null;
var ret = '';
for (i = 0; i < str.length; i++) {
ret += '$' + str.charCodeAt(i);
}
return ret;
}
function decode(str) {
if (str == null)
return null;
var ret = '';
var s = str.split('$')
for (i = 0; i < s.length; i++) {
if (s[i] != null && s[i].length > 0)
ret += String.fromCharCode(s[i]);
}
return ret;
} | azizkhani/StarterKitProject | target/starter/Scripts/Naji/base.js | JavaScript | apache-2.0 | 21,019 |
from collections import namedtuple
from copy import copy, deepcopy
from datetime import datetime, timedelta
from textwrap import dedent
from distutils.version import LooseVersion
import numpy as np
import pandas as pd
from xray import Variable, Dataset, DataArray
from xray.core import indexing
from xray.core.variable import (Coordinate, as_variable, _as_compatible_data)
from xray.core.indexing import (NumpyIndexingAdapter, PandasIndexAdapter,
LazilyIndexedArray)
from xray.core.pycompat import PY3, OrderedDict
from . import TestCase, source_ndarray
class VariableSubclassTestCases(object):
def test_properties(self):
data = 0.5 * np.arange(10)
v = self.cls(['time'], data, {'foo': 'bar'})
self.assertEqual(v.dims, ('time',))
self.assertArrayEqual(v.values, data)
self.assertEqual(v.dtype, float)
self.assertEqual(v.shape, (10,))
self.assertEqual(v.size, 10)
self.assertEqual(v.nbytes, 80)
self.assertEqual(v.ndim, 1)
self.assertEqual(len(v), 10)
self.assertEqual(v.attrs, {'foo': u'bar'})
def test_attrs(self):
v = self.cls(['time'], 0.5 * np.arange(10))
self.assertEqual(v.attrs, {})
attrs = {'foo': 'bar'}
v.attrs = attrs
self.assertEqual(v.attrs, attrs)
self.assertIsInstance(v.attrs, OrderedDict)
v.attrs['foo'] = 'baz'
self.assertEqual(v.attrs['foo'], 'baz')
def test_getitem_dict(self):
v = self.cls(['x'], np.random.randn(5))
actual = v[{'x': 0}]
expected = v[0]
self.assertVariableIdentical(expected, actual)
def assertIndexedLikeNDArray(self, variable, expected_value0,
expected_dtype=None):
"""Given a 1-dimensional variable, verify that the variable is indexed
like a numpy.ndarray.
"""
self.assertEqual(variable[0].shape, ())
self.assertEqual(variable[0].ndim, 0)
self.assertEqual(variable[0].size, 1)
# test identity
self.assertTrue(variable.equals(variable.copy()))
self.assertTrue(variable.identical(variable.copy()))
# check value is equal for both ndarray and Variable
self.assertEqual(variable.values[0], expected_value0)
self.assertEqual(variable[0].values, expected_value0)
# check type or dtype is consistent for both ndarray and Variable
if expected_dtype is None:
# check output type instead of array dtype
self.assertEqual(type(variable.values[0]), type(expected_value0))
self.assertEqual(type(variable[0].values), type(expected_value0))
else:
self.assertEqual(variable.values[0].dtype, expected_dtype)
self.assertEqual(variable[0].values.dtype, expected_dtype)
def test_index_0d_int(self):
for value, dtype in [(0, np.int_),
(np.int32(0), np.int32)]:
x = self.cls(['x'], [value])
self.assertIndexedLikeNDArray(x, value, dtype)
def test_index_0d_float(self):
for value, dtype in [(0.5, np.float_),
(np.float32(0.5), np.float32)]:
x = self.cls(['x'], [value])
self.assertIndexedLikeNDArray(x, value, dtype)
def test_index_0d_string(self):
for value, dtype in [('foo', np.dtype('U3' if PY3 else 'S3')),
(u'foo', np.dtype('U3'))]:
x = self.cls(['x'], [value])
self.assertIndexedLikeNDArray(x, value, dtype)
def test_index_0d_datetime(self):
d = datetime(2000, 1, 1)
x = self.cls(['x'], [d])
self.assertIndexedLikeNDArray(x, np.datetime64(d))
x = self.cls(['x'], [np.datetime64(d)])
self.assertIndexedLikeNDArray(x, np.datetime64(d), 'datetime64[ns]')
x = self.cls(['x'], pd.DatetimeIndex([d]))
self.assertIndexedLikeNDArray(x, np.datetime64(d), 'datetime64[ns]')
def test_index_0d_timedelta64(self):
td = timedelta(hours=1)
x = self.cls(['x'], [np.timedelta64(td)])
self.assertIndexedLikeNDArray(x, np.timedelta64(td), 'timedelta64[ns]')
x = self.cls(['x'], pd.to_timedelta([td]))
self.assertIndexedLikeNDArray(x, np.timedelta64(td), 'timedelta64[ns]')
def test_index_0d_not_a_time(self):
d = np.datetime64('NaT')
x = self.cls(['x'], [d])
self.assertIndexedLikeNDArray(x, d, None)
def test_index_0d_object(self):
class HashableItemWrapper(object):
def __init__(self, item):
self.item = item
def __eq__(self, other):
return self.item == other.item
def __hash__(self):
return hash(self.item)
def __repr__(self):
return '%s(item=%r)' % (type(self).__name__, self.item)
item = HashableItemWrapper((1, 2, 3))
x = self.cls('x', [item])
self.assertIndexedLikeNDArray(x, item)
def test_index_and_concat_datetime(self):
# regression test for #125
date_range = pd.date_range('2011-09-01', periods=10)
for dates in [date_range, date_range.values,
date_range.to_pydatetime()]:
expected = self.cls('t', dates)
for times in [[expected[i] for i in range(10)],
[expected[i:(i + 1)] for i in range(10)],
[expected[[i]] for i in range(10)]]:
actual = Variable.concat(times, 't')
self.assertEqual(expected.dtype, actual.dtype)
self.assertArrayEqual(expected, actual)
def test_0d_time_data(self):
# regression test for #105
x = self.cls('time', pd.date_range('2000-01-01', periods=5))
expected = np.datetime64('2000-01-01T00Z', 'ns')
self.assertEqual(x[0].values, expected)
def test_datetime64_conversion(self):
times = pd.date_range('2000-01-01', periods=3)
for values, preserve_source in [
(times, False),
(times.values, True),
(times.values.astype('datetime64[s]'), False),
(times.to_pydatetime(), False),
]:
v = self.cls(['t'], values)
self.assertEqual(v.dtype, np.dtype('datetime64[ns]'))
self.assertArrayEqual(v.values, times.values)
self.assertEqual(v.values.dtype, np.dtype('datetime64[ns]'))
same_source = source_ndarray(v.values) is source_ndarray(values)
if preserve_source and self.cls is Variable:
self.assertTrue(same_source)
else:
self.assertFalse(same_source)
def test_timedelta64_conversion(self):
times = pd.timedelta_range(start=0, periods=3)
for values, preserve_source in [
(times, False),
(times.values, True),
(times.values.astype('timedelta64[s]'), False),
(times.to_pytimedelta(), False),
]:
v = self.cls(['t'], values)
self.assertEqual(v.dtype, np.dtype('timedelta64[ns]'))
self.assertArrayEqual(v.values, times.values)
self.assertEqual(v.values.dtype, np.dtype('timedelta64[ns]'))
same_source = source_ndarray(v.values) is source_ndarray(values)
if preserve_source and self.cls is Variable:
self.assertTrue(same_source)
else:
self.assertFalse(same_source)
def test_object_conversion(self):
data = np.arange(5).astype(str).astype(object)
actual = self.cls('x', data)
self.assertEqual(actual.dtype, data.dtype)
def test_pandas_data(self):
v = self.cls(['x'], pd.Series([0, 1, 2], index=[3, 2, 1]))
self.assertVariableIdentical(v, v[[0, 1, 2]])
v = self.cls(['x'], pd.Index([0, 1, 2]))
self.assertEqual(v[0].values, v.values[0])
def test_1d_math(self):
x = 1.0 * np.arange(5)
y = np.ones(5)
v = self.cls(['x'], x)
# unary ops
self.assertVariableIdentical(v, +v)
self.assertVariableIdentical(v, abs(v))
self.assertArrayEqual((-v).values, -x)
# bianry ops with numbers
self.assertVariableIdentical(v, v + 0)
self.assertVariableIdentical(v, 0 + v)
self.assertVariableIdentical(v, v * 1)
self.assertArrayEqual((v > 2).values, x > 2)
self.assertArrayEqual((0 == v).values, 0 == x)
self.assertArrayEqual((v - 1).values, x - 1)
self.assertArrayEqual((1 - v).values, 1 - x)
# binary ops with numpy arrays
self.assertArrayEqual((v * x).values, x ** 2)
self.assertArrayEqual((x * v).values, x ** 2)
self.assertArrayEqual(v - y, v - 1)
self.assertArrayEqual(y - v, 1 - v)
# verify attributes are dropped
v2 = self.cls(['x'], x, {'units': 'meters'})
self.assertVariableIdentical(v, +v2)
# binary ops with all variables
self.assertArrayEqual(v + v, 2 * v)
w = self.cls(['x'], y, {'foo': 'bar'})
self.assertVariableIdentical(v + w, self.cls(['x'], x + y))
self.assertArrayEqual((v * w).values, x * y)
# something complicated
self.assertArrayEqual((v ** 2 * w - 1 + x).values, x ** 2 * y - 1 + x)
# make sure dtype is preserved (for Index objects)
self.assertEqual(float, (+v).dtype)
self.assertEqual(float, (+v).values.dtype)
self.assertEqual(float, (0 + v).dtype)
self.assertEqual(float, (0 + v).values.dtype)
# check types of returned data
self.assertIsInstance(+v, Variable)
self.assertNotIsInstance(+v, Coordinate)
self.assertIsInstance(0 + v, Variable)
self.assertNotIsInstance(0 + v, Coordinate)
def test_1d_reduce(self):
x = np.arange(5)
v = self.cls(['x'], x)
actual = v.sum()
expected = Variable((), 10)
self.assertVariableIdentical(expected, actual)
self.assertIs(type(actual), Variable)
def test_array_interface(self):
x = np.arange(5)
v = self.cls(['x'], x)
self.assertArrayEqual(np.asarray(v), x)
# test patched in methods
self.assertArrayEqual(v.astype(float), x.astype(float))
self.assertVariableIdentical(v.argsort(), v)
self.assertVariableIdentical(v.clip(2, 3), self.cls('x', x.clip(2, 3)))
# test ufuncs
self.assertVariableIdentical(np.sin(v), self.cls(['x'], np.sin(x)))
self.assertIsInstance(np.sin(v), Variable)
self.assertNotIsInstance(np.sin(v), Coordinate)
def example_1d_objects(self):
for data in [range(3),
0.5 * np.arange(3),
0.5 * np.arange(3, dtype=np.float32),
pd.date_range('2000-01-01', periods=3),
np.array(['a', 'b', 'c'], dtype=object)]:
yield (self.cls('x', data), data)
def test___array__(self):
for v, data in self.example_1d_objects():
self.assertArrayEqual(v.values, np.asarray(data))
self.assertArrayEqual(np.asarray(v), np.asarray(data))
self.assertEqual(v[0].values, np.asarray(data)[0])
self.assertEqual(np.asarray(v[0]), np.asarray(data)[0])
def test_equals_all_dtypes(self):
for v, _ in self.example_1d_objects():
v2 = v.copy()
self.assertTrue(v.equals(v2))
self.assertTrue(v.identical(v2))
self.assertTrue(v[0].equals(v2[0]))
self.assertTrue(v[0].identical(v2[0]))
self.assertTrue(v[:2].equals(v2[:2]))
self.assertTrue(v[:2].identical(v2[:2]))
def test_eq_all_dtypes(self):
# ensure that we don't choke on comparisons for which numpy returns
# scalars
expected = self.cls('x', 3 * [False])
for v, _ in self.example_1d_objects():
actual = 'z' == v
self.assertVariableIdentical(expected, actual)
actual = ~('z' != v)
self.assertVariableIdentical(expected, actual)
def test_concat(self):
x = np.arange(5)
y = np.arange(5, 10)
v = self.cls(['a'], x)
w = self.cls(['a'], y)
self.assertVariableIdentical(Variable(['b', 'a'], np.array([x, y])),
Variable.concat([v, w], 'b'))
self.assertVariableIdentical(Variable(['b', 'a'], np.array([x, y])),
Variable.concat((v, w), 'b'))
self.assertVariableIdentical(Variable(['b', 'a'], np.array([x, y])),
Variable.concat((v, w), 'b'))
with self.assertRaisesRegexp(ValueError, 'inconsistent dimensions'):
Variable.concat([v, Variable(['c'], y)], 'b')
# test indexers
actual = Variable.concat([v, w], indexers=[range(0, 10, 2), range(1, 10, 2)], dim='a')
expected = Variable('a', np.array([x, y]).ravel(order='F'))
self.assertVariableIdentical(expected, actual)
# test concatenating along a dimension
v = Variable(['time', 'x'], np.random.random((10, 8)))
self.assertVariableIdentical(v, Variable.concat([v[:5], v[5:]], 'time'))
self.assertVariableIdentical(v, Variable.concat([v[:5], v[5:6], v[6:]], 'time'))
self.assertVariableIdentical(v, Variable.concat([v[:1], v[1:]], 'time'))
# test dimension order
self.assertVariableIdentical(v, Variable.concat([v[:, :5], v[:, 5:]], 'x'))
with self.assertRaisesRegexp(ValueError, 'same number of dimensions'):
Variable.concat([v[:, 0], v[:, 1:]], 'x')
def test_concat_attrs(self):
# different or conflicting attributes should be removed
v = self.cls('a', np.arange(5), {'foo': 'bar'})
w = self.cls('a', np.ones(5))
expected = self.cls('a', np.concatenate([np.arange(5), np.ones(5)]))
self.assertVariableIdentical(expected, Variable.concat([v, w], 'a'))
w.attrs['foo'] = 2
self.assertVariableIdentical(expected, Variable.concat([v, w], 'a'))
w.attrs['foo'] = 'bar'
expected.attrs['foo'] = 'bar'
self.assertVariableIdentical(expected, Variable.concat([v, w], 'a'))
def test_concat_fixed_len_str(self):
# regression test for #217
for kind in ['S', 'U']:
x = self.cls('animal', np.array(['horse'], dtype=kind))
y = self.cls('animal', np.array(['aardvark'], dtype=kind))
actual = Variable.concat([x, y], 'animal')
expected = Variable(
'animal', np.array(['horse', 'aardvark'], dtype=kind))
self.assertVariableEqual(expected, actual)
def test_concat_number_strings(self):
# regression test for #305
a = self.cls('x', ['0', '1', '2'])
b = self.cls('x', ['3', '4'])
actual = Variable.concat([a, b], dim='x')
expected = Variable('x', np.arange(5).astype(str).astype(object))
self.assertVariableIdentical(expected, actual)
self.assertEqual(expected.dtype, object)
self.assertEqual(type(expected.values[0]), str)
def test_copy(self):
v = self.cls('x', 0.5 * np.arange(10), {'foo': 'bar'})
for deep in [True, False]:
w = v.copy(deep=deep)
self.assertIs(type(v), type(w))
self.assertVariableIdentical(v, w)
self.assertEqual(v.dtype, w.dtype)
if self.cls is Variable:
if deep:
self.assertIsNot(source_ndarray(v.values),
source_ndarray(w.values))
else:
self.assertIs(source_ndarray(v.values),
source_ndarray(w.values))
self.assertVariableIdentical(v, copy(v))
class TestVariable(TestCase, VariableSubclassTestCases):
cls = staticmethod(Variable)
def setUp(self):
self.d = np.random.random((10, 3)).astype(np.float64)
def test_data_and_values(self):
v = Variable(['time', 'x'], self.d)
self.assertArrayEqual(v.data, self.d)
self.assertArrayEqual(v.values, self.d)
self.assertIs(source_ndarray(v.values), self.d)
with self.assertRaises(ValueError):
# wrong size
v.values = np.random.random(5)
d2 = np.random.random((10, 3))
v.values = d2
self.assertIs(source_ndarray(v.values), d2)
d3 = np.random.random((10, 3))
v.data = d3
self.assertIs(source_ndarray(v.data), d3)
def test_numpy_same_methods(self):
v = Variable([], np.float32(0.0))
self.assertEqual(v.item(), 0)
self.assertIs(type(v.item()), float)
v = Coordinate('x', np.arange(5))
self.assertEqual(2, v.searchsorted(2))
def test_datetime64_conversion_scalar(self):
expected = np.datetime64('2000-01-01T00:00:00Z', 'ns')
for values in [
np.datetime64('2000-01-01T00Z'),
pd.Timestamp('2000-01-01T00'),
datetime(2000, 1, 1),
]:
v = Variable([], values)
self.assertEqual(v.dtype, np.dtype('datetime64[ns]'))
self.assertEqual(v.values, expected)
self.assertEqual(v.values.dtype, np.dtype('datetime64[ns]'))
def test_timedelta64_conversion_scalar(self):
expected = np.timedelta64(24 * 60 * 60 * 10 ** 9, 'ns')
for values in [
np.timedelta64(1, 'D'),
pd.Timedelta('1 day'),
timedelta(days=1),
]:
v = Variable([], values)
self.assertEqual(v.dtype, np.dtype('timedelta64[ns]'))
self.assertEqual(v.values, expected)
self.assertEqual(v.values.dtype, np.dtype('timedelta64[ns]'))
def test_0d_str(self):
v = Variable([], u'foo')
self.assertEqual(v.dtype, np.dtype('U3'))
self.assertEqual(v.values, 'foo')
v = Variable([], np.string_('foo'))
self.assertEqual(v.dtype, np.dtype('S3'))
self.assertEqual(v.values, bytes('foo', 'ascii') if PY3 else 'foo')
def test_0d_datetime(self):
v = Variable([], pd.Timestamp('2000-01-01'))
self.assertEqual(v.dtype, np.dtype('datetime64[ns]'))
self.assertEqual(v.values, np.datetime64('2000-01-01T00Z', 'ns'))
def test_0d_timedelta(self):
for td in [pd.to_timedelta('1s'), np.timedelta64(1, 's')]:
v = Variable([], td)
self.assertEqual(v.dtype, np.dtype('timedelta64[ns]'))
self.assertEqual(v.values, np.timedelta64(10 ** 9, 'ns'))
def test_equals_and_identical(self):
d = np.random.rand(10, 3)
d[0, 0] = np.nan
v1 = Variable(('dim1', 'dim2'), data=d,
attrs={'att1': 3, 'att2': [1, 2, 3]})
v2 = Variable(('dim1', 'dim2'), data=d,
attrs={'att1': 3, 'att2': [1, 2, 3]})
self.assertTrue(v1.equals(v2))
self.assertTrue(v1.identical(v2))
v3 = Variable(('dim1', 'dim3'), data=d)
self.assertFalse(v1.equals(v3))
v4 = Variable(('dim1', 'dim2'), data=d)
self.assertTrue(v1.equals(v4))
self.assertFalse(v1.identical(v4))
v5 = deepcopy(v1)
v5.values[:] = np.random.rand(10, 3)
self.assertFalse(v1.equals(v5))
self.assertFalse(v1.equals(None))
self.assertFalse(v1.equals(d))
self.assertFalse(v1.identical(None))
self.assertFalse(v1.identical(d))
def test_broadcast_equals(self):
v1 = Variable((), np.nan)
v2 = Variable(('x'), [np.nan, np.nan])
self.assertTrue(v1.broadcast_equals(v2))
self.assertFalse(v1.equals(v2))
self.assertFalse(v1.identical(v2))
v3 = Variable(('x'), [np.nan])
self.assertTrue(v1.broadcast_equals(v3))
self.assertFalse(v1.equals(v3))
self.assertFalse(v1.identical(v3))
self.assertFalse(v1.broadcast_equals(None))
v4 = Variable(('x'), [np.nan] * 3)
self.assertFalse(v2.broadcast_equals(v4))
def test_as_variable(self):
data = np.arange(10)
expected = Variable('x', data)
self.assertVariableIdentical(expected, as_variable(expected))
ds = Dataset({'x': expected})
self.assertVariableIdentical(expected, as_variable(ds['x']))
self.assertNotIsInstance(ds['x'], Variable)
self.assertIsInstance(as_variable(ds['x']), Variable)
self.assertIsInstance(as_variable(ds['x'], strict=False), DataArray)
FakeVariable = namedtuple('FakeVariable', 'values dims')
fake_xarray = FakeVariable(expected.values, expected.dims)
self.assertVariableIdentical(expected, as_variable(fake_xarray))
xarray_tuple = (expected.dims, expected.values)
self.assertVariableIdentical(expected, as_variable(xarray_tuple))
with self.assertRaisesRegexp(TypeError, 'cannot convert arg'):
as_variable(tuple(data))
with self.assertRaisesRegexp(TypeError, 'cannot infer .+ dimensions'):
as_variable(data)
actual = as_variable(data, key='x')
self.assertVariableIdentical(expected, actual)
actual = as_variable(0)
expected = Variable([], 0)
self.assertVariableIdentical(expected, actual)
def test_repr(self):
v = Variable(['time', 'x'], [[1, 2, 3], [4, 5, 6]], {'foo': 'bar'})
expected = dedent("""
<xray.Variable (time: 2, x: 3)>
array([[1, 2, 3],
[4, 5, 6]])
Attributes:
foo: bar
""").strip()
self.assertEqual(expected, repr(v))
def test_repr_lazy_data(self):
v = Variable('x', LazilyIndexedArray(np.arange(2e5)))
self.assertIn('200000 values with dtype', repr(v))
self.assertIsInstance(v._data, LazilyIndexedArray)
def test_items(self):
data = np.random.random((10, 11))
v = Variable(['x', 'y'], data)
# test slicing
self.assertVariableIdentical(v, v[:])
self.assertVariableIdentical(v, v[...])
self.assertVariableIdentical(Variable(['y'], data[0]), v[0])
self.assertVariableIdentical(Variable(['x'], data[:, 0]), v[:, 0])
self.assertVariableIdentical(Variable(['x', 'y'], data[:3, :2]),
v[:3, :2])
# test array indexing
x = Variable(['x'], np.arange(10))
y = Variable(['y'], np.arange(11))
self.assertVariableIdentical(v, v[x.values])
self.assertVariableIdentical(v, v[x])
self.assertVariableIdentical(v[:3], v[x < 3])
self.assertVariableIdentical(v[:, 3:], v[:, y >= 3])
self.assertVariableIdentical(v[:3, 3:], v[x < 3, y >= 3])
self.assertVariableIdentical(v[:3, :2], v[x[:3], y[:2]])
self.assertVariableIdentical(v[:3, :2], v[range(3), range(2)])
# test iteration
for n, item in enumerate(v):
self.assertVariableIdentical(Variable(['y'], data[n]), item)
with self.assertRaisesRegexp(TypeError, 'iteration over a 0-d'):
iter(Variable([], 0))
# test setting
v.values[:] = 0
self.assertTrue(np.all(v.values == 0))
# test orthogonal setting
v[range(10), range(11)] = 1
self.assertArrayEqual(v.values, np.ones((10, 11)))
def test_isel(self):
v = Variable(['time', 'x'], self.d)
self.assertVariableIdentical(v.isel(time=slice(None)), v)
self.assertVariableIdentical(v.isel(time=0), v[0])
self.assertVariableIdentical(v.isel(time=slice(0, 3)), v[:3])
self.assertVariableIdentical(v.isel(x=0), v[:, 0])
with self.assertRaisesRegexp(ValueError, 'do not exist'):
v.isel(not_a_dim=0)
def test_index_0d_numpy_string(self):
# regression test to verify our work around for indexing 0d strings
v = Variable([], np.string_('asdf'))
self.assertVariableIdentical(v[()], v)
def test_transpose(self):
v = Variable(['time', 'x'], self.d)
v2 = Variable(['x', 'time'], self.d.T)
self.assertVariableIdentical(v, v2.transpose())
self.assertVariableIdentical(v.transpose(), v.T)
x = np.random.randn(2, 3, 4, 5)
w = Variable(['a', 'b', 'c', 'd'], x)
w2 = Variable(['d', 'b', 'c', 'a'], np.einsum('abcd->dbca', x))
self.assertEqual(w2.shape, (5, 3, 4, 2))
self.assertVariableIdentical(w2, w.transpose('d', 'b', 'c', 'a'))
self.assertVariableIdentical(w, w2.transpose('a', 'b', 'c', 'd'))
w3 = Variable(['b', 'c', 'd', 'a'], np.einsum('abcd->bcda', x))
self.assertVariableIdentical(w, w3.transpose('a', 'b', 'c', 'd'))
def test_squeeze(self):
v = Variable(['x', 'y'], [[1]])
self.assertVariableIdentical(Variable([], 1), v.squeeze())
self.assertVariableIdentical(Variable(['y'], [1]), v.squeeze('x'))
self.assertVariableIdentical(Variable(['y'], [1]), v.squeeze(['x']))
self.assertVariableIdentical(Variable(['x'], [1]), v.squeeze('y'))
self.assertVariableIdentical(Variable([], 1), v.squeeze(['x', 'y']))
v = Variable(['x', 'y'], [[1, 2]])
self.assertVariableIdentical(Variable(['y'], [1, 2]), v.squeeze())
self.assertVariableIdentical(Variable(['y'], [1, 2]), v.squeeze('x'))
with self.assertRaisesRegexp(ValueError, 'cannot select a dimension'):
v.squeeze('y')
def test_get_axis_num(self):
v = Variable(['x', 'y', 'z'], np.random.randn(2, 3, 4))
self.assertEqual(v.get_axis_num('x'), 0)
self.assertEqual(v.get_axis_num(['x']), (0,))
self.assertEqual(v.get_axis_num(['x', 'y']), (0, 1))
self.assertEqual(v.get_axis_num(['z', 'y', 'x']), (2, 1, 0))
with self.assertRaisesRegexp(ValueError, 'not found in array dim'):
v.get_axis_num('foobar')
def test_expand_dims(self):
v = Variable(['x'], [0, 1])
actual = v.expand_dims(['x', 'y'])
expected = Variable(['x', 'y'], [[0], [1]])
self.assertVariableIdentical(actual, expected)
actual = v.expand_dims(['y', 'x'])
self.assertVariableIdentical(actual, expected.T)
actual = v.expand_dims(OrderedDict([('x', 2), ('y', 2)]))
expected = Variable(['x', 'y'], [[0, 0], [1, 1]])
self.assertVariableIdentical(actual, expected)
v = Variable(['foo'], [0, 1])
actual = v.expand_dims('foo')
expected = v
self.assertVariableIdentical(actual, expected)
with self.assertRaisesRegexp(ValueError, 'must be a superset'):
v.expand_dims(['z'])
def test_broadcasting_math(self):
x = np.random.randn(2, 3)
v = Variable(['a', 'b'], x)
# 1d to 2d broadcasting
self.assertVariableIdentical(
v * v,
Variable(['a', 'b'], np.einsum('ab,ab->ab', x, x)))
self.assertVariableIdentical(
v * v[0],
Variable(['a', 'b'], np.einsum('ab,b->ab', x, x[0])))
self.assertVariableIdentical(
v[0] * v,
Variable(['b', 'a'], np.einsum('b,ab->ba', x[0], x)))
self.assertVariableIdentical(
v[0] * v[:, 0],
Variable(['b', 'a'], np.einsum('b,a->ba', x[0], x[:, 0])))
# higher dim broadcasting
y = np.random.randn(3, 4, 5)
w = Variable(['b', 'c', 'd'], y)
self.assertVariableIdentical(
v * w, Variable(['a', 'b', 'c', 'd'],
np.einsum('ab,bcd->abcd', x, y)))
self.assertVariableIdentical(
w * v, Variable(['b', 'c', 'd', 'a'],
np.einsum('bcd,ab->bcda', y, x)))
self.assertVariableIdentical(
v * w[0], Variable(['a', 'b', 'c', 'd'],
np.einsum('ab,cd->abcd', x, y[0])))
def test_broadcasting_failures(self):
a = Variable(['x'], np.arange(10))
b = Variable(['x'], np.arange(5))
c = Variable(['x', 'x'], np.arange(100).reshape(10, 10))
with self.assertRaisesRegexp(ValueError, 'mismatched lengths'):
a + b
with self.assertRaisesRegexp(ValueError, 'duplicate dimensions'):
a + c
def test_inplace_math(self):
x = np.arange(5)
v = Variable(['x'], x)
v2 = v
v2 += 1
self.assertIs(v, v2)
# since we provided an ndarray for data, it is also modified in-place
self.assertIs(source_ndarray(v.values), x)
self.assertArrayEqual(v.values, np.arange(5) + 1)
with self.assertRaisesRegexp(ValueError, 'dimensions cannot change'):
v += Variable('y', np.arange(5))
def test_reduce(self):
v = Variable(['x', 'y'], self.d, {'ignored': 'attributes'})
self.assertVariableIdentical(v.reduce(np.std, 'x'),
Variable(['y'], self.d.std(axis=0)))
self.assertVariableIdentical(v.reduce(np.std, axis=0),
v.reduce(np.std, dim='x'))
self.assertVariableIdentical(v.reduce(np.std, ['y', 'x']),
Variable([], self.d.std(axis=(0, 1))))
self.assertVariableIdentical(v.reduce(np.std),
Variable([], self.d.std()))
self.assertVariableIdentical(
v.reduce(np.mean, 'x').reduce(np.std, 'y'),
Variable([], self.d.mean(axis=0).std()))
self.assertVariableIdentical(v.mean('x'), v.reduce(np.mean, 'x'))
with self.assertRaisesRegexp(ValueError, 'cannot supply both'):
v.mean(dim='x', axis=0)
def test_reduce_funcs(self):
v = Variable('x', np.array([1, np.nan, 2, 3]))
self.assertVariableIdentical(v.mean(), Variable([], 2))
self.assertVariableIdentical(v.mean(skipna=True), Variable([], 2))
self.assertVariableIdentical(v.mean(skipna=False), Variable([], np.nan))
self.assertVariableIdentical(np.mean(v), Variable([], 2))
self.assertVariableIdentical(v.prod(), Variable([], 6))
self.assertVariableIdentical(v.var(), Variable([], 2.0 / 3))
if LooseVersion(np.__version__) < '1.9':
with self.assertRaises(NotImplementedError):
v.median()
else:
self.assertVariableIdentical(v.median(), Variable([], 2))
v = Variable('x', [True, False, False])
self.assertVariableIdentical(v.any(), Variable([], True))
self.assertVariableIdentical(v.all(dim='x'), Variable([], False))
v = Variable('t', pd.date_range('2000-01-01', periods=3))
with self.assertRaises(NotImplementedError):
v.max(skipna=True)
self.assertVariableIdentical(
v.max(), Variable([], pd.Timestamp('2000-01-03')))
def test_reduce_keep_attrs(self):
_attrs = {'units': 'test', 'long_name': 'testing'}
v = Variable(['x', 'y'], self.d, _attrs)
# Test dropped attrs
vm = v.mean()
self.assertEqual(len(vm.attrs), 0)
self.assertEqual(vm.attrs, OrderedDict())
# Test kept attrs
vm = v.mean(keep_attrs=True)
self.assertEqual(len(vm.attrs), len(_attrs))
self.assertEqual(vm.attrs, _attrs)
def test_count(self):
expected = Variable([], 3)
actual = Variable(['x'], [1, 2, 3, np.nan]).count()
self.assertVariableIdentical(expected, actual)
v = Variable(['x'], np.array(['1', '2', '3', np.nan], dtype=object))
actual = v.count()
self.assertVariableIdentical(expected, actual)
actual = Variable(['x'], [True, False, True]).count()
self.assertVariableIdentical(expected, actual)
self.assertEqual(actual.dtype, int)
expected = Variable(['x'], [2, 3])
actual = Variable(['x', 'y'], [[1, 0, np.nan], [1, 1, 1]]).count('y')
self.assertVariableIdentical(expected, actual)
class TestCoordinate(TestCase, VariableSubclassTestCases):
cls = staticmethod(Coordinate)
def test_init(self):
with self.assertRaisesRegexp(ValueError, 'must be 1-dimensional'):
Coordinate((), 0)
def test_to_index(self):
data = 0.5 * np.arange(10)
v = Coordinate(['time'], data, {'foo': 'bar'})
self.assertTrue(pd.Index(data, name='time').identical(v.to_index()))
def test_data(self):
x = Coordinate('x', np.arange(3.0))
# data should be initially saved as an ndarray
self.assertIs(type(x._data), np.ndarray)
self.assertEqual(float, x.dtype)
self.assertArrayEqual(np.arange(3), x)
self.assertEqual(float, x.values.dtype)
# after inspecting x.values, the Coordinate value will be saved as an Index
self.assertIsInstance(x._data, PandasIndexAdapter)
with self.assertRaisesRegexp(TypeError, 'cannot be modified'):
x[:] = 0
def test_name(self):
coord = Coordinate('x', [10.0])
self.assertEqual(coord.name, 'x')
with self.assertRaises(AttributeError):
coord.name = 'y'
class TestAsCompatibleData(TestCase):
def test_unchanged_types(self):
types = (np.asarray, PandasIndexAdapter, indexing.LazilyIndexedArray)
for t in types:
for data in [np.arange(3),
pd.date_range('2000-01-01', periods=3),
pd.date_range('2000-01-01', periods=3).values]:
x = t(data)
self.assertIs(source_ndarray(x),
source_ndarray(_as_compatible_data(x)))
def test_converted_types(self):
for input_array in [[[0, 1, 2]], pd.DataFrame([[0, 1, 2]])]:
actual = _as_compatible_data(input_array)
self.assertArrayEqual(np.asarray(input_array), actual)
self.assertEqual(np.ndarray, type(actual))
self.assertEqual(np.asarray(input_array).dtype, actual.dtype)
def test_masked_array(self):
original = np.ma.MaskedArray(np.arange(5))
expected = np.arange(5)
actual = _as_compatible_data(original)
self.assertArrayEqual(expected, actual)
self.assertEqual(np.dtype(int), actual.dtype)
original = np.ma.MaskedArray(np.arange(5), mask=4 * [False] + [True])
expected = np.arange(5.0)
expected[-1] = np.nan
actual = _as_compatible_data(original)
self.assertArrayEqual(expected, actual)
self.assertEqual(np.dtype(float), actual.dtype)
def test_datetime(self):
expected = np.datetime64('2000-01-01T00Z')
actual = _as_compatible_data(expected)
self.assertEqual(expected, actual)
self.assertEqual(np.ndarray, type(actual))
self.assertEqual(np.dtype('datetime64[ns]'), actual.dtype)
expected = np.array([np.datetime64('2000-01-01T00Z')])
actual = _as_compatible_data(expected)
self.assertEqual(np.asarray(expected), actual)
self.assertEqual(np.ndarray, type(actual))
self.assertEqual(np.dtype('datetime64[ns]'), actual.dtype)
expected = np.array([np.datetime64('2000-01-01T00Z', 'ns')])
actual = _as_compatible_data(expected)
self.assertEqual(np.asarray(expected), actual)
self.assertEqual(np.ndarray, type(actual))
self.assertEqual(np.dtype('datetime64[ns]'), actual.dtype)
self.assertIs(expected, source_ndarray(np.asarray(actual)))
expected = np.datetime64('2000-01-01T00Z', 'ns')
actual = _as_compatible_data(datetime(2000, 1, 1))
self.assertEqual(np.asarray(expected), actual)
self.assertEqual(np.ndarray, type(actual))
self.assertEqual(np.dtype('datetime64[ns]'), actual.dtype)
| clarkfitzg/xray | xray/test/test_variable.py | Python | apache-2.0 | 35,943 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse.spark;
import java.util.Stack;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.parse.SemanticException;
/**
* FileSinkProcessor handles addition of merge, move and stats tasks for filesinks.
* Cloned from tez's FileSinkProcessor.
*/
public class SparkFileSinkProcessor implements NodeProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(SparkFileSinkProcessor.class.getName());
/*
* (non-Javadoc)
* we should ideally not modify the tree we traverse.
* However, since we need to walk the tree at any time when we modify the operator,
* we might as well do it here.
*/
@Override
public Object process(Node nd, Stack<Node> stack,
NodeProcessorCtx procCtx, Object... nodeOutputs)
throws SemanticException {
GenSparkProcContext context = (GenSparkProcContext) procCtx;
FileSinkOperator fileSink = (FileSinkOperator) nd;
// just remember it for later processing
context.fileSinkSet.add(fileSink);
return true;
}
}
| b-slim/hive | ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkFileSinkProcessor.java | Java | apache-2.0 | 2,090 |
/*
* arcus-java-client : Arcus Java client
* Copyright 2010-2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.spy.memcached.collection;
public class ListGet extends CollectionGet {
public static final int FIRST = 0;
public static final int LAST = -1;
private static final String command = "lop get";
public ListGet(int index, boolean delete) {
this.headerCount = 1;
this.range = String.valueOf(index);
this.delete = delete;
}
public ListGet(int index, boolean delete, boolean dropIfEmpty) {
this(index, delete);
this.dropIfEmpty = dropIfEmpty;
}
public ListGet(int from, int to, boolean delete) {
this.headerCount = 1;
this.range = String.valueOf(from) + ".." + String.valueOf(to);
this.delete = delete;
}
public ListGet(int from, int to, boolean delete, boolean dropIfEmpty) {
this(from, to, delete);
this.dropIfEmpty = dropIfEmpty;
}
public String getRange() {
return range;
}
public void setRange(String range) {
this.range = range;
}
@Override
public byte[] getAddtionalArgs() {
return null;
}
public String stringify() {
if (str != null) return str;
StringBuilder b = new StringBuilder();
b.append(range);
if (delete && dropIfEmpty) b.append(" drop");
if (delete && !dropIfEmpty) b.append(" delete");
str = b.toString();
return str;
}
public String getCommand() {
return command;
}
public void decodeItemHeader(String itemHeader) {
this.dataLength = Integer.parseInt(itemHeader);
}
}
| whchoi83/arcus-java-client | src/main/java/net/spy/memcached/collection/ListGet.java | Java | apache-2.0 | 2,022 |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.skyframe;
import com.google.common.base.Preconditions;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
/**
* A node in the graph without the means to access its value. All operations on this class are
* thread-safe (note, however, the warning on the return value of {@link #markDirty}).
*
* <p>This interface is public only for the benefit of alternative graph implementations outside of
* the package.
*/
public interface ThinNodeEntry {
/** Returns whether the entry has been built and is finished evaluating. */
@ThreadSafe
boolean isDone();
/**
* Returns true if the entry is marked dirty, meaning that at least one of its transitive
* dependencies is marked changed.
*/
@ThreadSafe
boolean isDirty();
/**
* Returns true if the entry is marked changed, meaning that it must be re-evaluated even if its
* dependencies' values have not changed.
*/
@ThreadSafe
boolean isChanged();
/**
* Marks this node dirty, or changed if {@code isChanged} is true.
*
* <p>A dirty node P is re-evaluated during the evaluation phase if it's requested and directly
* depends on some node C whose value changed since the last evaluation of P. If it's requested
* and there is no such node C, P is marked clean.
*
* <p>A changed node is re-evaluated during the evaluation phase if it's requested (regardless of
* the state of its dependencies).
*
* @return a {@link MarkedDirtyResult} indicating whether the call was redundant and which may
* include the node's reverse deps
*/
@ThreadSafe
MarkedDirtyResult markDirty(boolean isChanged) throws InterruptedException;
/** Returned by {@link #markDirty}. */
interface MarkedDirtyResult {
/** Returns true iff the node was clean prior to the {@link #markDirty} call. */
boolean wasClean();
/**
* Returns true iff the call to {@link #markDirty} was the same as some previous call to {@link
* #markDirty} (i.e., sharing the same {@code isChanged} parameter value) since the last time
* the node was clean.
*
* <p>More specifically, this returns true iff the call was {@code n.markDirty(b)} and prior to
* the call {@code n.isDirty() && n.isChanged() == b}).
*/
boolean wasCallRedundant();
/**
* If {@code wasClean()}, this returns an iterable of the node's reverse deps for efficiency,
* because the {@link #markDirty} caller may be doing graph invalidation, and after dirtying a
* node, the invalidation process may want to dirty the node's reverse deps.
*
* <p>If {@code !wasClean()}, this must not be called. It will throw {@link
* IllegalStateException}.
*
* <p>Warning: the returned iterable may be a live view of the reverse deps collection of the
* marked-dirty node. The consumer of this data must be careful only to iterate over and consume
* its values while that collection is guaranteed not to change. This is true during
* invalidation, because reverse deps don't change during invalidation.
*/
Iterable<SkyKey> getReverseDepsUnsafeIfWasClean();
}
/** A {@link MarkedDirtyResult} returned when {@link #markDirty} is called on a clean node. */
class FromCleanMarkedDirtyResult implements MarkedDirtyResult {
private final Iterable<SkyKey> reverseDepsUnsafe;
public FromCleanMarkedDirtyResult(Iterable<SkyKey> reverseDepsUnsafe) {
this.reverseDepsUnsafe = Preconditions.checkNotNull(reverseDepsUnsafe);
}
@Override
public boolean wasClean() {
return true;
}
@Override
public boolean wasCallRedundant() {
return false;
}
@Override
public Iterable<SkyKey> getReverseDepsUnsafeIfWasClean() {
return reverseDepsUnsafe;
}
}
/** A {@link MarkedDirtyResult} returned when {@link #markDirty} is called on a dirty node. */
class FromDirtyMarkedDirtyResult implements MarkedDirtyResult {
static final FromDirtyMarkedDirtyResult REDUNDANT = new FromDirtyMarkedDirtyResult(true);
static final FromDirtyMarkedDirtyResult NOT_REDUNDANT = new FromDirtyMarkedDirtyResult(false);
private final boolean redundant;
private FromDirtyMarkedDirtyResult(boolean redundant) {
this.redundant = redundant;
}
@Override
public boolean wasClean() {
return false;
}
@Override
public boolean wasCallRedundant() {
return redundant;
}
@Override
public Iterable<SkyKey> getReverseDepsUnsafeIfWasClean() {
throw new IllegalStateException();
}
}
}
| dropbox/bazel | src/main/java/com/google/devtools/build/skyframe/ThinNodeEntry.java | Java | apache-2.0 | 5,222 |
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package ch.boye.httpclientandroidlib.entity;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import ch.boye.httpclientandroidlib.HttpEntity;
import ch.boye.httpclientandroidlib.util.EntityUtils;
/**
* A wrapping entity that buffers it content if necessary.
* The buffered entity is always repeatable.
* If the wrapped entity is repeatable itself, calls are passed through.
* If the wrapped entity is not repeatable, the content is read into a
* buffer once and provided from there as often as required.
*
* @since 4.0
*/
public class BufferedHttpEntity extends HttpEntityWrapper {
private final byte[] buffer;
/**
* Creates a new buffered entity wrapper.
*
* @param entity the entity to wrap, not null
* @throws IllegalArgumentException if wrapped is null
*/
public BufferedHttpEntity(final HttpEntity entity) throws IOException {
super(entity);
if (!entity.isRepeatable() || entity.getContentLength() < 0) {
this.buffer = EntityUtils.toByteArray(entity);
} else {
this.buffer = null;
}
}
public long getContentLength() {
if (this.buffer != null) {
return this.buffer.length;
} else {
return wrappedEntity.getContentLength();
}
}
public InputStream getContent() throws IOException {
if (this.buffer != null) {
return new ByteArrayInputStream(this.buffer);
} else {
return wrappedEntity.getContent();
}
}
/**
* Tells that this entity does not have to be chunked.
*
* @return <code>false</code>
*/
public boolean isChunked() {
return (buffer == null) && wrappedEntity.isChunked();
}
/**
* Tells that this entity is repeatable.
*
* @return <code>true</code>
*/
public boolean isRepeatable() {
return true;
}
public void writeTo(final OutputStream outstream) throws IOException {
if (outstream == null) {
throw new IllegalArgumentException("Output stream may not be null");
}
if (this.buffer != null) {
outstream.write(this.buffer);
} else {
wrappedEntity.writeTo(outstream);
}
}
// non-javadoc, see interface HttpEntity
public boolean isStreaming() {
return (buffer == null) && wrappedEntity.isStreaming();
}
} // class BufferedHttpEntity
| wilebeast/FireFox-OS | B2G/gecko/mobile/android/base/httpclientandroidlib/entity/BufferedHttpEntity.java | Java | apache-2.0 | 3,714 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.giraph.graph;
/**
* Simple immutable structure for storing a final vertex and edge count.
*/
public class VertexEdgeCount {
/** Immutable vertices */
private final long vertexCount;
/** Immutable edges */
private final long edgeCount;
public VertexEdgeCount() {
vertexCount = 0;
edgeCount = 0;
}
public VertexEdgeCount(long vertexCount, long edgeCount) {
this.vertexCount = vertexCount;
this.edgeCount = edgeCount;
}
public long getVertexCount() {
return vertexCount;
}
public long getEdgeCount() {
return edgeCount;
}
public VertexEdgeCount incrVertexEdgeCount(
VertexEdgeCount vertexEdgeCount) {
return new VertexEdgeCount(
vertexCount + vertexEdgeCount.getVertexCount(),
edgeCount + vertexEdgeCount.getEdgeCount());
}
public VertexEdgeCount incrVertexEdgeCount(
long vertexCount, long edgeCount) {
return new VertexEdgeCount(
this.vertexCount + vertexCount,
this.edgeCount + edgeCount);
}
@Override
public String toString() {
return "(v=" + getVertexCount() + ", e=" + getEdgeCount() + ")";
}
}
| sscdotopen/giraph-compensations | src/main/java/org/apache/giraph/graph/VertexEdgeCount.java | Java | apache-2.0 | 2,054 |
/* RefinedScope
*
* $Id: RefinedScope.java 4651 2006-09-25 18:31:13Z paul_jack $
*
* Created on Jul 16, 2004
*
* Copyright (C) 2004 Internet Archive.
*
* This file is part of the Heritrix web crawler (crawler.archive.org).
*
* Heritrix is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* any later version.
*
* Heritrix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser Public License for more details.
*
* You should have received a copy of the GNU Lesser Public License
* along with Heritrix; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package org.archive.crawler.scope;
import org.archive.crawler.framework.Filter;
/**
* Superclass for Scopes which make use of "additional focus"
* to add items by pattern, or want to swap in alternative
* transitive filter.
*
* @author gojomo
*/
public abstract class RefinedScope extends ClassicScope {
public static final String ATTR_TRANSITIVE_FILTER = "transitiveFilter";
public static final String ATTR_ADDITIONAL_FOCUS_FILTER =
"additionalScopeFocus";
Filter additionalFocusFilter;
Filter transitiveFilter;
@SuppressWarnings("deprecation")
public RefinedScope(String name) {
super(name);
this.additionalFocusFilter = (Filter) addElementToDefinition(
new org.archive.crawler.filter.FilePatternFilter(
ATTR_ADDITIONAL_FOCUS_FILTER));
this.transitiveFilter = (Filter) addElementToDefinition(
new org.archive.crawler.filter.TransclusionFilter(
ATTR_TRANSITIVE_FILTER));
}
/**
* @param o
* @return True if transitive filter accepts passed object.
*/
protected boolean transitiveAccepts(Object o) {
return this.transitiveFilter.accepts(o);
}
protected boolean additionalFocusAccepts(Object o) {
return additionalFocusFilter.accepts(o);
}
}
| gaowangyizu/myHeritrix | myHeritrix/src/org/archive/crawler/scope/RefinedScope.java | Java | apache-2.0 | 2,250 |
class AddIndexes < ActiveRecord::Migration
def change
add_index(:paypal_express_responses, :kb_account_id, :name => 'idx_paypal_express_responses_on_kb_account_id')
add_index(:paypal_express_responses, :payer_email, :name => 'idx_paypal_express_responses_on_payer_email')
end
end
| daliwei/killbill-paypal-express-plugin | db/migrate/20161209090000_add_indexes.rb | Ruby | apache-2.0 | 293 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import exceptions
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.actions import page_action
from telemetry.page import action_runner as action_runner_module
from telemetry.testing import tab_test_case
from telemetry.timeline import model
from telemetry.timeline import tracing_category_filter
from telemetry.timeline import tracing_options
from telemetry.web_perf import timeline_interaction_record as tir_module
util.AddDirToPythonPath(util.GetTelemetryDir(), 'third_party', 'mock')
import mock
class ActionRunnerInteractionTest(tab_test_case.TabTestCase):
def GetInteractionRecords(self, trace_data):
timeline_model = model.TimelineModel(trace_data)
renderer_thread = timeline_model.GetRendererThreadFromTabId(self._tab.id)
return [
tir_module.TimelineInteractionRecord.FromAsyncEvent(e)
for e in renderer_thread.async_slices
if tir_module.IsTimelineInteractionRecord(e.name)
]
def VerifyIssuingInteractionRecords(self, **interaction_kwargs):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('interaction_enabled_page.html')
action_runner.Wait(1)
options = tracing_options.TracingOptions()
options.enable_chrome_trace = True
self._browser.platform.tracing_controller.Start(
options, tracing_category_filter.CreateNoOverheadFilter())
with action_runner.CreateInteraction('InteractionName',
**interaction_kwargs):
pass
trace_data = self._browser.platform.tracing_controller.Stop()
records = self.GetInteractionRecords(trace_data)
self.assertEqual(
1, len(records),
'Failed to issue the interaction record on the tracing timeline.'
' Trace data:\n%s' % repr(trace_data._raw_data))
self.assertEqual('InteractionName', records[0].label)
for attribute_name in interaction_kwargs:
self.assertTrue(getattr(records[0], attribute_name))
# Test disabled for android: crbug.com/437057
@decorators.Disabled('android', 'chromeos')
def testIssuingMultipleMeasurementInteractionRecords(self):
self.VerifyIssuingInteractionRecords(repeatable=True)
class ActionRunnerTest(tab_test_case.TabTestCase):
def testExecuteJavaScript(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript('var testing = 42;')
self.assertEqual(42, self._tab.EvaluateJavaScript('testing'))
def testWaitForNavigate(self):
self.Navigate('page_with_link.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ClickElement('#clickme')
action_runner.WaitForNavigate()
self.assertTrue(self._tab.EvaluateJavaScript(
'document.readyState == "interactive" || '
'document.readyState == "complete"'))
self.assertEqual(
self._tab.EvaluateJavaScript('document.location.pathname;'),
'/blank.html')
def testWait(self):
action_runner = action_runner_module.ActionRunner(self._tab)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript(
'window.setTimeout(function() { window.testing = 101; }, 50);')
action_runner.Wait(0.1)
self.assertEqual(101, self._tab.EvaluateJavaScript('window.testing'))
action_runner.ExecuteJavaScript(
'window.setTimeout(function() { window.testing = 102; }, 100);')
action_runner.Wait(0.2)
self.assertEqual(102, self._tab.EvaluateJavaScript('window.testing'))
def testWaitForJavaScriptCondition(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript('window.testing = 219;')
action_runner.WaitForJavaScriptCondition(
'window.testing == 219', timeout_in_seconds=0.1)
action_runner.ExecuteJavaScript(
'window.setTimeout(function() { window.testing = 220; }, 50);')
action_runner.WaitForJavaScriptCondition(
'window.testing == 220', timeout_in_seconds=0.1)
self.assertEqual(220, self._tab.EvaluateJavaScript('window.testing'))
def testWaitForElement(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript(
'(function() {'
' var el = document.createElement("div");'
' el.id = "test1";'
' el.textContent = "foo";'
' document.body.appendChild(el);'
'})()')
action_runner.WaitForElement('#test1', timeout_in_seconds=0.1)
action_runner.WaitForElement(text='foo', timeout_in_seconds=0.1)
action_runner.WaitForElement(
element_function='document.getElementById("test1")')
action_runner.ExecuteJavaScript(
'window.setTimeout(function() {'
' var el = document.createElement("div");'
' el.id = "test2";'
' document.body.appendChild(el);'
'}, 50)')
action_runner.WaitForElement('#test2', timeout_in_seconds=0.1)
action_runner.ExecuteJavaScript(
'window.setTimeout(function() {'
' document.getElementById("test2").textContent = "bar";'
'}, 50)')
action_runner.WaitForElement(text='bar', timeout_in_seconds=0.1)
action_runner.ExecuteJavaScript(
'window.setTimeout(function() {'
' var el = document.createElement("div");'
' el.id = "test3";'
' document.body.appendChild(el);'
'}, 50)')
action_runner.WaitForElement(
element_function='document.getElementById("test3")')
def testWaitForElementWithWrongText(self):
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
self.Navigate('blank.html')
action_runner.ExecuteJavaScript(
'(function() {'
' var el = document.createElement("div");'
' el.id = "test1";'
' el.textContent = "foo";'
' document.body.appendChild(el);'
'})()')
action_runner.WaitForElement('#test1', timeout_in_seconds=0.2)
def WaitForElement():
action_runner.WaitForElement(text='oo', timeout_in_seconds=0.2)
self.assertRaises(exceptions.TimeoutException, WaitForElement)
def testClickElement(self):
self.Navigate('page_with_clickables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ExecuteJavaScript('valueSettableByTest = 1;')
action_runner.ClickElement('#test')
self.assertEqual(1, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 2;')
action_runner.ClickElement(text='Click/tap me')
self.assertEqual(2, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 3;')
action_runner.ClickElement(
element_function='document.body.firstElementChild;')
self.assertEqual(3, action_runner.EvaluateJavaScript('valueToTest'))
def WillFail():
action_runner.ClickElement('#notfound')
self.assertRaises(exceptions.EvaluateException, WillFail)
@decorators.Disabled('android', 'debug', # crbug.com/437068
'chromeos') # crbug.com/483212
def testTapElement(self):
self.Navigate('page_with_clickables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ExecuteJavaScript('valueSettableByTest = 1;')
action_runner.TapElement('#test')
self.assertEqual(1, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 2;')
action_runner.TapElement(text='Click/tap me')
self.assertEqual(2, action_runner.EvaluateJavaScript('valueToTest'))
action_runner.ExecuteJavaScript('valueSettableByTest = 3;')
action_runner.TapElement(
element_function='document.body.firstElementChild')
self.assertEqual(3, action_runner.EvaluateJavaScript('valueToTest'))
def WillFail():
action_runner.TapElement('#notfound')
self.assertRaises(exceptions.EvaluateException, WillFail)
@decorators.Disabled('android', # crbug.com/437065.
'chromeos') # crbug.com/483212.
def testScroll(self):
if not page_action.IsGestureSourceTypeSupported(
self._tab, 'touch'):
return
self.Navigate('page_with_swipeables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.ScrollElement(
selector='#left-right', direction='right', left_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#left-right").scrollLeft') > 75)
action_runner.ScrollElement(
selector='#top-bottom', direction='down', top_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#top-bottom").scrollTop') > 75)
action_runner.ScrollPage(direction='right', left_start_ratio=0.9,
distance=100)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.body.scrollLeft') > 75)
@decorators.Disabled('android', # crbug.com/437065.
'chromeos') # crbug.com/483212.
def testSwipe(self):
if not page_action.IsGestureSourceTypeSupported(
self._tab, 'touch'):
return
self.Navigate('page_with_swipeables.html')
action_runner = action_runner_module.ActionRunner(self._tab,
skip_waits=True)
action_runner.SwipeElement(
selector='#left-right', direction='left', left_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#left-right").scrollLeft') > 75)
action_runner.SwipeElement(
selector='#top-bottom', direction='up', top_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.querySelector("#top-bottom").scrollTop') > 75)
action_runner.SwipePage(direction='left', left_start_ratio=0.9)
self.assertTrue(action_runner.EvaluateJavaScript(
'document.body.scrollLeft') > 75)
class InteractionTest(unittest.TestCase):
def setUp(self):
self.mock_action_runner = mock.Mock(action_runner_module.ActionRunner)
def testIssuingInteractionRecordCommand(self):
with action_runner_module.Interaction(
self.mock_action_runner, label='ABC', flags=[]):
pass
expected_calls = [
mock.call.ExecuteJavaScript('console.time("Interaction.ABC");'),
mock.call.ExecuteJavaScript('console.timeEnd("Interaction.ABC");')]
self.assertEqual(expected_calls, self.mock_action_runner.mock_calls)
def testExceptionRaisedInWithInteraction(self):
class FooException(Exception):
pass
# Test that the Foo exception raised in the with block is propagated to the
# caller.
with self.assertRaises(FooException):
with action_runner_module.Interaction(
self.mock_action_runner, label='ABC', flags=[]):
raise FooException()
# Test that the end console.timeEnd(...) isn't called because exception was
# raised.
expected_calls = [
mock.call.ExecuteJavaScript('console.time("Interaction.ABC");')]
self.assertEqual(expected_calls, self.mock_action_runner.mock_calls)
| googlearchive/big-rig | app/src/thirdparty/telemetry/internal/actions/action_runner_unittest.py | Python | apache-2.0 | 12,077 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.knightsoftnet.validators.shared;
import de.knightsoftnet.validators.shared.impl.EmptyIfOtherHasValueValidator;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import javax.validation.Constraint;
import javax.validation.Payload;
/**
* The annotated bean must contain at least two properties:
* <ul>
* <li>a field that has to be checked (option <code>field</code>)</li>
* <li>a field which entry is compared against a value (option <code>fieldCompare</code> and
* <code>valueCompare</code>)</li>
* </ul>
* if the entry of <code>fieldCompare</code> matches <code>valueCompare</code>, <code>field</code>
* must be empty (null or "").<br>
* Supported types are beans, <code>null</code> elements are considered valid.<br>
*
* @author Manfred Tremmel
*
*/
@Documented
@Constraint(validatedBy = EmptyIfOtherHasValueValidator.class)
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface EmptyIfOtherHasValue {
/**
* localized message.
*
* @return localized validation message
*/
String message() default "{deKnightsoftnetValidatorsSharedValidationEmptyIfOtherHasValueMessage}";
/**
* groups to use.
*
* @return array of validation groups
*/
Class<?>[] groups() default {};
/**
* field name to check.
*
* @return field/path of the value
*/
String field();
/**
* field name to compare.
*
* @return field/path of the value to compare
*/
String fieldCompare();
/**
* value to compare with field name to compare.
*
* @return value to compare
*/
String valueCompare();
/**
* payload whatever.
*
* @return payload class
*/
Class<? extends Payload>[] payload() default {};
/**
* Defines several {@code @MustBeEmptyIfOtherHasValue} annotations on the same element.
*/
@Target({ElementType.TYPE, ElementType.ANNOTATION_TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Documented
@interface List {
/**
* must be empty if other has the given value.
*
* @return value
*/
EmptyIfOtherHasValue[] value();
}
}
| ManfredTremmel/mt-bean-validators | src/main/java/de/knightsoftnet/validators/shared/EmptyIfOtherHasValue.java | Java | apache-2.0 | 3,057 |
package com.at.androidtraining1.webservices;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.json.JSONArray;
import org.json.JSONException;
import android.util.Log;
public class JSONParser {
static InputStream iStream = null;
static JSONArray jarray = null;
static String json = "";
public JSONParser() {
}
public JSONArray getJSONFromUrl(String url) {
StringBuilder builder = new StringBuilder();
HttpClient client = new DefaultHttpClient();
HttpGet httpGet = new HttpGet(url);
try {
HttpResponse response = client.execute(httpGet);
StatusLine statusLine = response.getStatusLine();
int statusCode = statusLine.getStatusCode();
if (statusCode == 200) {
HttpEntity entity = response.getEntity();
InputStream content = entity.getContent();
BufferedReader reader = new BufferedReader(new InputStreamReader(content));
String line;
while ((line = reader.readLine()) != null) {
builder.append(line);
}
} else {
Log.e("==>", "Failed to download file");
}
} catch (ClientProtocolException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Parse String to JSON object
try {
jarray = new JSONArray( builder.toString());
} catch (JSONException e) {
Log.e("JSON Parser", "Error parsing data " + e.toString());
}
// return JSON Object
return jarray;
}
}
| aniXification/AndroidTraining | AndroidTrainingFinal/src/com/at/androidtraining1/webservices/JSONParser.java | Java | apache-2.0 | 2,115 |
//========================================================================
// 存储器属性
//========================================================================
var p = {
//普通属性
x: 1.0,
y: 1.0,
//$符号暗示这个属性是私有属性,但仅仅是暗示
$n: 0,
//存储器属性
get r() {
return Math.sqrt(this.x * this.x + this.y * this.y);
},
set r(newValue) {
//this表示指向这个点的对象
var oldValue = Math.sqrt(this.x * this.x + this.y * this.y);
var ratio = newValue / oldValue;
this.x *= ratio;
this.y *= ratio;
}
};
//继承存储器属性
var q = Object.create(p);
q.x = 3;
q.y = 4;
console.log(q.r);//->5
| Ztiany/CodeRepository | Web/JavaScriptDefinitiveGuide-Core/06_对象/03_getter_setter.js | JavaScript | apache-2.0 | 732 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.server.ui;
import com.google.common.collect.ImmutableList;
import io.trino.dispatcher.DispatchManager;
import io.trino.execution.QueryInfo;
import io.trino.execution.QueryState;
import io.trino.security.AccessControl;
import io.trino.server.BasicQueryInfo;
import io.trino.server.ProtocolConfig;
import io.trino.server.security.ResourceSecurity;
import io.trino.spi.QueryId;
import io.trino.spi.TrinoException;
import io.trino.spi.security.AccessDeniedException;
import io.trino.spi.security.GroupProvider;
import javax.inject.Inject;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.ForbiddenException;
import javax.ws.rs.GET;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import java.util.List;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.Optional;
import static io.trino.connector.system.KillQueryProcedure.createKillQueryException;
import static io.trino.connector.system.KillQueryProcedure.createPreemptQueryException;
import static io.trino.security.AccessControlUtil.checkCanKillQueryOwnedBy;
import static io.trino.security.AccessControlUtil.checkCanViewQueryOwnedBy;
import static io.trino.security.AccessControlUtil.filterQueries;
import static io.trino.server.HttpRequestSessionContext.extractAuthorizedIdentity;
import static io.trino.server.security.ResourceSecurity.AccessType.WEB_UI;
import static java.util.Objects.requireNonNull;
@Path("/ui/api/query")
public class UiQueryResource
{
private final DispatchManager dispatchManager;
private final AccessControl accessControl;
private final GroupProvider groupProvider;
private final Optional<String> alternateHeaderName;
@Inject
public UiQueryResource(DispatchManager dispatchManager, AccessControl accessControl, GroupProvider groupProvider, ProtocolConfig protocolConfig)
{
this.dispatchManager = requireNonNull(dispatchManager, "dispatchManager is null");
this.accessControl = requireNonNull(accessControl, "accessControl is null");
this.groupProvider = requireNonNull(groupProvider, "groupProvider is null");
this.alternateHeaderName = protocolConfig.getAlternateHeaderName();
}
@ResourceSecurity(WEB_UI)
@GET
public List<BasicQueryInfo> getAllQueryInfo(@QueryParam("state") String stateFilter, @Context HttpServletRequest servletRequest, @Context HttpHeaders httpHeaders)
{
QueryState expectedState = stateFilter == null ? null : QueryState.valueOf(stateFilter.toUpperCase(Locale.ENGLISH));
List<BasicQueryInfo> queries = dispatchManager.getQueries();
queries = filterQueries(extractAuthorizedIdentity(servletRequest, httpHeaders, alternateHeaderName, accessControl, groupProvider), queries, accessControl);
ImmutableList.Builder<BasicQueryInfo> builder = new ImmutableList.Builder<>();
for (BasicQueryInfo queryInfo : queries) {
if (stateFilter == null || queryInfo.getState() == expectedState) {
builder.add(queryInfo);
}
}
return builder.build();
}
@ResourceSecurity(WEB_UI)
@GET
@Path("{queryId}")
public Response getQueryInfo(@PathParam("queryId") QueryId queryId, @Context HttpServletRequest servletRequest, @Context HttpHeaders httpHeaders)
{
requireNonNull(queryId, "queryId is null");
Optional<QueryInfo> queryInfo = dispatchManager.getFullQueryInfo(queryId);
if (queryInfo.isPresent()) {
try {
checkCanViewQueryOwnedBy(extractAuthorizedIdentity(servletRequest, httpHeaders, alternateHeaderName, accessControl, groupProvider), queryInfo.get().getSession().getUser(), accessControl);
return Response.ok(queryInfo.get()).build();
}
catch (AccessDeniedException e) {
throw new ForbiddenException();
}
}
return Response.status(Status.GONE).build();
}
@ResourceSecurity(WEB_UI)
@PUT
@Path("{queryId}/killed")
public Response killQuery(@PathParam("queryId") QueryId queryId, String message, @Context HttpServletRequest servletRequest, @Context HttpHeaders httpHeaders)
{
return failQuery(queryId, createKillQueryException(message), servletRequest, httpHeaders);
}
@ResourceSecurity(WEB_UI)
@PUT
@Path("{queryId}/preempted")
public Response preemptQuery(@PathParam("queryId") QueryId queryId, String message, @Context HttpServletRequest servletRequest, @Context HttpHeaders httpHeaders)
{
return failQuery(queryId, createPreemptQueryException(message), servletRequest, httpHeaders);
}
private Response failQuery(QueryId queryId, TrinoException queryException, HttpServletRequest servletRequest, @Context HttpHeaders httpHeaders)
{
requireNonNull(queryId, "queryId is null");
try {
BasicQueryInfo queryInfo = dispatchManager.getQueryInfo(queryId);
checkCanKillQueryOwnedBy(extractAuthorizedIdentity(servletRequest, httpHeaders, alternateHeaderName, accessControl, groupProvider), queryInfo.getSession().getUser(), accessControl);
// check before killing to provide the proper error code (this is racy)
if (queryInfo.getState().isDone()) {
return Response.status(Status.CONFLICT).build();
}
dispatchManager.failQuery(queryId, queryException);
return Response.status(Status.ACCEPTED).build();
}
catch (AccessDeniedException e) {
throw new ForbiddenException();
}
catch (NoSuchElementException e) {
return Response.status(Status.GONE).build();
}
}
}
| 11xor6/presto | core/trino-main/src/main/java/io/trino/server/ui/UiQueryResource.java | Java | apache-2.0 | 6,476 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.omg.CORBA;
//
// IDL:omg.org/CORBA/AbstractInterfaceDef:1.0
//
final public class AbstractInterfaceDefHelper
{
public static void
insert(org.omg.CORBA.Any any, AbstractInterfaceDef val)
{
any.insert_Object(val, type());
}
public static AbstractInterfaceDef
extract(org.omg.CORBA.Any any)
{
if(any.type().equivalent(type()))
return narrow(any.extract_Object());
throw new org.omg.CORBA.BAD_OPERATION();
}
private static org.omg.CORBA.TypeCode typeCode_;
public static org.omg.CORBA.TypeCode
type()
{
if(typeCode_ == null)
{
org.omg.CORBA.ORB orb = org.omg.CORBA.ORB.init();
typeCode_ = orb.create_interface_tc(id(), "AbstractInterfaceDef");
}
return typeCode_;
}
public static String
id()
{
return "IDL:omg.org/CORBA/AbstractInterfaceDef:1.0";
}
public static AbstractInterfaceDef
read(org.omg.CORBA.portable.InputStream in)
{
org.omg.CORBA.Object _ob_v = in.read_Object();
try
{
return (AbstractInterfaceDef)_ob_v;
}
catch(ClassCastException ex)
{
}
org.omg.CORBA.portable.ObjectImpl _ob_impl;
_ob_impl = (org.omg.CORBA.portable.ObjectImpl)_ob_v;
_AbstractInterfaceDefStub _ob_stub = new _AbstractInterfaceDefStub();
_ob_stub._set_delegate(_ob_impl._get_delegate());
return _ob_stub;
}
public static void
write(org.omg.CORBA.portable.OutputStream out, AbstractInterfaceDef val)
{
out.write_Object(val);
}
public static AbstractInterfaceDef
narrow(org.omg.CORBA.Object val)
{
if(val != null)
{
try
{
return (AbstractInterfaceDef)val;
}
catch(ClassCastException ex)
{
}
if(val._is_a(id()))
{
org.omg.CORBA.portable.ObjectImpl _ob_impl;
_AbstractInterfaceDefStub _ob_stub = new _AbstractInterfaceDefStub();
_ob_impl = (org.omg.CORBA.portable.ObjectImpl)val;
_ob_stub._set_delegate(_ob_impl._get_delegate());
return _ob_stub;
}
throw new org.omg.CORBA.BAD_PARAM();
}
return null;
}
public static AbstractInterfaceDef
unchecked_narrow(org.omg.CORBA.Object val)
{
if(val != null)
{
try
{
return (AbstractInterfaceDef)val;
}
catch(ClassCastException ex)
{
}
org.omg.CORBA.portable.ObjectImpl _ob_impl;
_AbstractInterfaceDefStub _ob_stub = new _AbstractInterfaceDefStub();
_ob_impl = (org.omg.CORBA.portable.ObjectImpl)val;
_ob_stub._set_delegate(_ob_impl._get_delegate());
return _ob_stub;
}
return null;
}
}
| apache/geronimo-yoko | yoko-spec-corba/src/main/java/org/omg/CORBA/AbstractInterfaceDefHelper.java | Java | apache-2.0 | 3,817 |
import hashlib
from waterbutler import settings
config = settings.child('SERVER_CONFIG')
ADDRESS = config.get('ADDRESS', 'localhost')
PORT = config.get('PORT', 7777)
DOMAIN = config.get('DOMAIN', "http://localhost:7777")
DEBUG = config.get_bool('DEBUG', True)
SSL_CERT_FILE = config.get_nullable('SSL_CERT_FILE', None)
SSL_KEY_FILE = config.get_nullable('SSL_KEY_FILE', None)
XHEADERS = config.get_bool('XHEADERS', False)
CORS_ALLOW_ORIGIN = config.get('CORS_ALLOW_ORIGIN', '*')
CHUNK_SIZE = int(config.get('CHUNK_SIZE', 65536)) # 64KB
MAX_BODY_SIZE = int(config.get('MAX_BODY_SIZE', int(4.9 * (1024 ** 3)))) # 4.9 GB
AUTH_HANDLERS = config.get('AUTH_HANDLERS', [
'osf',
])
HMAC_ALGORITHM = getattr(hashlib, config.get('HMAC_ALGORITHM', 'sha256'))
HMAC_SECRET = config.get('HMAC_SECRET')
if not settings.DEBUG:
assert HMAC_SECRET, 'HMAC_SECRET must be specified when not in debug mode'
HMAC_SECRET = (HMAC_SECRET or 'changeme').encode('utf-8')
# Configs for WB API Rate-limiting with Redis
ENABLE_RATE_LIMITING = config.get_bool('ENABLE_RATE_LIMITING', False)
REDIS_HOST = config.get('REDIS_HOST', '192.168.168.167')
REDIS_PORT = config.get('REDIS_PORT', '6379')
REDIS_PASSWORD = config.get('REDIS_PASSWORD', None)
# Number of seconds until the redis key expires
RATE_LIMITING_FIXED_WINDOW_SIZE = int(config.get('RATE_LIMITING_FIXED_WINDOW_SIZE', 3600))
# number of reqests permitted while the redis key is active
RATE_LIMITING_FIXED_WINDOW_LIMIT = int(config.get('RATE_LIMITING_FIXED_WINDOW_LIMIT', 3600))
| CenterForOpenScience/waterbutler | waterbutler/server/settings.py | Python | apache-2.0 | 1,532 |
using UnityEngine;
public class CharacterAnimatorController : MonoBehaviour
{
void OnCollisionEnter2D(Collision2D col)
{
GetComponent<Animator>().SetTrigger("Hit");
}
}
| unity3d-jp/AngryChicken | Assets/script/CharacterAnimatorController.cs | C# | apache-2.0 | 178 |
/*
* Copyright © 2015 Cask Data, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package co.cask.cdap.data2.dataset2.lib.table;
import co.cask.cdap.api.annotation.Beta;
import co.cask.cdap.api.common.Bytes;
import co.cask.cdap.api.data.batch.RecordScanner;
import co.cask.cdap.api.data.batch.Split;
import co.cask.cdap.api.data.batch.SplitReader;
import co.cask.cdap.api.data.format.StructuredRecord;
import co.cask.cdap.api.data.schema.Schema;
import co.cask.cdap.api.dataset.DataSetException;
import co.cask.cdap.api.dataset.lib.AbstractCloseableIterator;
import co.cask.cdap.api.dataset.lib.AbstractDataset;
import co.cask.cdap.api.dataset.lib.CloseableIterator;
import co.cask.cdap.api.dataset.lib.KeyValue;
import co.cask.cdap.api.dataset.lib.ObjectMappedTable;
import co.cask.cdap.api.dataset.table.Put;
import co.cask.cdap.api.dataset.table.Row;
import co.cask.cdap.api.dataset.table.Scanner;
import co.cask.cdap.api.dataset.table.Table;
import co.cask.cdap.internal.io.ReflectionPutWriter;
import co.cask.cdap.internal.io.ReflectionRowReader;
import co.cask.cdap.internal.io.TypeRepresentation;
import com.google.common.base.Preconditions;
import com.google.common.reflect.TypeToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.lang.reflect.Type;
import java.util.List;
import javax.annotation.Nullable;
/**
* Default implementation for {@link ObjectMappedTable}.
*
* @param <T> the type of objects in the table
*/
@Beta
public class ObjectMappedTableDataset<T> extends AbstractDataset implements ObjectMappedTable<T> {
private static final Logger LOG = LoggerFactory.getLogger(ObjectMappedTableDataset.class);
private final Table table;
private final Schema objectSchema;
private final TypeRepresentation typeRepresentation;
private final ReflectionPutWriter<T> putWriter;
// we get this lazily, since we may not have the actual Type when using this as a RecordScannable,
// but we do expect to have it when using it in a program context
private ReflectionRowReader<T> rowReader;
// schema is passed in as an argument because it is a required dataset property for validation purposes, so
// the ObjectMappedTableDefinition will always have it. We could always derive the schema from the type,
// but it is simpler to just pass it in.
public ObjectMappedTableDataset(String name, Table table, TypeRepresentation typeRep,
Schema objectSchema, @Nullable ClassLoader classLoader) {
super(name, table);
this.table = table;
this.objectSchema = objectSchema;
this.typeRepresentation = typeRep;
this.typeRepresentation.setClassLoader(classLoader);
this.putWriter = new ReflectionPutWriter<>(objectSchema);
}
@SuppressWarnings("unchecked")
private ReflectionRowReader<T> getReflectionRowReader() {
if (rowReader == null) {
try {
// this can throw a runtime exception from a ClassNotFoundException
Type type = typeRepresentation.toType();
this.rowReader = new ReflectionRowReader<>(objectSchema, (TypeToken<T>) TypeToken.of(type));
} catch (RuntimeException e) {
String missingClass = isClassNotFoundException(e);
if (missingClass != null) {
LOG.error("Cannot load dataset because class {} could not be found. This is probably because the " +
"type parameter of the dataset is not present in the dataset's jar file. See the developer " +
"guide for more information.", missingClass);
}
throw e;
}
}
return this.rowReader;
}
private String isClassNotFoundException(Throwable e) {
if (e instanceof ClassNotFoundException) {
return e.getMessage();
}
if (e.getCause() != null) {
return isClassNotFoundException(e.getCause());
}
return null;
}
@Override
public void write(String key, T object) {
write(Bytes.toBytes(key), object);
}
@Override
public void write(byte[] key, T object) {
Put put = new Put(key);
try {
putWriter.write(object, put);
table.put(put);
} catch (IOException e) {
// should never happen
throw new DataSetException("Failed to encode object to be written: " + e.getMessage(), e);
}
}
@Override
public T read(String key) {
return read(Bytes.toBytes(key));
}
@Override
public T read(byte[] key) {
return readRow(table.get(key));
}
@Override
public CloseableIterator<KeyValue<byte[], T>> scan(@Nullable String startRow, @Nullable String stopRow) {
return scan(startRow == null ? null : Bytes.toBytes(startRow), stopRow == null ? null : Bytes.toBytes(stopRow));
}
@Override
public CloseableIterator<KeyValue<byte[], T>> scan(byte[] startRow, byte[] stopRow) {
return new ObjectIterator(table.scan(startRow, stopRow));
}
@Override
public void delete(String key) {
delete(Bytes.toBytes(key));
}
@Override
public void delete(byte[] key) {
table.delete(key);
}
@Override
public Type getRecordType() {
return table.getRecordType();
}
@Override
public List<Split> getSplits() {
return table.getSplits();
}
@Override
public List<Split> getSplits(int numSplits, byte[] start, byte[] stop) {
return table.getSplits(numSplits, start, stop);
}
@Override
public RecordScanner<StructuredRecord> createSplitRecordScanner(Split split) {
return table.createSplitRecordScanner(split);
}
@Override
public SplitReader<byte[], T> createSplitReader(Split split) {
return new ObjectSplitReader(table.createSplitReader(split));
}
private class ObjectIterator extends AbstractCloseableIterator<KeyValue<byte[], T>> {
private final Scanner scanner;
private boolean closed = false;
private ObjectIterator(Scanner scanner) {
this.scanner = scanner;
}
@Override
protected KeyValue<byte[], T> computeNext() {
Preconditions.checkState(!closed);
Row row = scanner.next();
if (row != null) {
return new KeyValue<>(row.getRow(), readRow(row));
}
close();
return endOfData();
}
@Override
public void close() {
scanner.close();
endOfData();
closed = true;
}
}
/**
* The split reader for objects is reading a table split using the underlying Table's split reader.
*/
private class ObjectSplitReader extends SplitReader<byte[], T> {
// the underlying Table's split reader
private SplitReader<byte[], Row> reader;
public ObjectSplitReader(SplitReader<byte[], Row> reader) {
this.reader = reader;
}
@Override
public void initialize(Split split) throws InterruptedException {
this.reader.initialize(split);
}
@Override
public boolean nextKeyValue() throws InterruptedException {
return this.reader.nextKeyValue();
}
@Override
public byte[] getCurrentKey() throws InterruptedException {
return this.reader.getCurrentKey();
}
@Override
public T getCurrentValue() throws InterruptedException {
return readRow(this.reader.getCurrentValue());
}
@Override
public void close() {
this.reader.close();
}
}
private T readRow(Row row) {
try {
if (row.isEmpty()) {
return null;
}
return getReflectionRowReader().read(row, objectSchema);
} catch (Exception e) {
// should not happen. Can happen if somebody changes the type in an incompatible way?
throw new DataSetException("Failed to decode object: " + e.getMessage(), e);
}
}
}
| hsaputra/cdap | cdap-data-fabric/src/main/java/co/cask/cdap/data2/dataset2/lib/table/ObjectMappedTableDataset.java | Java | apache-2.0 | 8,119 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator.scalar;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import io.prestosql.spi.type.ArrayType;
import org.testng.annotations.Test;
import java.util.HashMap;
import java.util.Map;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.BooleanType.BOOLEAN;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.IntegerType.INTEGER;
import static io.prestosql.spi.type.TimestampType.createTimestampType;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.spi.type.VarcharType.createVarcharType;
import static io.prestosql.type.UnknownType.UNKNOWN;
import static io.prestosql.util.StructuralTestUtil.mapType;
public class TestMapTransformValuesFunction
extends AbstractTestFunctions
{
@Test
public void testRetainedSizeBounded()
{
assertCachedInstanceHasBoundedRetainedSize("transform_values(map(ARRAY [25, 26, 27], ARRAY [25, 26, 27]), (k, v) -> k + v)");
}
@Test
public void testEmpty()
{
assertFunction("transform_values(map(ARRAY[], ARRAY[]), (k, v) -> NULL)", mapType(UNKNOWN, UNKNOWN), ImmutableMap.of());
assertFunction("transform_values(map(ARRAY[], ARRAY[]), (k, v) -> k)", mapType(UNKNOWN, UNKNOWN), ImmutableMap.of());
assertFunction("transform_values(map(ARRAY[], ARRAY[]), (k, v) -> v)", mapType(UNKNOWN, UNKNOWN), ImmutableMap.of());
assertFunction("transform_values(map(ARRAY[], ARRAY[]), (k, v) -> 0)", mapType(UNKNOWN, INTEGER), ImmutableMap.of());
assertFunction("transform_values(map(ARRAY[], ARRAY[]), (k, v) -> true)", mapType(UNKNOWN, BOOLEAN), ImmutableMap.of());
assertFunction("transform_values(map(ARRAY[], ARRAY[]), (k, v) -> 'value')", mapType(UNKNOWN, createVarcharType(5)), ImmutableMap.of());
assertFunction("transform_values(CAST (map(ARRAY[], ARRAY[]) AS MAP(BIGINT,VARCHAR)), (k, v) -> k + CAST(v as BIGINT))", mapType(BIGINT, BIGINT), ImmutableMap.of());
assertFunction("transform_values(CAST (map(ARRAY[], ARRAY[]) AS MAP(BIGINT,VARCHAR)), (k, v) -> CAST(k AS VARCHAR) || v)", mapType(BIGINT, VARCHAR), ImmutableMap.of());
}
@Test
public void testNullValue()
{
Map<Integer, Void> sequenceToNullMap = new HashMap<>();
sequenceToNullMap.put(1, null);
sequenceToNullMap.put(2, null);
sequenceToNullMap.put(3, null);
assertFunction("transform_values(map(ARRAY[1, 2, 3], ARRAY ['a', 'b', 'c']), (k, v) -> NULL)", mapType(INTEGER, UNKNOWN), sequenceToNullMap);
Map<Integer, String> mapWithNullValue = new HashMap<>();
mapWithNullValue.put(1, "a");
mapWithNullValue.put(2, "b");
mapWithNullValue.put(3, null);
assertFunction("transform_values(map(ARRAY[1, 2, 3], ARRAY ['a', 'b', NULL]), (k, v) -> v)", mapType(INTEGER, createVarcharType(1)), mapWithNullValue);
assertFunction("transform_values(map(ARRAY[1, 2, 3], ARRAY [10, 11, NULL]), (k, v) -> to_base(v, 16))", mapType(INTEGER, createVarcharType(64)), mapWithNullValue);
assertFunction("transform_values(map(ARRAY[1, 2, 3], ARRAY ['10', '11', 'Invalid']), (k, v) -> to_base(TRY_CAST(v as BIGINT), 16))", mapType(INTEGER, createVarcharType(64)), mapWithNullValue);
assertFunction(
"transform_values(map(ARRAY[1, 2, 3], ARRAY [0, 0, 0]), (k, v) -> element_at(map(ARRAY[1, 2], ARRAY['a', 'b']), k + v))",
mapType(INTEGER, createVarcharType(1)),
mapWithNullValue);
assertFunction(
"transform_values(map(ARRAY[1, 2, 3], ARRAY ['a', 'b', NULL]), (k, v) -> IF(v IS NULL, k + 1.0E0, k + 0.5E0))",
mapType(INTEGER, DOUBLE),
ImmutableMap.of(1, 1.5, 2, 2.5, 3, 4.0));
}
@Test
public void testBasic()
{
assertFunction(
"transform_values(map(ARRAY [1, 2, 3, 4], ARRAY [10, 20, 30, 40]), (k, v) -> k + v)",
mapType(INTEGER, INTEGER),
ImmutableMap.of(1, 11, 2, 22, 3, 33, 4, 44));
assertFunction(
"transform_values(map(ARRAY ['a', 'b', 'c', 'd'], ARRAY [1, 2, 3, 4]), (k, v) -> v * v)",
mapType(createVarcharType(1), INTEGER),
ImmutableMap.of("a", 1, "b", 4, "c", 9, "d", 16));
assertFunction(
"transform_values(map(ARRAY ['a', 'b', 'c', 'd'], ARRAY [1, 2, 3, 4]), (k, v) -> k || CAST(v as VARCHAR))",
mapType(createVarcharType(1), VARCHAR),
ImmutableMap.of("a", "a1", "b", "b2", "c", "c3", "d", "d4"));
assertFunction(
"transform_values(map(ARRAY[1, 2, 3], ARRAY [1.0E0, 1.4E0, 1.7E0]), (k, v) -> map(ARRAY[1, 2, 3], ARRAY['one', 'two', 'three'])[k] || '_' || CAST(v AS VARCHAR))",
mapType(INTEGER, VARCHAR),
ImmutableMap.of(1, "one_1.0", 2, "two_1.4", 3, "three_1.7"));
assertFunction(
"transform_values(map(ARRAY[1, 2], ARRAY [TIMESTAMP '2020-05-10 12:34:56.123456789', TIMESTAMP '2010-05-10 12:34:56.123456789']), (k, v) -> date_add('year', 1, v))",
mapType(INTEGER, createTimestampType(9)),
ImmutableMap.of(1, legacyTimestamp(9, "2021-05-10 12:34:56.123456789"), 2, legacyTimestamp(9, "2011-05-10 12:34:56.123456789")));
}
@Test
public void testTypeCombinations()
{
assertFunction(
"transform_values(map(ARRAY [25, 26, 27], ARRAY [25, 26, 27]), (k, v) -> k + v)",
mapType(INTEGER, INTEGER),
ImmutableMap.of(25, 50, 26, 52, 27, 54));
assertFunction(
"transform_values(map(ARRAY [25, 26, 27], ARRAY [26.1E0, 31.2E0, 37.1E0]), (k, v) -> CAST(v - k AS BIGINT))",
mapType(INTEGER, BIGINT),
ImmutableMap.of(25, 1L, 26, 5L, 27, 10L));
assertFunction(
"transform_values(map(ARRAY [25, 27], ARRAY [false, true]), (k, v) -> if(v, k + 1, k + 2))",
mapType(INTEGER, INTEGER),
ImmutableMap.of(25, 27, 27, 28));
assertFunction(
"transform_values(map(ARRAY [25, 26, 27], ARRAY ['abc', 'd', 'xy']), (k, v) -> k + length(v))",
mapType(INTEGER, BIGINT),
ImmutableMap.of(25, 28L, 26, 27L, 27, 29L));
assertFunction(
"transform_values(map(ARRAY [25, 26, 27], ARRAY [ARRAY ['a'], ARRAY ['a', 'c'], ARRAY ['a', 'b', 'c']]), (k, v) -> k + cardinality(v))",
mapType(INTEGER, BIGINT),
ImmutableMap.of(25, 26L, 26, 28L, 27, 30L));
assertFunction(
"transform_values(map(ARRAY [25.5E0, 26.75E0, 27.875E0], ARRAY [25, 26, 27]), (k, v) -> k - v)",
mapType(DOUBLE, DOUBLE),
ImmutableMap.of(25.5, 0.5, 26.75, 0.75, 27.875, 0.875));
assertFunction(
"transform_values(map(ARRAY [25.5E0, 26.75E0, 27.875E0], ARRAY [25.0E0, 26.0E0, 27.0E0]), (k, v) -> k - v)",
mapType(DOUBLE, DOUBLE),
ImmutableMap.of(25.5, 0.5, 26.75, 0.75, 27.875, 0.875));
assertFunction(
"transform_values(map(ARRAY [25.5E0, 27.5E0], ARRAY [false, true]), (k, v) -> if(v, k + 0.1E0, k + 0.2E0))",
mapType(DOUBLE, DOUBLE),
ImmutableMap.of(25.5, 25.7, 27.5, 27.6));
assertFunction(
"transform_values(map(ARRAY [25.5E0, 26.5E0, 27.5E0], ARRAY ['a', 'def', 'xy']), (k, v) -> k + length(v))",
mapType(DOUBLE, DOUBLE),
ImmutableMap.of(25.5, 26.5, 26.5, 29.5, 27.5, 29.5));
assertFunction(
"transform_values(map(ARRAY [25.5E0, 26.5E0, 27.5E0], ARRAY [ARRAY ['a'], ARRAY ['a', 'c'], ARRAY ['a', 'b', 'c']]), (k, v) -> k + cardinality(v))",
mapType(DOUBLE, DOUBLE),
ImmutableMap.of(25.5, 26.5, 26.5, 28.5, 27.5, 30.5));
assertFunction(
"transform_values(map(ARRAY [true, false], ARRAY [25, 26]), (k, v) -> k AND v = 25)",
mapType(BOOLEAN, BOOLEAN),
ImmutableMap.of(true, true, false, false));
assertFunction(
"transform_values(map(ARRAY [false, true], ARRAY [25.5E0, 26.5E0]), (k, v) -> k OR v > 100)",
mapType(BOOLEAN, BOOLEAN),
ImmutableMap.of(false, false, true, true));
assertFunction(
"transform_values(map(ARRAY [true, false], ARRAY [false, null]), (k, v) -> NOT k OR v)",
mapType(BOOLEAN, BOOLEAN),
ImmutableMap.of(false, true, true, false));
assertFunction(
"transform_values(map(ARRAY [false, true], ARRAY ['abc', 'def']), (k, v) -> NOT k AND v = 'abc')",
mapType(BOOLEAN, BOOLEAN),
ImmutableMap.of(false, true, true, false));
assertFunction(
"transform_values(map(ARRAY [true, false], ARRAY [ARRAY ['a', 'b'], ARRAY ['a', 'b', 'c']]), (k, v) -> k OR cardinality(v) = 3)",
mapType(BOOLEAN, BOOLEAN),
ImmutableMap.of(false, true, true, true));
assertFunction(
"transform_values(map(ARRAY ['s0', 's1', 's2'], ARRAY [25, 26, 27]), (k, v) -> k || ':' || CAST(v as VARCHAR))",
mapType(createVarcharType(2), VARCHAR),
ImmutableMap.of("s0", "s0:25", "s1", "s1:26", "s2", "s2:27"));
assertFunction(
"transform_values(map(ARRAY ['s0', 's1', 's2'], ARRAY [25.5E0, 26.5E0, 27.5E0]), (k, v) -> k || ':' || CAST(v as VARCHAR))",
mapType(createVarcharType(2), VARCHAR),
ImmutableMap.of("s0", "s0:25.5", "s1", "s1:26.5", "s2", "s2:27.5"));
assertFunction(
"transform_values(map(ARRAY ['s0', 's2'], ARRAY [false, true]), (k, v) -> if(v, k, CAST(v AS VARCHAR)))",
mapType(createVarcharType(2), VARCHAR),
ImmutableMap.of("s0", "false", "s2", "s2"));
assertFunction(
"transform_values(map(ARRAY ['s0', 's1', 's2'], ARRAY ['abc', 'def', 'xyz']), (k, v) -> k || ':' || v)",
mapType(createVarcharType(2), VARCHAR),
ImmutableMap.of("s0", "s0:abc", "s1", "s1:def", "s2", "s2:xyz"));
assertFunction(
"transform_values(map(ARRAY ['s0', 's1', 's2'], ARRAY [ARRAY ['a', 'b'], ARRAY ['a', 'c'], ARRAY ['a', 'b', 'c']]), (k, v) -> k || ':' || array_max(v))",
mapType(createVarcharType(2), VARCHAR),
ImmutableMap.of("s0", "s0:b", "s1", "s1:c", "s2", "s2:c"));
assertFunction(
"transform_values(map(ARRAY [ARRAY [1, 2], ARRAY [3, 4]], ARRAY [25, 26]), (k, v) -> if(v % 2 = 0, reverse(k), k))",
mapType(new ArrayType(INTEGER), new ArrayType(INTEGER)),
ImmutableMap.of(ImmutableList.of(1, 2), ImmutableList.of(1, 2), ImmutableList.of(3, 4), ImmutableList.of(4, 3)));
assertFunction(
"transform_values(map(ARRAY [ARRAY [1, 2], ARRAY [3, 4]], ARRAY [25.5E0, 26.5E0]), (k, v) -> CAST(k AS ARRAY(DOUBLE)) || v)",
mapType(new ArrayType(INTEGER), new ArrayType(DOUBLE)),
ImmutableMap.of(ImmutableList.of(1, 2), ImmutableList.of(1., 2., 25.5), ImmutableList.of(3, 4), ImmutableList.of(3., 4., 26.5)));
assertFunction(
"transform_values(map(ARRAY [ARRAY [1, 2], ARRAY [3, 4]], ARRAY [false, true]), (k, v) -> if(v, reverse(k), k))",
mapType(new ArrayType(INTEGER), new ArrayType(INTEGER)),
ImmutableMap.of(ImmutableList.of(1, 2), ImmutableList.of(1, 2), ImmutableList.of(3, 4), ImmutableList.of(4, 3)));
assertFunction(
"transform_values(map(ARRAY [ARRAY [1, 2], ARRAY []], ARRAY ['a', 'ff']), (k, v) -> k || from_base(v, 16))",
mapType(new ArrayType(INTEGER), new ArrayType(BIGINT)),
ImmutableMap.of(ImmutableList.of(1, 2), ImmutableList.of(1L, 2L, 10L), ImmutableList.of(), ImmutableList.of(255L)));
assertFunction(
"transform_values(map(ARRAY [ARRAY [3, 4], ARRAY []], ARRAY [ARRAY ['a', 'b', 'c'], ARRAY ['a', 'c']]), (k, v) -> transform(k, x -> CAST(x AS VARCHAR)) || v)",
mapType(new ArrayType(INTEGER), new ArrayType(VARCHAR)),
ImmutableMap.of(ImmutableList.of(3, 4), ImmutableList.of("3", "4", "a", "b", "c"), ImmutableList.of(), ImmutableList.of("a", "c")));
}
}
| hgschmie/presto | presto-main/src/test/java/io/prestosql/operator/scalar/TestMapTransformValuesFunction.java | Java | apache-2.0 | 13,148 |
package com.thinkgem.jeesite.modules.sys.service.workflow;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.activiti.engine.IdentityService;
import org.activiti.engine.RepositoryService;
import org.activiti.engine.RuntimeService;
import org.activiti.engine.TaskService;
import org.activiti.engine.delegate.Expression;
import org.activiti.engine.identity.User;
import org.activiti.engine.impl.RepositoryServiceImpl;
import org.activiti.engine.impl.bpmn.behavior.UserTaskActivityBehavior;
import org.activiti.engine.impl.persistence.entity.ProcessDefinitionEntity;
import org.activiti.engine.impl.pvm.delegate.ActivityBehavior;
import org.activiti.engine.impl.pvm.process.ActivityImpl;
import org.activiti.engine.impl.task.TaskDefinition;
import org.activiti.engine.runtime.Execution;
import org.activiti.engine.runtime.ProcessInstance;
import org.activiti.engine.task.Task;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.thinkgem.jeesite.common.service.BaseService;
import com.thinkgem.jeesite.modules.sys.utils.workflow.WorkflowUtils;
/**
* 工作流跟踪相关Service
* @author HenryYan
*/
@Service
public class WorkflowTraceService extends BaseService {
@Autowired
protected RuntimeService runtimeService;
@Autowired
protected TaskService taskService;
@Autowired
protected RepositoryService repositoryService;
@Autowired
protected IdentityService identityService;
/**
* 流程跟踪图
* @param processInstanceId 流程实例ID
* @return 封装了各种节点信息
*/
public List<Map<String, Object>> traceProcess(String processInstanceId) throws Exception {
Execution execution = runtimeService.createExecutionQuery().executionId(processInstanceId).singleResult();//执行实例
Object property = PropertyUtils.getProperty(execution, "activityId");
String activityId = "";
if (property != null) {
activityId = property.toString();
}
ProcessInstance processInstance = runtimeService.createProcessInstanceQuery().processInstanceId(processInstanceId)
.singleResult();
ProcessDefinitionEntity processDefinition = (ProcessDefinitionEntity) ((RepositoryServiceImpl) repositoryService)
.getDeployedProcessDefinition(processInstance.getProcessDefinitionId());
List<ActivityImpl> activitiList = processDefinition.getActivities();//获得当前任务的所有节点
List<Map<String, Object>> activityInfos = new ArrayList<Map<String, Object>>();
for (ActivityImpl activity : activitiList) {
boolean currentActiviti = false;
String id = activity.getId();
// 当前节点
if (id.equals(activityId)) {
currentActiviti = true;
}
Map<String, Object> activityImageInfo = packageSingleActivitiInfo(activity, processInstance, currentActiviti);
activityInfos.add(activityImageInfo);
}
return activityInfos;
}
/**
* 封装输出信息,包括:当前节点的X、Y坐标、变量信息、任务类型、任务描述
* @param activity
* @param processInstance
* @param currentActiviti
* @return
*/
private Map<String, Object> packageSingleActivitiInfo(ActivityImpl activity, ProcessInstance processInstance,
boolean currentActiviti) throws Exception {
Map<String, Object> vars = new HashMap<String, Object>();
Map<String, Object> activityInfo = new HashMap<String, Object>();
activityInfo.put("currentActiviti", currentActiviti);
setPosition(activity, activityInfo);
setWidthAndHeight(activity, activityInfo);
Map<String, Object> properties = activity.getProperties();
vars.put("任务类型", WorkflowUtils.parseToZhType(properties.get("type").toString()));
ActivityBehavior activityBehavior = activity.getActivityBehavior();
logger.debug("activityBehavior={}", activityBehavior);
if (activityBehavior instanceof UserTaskActivityBehavior) {
Task currentTask = null;
/*
* 当前节点的task
*/
if (currentActiviti) {
currentTask = getCurrentTaskInfo(processInstance);
}
/*
* 当前任务的分配角色
*/
UserTaskActivityBehavior userTaskActivityBehavior = (UserTaskActivityBehavior) activityBehavior;
TaskDefinition taskDefinition = userTaskActivityBehavior.getTaskDefinition();
Set<Expression> candidateGroupIdExpressions = taskDefinition.getCandidateGroupIdExpressions();
if (!candidateGroupIdExpressions.isEmpty()) {
// 任务的处理角色
setTaskGroup(vars, candidateGroupIdExpressions);
// 当前处理人
if (currentTask != null) {
setCurrentTaskAssignee(vars, currentTask);
}
}
}
vars.put("节点说明", properties.get("documentation"));
String description = activity.getProcessDefinition().getDescription();
vars.put("描述", description);
logger.debug("trace variables: {}", vars);
activityInfo.put("vars", vars);
return activityInfo;
}
private void setTaskGroup(Map<String, Object> vars, Set<Expression> candidateGroupIdExpressions) {
String roles = "";
for (Expression expression : candidateGroupIdExpressions) {
String expressionText = expression.getExpressionText();
if (expressionText.startsWith("$")) {
expressionText = expressionText.replace("${insuranceType}", "life");
}
String roleName = identityService.createGroupQuery().groupId(expressionText).singleResult().getName();
roles += roleName;
}
vars.put("任务所属角色", roles);
}
/**
* 设置当前处理人信息
* @param vars
* @param currentTask
*/
private void setCurrentTaskAssignee(Map<String, Object> vars, Task currentTask) {
String assignee = currentTask.getAssignee();
if (assignee != null) {
User assigneeUser = identityService.createUserQuery().userId(assignee).singleResult();
String userInfo = assigneeUser.getFirstName() + " " + assigneeUser.getLastName();
vars.put("当前处理人", userInfo);
}
}
/**
* 获取当前节点信息
* @param processInstance
* @return
*/
private Task getCurrentTaskInfo(ProcessInstance processInstance) {
Task currentTask = null;
try {
String activitiId = (String) PropertyUtils.getProperty(processInstance, "activityId");
logger.debug("current activity id: {}", activitiId);
currentTask = taskService.createTaskQuery().processInstanceId(processInstance.getId()).taskDefinitionKey(activitiId)
.singleResult();
logger.debug("current task for processInstance: {}", ToStringBuilder.reflectionToString(currentTask));
} catch (Exception e) {
logger.error("can not get property activityId from processInstance: {}", processInstance);
}
return currentTask;
}
/**
* 设置宽度、高度属性
* @param activity
* @param activityInfo
*/
private void setWidthAndHeight(ActivityImpl activity, Map<String, Object> activityInfo) {
activityInfo.put("width", activity.getWidth());
activityInfo.put("height", activity.getHeight());
}
/**
* 设置坐标位置
* @param activity
* @param activityInfo
*/
private void setPosition(ActivityImpl activity, Map<String, Object> activityInfo) {
activityInfo.put("x", activity.getX());
activityInfo.put("y", activity.getY());
}
}
| zhangzuoqiang/jeesite | src/main/java/com/thinkgem/jeesite/modules/sys/service/workflow/WorkflowTraceService.java | Java | apache-2.0 | 7,301 |
package org.sagebionetworks.web.client.widget.docker;
import java.util.Date;
import org.gwtbootstrap3.client.ui.html.Span;
import org.sagebionetworks.web.client.SynapseJSNIUtils;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.i18n.client.DateTimeFormat;
import com.google.gwt.i18n.client.DateTimeFormat.PredefinedFormat;
import com.google.gwt.uibinder.client.UiBinder;
import com.google.gwt.uibinder.client.UiField;
import com.google.gwt.user.client.ui.FocusPanel;
import com.google.gwt.user.client.ui.TextBox;
import com.google.gwt.user.client.ui.Widget;
import com.google.inject.Inject;
public class DockerCommitRowWidgetViewImpl implements DockerCommitRowWidgetView {
public static final DateTimeFormat DATE_FORMAT = DateTimeFormat.getFormat(PredefinedFormat.DATE_TIME_SHORT);
@UiField
Span tag;
@UiField
TextBox createdOn;
@UiField
TextBox digest;
@UiField
FocusPanel row;
private Widget widget;
private Presenter presenter;
public interface Binder extends UiBinder<Widget, DockerCommitRowWidgetViewImpl> {
}
@Inject
public DockerCommitRowWidgetViewImpl(Binder binder, final SynapseJSNIUtils jsniUtils) {
this.widget = binder.createAndBindUi(this);
digest.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
digest.selectAll();
}
});
row.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent arg0) {
presenter.onClick();
}
});
}
@Override
public Widget asWidget() {
return widget;
}
@Override
public void setPresenter(Presenter presenter) {
this.presenter = presenter;
}
@Override
public void setTag(String tag) {
this.tag.setText(tag);
}
@Override
public void setDigest(String digest) {
this.digest.setText(digest);
}
@Override
public void setCreatedOn(Date createdOn) {
this.createdOn.setText(DATE_FORMAT.format(createdOn));
}
}
| jay-hodgson/SynapseWebClient | src/main/java/org/sagebionetworks/web/client/widget/docker/DockerCommitRowWidgetViewImpl.java | Java | apache-2.0 | 1,963 |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.cloud.aurora.client.sdk;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2016-10-21")
public class ResourceAggregate implements org.apache.thrift.TBase<ResourceAggregate, ResourceAggregate._Fields>, java.io.Serializable, Cloneable, Comparable<ResourceAggregate> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ResourceAggregate");
private static final org.apache.thrift.protocol.TField NUM_CPUS_FIELD_DESC = new org.apache.thrift.protocol.TField("numCpus", org.apache.thrift.protocol.TType.DOUBLE, (short)1);
private static final org.apache.thrift.protocol.TField RAM_MB_FIELD_DESC = new org.apache.thrift.protocol.TField("ramMb", org.apache.thrift.protocol.TType.I64, (short)2);
private static final org.apache.thrift.protocol.TField DISK_MB_FIELD_DESC = new org.apache.thrift.protocol.TField("diskMb", org.apache.thrift.protocol.TType.I64, (short)3);
private static final org.apache.thrift.protocol.TField RESOURCES_FIELD_DESC = new org.apache.thrift.protocol.TField("resources", org.apache.thrift.protocol.TType.SET, (short)4);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new ResourceAggregateStandardSchemeFactory());
schemes.put(TupleScheme.class, new ResourceAggregateTupleSchemeFactory());
}
/**
* Number of CPU cores allotted.
*/
public double numCpus; // required
/**
* Megabytes of RAM allotted.
*/
public long ramMb; // required
/**
* Megabytes of disk space allotted.
*/
public long diskMb; // required
/**
* Aggregated resource values.
*/
public Set<Resource> resources; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
/**
* Number of CPU cores allotted.
*/
NUM_CPUS((short)1, "numCpus"),
/**
* Megabytes of RAM allotted.
*/
RAM_MB((short)2, "ramMb"),
/**
* Megabytes of disk space allotted.
*/
DISK_MB((short)3, "diskMb"),
/**
* Aggregated resource values.
*/
RESOURCES((short)4, "resources");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // NUM_CPUS
return NUM_CPUS;
case 2: // RAM_MB
return RAM_MB;
case 3: // DISK_MB
return DISK_MB;
case 4: // RESOURCES
return RESOURCES;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __NUMCPUS_ISSET_ID = 0;
private static final int __RAMMB_ISSET_ID = 1;
private static final int __DISKMB_ISSET_ID = 2;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.NUM_CPUS, new org.apache.thrift.meta_data.FieldMetaData("numCpus", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)));
tmpMap.put(_Fields.RAM_MB, new org.apache.thrift.meta_data.FieldMetaData("ramMb", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.DISK_MB, new org.apache.thrift.meta_data.FieldMetaData("diskMb", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
tmpMap.put(_Fields.RESOURCES, new org.apache.thrift.meta_data.FieldMetaData("resources", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.SetMetaData(org.apache.thrift.protocol.TType.SET,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Resource.class))));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ResourceAggregate.class, metaDataMap);
}
public ResourceAggregate() {
}
public ResourceAggregate(
double numCpus,
long ramMb,
long diskMb,
Set<Resource> resources)
{
this();
this.numCpus = numCpus;
setNumCpusIsSet(true);
this.ramMb = ramMb;
setRamMbIsSet(true);
this.diskMb = diskMb;
setDiskMbIsSet(true);
this.resources = resources;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ResourceAggregate(ResourceAggregate other) {
__isset_bitfield = other.__isset_bitfield;
this.numCpus = other.numCpus;
this.ramMb = other.ramMb;
this.diskMb = other.diskMb;
if (other.isSetResources()) {
Set<Resource> __this__resources = new HashSet<Resource>(other.resources.size());
for (Resource other_element : other.resources) {
__this__resources.add(new Resource(other_element));
}
this.resources = __this__resources;
}
}
public ResourceAggregate deepCopy() {
return new ResourceAggregate(this);
}
@Override
public void clear() {
setNumCpusIsSet(false);
this.numCpus = 0.0;
setRamMbIsSet(false);
this.ramMb = 0;
setDiskMbIsSet(false);
this.diskMb = 0;
this.resources = null;
}
/**
* Number of CPU cores allotted.
*/
public double getNumCpus() {
return this.numCpus;
}
/**
* Number of CPU cores allotted.
*/
public ResourceAggregate setNumCpus(double numCpus) {
this.numCpus = numCpus;
setNumCpusIsSet(true);
return this;
}
public void unsetNumCpus() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __NUMCPUS_ISSET_ID);
}
/** Returns true if field numCpus is set (has been assigned a value) and false otherwise */
public boolean isSetNumCpus() {
return EncodingUtils.testBit(__isset_bitfield, __NUMCPUS_ISSET_ID);
}
public void setNumCpusIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __NUMCPUS_ISSET_ID, value);
}
/**
* Megabytes of RAM allotted.
*/
public long getRamMb() {
return this.ramMb;
}
/**
* Megabytes of RAM allotted.
*/
public ResourceAggregate setRamMb(long ramMb) {
this.ramMb = ramMb;
setRamMbIsSet(true);
return this;
}
public void unsetRamMb() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __RAMMB_ISSET_ID);
}
/** Returns true if field ramMb is set (has been assigned a value) and false otherwise */
public boolean isSetRamMb() {
return EncodingUtils.testBit(__isset_bitfield, __RAMMB_ISSET_ID);
}
public void setRamMbIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __RAMMB_ISSET_ID, value);
}
/**
* Megabytes of disk space allotted.
*/
public long getDiskMb() {
return this.diskMb;
}
/**
* Megabytes of disk space allotted.
*/
public ResourceAggregate setDiskMb(long diskMb) {
this.diskMb = diskMb;
setDiskMbIsSet(true);
return this;
}
public void unsetDiskMb() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __DISKMB_ISSET_ID);
}
/** Returns true if field diskMb is set (has been assigned a value) and false otherwise */
public boolean isSetDiskMb() {
return EncodingUtils.testBit(__isset_bitfield, __DISKMB_ISSET_ID);
}
public void setDiskMbIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __DISKMB_ISSET_ID, value);
}
public int getResourcesSize() {
return (this.resources == null) ? 0 : this.resources.size();
}
public java.util.Iterator<Resource> getResourcesIterator() {
return (this.resources == null) ? null : this.resources.iterator();
}
public void addToResources(Resource elem) {
if (this.resources == null) {
this.resources = new HashSet<Resource>();
}
this.resources.add(elem);
}
/**
* Aggregated resource values.
*/
public Set<Resource> getResources() {
return this.resources;
}
/**
* Aggregated resource values.
*/
public ResourceAggregate setResources(Set<Resource> resources) {
this.resources = resources;
return this;
}
public void unsetResources() {
this.resources = null;
}
/** Returns true if field resources is set (has been assigned a value) and false otherwise */
public boolean isSetResources() {
return this.resources != null;
}
public void setResourcesIsSet(boolean value) {
if (!value) {
this.resources = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case NUM_CPUS:
if (value == null) {
unsetNumCpus();
} else {
setNumCpus((Double)value);
}
break;
case RAM_MB:
if (value == null) {
unsetRamMb();
} else {
setRamMb((Long)value);
}
break;
case DISK_MB:
if (value == null) {
unsetDiskMb();
} else {
setDiskMb((Long)value);
}
break;
case RESOURCES:
if (value == null) {
unsetResources();
} else {
setResources((Set<Resource>)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case NUM_CPUS:
return getNumCpus();
case RAM_MB:
return getRamMb();
case DISK_MB:
return getDiskMb();
case RESOURCES:
return getResources();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case NUM_CPUS:
return isSetNumCpus();
case RAM_MB:
return isSetRamMb();
case DISK_MB:
return isSetDiskMb();
case RESOURCES:
return isSetResources();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof ResourceAggregate)
return this.equals((ResourceAggregate)that);
return false;
}
public boolean equals(ResourceAggregate that) {
if (that == null)
return false;
boolean this_present_numCpus = true;
boolean that_present_numCpus = true;
if (this_present_numCpus || that_present_numCpus) {
if (!(this_present_numCpus && that_present_numCpus))
return false;
if (this.numCpus != that.numCpus)
return false;
}
boolean this_present_ramMb = true;
boolean that_present_ramMb = true;
if (this_present_ramMb || that_present_ramMb) {
if (!(this_present_ramMb && that_present_ramMb))
return false;
if (this.ramMb != that.ramMb)
return false;
}
boolean this_present_diskMb = true;
boolean that_present_diskMb = true;
if (this_present_diskMb || that_present_diskMb) {
if (!(this_present_diskMb && that_present_diskMb))
return false;
if (this.diskMb != that.diskMb)
return false;
}
boolean this_present_resources = true && this.isSetResources();
boolean that_present_resources = true && that.isSetResources();
if (this_present_resources || that_present_resources) {
if (!(this_present_resources && that_present_resources))
return false;
if (!this.resources.equals(that.resources))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_numCpus = true;
list.add(present_numCpus);
if (present_numCpus)
list.add(numCpus);
boolean present_ramMb = true;
list.add(present_ramMb);
if (present_ramMb)
list.add(ramMb);
boolean present_diskMb = true;
list.add(present_diskMb);
if (present_diskMb)
list.add(diskMb);
boolean present_resources = true && (isSetResources());
list.add(present_resources);
if (present_resources)
list.add(resources);
return list.hashCode();
}
@Override
public int compareTo(ResourceAggregate other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetNumCpus()).compareTo(other.isSetNumCpus());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNumCpus()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.numCpus, other.numCpus);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetRamMb()).compareTo(other.isSetRamMb());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetRamMb()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ramMb, other.ramMb);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetDiskMb()).compareTo(other.isSetDiskMb());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetDiskMb()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.diskMb, other.diskMb);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetResources()).compareTo(other.isSetResources());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetResources()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.resources, other.resources);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ResourceAggregate(");
boolean first = true;
sb.append("numCpus:");
sb.append(this.numCpus);
first = false;
if (!first) sb.append(", ");
sb.append("ramMb:");
sb.append(this.ramMb);
first = false;
if (!first) sb.append(", ");
sb.append("diskMb:");
sb.append(this.diskMb);
first = false;
if (!first) sb.append(", ");
sb.append("resources:");
if (this.resources == null) {
sb.append("null");
} else {
sb.append(this.resources);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ResourceAggregateStandardSchemeFactory implements SchemeFactory {
public ResourceAggregateStandardScheme getScheme() {
return new ResourceAggregateStandardScheme();
}
}
private static class ResourceAggregateStandardScheme extends StandardScheme<ResourceAggregate> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ResourceAggregate struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // NUM_CPUS
if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) {
struct.numCpus = iprot.readDouble();
struct.setNumCpusIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // RAM_MB
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.ramMb = iprot.readI64();
struct.setRamMbIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // DISK_MB
if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
struct.diskMb = iprot.readI64();
struct.setDiskMbIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // RESOURCES
if (schemeField.type == org.apache.thrift.protocol.TType.SET) {
{
org.apache.thrift.protocol.TSet _set84 = iprot.readSetBegin();
struct.resources = new HashSet<Resource>(2*_set84.size);
Resource _elem85;
for (int _i86 = 0; _i86 < _set84.size; ++_i86)
{
_elem85 = new Resource();
_elem85.read(iprot);
struct.resources.add(_elem85);
}
iprot.readSetEnd();
}
struct.setResourcesIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ResourceAggregate struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(NUM_CPUS_FIELD_DESC);
oprot.writeDouble(struct.numCpus);
oprot.writeFieldEnd();
oprot.writeFieldBegin(RAM_MB_FIELD_DESC);
oprot.writeI64(struct.ramMb);
oprot.writeFieldEnd();
oprot.writeFieldBegin(DISK_MB_FIELD_DESC);
oprot.writeI64(struct.diskMb);
oprot.writeFieldEnd();
if (struct.resources != null) {
oprot.writeFieldBegin(RESOURCES_FIELD_DESC);
{
oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRUCT, struct.resources.size()));
for (Resource _iter87 : struct.resources)
{
_iter87.write(oprot);
}
oprot.writeSetEnd();
}
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ResourceAggregateTupleSchemeFactory implements SchemeFactory {
public ResourceAggregateTupleScheme getScheme() {
return new ResourceAggregateTupleScheme();
}
}
private static class ResourceAggregateTupleScheme extends TupleScheme<ResourceAggregate> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ResourceAggregate struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetNumCpus()) {
optionals.set(0);
}
if (struct.isSetRamMb()) {
optionals.set(1);
}
if (struct.isSetDiskMb()) {
optionals.set(2);
}
if (struct.isSetResources()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if (struct.isSetNumCpus()) {
oprot.writeDouble(struct.numCpus);
}
if (struct.isSetRamMb()) {
oprot.writeI64(struct.ramMb);
}
if (struct.isSetDiskMb()) {
oprot.writeI64(struct.diskMb);
}
if (struct.isSetResources()) {
{
oprot.writeI32(struct.resources.size());
for (Resource _iter88 : struct.resources)
{
_iter88.write(oprot);
}
}
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ResourceAggregate struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(4);
if (incoming.get(0)) {
struct.numCpus = iprot.readDouble();
struct.setNumCpusIsSet(true);
}
if (incoming.get(1)) {
struct.ramMb = iprot.readI64();
struct.setRamMbIsSet(true);
}
if (incoming.get(2)) {
struct.diskMb = iprot.readI64();
struct.setDiskMbIsSet(true);
}
if (incoming.get(3)) {
{
org.apache.thrift.protocol.TSet _set89 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
struct.resources = new HashSet<Resource>(2*_set89.size);
Resource _elem90;
for (int _i91 = 0; _i91 < _set89.size; ++_i91)
{
_elem90 = new Resource();
_elem90.read(iprot);
struct.resources.add(_elem90);
}
}
struct.setResourcesIsSet(true);
}
}
}
}
| machristie/airavata | modules/cloud/aurora-client/src/main/java/org/apache/airavata/cloud/aurora/client/sdk/ResourceAggregate.java | Java | apache-2.0 | 25,535 |
/**
* Copyright 2013-2020 the original author or authors from the JHipster project.
*
* This file is part of the JHipster project, see https://www.jhipster.tech/
* for more information.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
module.exports = {
stringify: data => JSON.stringify(data, (key, value) => (key === 'otherEntity' && value ? `[${value.name} entity]` : value), 4),
};
| PierreBesson/generator-jhipster | utils/index.js | JavaScript | apache-2.0 | 909 |
package org.ansj.recognition;
import org.ansj.dic.LearnTool;
import org.ansj.domain.Nature;
import org.ansj.domain.NewWord;
import org.ansj.domain.Term;
import org.ansj.util.TermUtil;
import org.nlpcn.commons.lang.tire.domain.SmartForest;
/**
* 新词识别
*
* @author ansj
*
*/
public class NewWordRecognition {
private Term[] terms = null;
private double score;
private StringBuilder sb = new StringBuilder();
private SmartForest<NewWord> forest = null;
private SmartForest<NewWord> branch = null;
// private int offe = -1;
// private int endOffe = -1;
private Nature tempNature;
private Term from;
private Term to;
// 偏移量
private int offe;
public NewWordRecognition(Term[] terms, LearnTool learn) {
this.terms = terms;
forest = learn.getForest();
branch = learn.getForest();
}
public void recognition() {
if (branch == null) {
return;
}
int length = terms.length - 1;
Term term = null;
for (int i = 0; i < length; i++) {
if (terms[i] == null) {
continue;
} else {
from = terms[i].from();
terms[i].score(0);
terms[i].selfScore(0);
}
branch = branch.getBranch(terms[i].getName());
if (branch == null || branch.getStatus() == 3) {
reset();
continue;
}
offe = i;
// 循环查找添加
term = terms[i];
sb.append(term.getName());
if (branch.getStatus() == 2) {
term.selfScore(branch.getParam().getScore());
}
boolean flag = true;
while (flag) {
term = term.to();
branch = branch.getBranch(term.getName());
// 如果没有找到跳出
if (branch == null) {
break;
}
switch (branch.getStatus()) {
case 1:
sb.append(term.getName());
continue;
case 2:
sb.append(term.getName());
score = branch.getParam().getScore();
tempNature = branch.getParam().getNature();
to = term.to();
makeNewTerm();
continue;
case 3:
sb.append(term.getName());
score = branch.getParam().getScore();
tempNature = branch.getParam().getNature();
to = term.to();
makeNewTerm();
flag = false;
break;
default:
System.out.println("怎么能出现0呢?");
break;
}
}
reset();
}
}
private void makeNewTerm() {
Term term = new Term(sb.toString(), offe, tempNature.natureStr, 1);
term.selfScore(score);
term.setNature(tempNature);
if (sb.length() > 3) {
term.setSubTerm(TermUtil.getSubTerm(from, to));
}
TermUtil.termLink(from, term);
TermUtil.termLink(term, to);
TermUtil.insertTerm(terms, term,2);
TermUtil.parseNature(term);
}
/**
* 重置
*/
private void reset() {
offe = -1;
tempNature = null;
branch = forest;
score = 0;
sb = new StringBuilder();
}
}
| hitscs/ansj_seg | src/main/java/org/ansj/recognition/NewWordRecognition.java | Java | apache-2.0 | 2,876 |
def install(job):
from zeroos.orchestrator.sal.Node import Node
from zeroos.orchestrator.configuration import get_jwt_token
service = job.service
job.context['token'] = get_jwt_token(job.service.aysrepo)
node = Node.from_ays(service.parent, job.context['token'])
if node.client.nft.rule_exists(service.model.data.port):
return
node.client.nft.open_port(service.model.data.port)
service.model.data.status = "opened"
service.saveAll()
def drop(job):
from zeroos.orchestrator.sal.Node import Node
from zeroos.orchestrator.configuration import get_jwt_token
service = job.service
job.context['token'] = get_jwt_token(job.service.aysrepo)
node = Node.from_ays(service.parent, job.context['token'])
if not node.client.nft.rule_exists(service.model.data.port):
return
node.client.nft.drop_port(service.model.data.port)
service.model.data.status = "dropped"
service.saveAll()
def monitor(job):
from zeroos.orchestrator.configuration import get_jwt_token
service = job.service
if service.model.data.status != "opened":
return
job.context['token'] = get_jwt_token(service.aysrepo)
install(job)
| zero-os/0-orchestrator | templates/tcp/actions.py | Python | apache-2.0 | 1,208 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.infra.federation.optimizer.context.parser.dialect.impl;
import org.apache.calcite.config.CalciteConnectionProperty;
import org.apache.calcite.config.Lex;
import org.apache.calcite.sql.fun.SqlLibrary;
import org.apache.calcite.sql.validate.SqlConformanceEnum;
import org.apache.shardingsphere.infra.federation.optimizer.context.parser.dialect.OptimizerSQLDialectBuilder;
import java.util.Properties;
/**
* Optimizer properties builder for openGauss.
*/
public final class OpenGaussOptimizerBuilder implements OptimizerSQLDialectBuilder {
@Override
public Properties build() {
Properties result = new Properties();
result.setProperty(CalciteConnectionProperty.LEX.camelName(), Lex.JAVA.name());
result.setProperty(CalciteConnectionProperty.CONFORMANCE.camelName(), SqlConformanceEnum.BABEL.name());
result.setProperty(CalciteConnectionProperty.FUN.camelName(), SqlLibrary.POSTGRESQL.fun);
return result;
}
@Override
public String getType() {
return "openGauss";
}
}
| apache/incubator-shardingsphere | shardingsphere-infra/shardingsphere-infra-federation/shardingsphere-infra-federation-optimizer/src/main/java/org/apache/shardingsphere/infra/federation/optimizer/context/parser/dialect/impl/OpenGaussOptimizerBuilder.java | Java | apache-2.0 | 1,888 |
/*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.actuate.autoconfigure.beans;
import org.springframework.boot.actuate.autoconfigure.endpoint.condition.ConditionalOnEnabledEndpoint;
import org.springframework.boot.actuate.autoconfigure.endpoint.condition.ConditionalOnExposedEndpoint;
import org.springframework.boot.actuate.beans.BeansEndpoint;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* {@link EnableAutoConfiguration Auto-configuration} for the {@link BeansEndpoint}.
*
* @author Phillip Webb
* @since 2.0.0
*/
@Configuration(proxyBeanMethods = false)
@ConditionalOnEnabledEndpoint(endpoint = BeansEndpoint.class)
@ConditionalOnExposedEndpoint(endpoint = BeansEndpoint.class)
public class BeansEndpointAutoConfiguration {
@Bean
@ConditionalOnMissingBean
public BeansEndpoint beansEndpoint(
ConfigurableApplicationContext applicationContext) {
return new BeansEndpoint(applicationContext);
}
}
| lburgazzoli/spring-boot | spring-boot-project/spring-boot-actuator-autoconfigure/src/main/java/org/springframework/boot/actuate/autoconfigure/beans/BeansEndpointAutoConfiguration.java | Java | apache-2.0 | 1,816 |
class AddPrevEpisodeIdToEpisodes < ActiveRecord::Migration
def change
add_column :episodes, :prev_episode_id, :integer
add_index :episodes, :prev_episode_id
add_foreign_key :episodes, :episodes, column: :prev_episode_id
add_column :draft_episodes, :prev_episode_id, :integer
add_index :draft_episodes, :prev_episode_id
add_foreign_key :draft_episodes, :episodes, column: :prev_episode_id
end
end
| flada-auxv/annict | db/migrate/20150626152128_add_prev_episode_id_to_episodes.rb | Ruby | apache-2.0 | 425 |
/**
* Copyright (C) 2014-2018 LinkedIn Corp. (pinot-core@linkedin.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.linkedin.thirdeye.anomalydetection.performanceEvaluation;
import com.linkedin.thirdeye.api.DimensionMap;
import com.linkedin.thirdeye.datalayer.dto.MergedAnomalyResultDTO;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.joda.time.Interval;
public abstract class BasePerformanceEvaluate implements PerformanceEvaluate {
/**
* convert merge anomalies to a dimension-intervals map
* @param mergedAnomalyResultDTOList
* @return
*/
public Map<DimensionMap, List<Interval>> mergedAnomalyResultsToIntervalMap (List<MergedAnomalyResultDTO> mergedAnomalyResultDTOList) {
Map<DimensionMap, List<Interval>> anomalyIntervals = new HashMap<>();
for(MergedAnomalyResultDTO mergedAnomaly : mergedAnomalyResultDTOList) {
if(!anomalyIntervals.containsKey(mergedAnomaly.getDimensions())) {
anomalyIntervals.put(mergedAnomaly.getDimensions(), new ArrayList<Interval>());
}
anomalyIntervals.get(mergedAnomaly.getDimensions()).add(
new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime()));
}
return anomalyIntervals;
}
/**
* convert merge anomalies to interval list without considering the dimension
* @param mergedAnomalyResultDTOList
* @return
*/
public List<Interval> mergedAnomalyResultsToIntervals (List<MergedAnomalyResultDTO> mergedAnomalyResultDTOList) {
List<Interval> anomalyIntervals = new ArrayList<>();
for(MergedAnomalyResultDTO mergedAnomaly : mergedAnomalyResultDTOList) {
anomalyIntervals.add(new Interval(mergedAnomaly.getStartTime(), mergedAnomaly.getEndTime()));
}
return anomalyIntervals;
}
}
| apucher/pinot | thirdeye/thirdeye-pinot/src/main/java/com/linkedin/thirdeye/anomalydetection/performanceEvaluation/BasePerformanceEvaluate.java | Java | apache-2.0 | 2,335 |
package com.fanlehai.java.exception;
import static com.fanlehai.java.util.Print.*;
public class MultipleReturns {
public static void f(int i) {
print("Initialization that requires cleanup");
try {
print("Point 1");
if (i == 1)
return;
print("Point 2");
if (i == 2)
return;
print("Point 3");
if (i == 3)
return;
print("End");
return;
} finally {
print("Performing cleanup");
}
}
public static void main(String[] args) {
for (int i = 1; i <= 4; i++)
f(i);
}
}
/* Output:
Initialization that requires cleanup
Point 1
Performing cleanup
Initialization that requires cleanup
Point 1
Point 2
Performing cleanup
Initialization that requires cleanup
Point 1
Point 2
Point 3
Performing cleanup
Initialization that requires cleanup
Point 1
Point 2
Point 3
End
Performing cleanup
*///:~
| fanlehai/CodePractice | java/src/main/java/com/fanlehai/java/exception/MultipleReturns.java | Java | apache-2.0 | 885 |
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/cloud/errors"
module Google
module Cloud
module PubSub
##
# # Policy
#
# Represents a Cloud IAM Policy for the Pub/Sub service.
#
# A common pattern for updating a resource's metadata, such as its Policy,
# is to read the current data from the service, update the data locally,
# and then send the modified data for writing. This pattern may result in
# a conflict if two or more processes attempt the sequence simultaneously.
# IAM solves this problem with the {Google::Cloud::PubSub::Policy#etag}
# property, which is used to verify whether the policy has changed since
# the last request. When you make a request to with an `etag` value, Cloud
# IAM compares the `etag` value in the request with the existing `etag`
# value associated with the policy. It writes the policy only if the
# `etag` values match.
#
# When you update a policy, first read the policy (and its current `etag`)
# from the service, then modify the policy locally, and then write the
# modified policy to the service. See
# {Google::Cloud::PubSub::Topic#policy} and
# {Google::Cloud::PubSub::Topic#policy=}.
#
# @see https://cloud.google.com/iam/docs/managing-policies Managing
# policies
# @see https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#iampolicy
# google.iam.v1.IAMPolicy
#
# @attr [String] etag Used to verify whether the policy has changed since
# the last request. The policy will be written only if the `etag` values
# match.
# @attr [Hash{String => Array<String>}] roles The bindings that associate
# roles with an array of members. See [Understanding
# Roles](https://cloud.google.com/iam/docs/understanding-roles) for a
# listing of primitive and curated roles.
# See [Binding](https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#binding)
# for a listing of values and patterns for members.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# topic.policy do |p|
# p.remove "roles/owner", "user:owner@example.com"
# p.add "roles/owner", "user:newowner@example.com"
# p.roles["roles/viewer"] = ["allUsers"]
# end
#
class Policy
attr_reader :etag, :roles
##
# @private Creates a Policy object.
def initialize etag, roles
@etag = etag
@roles = roles
end
##
# Convenience method for adding a member to a binding on this policy.
# See [Understanding
# Roles](https://cloud.google.com/iam/docs/understanding-roles) for a
# listing of primitive and curated roles.
# See [Binding](https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#binding)
# for a listing of values and patterns for members.
#
# @param [String] role_name A Cloud IAM role, such as
# `"roles/pubsub.admin"`.
# @param [String] member A Cloud IAM identity, such as
# `"user:owner@example.com"`.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# topic.policy do |p|
# p.add "roles/owner", "user:newowner@example.com"
# end
#
def add role_name, member
role(role_name) << member
end
##
# Convenience method for removing a member from a binding on this
# policy. See [Understanding
# Roles](https://cloud.google.com/iam/docs/understanding-roles) for a
# listing of primitive and curated roles. See
# [Binding](https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#binding)
# for a listing of values and patterns for members.
#
# @param [String] role_name A Cloud IAM role, such as
# `"roles/pubsub.admin"`.
# @param [String] member A Cloud IAM identity, such as
# `"user:owner@example.com"`.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# topic.policy do |p|
# p.remove "roles/owner", "user:owner@example.com"
# end
#
def remove role_name, member
role(role_name).delete member
end
##
# Convenience method returning the array of members bound to a role in
# this policy, or an empty array if no value is present for the role in
# {#roles}. See [Understanding
# Roles](https://cloud.google.com/iam/docs/understanding-roles) for a
# listing of primitive and curated roles. See
# [Binding](https://cloud.google.com/pubsub/docs/reference/rpc/google.iam.v1#binding)
# for a listing of values and patterns for members.
#
# @return [Array<String>] The members strings, or an empty array.
#
# @example
# require "google/cloud/pubsub"
#
# pubsub = Google::Cloud::PubSub.new
# topic = pubsub.topic "my-topic"
#
# topic.policy do |p|
# p.role("roles/viewer") << "user:viewer@example.com"
# end
#
def role role_name
roles[role_name] ||= []
end
##
# @private Convert the Policy to a Google::Iam::V1::Policy object.
def to_grpc
Google::Iam::V1::Policy.new(
etag: etag,
bindings: roles.keys.map do |role_name|
next if roles[role_name].empty?
Google::Iam::V1::Binding.new(
role: role_name,
members: roles[role_name]
)
end
)
end
##
# @private New Policy from a Google::Iam::V1::Policy object.
def self.from_grpc grpc
roles = grpc.bindings.each_with_object({}) do |binding, memo|
memo[binding.role] = binding.members.to_a
end
new grpc.etag, roles
end
end
end
Pubsub = PubSub unless const_defined? :Pubsub
end
end
| blowmage/gcloud-ruby | google-cloud-pubsub/lib/google/cloud/pubsub/policy.rb | Ruby | apache-2.0 | 7,039 |
package be.kdg.jsp.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.*;
@WebServlet("/Catalogus")
public class CatalogusServlet extends HttpServlet
{
@Override
protected void doPost(HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException
{
String klantnr = request.getParameter("klantnr");
HttpSession session = request.getSession();
session.setAttribute("klantnr", klantnr);
request.getRequestDispatcher("/toonCatalogus.jsp").forward(request, response);
}
}
| thebillkidy/KdG_IAO301A | JavaWebapps/java2web/JSP/src/main/java/be/kdg/jsp/controller/CatalogusServlet.java | Java | apache-2.0 | 679 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import base64
import json
import os
import sys
import time
from marionette import MarionetteTestCase
from marionette import Marionette
from marionette import MarionetteTouchMixin
from marionette.errors import NoSuchElementException
from marionette.errors import ElementNotVisibleException
from marionette.errors import TimeoutException
import mozdevice
class LockScreen(object):
def __init__(self, marionette):
self.marionette = marionette
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_lock_screen.js"))
self.marionette.import_script(js)
@property
def is_locked(self):
return self.marionette.execute_script('window.wrappedJSObject.LockScreen.locked')
def lock(self):
result = self.marionette.execute_async_script('GaiaLockScreen.lock()')
assert result, 'Unable to lock screen'
def unlock(self):
result = self.marionette.execute_async_script('GaiaLockScreen.unlock()')
assert result, 'Unable to unlock screen'
class GaiaApp(object):
def __init__(self, origin=None, name=None, frame=None, src=None):
self.frame = frame
self.frame_id = frame
self.src = src
self.name = name
self.origin = origin
class GaiaApps(object):
def __init__(self, marionette):
self.marionette = marionette
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_apps.js"))
self.marionette.import_script(js)
def get_permission(self, app_name, permission_name):
return self.marionette.execute_async_script("return GaiaApps.getPermission('%s', '%s')" % (app_name, permission_name))
def set_permission(self, app_name, permission_name, value):
return self.marionette.execute_async_script("return GaiaApps.setPermission('%s', '%s', '%s')" %
(app_name, permission_name, value))
def launch(self, name, switch_to_frame=True, url=None):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("GaiaApps.launchWithName('%s')" % name)
assert result, "Failed to launch app with name '%s'" % name
app = GaiaApp(frame=result.get('frame'),
src=result.get('src'),
name=result.get('name'),
origin=result.get('origin'))
if app.frame_id is None:
raise Exception("App failed to launch; there is no app frame")
if switch_to_frame:
self.switch_to_frame(app.frame_id, url)
return app
def uninstall(self, name):
self.marionette.switch_to_frame()
self.marionette.execute_async_script("GaiaApps.uninstallWithName('%s')" % name)
def kill(self, app):
self.marionette.switch_to_frame()
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_apps.js"))
self.marionette.import_script(js)
result = self.marionette.execute_async_script("GaiaApps.kill('%s');" % app.origin)
assert result, "Failed to kill app with name '%s'" % app.name
def kill_all(self):
self.marionette.switch_to_frame()
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_apps.js"))
self.marionette.import_script(js)
self.marionette.execute_async_script("GaiaApps.killAll()")
def runningApps(self):
return self.marionette.execute_script("return GaiaApps.getRunningApps()")
def switch_to_frame(self, app_frame, url=None, timeout=30):
self.marionette.switch_to_frame(app_frame)
start = time.time()
if not url:
def check(now):
return "about:blank" not in now
else:
def check(now):
return url in now
while (time.time() - start < timeout):
if check(self.marionette.get_url()):
return
time.sleep(2)
raise TimeoutException('Could not switch to app frame %s in time' % app_frame)
class GaiaData(object):
def __init__(self, marionette):
self.marionette = marionette
js = os.path.abspath(os.path.join(__file__, os.path.pardir, 'atoms', "gaia_data_layer.js"))
self.marionette.import_script(js)
self.marionette.set_search_timeout(10000)
def set_time(self, date_number):
self.marionette.set_context(self.marionette.CONTEXT_CHROME)
self.marionette.execute_script("window.navigator.mozTime.set(%s);" % date_number)
self.marionette.set_context(self.marionette.CONTEXT_CONTENT)
def insert_contact(self, contact):
self.marionette.execute_script("GaiaDataLayer.insertContact(%s)" % contact.json())
def remove_contact(self, contact):
self.marionette.execute_script("GaiaDataLayer.findAndRemoveContact(%s)" % contact.json())
def get_setting(self, name):
self.marionette.switch_to_frame()
return self.marionette.execute_async_script('return GaiaDataLayer.getSetting("%s")' % name)
@property
def all_settings(self):
return self.get_setting('*')
def set_setting(self, name, value):
import json
value = json.dumps(value)
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script('return GaiaDataLayer.setSetting("%s", %s)' % (name, value))
assert result, "Unable to change setting with name '%s' to '%s'" % (name, value)
def set_volume(self, value):
self.set_setting('audio.volume.master', value)
def enable_cell_data(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.enableCellData()")
assert result, 'Unable to enable cell data'
def disable_cell_data(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.disableCellData()")
assert result, 'Unable to disable cell data'
def enable_cell_roaming(self):
self.set_setting('ril.data.roaming_enabled', True)
def disable_cell_roaming(self):
self.set_setting('ril.data.roaming_enabled', False)
def enable_wifi(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.enableWiFi()")
assert result, 'Unable to enable WiFi'
def disable_wifi(self):
self.marionette.switch_to_frame()
result = self.marionette.execute_async_script("return GaiaDataLayer.disableWiFi()")
assert result, 'Unable to disable WiFi'
def connect_to_wifi(self, network):
result = self.marionette.execute_async_script("return GaiaDataLayer.connectToWiFi(%s)" % json.dumps(network))
assert result, 'Unable to connect to WiFi network'
def forget_all_networks(self):
self.marionette.execute_async_script('return GaiaDataLayer.forgetAllNetworks()')
def is_wifi_connected(self, network):
return self.marionette.execute_script("return GaiaDataLayer.isWiFiConnected(%s)" % json.dumps(network))
@property
def known_networks(self):
return self.marionette.execute_async_script('return GaiaDataLayer.getKnownNetworks()')
@property
def active_telephony_state(self):
# Returns the state of only the currently active call or None if no active call
return self.marionette.execute_script("return GaiaDataLayer.getMozTelephonyState()")
@property
def is_antenna_available(self):
return self.marionette.execute_script('return window.navigator.mozFMRadio.antennaAvailable')
@property
def is_fm_radio_enabled(self):
return self.marionette.execute_script('return window.navigator.mozFMRadio.enabled')
@property
def fm_radio_frequency(self):
return self.marionette.execute_script('return window.navigator.mozFMRadio.frequency')
@property
def media_files(self):
return self.marionette.execute_async_script('return GaiaDataLayer.getAllMediaFiles();')
class GaiaTestCase(MarionetteTestCase):
def setUp(self):
MarionetteTestCase.setUp(self)
self.marionette.__class__ = type('Marionette', (Marionette, MarionetteTouchMixin), {})
self.marionette.setup_touch()
# the emulator can be really slow!
self.marionette.set_script_timeout(60000)
self.marionette.set_search_timeout(10000)
self.lockscreen = LockScreen(self.marionette)
self.apps = GaiaApps(self.marionette)
self.data_layer = GaiaData(self.marionette)
self.keyboard = Keyboard(self.marionette)
# wifi is true if testvars includes wifi details and wifi manager is defined
self.wifi = self.testvars and \
'wifi' in self.testvars and \
self.marionette.execute_script('return window.navigator.mozWifiManager !== undefined')
self.cleanUp()
@property
def is_android_build(self):
return 'Android' in self.marionette.session_capabilities['platform']
@property
def device_manager(self):
if not self.is_android_build:
raise Exception('Device manager is only available for devices.')
if hasattr(self, '_device_manager') and self._device_manager:
return self._device_manager
else:
dm_type = os.environ.get('DM_TRANS', 'adb')
if dm_type == 'adb':
self._device_manager = mozdevice.DeviceManagerADB()
elif dm_type == 'sut':
host = os.environ.get('TEST_DEVICE')
if not host:
raise Exception('Must specify host with SUT!')
self._device_manager = mozdevice.DeviceManagerSUT(host=host)
else:
raise Exception('Unknown device manager type: %s' % dm_type)
return self._device_manager
def cleanUp(self):
# remove media
if self.is_android_build and self.data_layer.media_files:
for filename in self.data_layer.media_files:
self.device_manager.removeFile('/'.join(['sdcard', filename]))
# unlock
self.lockscreen.unlock()
# kill any open apps
self.apps.kill_all()
# disable sound completely
self.data_layer.set_volume(0)
if self.wifi:
# forget any known networks
self.data_layer.enable_wifi()
self.data_layer.forget_all_networks()
self.data_layer.disable_wifi()
# reset to home screen
self.marionette.execute_script("window.wrappedJSObject.dispatchEvent(new Event('home'));")
def push_resource(self, filename, destination=''):
local = os.path.abspath(os.path.join(os.path.dirname(__file__), 'resources', filename))
remote = '/'.join(['sdcard', destination, filename])
self.device_manager.mkDirs(remote)
self.device_manager.pushFile(local, remote)
def wait_for_element_present(self, by, locator, timeout=10):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
return self.marionette.find_element(by, locator)
except NoSuchElementException:
pass
else:
raise TimeoutException(
'Element %s not found before timeout' % locator)
def wait_for_element_not_present(self, by, locator, timeout=10):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
self.marionette.find_element(by, locator)
except NoSuchElementException:
break
else:
raise TimeoutException(
'Element %s still present after timeout' % locator)
def wait_for_element_displayed(self, by, locator, timeout=10):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
if self.marionette.find_element(by, locator).is_displayed():
break
except NoSuchElementException:
pass
else:
raise TimeoutException(
'Element %s not visible before timeout' % locator)
def wait_for_element_not_displayed(self, by, locator, timeout=10):
timeout = float(timeout) + time.time()
while time.time() < timeout:
time.sleep(0.5)
try:
if not self.marionette.find_element(by, locator).is_displayed():
break
except NoSuchElementException:
break
else:
raise TimeoutException(
'Element %s still visible after timeout' % locator)
def wait_for_condition(self, method, timeout=10,
message="Condition timed out"):
"""Calls the method provided with the driver as an argument until the \
return value is not False."""
end_time = time.time() + timeout
while time.time() < end_time:
try:
value = method(self.marionette)
if value:
return value
except NoSuchElementException:
pass
time.sleep(0.5)
else:
raise TimeoutException(message)
def is_element_present(self, by, locator):
try:
self.marionette.find_element(by, locator)
return True
except:
return False
def tearDown(self):
if any(sys.exc_info()):
# test has failed, gather debug
test_class, test_name = self.marionette.test_name.split()[-1].split('.')
xml_output = self.testvars.get('xml_output', None)
debug_path = os.path.join(xml_output and os.path.dirname(xml_output) or 'debug', test_class)
if not os.path.exists(debug_path):
os.makedirs(debug_path)
# screenshot
with open(os.path.join(debug_path, '%s_screenshot.png' % test_name), 'w') as f:
# TODO: Bug 818287 - Screenshots include data URL prefix
screenshot = self.marionette.screenshot()[22:]
f.write(base64.decodestring(screenshot))
self.lockscreen = None
self.apps = None
self.data_layer = None
MarionetteTestCase.tearDown(self)
class Keyboard(object):
_upper_case_key = '20'
_numeric_sign_key = '-2'
_alpha_key = '-1'
_alt_key = '18'
# Keyboard app
_keyboard_frame_locator = ('css selector', '#keyboard-frame iframe')
_button_locator = ('css selector', 'button.keyboard-key[data-keycode="%s"]')
def __init__(self, marionette):
self.marionette = marionette
def _switch_to_keyboard(self):
self.marionette.switch_to_frame()
keybframe = self.marionette.find_element(*self._keyboard_frame_locator)
self.marionette.switch_to_frame(keybframe, focus=False)
def _key_locator(self, val):
if len(val) == 1:
val = ord(val)
return (self._button_locator[0], self._button_locator[1] % val)
def _press(self, val):
self.marionette.find_element(*self._key_locator(val)).click()
def is_element_present(self, by, locator):
try:
self.marionette.set_search_timeout(500)
self.marionette.find_element(by, locator)
return True
except:
return False
finally:
# set the search timeout to the default value
self.marionette.set_search_timeout(10000)
def send(self, string):
self._switch_to_keyboard()
for val in string:
if val.isalnum():
if val.islower():
self._press(val)
elif val.isupper():
self._press(self._upper_case_key)
self._press(val)
elif val.isdigit():
self._press(self._numeric_sign_key)
self._press(val)
self._press(self._alpha_key)
else:
self._press(self._numeric_sign_key)
if self.is_element_present(*self._key_locator(val)):
self._press(val)
else:
self._press(self._alt_key)
if self.is_element_present(*self._key_locator(val)):
self._press(val)
else:
assert False, 'Key %s not found on the keyboard' % val
self._press(self._alpha_key)
self.marionette.switch_to_frame()
| sergecodd/FireFox-OS | B2G/gaia/tests/python/gaiatest/gaia_test.py | Python | apache-2.0 | 16,942 |
describe('Coordinate', function () {
describe('has various constructors', function () {
it('can be created by a coordinate array', function () {
var c = new maptalks.Coordinate([0, 0]);
expect(c.x).to.be.eql(0);
expect(c.y).to.be.eql(0);
});
it('can be created by x,y', function () {
var c = new maptalks.Coordinate(0, 0);
expect(c.x).to.be.eql(0);
expect(c.y).to.be.eql(0);
});
it('can be created by a object with x,y', function () {
var c = new maptalks.Coordinate({ x:0, y:0 });
expect(c.x).to.be.eql(0);
expect(c.y).to.be.eql(0);
});
it('can be created by another coordinate', function () {
var c = new maptalks.Coordinate(new maptalks.Coordinate(0, 0));
expect(c.x).to.be.eql(0);
expect(c.y).to.be.eql(0);
});
it('throws a error with NaN', function () {
expect(function () {
new maptalks.Coordinate(NaN, 0);
}).to.throwException();
});
});
describe('has operations', function () {
it('can add', function () {
var c = new maptalks.Coordinate(new maptalks.Coordinate(0, 0));
var t = c.add(new maptalks.Coordinate(1, 1));
expect(c.x).to.be.eql(0);
expect(c.y).to.be.eql(0);
expect(t.x).to.be.eql(1);
expect(t.y).to.be.eql(1);
});
it('can _add which is destructive', function () {
var c = new maptalks.Coordinate(new maptalks.Coordinate(0, 0));
var t = c._add(new maptalks.Coordinate(1, 1));
expect(c.x).to.be.eql(1);
expect(c.y).to.be.eql(1);
expect(t.x).to.be.eql(1);
expect(t.y).to.be.eql(1);
});
it('can substract', function () {
var c = new maptalks.Coordinate(new maptalks.Coordinate(0, 0));
var t = c.substract(new maptalks.Coordinate(1, 1));
expect(c.x).to.be.eql(0);
expect(c.y).to.be.eql(0);
expect(t.x).to.be.eql(-1);
expect(t.y).to.be.eql(-1);
});
it('can _substract which is destructive', function () {
var c = new maptalks.Coordinate(new maptalks.Coordinate(0, 0));
var t = c._substract(new maptalks.Coordinate(1, 1));
expect(c.x).to.be.eql(-1);
expect(c.y).to.be.eql(-1);
expect(t.x).to.be.eql(-1);
expect(t.y).to.be.eql(-1);
});
it('can multi', function () {
var c = new maptalks.Coordinate(new maptalks.Coordinate(2, 3));
var t = c.multi(3);
expect(c.x).to.be.eql(2);
expect(c.y).to.be.eql(3);
expect(t.x).to.be.eql(6);
expect(t.y).to.be.eql(9);
});
it('can decide whether is equal', function () {
var c1 = new maptalks.Coordinate(new maptalks.Coordinate(2, 3));
var c2 = new maptalks.Coordinate(new maptalks.Coordinate(2, 3));
expect(c1.equals(c2)).to.be.ok();
expect(c1.equals([])).not.to.be.ok();
expect(c1.equals(new maptalks.Coordinate(2, 3.1))).not.to.be.ok();
});
it('can toArray', function () {
var c1 = new maptalks.Coordinate(new maptalks.Coordinate(2, 3));
expect(c1.toArray()).to.be.eql([2, 3]);
});
it('can toJSON', function () {
var c = new maptalks.Coordinate(-2, -3);
var t = c.toJSON();
expect(t).to.be.eql({
x : -2,
y : -3
});
});
});
});
| MapTalks/layertalks | test/geo/CoordinateSpec.js | JavaScript | apache-2.0 | 3,726 |
package uk.ac.ebi.subs.api.config;
import org.mockito.Mockito;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import uk.ac.ebi.subs.api.services.SubmittableValidationDispatcher;
/**
* Created by rolando on 20/06/2017.
*/
@Profile("SubmittableValidationDispatcherTest")
@Configuration
public class SubmittableValidationDispatcherTestConfig {
@Bean
public SubmittableValidationDispatcher submittableValidationDispatcher() {
return Mockito.spy(SubmittableValidationDispatcher.class);
}
}
| EMBL-EBI-SUBS/subs | subs-api/src/test/java/uk/ac/ebi/subs/api/config/SubmittableValidationDispatcherTestConfig.java | Java | apache-2.0 | 631 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudata.core.commitlog.pipe;
import java.io.IOException;
import org.cloudata.core.commitlog.UnmatchedLogException;
import org.cloudata.core.commitlog.pipe.Pipe.Context;
public interface InitStateTestIF {
public void clearWriteProcess(Context ctx) throws IOException;
public int processPipeConnection(Context ctx, PipeConnectionInfo connInfo) throws IOException, UnmatchedLogException;
}
| gruter/cloudata | src/java/org/cloudata/core/commitlog/pipe/InitStateTestIF.java | Java | apache-2.0 | 1,218 |
/*
* Copyright 2005 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.felix.jmood.compendium;
public interface ConfigAdminManagerMBean {
/**
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#listConfigurations(java.lang.String)
*/
public abstract String[] listConfigurations(String filter)
throws Exception;
/**
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#getConfiguration(java.lang.String)
*/
public abstract String getConfiguration(String pid) throws Exception;
/**
* This method gets a configuration object related to a pid and a bundle location
* @param pid Persistent ID
* @param location Bundle location of the service
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#getConfiguration(java.lang.String, java.lang.String)
*/
public abstract String getConfiguration(String pid, String location)
throws Exception;
/**
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#createFactoryConfiguration(java.lang.String)
*/
public abstract String createFactoryConfiguration(String pid)
throws Exception;
/**
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#createFactoryConfiguration(java.lang.String, java.lang.String)
*/
public abstract String createFactoryConfiguration(
String pid,
String location)
throws Exception;
/**
* Delete the configurations identified by the LDAP filter
* @param filter LDAP String representing the configurations that want to be deleted
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#deleteConfigurations(java.lang.String)
*/
public abstract void deleteConfigurations(String filter) throws Exception;
/**
* Removes a property from all the configurations selected by an LDAP expression
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#removePropertyFromConfigurations(java.lang.String, java.lang.String)
*/
public abstract void removePropertyFromConfigurations(
String filter,
String name)
throws Exception;
/**
* Updates or adds a property to configurations selected by an LDAP expression
* Arrays and vectors not supported
* @see org.apache.felix.jmood.compendium.ConfigAdminManagerMBean#addPropertyToConfigurations(java.lang.String, java.lang.String, java.lang.String, java.lang.String)
*/
public abstract void addPropertyToConfigurations(
String filter,
String name,
String value,
String type)
throws Exception;
public abstract void refresh() throws Exception;
public abstract boolean isAvailable() throws Exception;
} | boneman1231/org.apache.felix | trunk/jmood/src/main/java/org/apache/felix/jmood/compendium/ConfigAdminManagerMBean.java | Java | apache-2.0 | 3,231 |
import mock
import unittest
import uuid
from vnc_api import vnc_api
from svc_monitor.vrouter_instance_manager import VRouterInstanceManager
class DBObjMatcher(object):
"""
Object for assert_called_with to check if db object is created properly
"""
def __init__(self, prefix):
self.prefix = prefix
def _has_field(self, name, ob):
return (self.prefix + name) in ob
def __eq__(self, other):
if not(self._has_field("name", other)
and self._has_field("uuid", other)
and self._has_field("state", other)
and self._has_field("vrouter", other)):
return False
if other[self.prefix + "vrouter"] == "None":
return False
return True
class VRouterInstanceManagerTest(unittest.TestCase):
VM_UUID = str(uuid.uuid4())
VR_UUID = str(uuid.uuid4())
DB_PREFIX = "test"
MOCKED_VR_BACK_REF = [{
"uuid": VR_UUID
}]
def setUp(self):
mocked_vnc = mock.MagicMock()
mocked_vm_ob = mock.MagicMock()
mocked_vm_ob.get_virtual_router_back_refs\
.return_value = self.MOCKED_VR_BACK_REF
mocked_vm_ob.uuid = self.VM_UUID
mocked_vnc.virtual_machine_read.return_value = mocked_vm_ob
self.mocked_vm_ob = mocked_vm_ob
mocked_db = mock.MagicMock()
mocked_db.get_vm_db_prefix.return_value = self.DB_PREFIX
self.vrouter_manager = VRouterInstanceManager(
db=mocked_db, logger=mock.MagicMock(),
vnc_lib=mocked_vnc, vrouter_scheduler=mock.MagicMock(),
nova_client=mock.MagicMock())
def test_create(self):
st_obj = vnc_api.ServiceTemplate(name="test-template")
svc_properties = vnc_api.ServiceTemplateType()
svc_properties.set_service_virtualization_type('vrouter-instance')
svc_properties.set_image_name('test')
svc_properties.set_ordered_interfaces(True)
if_list = [['management', False], ['left', False], ['right', False]]
for itf in if_list:
if_type = vnc_api.ServiceTemplateInterfaceType(shared_ip=itf[1])
if_type.set_service_interface_type(itf[0])
svc_properties.add_interface_type(if_type)
svc_properties.set_vrouter_instance_type("docker")
st_obj.set_service_template_properties(svc_properties)
si_obj = vnc_api.ServiceInstance("test2")
si_prop = vnc_api.ServiceInstanceType(
left_virtual_network="left", right_virtual_network="right",
management_virtual_network="management")
si_prop.set_interface_list(
[vnc_api.ServiceInstanceInterfaceType(virtual_network="left"),
vnc_api.ServiceInstanceInterfaceType(virtual_network="right"),
vnc_api.ServiceInstanceInterfaceType(
virtual_network="management")])
si_prop.set_virtual_router_id(uuid.uuid4())
si_obj.set_service_instance_properties(si_prop)
si_obj.set_service_template(st_obj)
si_obj.uuid = str(uuid.uuid4())
st_obj.uuid = str(uuid.uuid4())
self.vrouter_manager.create_service(st_obj, si_obj)
self.vrouter_manager.db.service_instance_insert.assert_called_with(
si_obj.get_fq_name_str(), DBObjMatcher(self.DB_PREFIX)
)
def test_delete(self):
mocked_vr = mock.MagicMock()
mocked_vr.uuid = self.VR_UUID
self.vrouter_manager._vnc_lib.virtual_router_read.\
return_value = mocked_vr
self.vrouter_manager.delete_service(mock.MagicMock(), self.VM_UUID)
self.vrouter_manager._vnc_lib.virtual_machine_delete\
.assert_called_with(id=self.VM_UUID)
mocked_vr.del_virtual_machine.assert_called_with(
self.mocked_vm_ob)
| cloudwatt/contrail-controller | src/config/svc-monitor/svc_monitor/tests/test_vrouter_instance_manager.py | Python | apache-2.0 | 3,797 |
package org.apache.lucene.search;
/**
* Copyright 2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Iterator;
import java.util.List;
import org.apache.lucene.analysis.SimpleAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.spans.SpanTermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.English;
import org.apache.lucene.util.LuceneTestCase;
public class TestSpanQueryFilter extends LuceneTestCase {
public TestSpanQueryFilter(String s) {
super(s);
}
public void testFilterWorks() throws Exception {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new SimpleAnalyzer(), true,
IndexWriter.MaxFieldLength.LIMITED);
for (int i = 0; i < 500; i++) {
Document document = new Document();
document.add(new Field("field", English.intToEnglish(i) + " equals " + English.intToEnglish(i),
Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(document);
}
writer.close();
IndexReader reader = IndexReader.open(dir);
SpanTermQuery query = new SpanTermQuery(new Term("field", English.intToEnglish(10).trim()));
SpanQueryFilter filter = new SpanQueryFilter(query);
SpanFilterResult result = filter.bitSpans(reader);
DocIdSet docIdSet = result.getDocIdSet();
assertTrue("docIdSet is null and it shouldn't be", docIdSet != null);
assertContainsDocId("docIdSet doesn't contain docId 10", docIdSet, 10);
List spans = result.getPositions();
assertTrue("spans is null and it shouldn't be", spans != null);
int size = getDocIdSetSize(docIdSet);
assertTrue("spans Size: " + spans.size() + " is not: " + size, spans.size() == size);
for (Iterator iterator = spans.iterator(); iterator.hasNext();) {
SpanFilterResult.PositionInfo info = (SpanFilterResult.PositionInfo) iterator.next();
assertTrue("info is null and it shouldn't be", info != null);
//The doc should indicate the bit is on
assertContainsDocId("docIdSet doesn't contain docId " + info.getDoc(), docIdSet, info.getDoc());
//There should be two positions in each
assertTrue("info.getPositions() Size: " + info.getPositions().size() + " is not: " + 2, info.getPositions().size() == 2);
}
reader.close();
}
int getDocIdSetSize(DocIdSet docIdSet) throws Exception {
int size = 0;
DocIdSetIterator it = docIdSet.iterator();
while (it.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
size++;
}
return size;
}
public void assertContainsDocId(String msg, DocIdSet docIdSet, int docId) throws Exception {
DocIdSetIterator it = docIdSet.iterator();
assertTrue(msg, it.advance(docId) != DocIdSetIterator.NO_MORE_DOCS);
assertTrue(msg, it.docID() == docId);
}
}
| Photobucket/Solbase-Lucene | src/test/org/apache/lucene/search/TestSpanQueryFilter.java | Java | apache-2.0 | 3,622 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/*
* This package is based on the work done by Keiron Liddle, Aftex Software
* <keiron@aftexsw.com> to whom the Ant project is very grateful for his
* great code.
*/
package org.apache.tools.bzip2;
import java.io.IOException;
import java.io.InputStream;
/**
* An input stream that decompresses from the BZip2 format (without the file header chars) to be read as any other stream.
*
* <p>
* The decompression requires large amounts of memory. Thus you should call the {@link #close() close()} method as soon as possible, to force
* <tt>CBZip2InputStream</tt> to release the allocated memory. See {@link CBZip2OutputStream CBZip2OutputStream} for information about memory usage.
* </p>
*
* <p>
* <tt>CBZip2InputStream</tt> reads bytes from the compressed source stream via the single byte {@link java.io.InputStream#read() read()} method exclusively.
* Thus you should consider to use a buffered source stream.
* </p>
*
* <p>
* Instances of this class are not threadsafe.
* </p>
*/
public class CBZip2InputStream extends InputStream implements BZip2Constants {
/**
* @throws IOException _
*/
private static void reportCRCError() throws IOException {
// The clean way would be to throw an exception.
// throw new IOException("crc error");
// Just print a message, like the previous versions of this class did
System.err.println( "BZip2 CRC error" );
}
/**
*
*/
private void makeMaps() {
final boolean[] inUse = this.data.inUse;
final byte[] seqToUnseq = this.data.seqToUnseq;
int nInUseShadow = 0;
for ( int i = 0; i < 256; i++ ) {
if ( inUse[ i ] )
seqToUnseq[ nInUseShadow++ ] = (byte) i;
}
this.nInUse = nInUseShadow;
}
/**
* Index of the last char in the block, so the block size == last + 1.
*/
private int last;
/**
* Index in zptr[] of original string after sorting.
*/
private int origPtr;
/**
* always: in the range 0 .. 9. The current block size is 100000 * this number.
*/
private int blockSize100k;
/** */
private boolean blockRandomised;
/** */
private int bsBuff;
/** */
private int bsLive;
/** */
private final CRC crc = new CRC();
/** */
private int nInUse;
/** */
private InputStream in;
/** */
private int currentChar = -1;
/** */
private static final int EOF = 0;
/** */
private static final int START_BLOCK_STATE = 1;
/** */
private static final int RAND_PART_A_STATE = 2;
/** */
private static final int RAND_PART_B_STATE = 3;
/** */
private static final int RAND_PART_C_STATE = 4;
/** */
private static final int NO_RAND_PART_A_STATE = 5;
/** */
private static final int NO_RAND_PART_B_STATE = 6;
/** */
private static final int NO_RAND_PART_C_STATE = 7;
/** */
private int currentState = START_BLOCK_STATE;
/** */
private int storedBlockCRC;
/** */
private int storedCombinedCRC;
/** */
private int computedBlockCRC;
/** */
private int computedCombinedCRC;
// Variables used by setup* methods exclusively
/** */
private int su_count;
/** */
private int su_ch2;
/** */
private int su_chPrev;
/** */
private int su_i2;
/** */
private int su_j2;
/** */
private int su_rNToGo;
/** */
private int su_rTPos;
/** */
private int su_tPos;
/** */
private char su_z;
/**
* All memory intensive stuff. This field is initialized by initBlock().
*/
private CBZip2InputStream.Data data;
/**
* Constructs a new CBZip2InputStream which decompresses bytes read from the specified stream.
*
* <p>
* Although BZip2 headers are marked with the magic <tt>"Bz"</tt> this constructor expects the next byte in the stream to be the first one after the magic.
* Thus callers have to skip the first two bytes. Otherwise this constructor will throw an exception.
* </p>
*
* @param in _
*
* @throws IOException if the stream content is malformed or an I/O error occurs.
* @throws NullPointerException if <tt>in == null</tt>
*/
public CBZip2InputStream( final InputStream in ) throws IOException {
super();
this.in = in;
init();
}
@Override
public int read() throws IOException {
if ( this.in != null ) {
return read0();
} else {
throw new IOException( "stream closed" );
}
}
@Override
public int read( final byte[] dest, final int offs, final int len ) throws IOException {
if ( offs < 0 ) {
throw new IndexOutOfBoundsException( "offs(" + offs + ") < 0." );
}
if ( len < 0 ) {
throw new IndexOutOfBoundsException( "len(" + len + ") < 0." );
}
if ( offs + len > dest.length ) {
throw new IndexOutOfBoundsException( "offs(" + offs + ") + len(" + len + ") > dest.length(" + dest.length + ")." );
}
if ( this.in == null ) {
throw new IOException( "stream closed" );
}
final int hi = offs + len;
int destOffs = offs;
for ( int b; ( destOffs < hi ) && ( ( b = read0() ) >= 0 ); ) {
dest[ destOffs++ ] = (byte) b;
}
return ( destOffs == offs ) ? -1 : ( destOffs - offs );
}
/**
* @return _
* @throws IOException _
*/
private int read0() throws IOException {
final int retChar = this.currentChar;
switch ( this.currentState ) {
case EOF :
return -1;
case START_BLOCK_STATE :
throw new IllegalStateException();
case RAND_PART_A_STATE :
throw new IllegalStateException();
case RAND_PART_B_STATE :
setupRandPartB();
break;
case RAND_PART_C_STATE :
setupRandPartC();
break;
case NO_RAND_PART_A_STATE :
throw new IllegalStateException();
case NO_RAND_PART_B_STATE :
setupNoRandPartB();
break;
case NO_RAND_PART_C_STATE :
setupNoRandPartC();
break;
default :
throw new IllegalStateException();
}
return retChar;
}
/**
* @throws IOException _
*/
private void init() throws IOException {
if ( null == in ) {
throw new IOException( "No InputStream" );
}
if ( in.available() == 0 ) {
throw new IOException( "Empty InputStream" );
}
int magic2 = this.in.read();
if ( magic2 != 'h' ) {
throw new IOException( "Stream is not BZip2 formatted: expected 'h'" + " as first byte but got '" + (char) magic2 + "'" );
}
int blockSize = this.in.read();
if ( ( blockSize < '1' ) || ( blockSize > '9' ) ) {
throw new IOException( "Stream is not BZip2 formatted: illegal " + "blocksize " + (char) blockSize );
}
this.blockSize100k = blockSize - '0';
initBlock();
setupBlock();
}
/**
* @throws IOException _
*/
private void initBlock() throws IOException {
char magic0 = bsGetUByte();
char magic1 = bsGetUByte();
char magic2 = bsGetUByte();
char magic3 = bsGetUByte();
char magic4 = bsGetUByte();
char magic5 = bsGetUByte();
if ( magic0 == 0x17 && magic1 == 0x72 && magic2 == 0x45 && magic3 == 0x38 && magic4 == 0x50 && magic5 == 0x90 ) {
complete(); // end of file
} else if ( magic0 != 0x31 || // '1'
magic1 != 0x41 || // ')'
magic2 != 0x59 || // 'Y'
magic3 != 0x26 || // '&'
magic4 != 0x53 || // 'S'
magic5 != 0x59 // 'Y'
) {
this.currentState = EOF;
throw new IOException( "bad block header" );
} else {
this.storedBlockCRC = bsGetInt();
this.blockRandomised = bsR( 1 ) == 1;
/**
* Allocate data here instead in constructor, so we do not allocate it if the input file is empty.
*/
if ( this.data == null ) {
this.data = new Data( this.blockSize100k );
}
// currBlockNo++;
getAndMoveToFrontDecode();
this.crc.initialiseCRC();
this.currentState = START_BLOCK_STATE;
}
}
/**
* @throws IOException _
*/
private void endBlock() throws IOException {
this.computedBlockCRC = this.crc.getFinalCRC();
// A bad CRC is considered a fatal error.
if ( this.storedBlockCRC != this.computedBlockCRC ) {
// make next blocks readable without error
// (repair feature, not yet documented, not tested)
this.computedCombinedCRC = ( this.storedCombinedCRC << 1 ) | ( this.storedCombinedCRC >>> 31 );
this.computedCombinedCRC ^= this.storedBlockCRC;
reportCRCError();
}
this.computedCombinedCRC = ( this.computedCombinedCRC << 1 ) | ( this.computedCombinedCRC >>> 31 );
this.computedCombinedCRC ^= this.computedBlockCRC;
}
/**
* @throws IOException _
*/
private void complete() throws IOException {
this.storedCombinedCRC = bsGetInt();
this.currentState = EOF;
this.data = null;
if ( this.storedCombinedCRC != this.computedCombinedCRC ) {
reportCRCError();
}
}
@Override
public void close() throws IOException {
InputStream inShadow = this.in;
if ( inShadow != null ) {
try {
if ( inShadow != System.in ) {
inShadow.close();
}
} finally {
this.data = null;
this.in = null;
}
}
}
/**
* @param n _
* @return _
* @throws IOException _
*/
private int bsR( final int n ) throws IOException {
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
if ( bsLiveShadow < n ) {
final InputStream inShadow = this.in;
do {
int thech = inShadow.read();
if ( thech < 0 ) {
throw new IOException( "unexpected end of stream" );
}
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
} while ( bsLiveShadow < n );
this.bsBuff = bsBuffShadow;
}
this.bsLive = bsLiveShadow - n;
return ( bsBuffShadow >> ( bsLiveShadow - n ) ) & ( ( 1 << n ) - 1 );
}
/**
* @return _
* @throws IOException _
*/
private boolean bsGetBit() throws IOException {
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
if ( bsLiveShadow < 1 ) {
int thech = this.in.read();
if ( thech < 0 ) {
throw new IOException( "unexpected end of stream" );
}
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
this.bsBuff = bsBuffShadow;
}
this.bsLive = bsLiveShadow - 1;
return ( ( bsBuffShadow >> ( bsLiveShadow - 1 ) ) & 1 ) != 0;
}
/**
* @return _
* @throws IOException _
*/
private char bsGetUByte() throws IOException {
return (char) bsR( 8 );
}
/**
* @return _
* @throws IOException _
*/
private int bsGetInt() throws IOException {
return ( ( ( ( ( bsR( 8 ) << 8 ) | bsR( 8 ) ) << 8 ) | bsR( 8 ) ) << 8 ) | bsR( 8 );
}
/**
* Called by createHuffmanDecodingTables() exclusively.
*
* @param limit _
* @param base _
* @param perm _
* @param length _
* @param minLen _
* @param maxLen _
* @param alphaSize _
*/
private static void hbCreateDecodeTables( final int[] limit, final int[] base, final int[] perm, final char[] length, final int minLen, final int maxLen,
final int alphaSize ) {
for ( int i = minLen, pp = 0; i <= maxLen; i++ ) {
for ( int j = 0; j < alphaSize; j++ ) {
if ( length[ j ] == i ) {
perm[ pp++ ] = j;
}
}
}
for ( int i = MAX_CODE_LEN; --i > 0; ) {
base[ i ] = 0;
limit[ i ] = 0;
}
for ( int i = 0; i < alphaSize; i++ ) {
base[ length[ i ] + 1 ]++;
}
for ( int i = 1, b = base[ 0 ]; i < MAX_CODE_LEN; i++ ) {
b += base[ i ];
base[ i ] = b;
}
for ( int i = minLen, vec = 0, b = base[ i ]; i <= maxLen; i++ ) {
final int nb = base[ i + 1 ];
vec += nb - b;
b = nb;
limit[ i ] = vec - 1;
vec <<= 1;
}
for ( int i = minLen + 1; i <= maxLen; i++ ) {
base[ i ] = ( ( limit[ i - 1 ] + 1 ) << 1 ) - base[ i ];
}
}
/**
* @throws IOException _
*/
private void recvDecodingTables() throws IOException {
final Data dataShadow = this.data;
final boolean[] inUse = dataShadow.inUse;
final byte[] pos = dataShadow.recvDecodingTables_pos;
final byte[] selector = dataShadow.selector;
final byte[] selectorMtf = dataShadow.selectorMtf;
int inUse16 = 0;
/* Receive the mapping table */
for ( int i = 0; i < 16; i++ ) {
if ( bsGetBit() ) {
inUse16 |= 1 << i;
}
}
for ( int i = 256; --i >= 0; ) {
inUse[ i ] = false;
}
for ( int i = 0; i < 16; i++ ) {
if ( ( inUse16 & ( 1 << i ) ) != 0 ) {
final int i16 = i << 4;
for ( int j = 0; j < 16; j++ ) {
if ( bsGetBit() ) {
inUse[ i16 + j ] = true;
}
}
}
}
makeMaps();
final int alphaSize = this.nInUse + 2;
/* Now the selectors */
final int nGroups = bsR( 3 );
final int nSelectors = bsR( 15 );
for ( int i = 0; i < nSelectors; i++ ) {
int j = 0;
while ( bsGetBit() ) {
j++;
}
selectorMtf[ i ] = (byte) j;
}
/* Undo the MTF values for the selectors. */
for ( int v = nGroups; --v >= 0; ) {
pos[ v ] = (byte) v;
}
for ( int i = 0; i < nSelectors; i++ ) {
int v = selectorMtf[ i ] & 0xff;
final byte tmp = pos[ v ];
while ( v > 0 ) {
// nearly all times v is zero, 4 in most other cases
pos[ v ] = pos[ v - 1 ];
v--;
}
pos[ 0 ] = tmp;
selector[ i ] = tmp;
}
final char[][] len = dataShadow.temp_charArray2d;
/* Now the coding tables */
for ( int t = 0; t < nGroups; t++ ) {
int curr = bsR( 5 );
final char[] len_t = len[ t ];
for ( int i = 0; i < alphaSize; i++ ) {
while ( bsGetBit() ) {
curr += bsGetBit() ? -1 : 1;
}
len_t[ i ] = (char) curr;
}
}
// finally create the Huffman tables
createHuffmanDecodingTables( alphaSize, nGroups );
}
/**
* Called by recvDecodingTables() exclusively.
*
* @param alphaSize _
* @param nGroups _
*/
private void createHuffmanDecodingTables( final int alphaSize, final int nGroups ) {
final Data dataShadow = this.data;
final char[][] len = dataShadow.temp_charArray2d;
final int[] minLens = dataShadow.minLens;
final int[][] limit = dataShadow.limit;
final int[][] base = dataShadow.base;
final int[][] perm = dataShadow.perm;
for ( int t = 0; t < nGroups; t++ ) {
int minLen = 32;
int maxLen = 0;
final char[] len_t = len[ t ];
for ( int i = alphaSize; --i >= 0; ) {
final char lent = len_t[ i ];
if ( lent > maxLen ) {
maxLen = lent;
}
if ( lent < minLen ) {
minLen = lent;
}
}
hbCreateDecodeTables( limit[ t ], base[ t ], perm[ t ], len[ t ], minLen, maxLen, alphaSize );
minLens[ t ] = minLen;
}
}
/**
* @throws IOException _
*/
private void getAndMoveToFrontDecode() throws IOException {
this.origPtr = bsR( 24 );
recvDecodingTables();
final InputStream inShadow = this.in;
final Data dataShadow = this.data;
final byte[] ll8 = dataShadow.ll8;
final int[] unzftab = dataShadow.unzftab;
final byte[] selector = dataShadow.selector;
final byte[] seqToUnseq = dataShadow.seqToUnseq;
final char[] yy = dataShadow.getAndMoveToFrontDecode_yy;
final int[] minLens = dataShadow.minLens;
final int[][] limit = dataShadow.limit;
final int[][] base = dataShadow.base;
final int[][] perm = dataShadow.perm;
final int limitLast = this.blockSize100k * 100000;
/*
* Setting up the unzftab entries here is not strictly necessary, but it does save having to do it later in a separate pass, and so saves a block's
* worth of cache misses.
*/
for ( int i = 256; --i >= 0; ) {
yy[ i ] = (char) i;
unzftab[ i ] = 0;
}
int groupNo = 0;
int groupPos = G_SIZE - 1;
final int eob = this.nInUse + 1;
int nextSym = getAndMoveToFrontDecode0( 0 );
int bsBuffShadow = this.bsBuff;
int bsLiveShadow = this.bsLive;
int lastShadow = -1;
int zt = selector[ groupNo ] & 0xff;
int[] base_zt = base[ zt ];
int[] limit_zt = limit[ zt ];
int[] perm_zt = perm[ zt ];
int minLens_zt = minLens[ zt ];
while ( nextSym != eob ) {
if ( ( nextSym == RUNA ) || ( nextSym == RUNB ) ) {
int s = -1;
for ( int n = 1; true; n <<= 1 ) {
if ( nextSym == RUNA ) {
s += n;
} else if ( nextSym == RUNB ) {
s += n << 1;
} else {
break;
}
if ( groupPos == 0 ) {
groupPos = G_SIZE - 1;
zt = selector[ ++groupNo ] & 0xff;
base_zt = base[ zt ];
limit_zt = limit[ zt ];
perm_zt = perm[ zt ];
minLens_zt = minLens[ zt ];
} else {
groupPos--;
}
int zn = minLens_zt;
// Inlined:
// int zvec = bsR(zn);
while ( bsLiveShadow < zn ) {
final int thech = inShadow.read();
if ( thech >= 0 ) {
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
continue;
} else {
throw new IOException( "unexpected end of stream" );
}
}
int zvec = ( bsBuffShadow >> ( bsLiveShadow - zn ) ) & ( ( 1 << zn ) - 1 );
bsLiveShadow -= zn;
while ( zvec > limit_zt[ zn ] ) {
zn++;
while ( bsLiveShadow < 1 ) {
final int thech = inShadow.read();
if ( thech >= 0 ) {
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
continue;
} else {
throw new IOException( "unexpected end of stream" );
}
}
bsLiveShadow--;
zvec = ( zvec << 1 ) | ( ( bsBuffShadow >> bsLiveShadow ) & 1 );
}
nextSym = perm_zt[ zvec - base_zt[ zn ] ];
}
final byte ch = seqToUnseq[ yy[ 0 ] ];
unzftab[ ch & 0xff ] += s + 1;
while ( s-- >= 0 ) {
ll8[ ++lastShadow ] = ch;
}
if ( lastShadow >= limitLast ) {
throw new IOException( "block overrun" );
}
} else {
if ( ++lastShadow >= limitLast ) {
throw new IOException( "block overrun" );
}
final char tmp = yy[ nextSym - 1 ];
unzftab[ seqToUnseq[ tmp ] & 0xff ]++;
ll8[ lastShadow ] = seqToUnseq[ tmp ];
/*
* This loop is hammered during decompression, hence avoid native method call overhead of System.arraycopy for very small ranges to copy.
*/
if ( nextSym <= 16 ) {
for ( int j = nextSym - 1; j > 0; ) {
yy[ j ] = yy[ --j ];
}
} else {
System.arraycopy( yy, 0, yy, 1, nextSym - 1 );
}
yy[ 0 ] = tmp;
if ( groupPos == 0 ) {
groupPos = G_SIZE - 1;
zt = selector[ ++groupNo ] & 0xff;
base_zt = base[ zt ];
limit_zt = limit[ zt ];
perm_zt = perm[ zt ];
minLens_zt = minLens[ zt ];
} else {
groupPos--;
}
int zn = minLens_zt;
// Inlined:
// int zvec = bsR(zn);
while ( bsLiveShadow < zn ) {
final int thech = inShadow.read();
if ( thech >= 0 ) {
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
continue;
} else {
throw new IOException( "unexpected end of stream" );
}
}
int zvec = ( bsBuffShadow >> ( bsLiveShadow - zn ) ) & ( ( 1 << zn ) - 1 );
bsLiveShadow -= zn;
while ( zvec > limit_zt[ zn ] ) {
zn++;
while ( bsLiveShadow < 1 ) {
final int thech = inShadow.read();
if ( thech >= 0 ) {
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
continue;
} else {
throw new IOException( "unexpected end of stream" );
}
}
bsLiveShadow--;
zvec = ( zvec << 1 ) | ( ( bsBuffShadow >> bsLiveShadow ) & 1 );
}
nextSym = perm_zt[ zvec - base_zt[ zn ] ];
}
}
this.last = lastShadow;
this.bsLive = bsLiveShadow;
this.bsBuff = bsBuffShadow;
}
/**
* @param groupNo _
* @return _
* @throws IOException _
*/
private int getAndMoveToFrontDecode0( final int groupNo ) throws IOException {
final InputStream inShadow = this.in;
final Data dataShadow = this.data;
final int zt = dataShadow.selector[ groupNo ] & 0xff;
final int[] limit_zt = dataShadow.limit[ zt ];
int zn = dataShadow.minLens[ zt ];
int zvec = bsR( zn );
int bsLiveShadow = this.bsLive;
int bsBuffShadow = this.bsBuff;
while ( zvec > limit_zt[ zn ] ) {
zn++;
while ( bsLiveShadow < 1 ) {
final int thech = inShadow.read();
if ( thech >= 0 ) {
bsBuffShadow = ( bsBuffShadow << 8 ) | thech;
bsLiveShadow += 8;
continue;
} else {
throw new IOException( "unexpected end of stream" );
}
}
bsLiveShadow--;
zvec = ( zvec << 1 ) | ( ( bsBuffShadow >> bsLiveShadow ) & 1 );
}
this.bsLive = bsLiveShadow;
this.bsBuff = bsBuffShadow;
return dataShadow.perm[ zt ][ zvec - dataShadow.base[ zt ][ zn ] ];
}
/**
* @throws IOException _
*/
private void setupBlock() throws IOException {
if ( this.data == null ) {
return;
}
final int[] cftab = this.data.cftab;
final int[] tt = this.data.initTT( this.last + 1 );
final byte[] ll8 = this.data.ll8;
cftab[ 0 ] = 0;
System.arraycopy( this.data.unzftab, 0, cftab, 1, 256 );
for ( int i = 1, c = cftab[ 0 ]; i <= 256; i++ ) {
c += cftab[ i ];
cftab[ i ] = c;
}
for ( int i = 0, lastShadow = this.last; i <= lastShadow; i++ ) {
tt[ cftab[ ll8[ i ] & 0xff ]++ ] = i;
}
if ( ( this.origPtr < 0 ) || ( this.origPtr >= tt.length ) ) {
throw new IOException( "stream corrupted" );
}
this.su_tPos = tt[ this.origPtr ];
this.su_count = 0;
this.su_i2 = 0;
this.su_ch2 = 256; /* not a char and not EOF */
if ( this.blockRandomised ) {
this.su_rNToGo = 0;
this.su_rTPos = 0;
setupRandPartA();
} else {
setupNoRandPartA();
}
}
/**
* @throws IOException _
*/
private void setupRandPartA() throws IOException {
if ( this.su_i2 <= this.last ) {
this.su_chPrev = this.su_ch2;
int su_ch2Shadow = this.data.ll8[ this.su_tPos ] & 0xff;
this.su_tPos = this.data.tt[ this.su_tPos ];
if ( this.su_rNToGo == 0 ) {
this.su_rNToGo = BZip2Constants.rNums[ this.su_rTPos ] - 1;
if ( ++this.su_rTPos == 512 ) {
this.su_rTPos = 0;
}
} else {
this.su_rNToGo--;
}
this.su_ch2 = su_ch2Shadow ^= ( this.su_rNToGo == 1 ) ? 1 : 0;
this.su_i2++;
this.currentChar = su_ch2Shadow;
this.currentState = RAND_PART_B_STATE;
this.crc.updateCRC( su_ch2Shadow );
} else {
endBlock();
initBlock();
setupBlock();
}
}
/**
* @throws IOException _
*/
private void setupNoRandPartA() throws IOException {
if ( this.su_i2 <= this.last ) {
this.su_chPrev = this.su_ch2;
int su_ch2Shadow = this.data.ll8[ this.su_tPos ] & 0xff;
this.su_ch2 = su_ch2Shadow;
this.su_tPos = this.data.tt[ this.su_tPos ];
this.su_i2++;
this.currentChar = su_ch2Shadow;
this.currentState = NO_RAND_PART_B_STATE;
this.crc.updateCRC( su_ch2Shadow );
} else {
this.currentState = NO_RAND_PART_A_STATE;
endBlock();
initBlock();
setupBlock();
}
}
/**
* @throws IOException _
*/
private void setupRandPartB() throws IOException {
if ( this.su_ch2 != this.su_chPrev ) {
this.currentState = RAND_PART_A_STATE;
this.su_count = 1;
setupRandPartA();
} else if ( ++this.su_count >= 4 ) {
this.su_z = (char) ( this.data.ll8[ this.su_tPos ] & 0xff );
this.su_tPos = this.data.tt[ this.su_tPos ];
if ( this.su_rNToGo == 0 ) {
this.su_rNToGo = BZip2Constants.rNums[ this.su_rTPos ] - 1;
if ( ++this.su_rTPos == 512 ) {
this.su_rTPos = 0;
}
} else {
this.su_rNToGo--;
}
this.su_j2 = 0;
this.currentState = RAND_PART_C_STATE;
if ( this.su_rNToGo == 1 ) {
this.su_z ^= 1;
}
setupRandPartC();
} else {
this.currentState = RAND_PART_A_STATE;
setupRandPartA();
}
}
/**
* @throws IOException _
*/
private void setupRandPartC() throws IOException {
if ( this.su_j2 < this.su_z ) {
this.currentChar = this.su_ch2;
this.crc.updateCRC( this.su_ch2 );
this.su_j2++;
} else {
this.currentState = RAND_PART_A_STATE;
this.su_i2++;
this.su_count = 0;
setupRandPartA();
}
}
/**
* @throws IOException _
*/
private void setupNoRandPartB() throws IOException {
if ( this.su_ch2 != this.su_chPrev ) {
this.su_count = 1;
setupNoRandPartA();
} else if ( ++this.su_count >= 4 ) {
this.su_z = (char) ( this.data.ll8[ this.su_tPos ] & 0xff );
this.su_tPos = this.data.tt[ this.su_tPos ];
this.su_j2 = 0;
setupNoRandPartC();
} else {
setupNoRandPartA();
}
}
/**
* @throws IOException _
*/
private void setupNoRandPartC() throws IOException {
if ( this.su_j2 < this.su_z ) {
int su_ch2Shadow = this.su_ch2;
this.currentChar = su_ch2Shadow;
this.crc.updateCRC( su_ch2Shadow );
this.su_j2++;
this.currentState = NO_RAND_PART_C_STATE;
} else {
this.su_i2++;
this.su_count = 0;
setupNoRandPartA();
}
}
/**
*
*/
private static final class Data extends Object {
/** */
// (with blockSize 900k)
final boolean[] inUse = new boolean[ 256 ]; // 256 byte
/** */
final byte[] seqToUnseq = new byte[ 256 ]; // 256 byte
/** */
final byte[] selector = new byte[ MAX_SELECTORS ]; // 18002 byte
/** */
final byte[] selectorMtf = new byte[ MAX_SELECTORS ]; // 18002 byte
/**
* Freq table collected to save a pass over the data during decompression.
*/
final int[] unzftab = new int[ 256 ]; // 1024 byte
/** */
final int[][] limit = new int[ N_GROUPS ][ MAX_ALPHA_SIZE ]; // 6192 byte
/** */
final int[][] base = new int[ N_GROUPS ][ MAX_ALPHA_SIZE ]; // 6192 byte
/** */
final int[][] perm = new int[ N_GROUPS ][ MAX_ALPHA_SIZE ]; // 6192 byte
/** */
final int[] minLens = new int[ N_GROUPS ]; // 24 byte
/** */
final int[] cftab = new int[ 257 ]; // 1028 byte
/** */
final char[] getAndMoveToFrontDecode_yy = new char[ 256 ]; // 512 byte
/** */
final char[][] temp_charArray2d = new char[ N_GROUPS ][ MAX_ALPHA_SIZE ]; // 3096 byte
/** */
final byte[] recvDecodingTables_pos = new byte[ N_GROUPS ]; // 6 byte
// ---------------
// 60798 byte
/** */
int[] tt; // 3600000 byte
/** */
byte[] ll8; // 900000 byte
// ---------------
// 4560782 byte
// ===============
/**
* Creates a new {@link Data}.
*
* @param blockSize100k _
*/
Data( int blockSize100k ) {
super();
this.ll8 = new byte[ blockSize100k * BZip2Constants.baseBlockSize ];
}
/**
* Initializes the {@link #tt} array.
*
* This method is called when the required length of the array is known. I don't initialize it at construction time to avoid unneccessary memory
* allocation when compressing small files.
*
* @param length _
* @return _
*/
final int[] initTT( int length ) {
int[] ttShadow = this.tt;
// tt.length should always be >= length, but theoretically
// it can happen, if the compressor mixed small and large
// blocks. Normally only the last block will be smaller
// than others.
if ( ( ttShadow == null ) || ( ttShadow.length < length ) ) {
this.tt = ttShadow = new int[ length ];
}
return ttShadow;
}
}
}
| icza/scelight | src-app-libs/org/apache/tools/bzip2/CBZip2InputStream.java | Java | apache-2.0 | 31,065 |