gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (c) 2011-2014, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.factory.filter.kernel; import boofcv.alg.filter.kernel.KernelMath; import boofcv.core.image.GeneralizedImageOps; import boofcv.struct.convolve.*; import boofcv.struct.image.ImageSingleBand; import org.ddogleg.stats.UtilGaussian; /** * @author Peter Abeles */ // todo add size heuristic for derivative that is different from regular kernel public class FactoryKernelGaussian { // when converting to integer kernels what is the minimum size of the an element relative to the maximum public static float MIN_FRAC = 1.0f/100f; public static double MIN_FRACD = 1.0/100; /** * Creates a Gaussian kernel of the specified type. * * @param kernelType The type of kernel which is to be created. * @param sigma The distributions stdev. If <= 0 then the sigma will be computed from the radius. * @param radius Number of pixels in the kernel's radius. If <= 0 then the sigma will be computed from the sigma. * @return The computed Gaussian kernel. */ public static <T extends KernelBase> T gaussian(Class<T> kernelType, double sigma, int radius ) { if (Kernel1D_F32.class == kernelType) { return gaussian(1,true, 32, sigma,radius); } else if (Kernel1D_I32.class == kernelType) { return gaussian(1,false, 32, sigma,radius); } else if (Kernel2D_I32.class == kernelType) { return gaussian(2,false, 32, sigma,radius); } else if (Kernel2D_F32.class == kernelType) { return gaussian(2,true, 32, sigma,radius); } else { throw new RuntimeException("Unknown kernel type"); } } /** * Creates a 1D Gaussian kernel of the specified type. * * @param imageType The type of image which is to be convolved by this kernel. * @param sigma The distributions stdev. If <= 0 then the sigma will be computed from the radius. * @param radius Number of pixels in the kernel's radius. If <= 0 then the sigma will be computed from the sigma. * @return The computed Gaussian kernel. */ public static <T extends ImageSingleBand, K extends Kernel1D> K gaussian1D(Class<T> imageType, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); int numBits = GeneralizedImageOps.getNumBits(imageType); if( numBits < 32 ) numBits = 32; return gaussian(1,isFloat, numBits, sigma,radius); } /** * Creates a 2D Gaussian kernel of the specified type. * * @param imageType The type of image which is to be convolved by this kernel. * @param sigma The distributions stdev. If <= 0 then the sigma will be computed from the radius. * @param radius Number of pixels in the kernel's radius. If <= 0 then the sigma will be computed from the sigma. * @return The computed Gaussian kernel. */ public static <T extends ImageSingleBand, K extends Kernel2D> K gaussian2D(Class<T> imageType, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); int numBits = Math.max(32, GeneralizedImageOps.getNumBits(imageType)); return gaussian(2,isFloat, numBits, sigma,radius); } /** * Creates a Gaussian kernel with the specified properties. * * @param DOF 1 for 1D kernel and 2 for 2D kernel. * @param isFloat True for F32 kernel and false for I32. * @param numBits Number of bits in each data element. 32 or 64 * @param sigma The distributions stdev. If <= 0 then the sigma will be computed from the radius. * @param radius Number of pixels in the kernel's radius. If <= 0 then the sigma will be computed from the sigma. @return The computed Gaussian kernel. */ public static <T extends KernelBase> T gaussian(int DOF, boolean isFloat, int numBits, double sigma, int radius) { if( radius <= 0 ) radius = FactoryKernelGaussian.radiusForSigma(sigma,0); else if( sigma <= 0 ) sigma = FactoryKernelGaussian.sigmaForRadius(radius,0); if( DOF == 2 ) { if( numBits == 32 ) { Kernel2D_F32 k = gaussian2D_F32(sigma,radius, isFloat); if( isFloat ) return (T)k; return (T) KernelMath.convert(k,MIN_FRAC); } else if( numBits == 64 ) { Kernel2D_F64 k = gaussian2D_F64(sigma,radius, isFloat); if( isFloat ) return (T)k; else throw new IllegalArgumentException("64bit int kernels supported"); } else { throw new IllegalArgumentException("Bits must be 32 or 64"); } } else if( DOF == 1 ) { if( numBits == 32 ) { Kernel1D_F32 k = gaussian1D_F32(sigma,radius, isFloat); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRAC); } else if( numBits == 64 ) { Kernel1D_F64 k = gaussian1D_F64(sigma, radius, isFloat); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRACD); } else { throw new IllegalArgumentException("Bits must be 32 or 64 not "+numBits); } } else { throw new IllegalArgumentException("DOF not supported"); } } public static <T extends ImageSingleBand, K extends Kernel1D> K derivativeI( Class<T> imageType , int order, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); return derivative(order,isFloat,sigma,radius); } public static <T extends Kernel1D> T derivativeK( Class<T> kernelType , int order, double sigma, int radius ) { if (Kernel1D_F32.class == kernelType) return derivative(order,true,sigma,radius); else return derivative(order,false,sigma,radius); } /** * Creates a 1D Gaussian kernel with the specified properties. * * @param order The order of the gaussian derivative. * @param isFloat True for F32 kernel and false for I32. * @param sigma The distributions stdev. If <= 0 then the sigma will be computed from the radius. * @param radius Number of pixels in the kernel's radius. If <= 0 then the sigma will be computed from the sigma. * @return The computed Gaussian kernel. */ public static <T extends Kernel1D> T derivative( int order, boolean isFloat, double sigma, int radius ) { // zero order is a regular gaussian if( order == 0 ) { return gaussian(1,isFloat, 32, sigma,radius); } if( radius <= 0 ) radius = FactoryKernelGaussian.radiusForSigma(sigma,order); else if( sigma <= 0 ) { sigma = FactoryKernelGaussian.sigmaForRadius(radius,order); } Kernel1D_F32 k = derivative1D_F32(order,sigma,radius, true); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRAC); } /** * <p> * Creates a floating point Gaussian kernel with the sigma and radius. * If normalized is set to true then the elements in the kernel will sum up to one. * </p> * * @param sigma Distributions standard deviation. * @param radius Kernel's radius. * @param normalize If the kernel should be normalized to one or not. */ protected static Kernel1D_F32 gaussian1D_F32(double sigma, int radius, boolean normalize) { Kernel1D_F32 ret = new Kernel1D_F32(radius * 2 + 1); float[] gaussian = ret.data; int index = 0; for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.computePDF(0, sigma, i); } if (normalize) { KernelMath.normalizeSumToOne(ret); } return ret; } protected static Kernel1D_F64 gaussian1D_F64(double sigma, int radius, boolean normalize) { Kernel1D_F64 ret = new Kernel1D_F64(radius * 2 + 1); double[] gaussian = ret.data; int index = 0; for (int i = radius; i >= -radius; i--) { gaussian[index++] = UtilGaussian.computePDF(0, sigma, i); } if (normalize) { KernelMath.normalizeSumToOne(ret); } return ret; } /** * Creates a kernel for a 2D convolution. This should only be used for validation purposes. * * @param sigma Distributions standard deviation. * @param radius Kernel's radius. * @param normalize If the kernel should be normalized to one or not. */ public static Kernel2D_F32 gaussian2D_F32(double sigma, int radius, boolean normalize) { Kernel1D_F32 kernel1D = gaussian1D_F32(sigma,radius,false); Kernel2D_F32 ret = KernelMath.convolve(kernel1D,kernel1D); if (normalize) { KernelMath.normalizeSumToOne(ret); } return ret; } public static Kernel2D_F64 gaussian2D_F64(double sigma, int radius, boolean normalize) { Kernel1D_F64 kernel1D = gaussian1D_F64(sigma,radius,false); Kernel2D_F64 ret = KernelMath.convolve(kernel1D,kernel1D); if (normalize) { KernelMath.normalizeSumToOne(ret); } return ret; } /** * Computes the derivative of a Gaussian kernel. * * @param sigma Distributions standard deviation. * @param radius Kernel's radius. * @param normalize * @return The derivative of the gaussian */ protected static Kernel1D_F32 derivative1D_F32(int order, double sigma, int radius, boolean normalize) { Kernel1D_F32 ret = new Kernel1D_F32(radius * 2 + 1); float[] gaussian = ret.data; int index = 0; switch( order ) { case 1: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative1(0, sigma, i); } break; case 2: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative2(0, sigma, i); } break; case 3: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative3(0, sigma, i); } break; case 4: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative4(0, sigma, i); } break; default: throw new IllegalArgumentException("Only derivatives of order 1 to 4 are supported"); } // multiply by the same factor as the gaussian would be normalized by // otherwise it will effective change the intensity of the input image if( normalize ) { double sum = 0; for (int i = radius; i >= -radius; i--) { sum += UtilGaussian.computePDF(0, sigma, i); } for (int i = 0; i < gaussian.length; i++) { gaussian[i] /= sum; } } return ret; } /** * <p> * Given the the radius of a Gaussian distribution and the order of its derivative, choose an appropriate sigma. * </p> * @param radius Kernel's radius * @param order Order of the derivative. 0 original distribution * @return Default sigma */ public static double sigmaForRadius(double radius , int order ) { if( radius <= 0 ) throw new IllegalArgumentException("Radius must be > 0"); return (radius* 2.0 + 1.0 ) / (5.0+0.8*order); } /** * <p> * Given the the sigma of a Gaussian distribution and the order of its derivative, choose an appropriate radius. * </p> * * @param sigma Distribution's sigma * @param order Order of the derivative. 0 original distribution * @return Default sigma */ public static int radiusForSigma(double sigma, int order ) { if( sigma <= 0 ) throw new IllegalArgumentException("Sigma must be > 0"); return (int)Math.ceil((((5+0.8*order)*sigma)-1)/2); } /** * Create a gaussian kernel based on its width. Supports kernels of even or odd widths * . * @param sigma Sigma of the Gaussian distribution. If <= 0 then the width will be used. * @param width How wide the kernel is. Can be even or odd. * @return Gaussian convolution kernel. */ public static Kernel2D_F64 gaussianWidth( double sigma , int width ) { if( sigma <= 0 ) sigma = sigmaForRadius(width/2,0); else if( width <= 0 ) throw new IllegalArgumentException("Must specify the width since it doesn't know if it should be even or odd"); if( width % 2 == 0 ) { int r = width/2-1; Kernel2D_F64 ret = new Kernel2D_F64(width); double sum = 0; for( int y = 0; y < width; y++ ) { double dy = y <= r ? Math.abs(y-r)+0.5 : Math.abs(y-r-1)+0.5; for( int x = 0; x < width; x++ ) { double dx = x <= r ? Math.abs(x-r)+0.5 : Math.abs(x-r-1)+0.5; double d = Math.sqrt(dx*dx + dy*dy); double val = UtilGaussian.computePDF(0,sigma,d); ret.set(x,y,val); sum += val; } } for( int i = 0; i < ret.data.length; i++ ) { ret.data[i] /= sum; } return ret; } else { return gaussian2D_F64(sigma,width/2,true); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.parser; import com.google.common.io.Resources; import io.prestosql.sql.SqlFormatter; import io.prestosql.sql.tree.Expression; import io.prestosql.sql.tree.Statement; import org.testng.annotations.Test; import java.io.IOException; import java.util.Optional; import static com.google.common.base.Strings.repeat; import static io.prestosql.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE; import static io.prestosql.sql.testing.TreeAssertions.assertFormattedSql; import static java.lang.String.format; import static java.nio.charset.StandardCharsets.UTF_8; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertFalse; public class TestStatementBuilder { private static final SqlParser SQL_PARSER = new SqlParser(); @Test public void testStatementBuilder() { printStatement("select * from foo"); printStatement("explain select * from foo"); printStatement("explain (type distributed, format graphviz) select * from foo"); printStatement("select * from foo /* end */"); printStatement("/* start */ select * from foo"); printStatement("/* start */ select * /* middle */ from foo /* end */"); printStatement("-- start\nselect * -- junk\n-- hi\nfrom foo -- done"); printStatement("select * from foo a (x, y, z)"); printStatement("select *, 123, * from foo"); printStatement("select show from foo"); printStatement("select extract(day from x), extract(dow from x) from y"); printStatement("select 1 + 13 || '15' from foo"); printStatement("select x is distinct from y from foo where a is not distinct from b"); printStatement("select x[1] from my_table"); printStatement("select x[1][2] from my_table"); printStatement("select x[cast(10 * sin(x) as bigint)] from my_table"); printStatement("select * from unnest(t.my_array)"); printStatement("select * from unnest(array[1, 2, 3])"); printStatement("select x from unnest(array[1, 2, 3]) t(x)"); printStatement("select * from users cross join unnest(friends)"); printStatement("select id, friend from users cross join unnest(friends) t(friend)"); printStatement("select * from unnest(t.my_array) with ordinality"); printStatement("select * from unnest(array[1, 2, 3]) with ordinality"); printStatement("select x from unnest(array[1, 2, 3]) with ordinality t(x)"); printStatement("select * from users cross join unnest(friends) with ordinality"); printStatement("select id, friend from users cross join unnest(friends) with ordinality t(friend)"); printStatement("select count(*) x from src group by k, v"); printStatement("select count(*) x from src group by cube (k, v)"); printStatement("select count(*) x from src group by rollup (k, v)"); printStatement("select count(*) x from src group by grouping sets ((k, v))"); printStatement("select count(*) x from src group by grouping sets ((k, v), (v))"); printStatement("select count(*) x from src group by grouping sets (k, v, k)"); printStatement("select count(*) filter (where x > 4) y from t"); printStatement("select sum(x) filter (where x > 4) y from t"); printStatement("select sum(x) filter (where x > 4) y, sum(x) filter (where x < 2) z from t"); printStatement("select sum(distinct x) filter (where x > 4) y, sum(x) filter (where x < 2) z from t"); printStatement("select sum(x) filter (where x > 4) over (partition by y) z from t"); printStatement("" + "select depname, empno, salary\n" + ", count(*) over ()\n" + ", avg(salary) over (partition by depname)\n" + ", rank() over (partition by depname order by salary desc)\n" + ", sum(salary) over (order by salary rows unbounded preceding)\n" + ", sum(salary) over (partition by depname order by salary rows between current row and 3 following)\n" + ", sum(salary) over (partition by depname range unbounded preceding)\n" + ", sum(salary) over (rows between 2 preceding and unbounded following)\n" + "from emp"); printStatement("" + "with a (id) as (with x as (select 123 from z) select * from x) " + " , b (id) as (select 999 from z) " + "select * from a join b using (id)"); printStatement("with recursive t as (select * from x) select * from t"); printStatement("select * from information_schema.tables"); printStatement("show catalogs"); printStatement("show schemas"); printStatement("show schemas from sys"); printStatement("show tables"); printStatement("show tables from information_schema"); printStatement("show tables like '%'"); printStatement("show tables from information_schema like '%'"); printStatement("show functions"); printStatement("select cast('123' as bigint), try_cast('foo' as bigint)"); printStatement("select * from a.b.c"); printStatement("select * from a.b.c.e.f.g"); printStatement("select \"TOTALPRICE\" \"my price\" from \"$MY\"\"ORDERS\""); printStatement("select * from foo tablesample system (10+1)"); printStatement("select * from foo tablesample system (10) join bar tablesample bernoulli (30) on a.id = b.id"); printStatement("select * from foo tablesample system (10) join bar tablesample bernoulli (30) on not(a.id > b.id)"); printStatement("create table foo as (select * from abc)"); printStatement("create table if not exists foo as (select * from abc)"); printStatement("create table foo with (a = 'apple', b = 'banana') as select * from abc"); printStatement("create table foo comment 'test' with (a = 'apple') as select * from abc"); printStatement("create table foo as select * from abc WITH NO DATA"); printStatement("create table foo as (with t(x) as (values 1) select x from t)"); printStatement("create table if not exists foo as (with t(x) as (values 1) select x from t)"); printStatement("create table foo as (with t(x) as (values 1) select x from t) WITH DATA"); printStatement("create table if not exists foo as (with t(x) as (values 1) select x from t) WITH DATA"); printStatement("create table foo as (with t(x) as (values 1) select x from t) WITH NO DATA"); printStatement("create table if not exists foo as (with t(x) as (values 1) select x from t) WITH NO DATA"); printStatement("create table foo(a) as (with t(x) as (values 1) select x from t)"); printStatement("create table if not exists foo(a) as (with t(x) as (values 1) select x from t)"); printStatement("create table foo(a) as (with t(x) as (values 1) select x from t) WITH DATA"); printStatement("create table if not exists foo(a) as (with t(x) as (values 1) select x from t) WITH DATA"); printStatement("create table foo(a) as (with t(x) as (values 1) select x from t) WITH NO DATA"); printStatement("create table if not exists foo(a) as (with t(x) as (values 1) select x from t) WITH NO DATA"); printStatement("drop table foo"); printStatement("insert into foo select * from abc"); printStatement("delete from foo"); printStatement("delete from foo where a = b"); printStatement("values ('a', 1, 2.2), ('b', 2, 3.3)"); printStatement("table foo"); printStatement("table foo order by x limit 10"); printStatement("(table foo)"); printStatement("(table foo) limit 10"); printStatement("(table foo limit 5) limit 10"); printStatement("select * from a limit all"); printStatement("select * from a order by x limit all"); printStatement("select * from a union select * from b"); printStatement("table a union all table b"); printStatement("(table foo) union select * from foo union (table foo order by x)"); printStatement("table a union table b intersect table c"); printStatement("(table a union table b) intersect table c"); printStatement("table a union table b except table c intersect table d"); printStatement("(table a union table b except table c) intersect table d"); printStatement("((table a union table b) except table c) intersect table d"); printStatement("(table a union (table b except table c)) intersect table d"); printStatement("table a intersect table b union table c"); printStatement("table a intersect (table b union table c)"); printStatement("alter table foo rename to bar"); printStatement("alter table a.b.c rename to d.e.f"); printStatement("alter table a.b.c rename column x to y"); printStatement("alter table a.b.c add column x bigint"); printStatement("alter table a.b.c add column x bigint comment 'large x'"); printStatement("alter table a.b.c add column x bigint with (weight = 2)"); printStatement("alter table a.b.c add column x bigint comment 'xtra' with (compression = 'LZ4', special = true)"); printStatement("alter table a.b.c drop column x"); printStatement("create schema test"); printStatement("create schema if not exists test"); printStatement("create schema test with (a = 'apple', b = 123)"); printStatement("drop schema test"); printStatement("drop schema test cascade"); printStatement("drop schema if exists test"); printStatement("drop schema if exists test restrict"); printStatement("alter schema foo rename to bar"); printStatement("alter schema foo.bar rename to baz"); printStatement("create table test (a boolean, b bigint, c double, d varchar, e timestamp)"); printStatement("create table test (a boolean, b bigint comment 'test')"); printStatement("create table if not exists baz (a timestamp, b varchar)"); printStatement("create table test (a boolean, b bigint) with (a = 'apple', b = 'banana')"); printStatement("create table test (a boolean, b bigint) comment 'test' with (a = 'apple')"); printStatement("create table test (a boolean with (a = 'apple', b = 'banana'), b bigint comment 'bla' with (c = 'cherry')) comment 'test' with (a = 'apple')"); printStatement("drop table test"); printStatement("create view foo as with a as (select 123) select * from a"); printStatement("create or replace view foo as select 123 from t"); printStatement("drop view foo"); printStatement("insert into t select * from t"); printStatement("insert into t (c1, c2) select * from t"); printStatement("start transaction"); printStatement("start transaction isolation level read uncommitted"); printStatement("start transaction isolation level read committed"); printStatement("start transaction isolation level repeatable read"); printStatement("start transaction isolation level serializable"); printStatement("start transaction read only"); printStatement("start transaction read write"); printStatement("start transaction isolation level read committed, read only"); printStatement("start transaction read only, isolation level read committed"); printStatement("start transaction read write, isolation level serializable"); printStatement("commit"); printStatement("commit work"); printStatement("rollback"); printStatement("rollback work"); printStatement("call foo()"); printStatement("call foo(123, a => 1, b => 'go', 456)"); printStatement("grant select on foo to alice with grant option"); printStatement("grant all privileges on foo to alice"); printStatement("grant delete, select on foo to public"); printStatement("revoke grant option for select on foo from alice"); printStatement("revoke all privileges on foo from alice"); printStatement("revoke insert, delete on foo from public"); //check support for public printStatement("show grants on table t"); printStatement("show grants on t"); printStatement("show grants"); printStatement("prepare p from select * from (select * from T) \"A B\""); printStatement("SELECT * FROM table1 WHERE a >= ALL (VALUES 2, 3, 4)"); printStatement("SELECT * FROM table1 WHERE a <> ANY (SELECT 2, 3, 4)"); printStatement("SELECT * FROM table1 WHERE a = SOME (SELECT id FROM table2)"); } @Test public void testStringFormatter() { assertSqlFormatter("U&'hello\\6d4B\\8Bd5\\+10FFFFworld\\7F16\\7801'", "U&'hello\\6D4B\\8BD5\\+10FFFFworld\\7F16\\7801'"); assertSqlFormatter("'hello world'", "'hello world'"); assertSqlFormatter("U&'!+10FFFF!6d4B!8Bd5ABC!6d4B!8Bd5' UESCAPE '!'", "U&'\\+10FFFF\\6D4B\\8BD5ABC\\6D4B\\8BD5'"); assertSqlFormatter("U&'\\+10FFFF\\6D4B\\8BD5\\0041\\0042\\0043\\6D4B\\8BD5'", "U&'\\+10FFFF\\6D4B\\8BD5ABC\\6D4B\\8BD5'"); assertSqlFormatter("U&'\\\\abc\\6D4B'''", "U&'\\\\abc\\6D4B'''"); } @Test public void testStatementBuilderTpch() throws Exception { printTpchQuery(1, 3); printTpchQuery(2, 33, "part type like", "region name"); printTpchQuery(3, "market segment", "2013-03-05"); printTpchQuery(4, "2013-03-05"); printTpchQuery(5, "region name", "2013-03-05"); printTpchQuery(6, "2013-03-05", 33, 44); printTpchQuery(7, "nation name 1", "nation name 2"); printTpchQuery(8, "nation name", "region name", "part type"); printTpchQuery(9, "part name like"); printTpchQuery(10, "2013-03-05"); printTpchQuery(11, "nation name", 33); printTpchQuery(12, "ship mode 1", "ship mode 2", "2013-03-05"); printTpchQuery(13, "comment like 1", "comment like 2"); printTpchQuery(14, "2013-03-05"); // query 15: views not supported printTpchQuery(16, "part brand", "part type like", 3, 4, 5, 6, 7, 8, 9, 10); printTpchQuery(17, "part brand", "part container"); printTpchQuery(18, 33); printTpchQuery(19, "part brand 1", "part brand 2", "part brand 3", 11, 22, 33); printTpchQuery(20, "part name like", "2013-03-05", "nation name"); printTpchQuery(21, "nation name"); printTpchQuery(22, "phone 1", "phone 2", "phone 3", "phone 4", "phone 5", "phone 6", "phone 7"); } private static void printStatement(String sql) { println(sql.trim()); println(""); ParsingOptions parsingOptions = new ParsingOptions(AS_DOUBLE /* anything */); Statement statement = SQL_PARSER.createStatement(sql, parsingOptions); println(statement.toString()); println(""); println(SqlFormatter.formatSql(statement, Optional.empty())); println(""); assertFormattedSql(SQL_PARSER, statement); println(repeat("=", 60)); println(""); } private static void assertSqlFormatter(String expression, String formatted) { Expression originalExpression = SQL_PARSER.createExpression(expression, new ParsingOptions()); String real = SqlFormatter.formatSql(originalExpression, Optional.empty()); assertEquals(real, formatted); } private static void println(String s) { if (Boolean.parseBoolean(System.getProperty("printParse"))) { System.out.println(s); } } private static String getTpchQuery(int q) throws IOException { return readResource("tpch/queries/" + q + ".sql"); } private static void printTpchQuery(int query, Object... values) throws IOException { String sql = getTpchQuery(query); for (int i = values.length - 1; i >= 0; i--) { sql = sql.replaceAll(format(":%s", i + 1), String.valueOf(values[i])); } assertFalse(sql.matches("(?s).*:[0-9].*"), "Not all bind parameters were replaced: " + sql); sql = fixTpchQuery(sql); printStatement(sql); } private static String readResource(String name) throws IOException { return Resources.toString(Resources.getResource(name), UTF_8); } private static String fixTpchQuery(String s) { s = s.replaceFirst("(?m);$", ""); s = s.replaceAll("(?m)^:[xo]$", ""); s = s.replaceAll("(?m)^:n -1$", ""); s = s.replaceAll("(?m)^:n ([0-9]+)$", "LIMIT $1"); s = s.replace("day (3)", "day"); // for query 1 return s; } }
/** * ProphetFilteredSpectraDistributionCalculator.java * @author Vagisha Sharma * Aug 7, 2011 * @version 1.0 */ package org.yeastrc.ms.service.pepxml.stats; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import org.yeastrc.ms.dao.DAOFactory; import org.yeastrc.ms.dao.analysis.MsRunSearchAnalysisDAO; import org.yeastrc.ms.dao.analysis.peptideProphet.PeptideProphetResultDAO; import org.yeastrc.ms.dao.run.MsScanDAO; import org.yeastrc.ms.domain.analysis.MsRunSearchAnalysis; import org.yeastrc.ms.domain.analysis.peptideProphet.PeptideProphetResult; import org.yeastrc.ms.domain.analysis.peptideProphet.impl.ProphetBinnedSpectraResult; import org.yeastrc.ms.domain.analysis.peptideProphet.impl.ProphetFilteredSpectraResult; import org.yeastrc.ms.domain.run.MsScan; import org.yeastrc.ms.domain.search.Program; import org.yeastrc.ms.util.TimeUtils; /** * */ public class ProphetFilteredSpectraDistributionCalculator { private int analysisId; private double scoreCutoff; private int[] allSpectraCounts; private int[] filteredSpectraCounts; public static double BIN_SIZE = 1.0; private int numBins; private List<ProphetFilteredSpectraResult> filteredResults; private static final Logger log = Logger.getLogger(ProphetFilteredSpectraDistributionCalculator.class.getName()); public ProphetFilteredSpectraDistributionCalculator(int analysisId, double scoreCutoff) { this.analysisId = analysisId; this.scoreCutoff = scoreCutoff; filteredResults = new ArrayList<ProphetFilteredSpectraResult>(); } public List<ProphetFilteredSpectraResult> getFilteredResults() { return filteredResults; } public double getScoreCutoff() { return this.scoreCutoff; } public void calculate() { Program analysisProgram = DAOFactory.instance().getMsSearchAnalysisDAO().load(analysisId).getAnalysisProgram(); if(analysisProgram == Program.PEPTIDE_PROPHET) { if(!initBins()) { log.error("There was an error iniitalizing bins for searchAnalysisID: "+analysisId); return; } // we will calculate two things: // 1. RT distribution of all acquired MS/MS spectra for the analysis // 2. RT distribution of spectra with IDs >= given qvalue cutoff MsRunSearchAnalysisDAO rsDao = DAOFactory.instance().getMsRunSearchAnalysisDAO(); List<Integer> runSearchAnalysisIds = rsDao.getRunSearchAnalysisIdsForAnalysis(analysisId); long s = System.currentTimeMillis(); log.info("Binning data.."); for(int runSearchAnalysisId: runSearchAnalysisIds) { MsRunSearchAnalysis rsAnalysis = rsDao.load(runSearchAnalysisId); int runId = DAOFactory.instance().getMsRunSearchDAO().loadRunSearch(rsAnalysis.getRunSearchId()).getRunId(); log.info("Getting data for runSearchAnalysis: "+runSearchAnalysisId+"; runId: "+runId); binUsingMsLib(scoreCutoff, runSearchAnalysisId, runId); //binUsingJDBC(scoreCutoff, runSearchAnalysisId, runId); log.info("Calculated results for runSearchAnalysisID: "+runSearchAnalysisId); } long e = System.currentTimeMillis(); log.info("Binned data in: "+TimeUtils.timeElapsedSeconds(s, e)+"seconds"); } else { log.error("Don't know how to build RT distribution for analysis program: "+analysisProgram); } } private void binUsingMsLib(double scoreCutoff, int runSearchAnalysisId, int runId) { long s = System.currentTimeMillis(); allSpectraCounts = new int[numBins]; filteredSpectraCounts = new int[numBins]; int scanCnt = 0; int goodScanCnt = 0; DAOFactory daoFactory = DAOFactory.instance(); MsScanDAO scanDao = daoFactory.getMsScanDAO(); PeptideProphetResultDAO prophetResDao = daoFactory.getPeptideProphetResultDAO(); List<Integer> scanIds = scanDao.loadScanIdsForRun(runId); for(Integer scanId: scanIds) { MsScan scan = scanDao.loadScanLite(scanId); scanCnt++; boolean filtered = false; List<Integer> prophetResultIds = prophetResDao.loadIdsForRunSearchAnalysisScan(runSearchAnalysisId, scanId); if(prophetResultIds != null && prophetResultIds.size() > 0) { for(Integer prophetResultId: prophetResultIds) { PeptideProphetResult pres = prophetResDao.loadForProphetResultId(prophetResultId); if(pres.getProbability() >= scoreCutoff) { filtered = true; goodScanCnt++; break; } } } // If we don't have retention time for a scan skip the whole runSearchAnalysis if(scan.getRetentionTime() == null) { log.debug("!!!RETENTION TIME NOT FOUND for runSearchAnalysisID: "+runSearchAnalysisId+". Will not be binned...."); } else { double rt = scan.getRetentionTime().doubleValue(); putScanInBin(rt, filtered); } } // add to list ProphetFilteredSpectraResult stat = new ProphetFilteredSpectraResult(); stat.setRunSearchAnalysisId(runSearchAnalysisId); stat.setTotal(scanCnt); stat.setFiltered(goodScanCnt); stat.setProbability(scoreCutoff); List<ProphetBinnedSpectraResult> binnedResults = new ArrayList<ProphetBinnedSpectraResult>(); for(int i = 0; i < numBins; i++) { ProphetBinnedSpectraResult bin = new ProphetBinnedSpectraResult(); bin.setBinStart(i*BIN_SIZE); bin.setBinEnd(bin.getBinStart() + BIN_SIZE); bin.setTotal(allSpectraCounts[i]); bin.setFiltered(filteredSpectraCounts[i]); binnedResults.add(bin); } stat.setBinnedResults(binnedResults); filteredResults.add(stat); long e = System.currentTimeMillis(); log.info("Binned data in: "+TimeUtils.timeElapsedSeconds(s, e)+" seconds"); } private void binUsingJDBC(double scoreCutoff, int runSearchAnalysisId, int runId) { long s = System.currentTimeMillis(); Connection conn = null; Statement stmt = null; ResultSet rs = null; allSpectraCounts = new int[numBins]; filteredSpectraCounts = new int[numBins]; int scanCnt = 0; int goodScanCnt = 0; try { conn = DAOFactory.instance().getConnection(); String sql = "SELECT scan.id, scan.retentionTime, pres.qvalue "+ "FROM msScan AS scan "+ "LEFT JOIN (msRunSearchResult AS res, PeptideProphetResult AS pres) "+ "ON (scan.id = res.scanID AND res.id = pres.resultID AND pres.runSearchAnalysisID="+runSearchAnalysisId+") "+ "WHERE scan.runID = "+runId+" "+ "ORDER BY scan.id,probability ASC"; log.info(sql); stmt = conn.createStatement(); rs = stmt.executeQuery(sql); int lastScan = -1; while(rs.next()) { int scanId = rs.getInt("id"); if(scanId == lastScan) continue; lastScan = scanId; boolean filtered = false; if(rs.getObject("qvalue") != null) { filtered = rs.getDouble("qvalue") <= scoreCutoff; } // If we don't have retention time for a scan skip the whole runSearchAnalysis if(rs.getObject("retentionTime") == null) { log.debug("!!!RETENTION TIME NOT FOUND for runSearchAnalysisID: "+runSearchAnalysisId+". Will not be binned...."); } else { double rt = rs.getBigDecimal("retentionTime").doubleValue(); putScanInBin(rt, filtered); } scanCnt++; if(filtered) goodScanCnt++; } } catch(SQLException ex) { log.error("Error binning data",ex); } finally { if(conn != null) try {conn.close();} catch(SQLException e){} if(stmt != null) try {stmt.close();} catch(SQLException e){} if(rs != null) try {rs.close();} catch(SQLException e){} } // add to list ProphetFilteredSpectraResult stat = new ProphetFilteredSpectraResult(); stat.setRunSearchAnalysisId(runSearchAnalysisId); stat.setTotal(scanCnt); stat.setFiltered(goodScanCnt); stat.setProbability(scoreCutoff); List<ProphetBinnedSpectraResult> binnedResults = new ArrayList<ProphetBinnedSpectraResult>(); for(int i = 0; i < numBins; i++) { ProphetBinnedSpectraResult bin = new ProphetBinnedSpectraResult(); bin.setBinStart(i*BIN_SIZE); bin.setBinEnd(bin.getBinStart() + BIN_SIZE); bin.setTotal(allSpectraCounts[i]); bin.setFiltered(filteredSpectraCounts[i]); binnedResults.add(bin); } stat.setBinnedResults(binnedResults); filteredResults.add(stat); long e = System.currentTimeMillis(); log.info("Binned data in: "+TimeUtils.timeElapsedSeconds(s, e)+" seconds"); } public int getNumBins() { return numBins; } public double getBinSize() { return BIN_SIZE; } public List<ProphetFilteredSpectraResult> getResult() { return this.filteredResults; } private void putScanInBin(double rt, boolean isFiltered) { int binIndex = (int)(rt / BIN_SIZE); allSpectraCounts[binIndex]++; if(isFiltered) { filteredSpectraCounts[binIndex]++; } } private boolean initBins() { long s = System.currentTimeMillis(); log.info("Initializing bins..."); // get the runIDs for this analysis List<Integer> searchIds = DAOFactory.instance().getMsSearchAnalysisDAO().getSearchIdsForAnalysis(analysisId); List<Integer> runIds = new ArrayList<Integer>(); for(int searchId: searchIds) { int experimentId = DAOFactory.instance().getMsSearchDAO().loadSearch(searchId).getExperimentId(); List<Integer> rIds = DAOFactory.instance().getMsExperimentDAO().getRunIdsForExperiment(experimentId); runIds.addAll(rIds); } if(runIds.size() == 0) { log.warn("No runIds found for searchAnalysisID: "+analysisId); return false; } // get max RT and create bins double maxRT = DAOFactory.instance().getMsRunDAO().getMaxRetentionTimeForRuns(runIds); numBins = (int)(maxRT / BIN_SIZE) + 1; long e = System.currentTimeMillis(); log.info("Initialized bins in "+TimeUtils.timeElapsedSeconds(s, e)+"seconds"); return true; } }
/* * Copyright (C) 2009 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.google.common.escape; import static com.google.common.base.Preconditions.checkNotNull; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import com.google.errorprone.annotations.CanIgnoreReturnValue; import java.util.HashMap; import java.util.Map; import org.checkerframework.checker.index.qual.NonNegative; import org.checkerframework.checker.nullness.qual.Nullable; /** * Static utility methods pertaining to {@link Escaper} instances. * * @author Sven Mawson * @author David Beaumont * @since 15.0 */ @Beta @GwtCompatible public final class Escapers { private Escapers() {} /** * Returns an {@link Escaper} that does no escaping, passing all character data through unchanged. */ public static Escaper nullEscaper() { return NULL_ESCAPER; } // An Escaper that efficiently performs no escaping. // Extending CharEscaper (instead of Escaper) makes Escapers.compose() easier. private static final Escaper NULL_ESCAPER = new CharEscaper() { @Override public String escape(String string) { return checkNotNull(string); } @Override protected char[] escape(char c) { // TODO: Fix tests not to call this directly and make it throw an error. return null; } }; /** * Returns a builder for creating simple, fast escapers. A builder instance can be reused and each * escaper that is created will be a snapshot of the current builder state. Builders are not * thread safe. * * <p>The initial state of the builder is such that: * * <ul> * <li>There are no replacement mappings * <li>{@code safeMin == Character.MIN_VALUE} * <li>{@code safeMax == Character.MAX_VALUE} * <li>{@code unsafeReplacement == null} * </ul> * * <p>For performance reasons escapers created by this builder are not Unicode aware and will not * validate the well-formedness of their input. */ public static Builder builder() { return new Builder(); } /** * A builder for simple, fast escapers. * * <p>Typically an escaper needs to deal with the escaping of high valued characters or code * points. In these cases it is necessary to extend either {@link ArrayBasedCharEscaper} or {@link * ArrayBasedUnicodeEscaper} to provide the desired behavior. However this builder is suitable for * creating escapers that replace a relative small set of characters. * * @author David Beaumont * @since 15.0 */ @Beta public static final class Builder { private final Map<Character, String> replacementMap = new HashMap<>(); private char safeMin = Character.MIN_VALUE; private char safeMax = Character.MAX_VALUE; private String unsafeReplacement = null; // The constructor is exposed via the builder() method above. private Builder() {} /** * Sets the safe range of characters for the escaper. Characters in this range that have no * explicit replacement are considered 'safe' and remain unescaped in the output. If {@code * safeMax < safeMin} then the safe range is empty. * * @param safeMin the lowest 'safe' character * @param safeMax the highest 'safe' character * @return the builder instance */ @CanIgnoreReturnValue public Builder setSafeRange(char safeMin, char safeMax) { this.safeMin = safeMin; this.safeMax = safeMax; return this; } /** * Sets the replacement string for any characters outside the 'safe' range that have no explicit * replacement. If {@code unsafeReplacement} is {@code null} then no replacement will occur, if * it is {@code ""} then the unsafe characters are removed from the output. * * @param unsafeReplacement the string to replace unsafe characters * @return the builder instance */ @CanIgnoreReturnValue public Builder setUnsafeReplacement(@Nullable String unsafeReplacement) { this.unsafeReplacement = unsafeReplacement; return this; } /** * Adds a replacement string for the given input character. The specified character will be * replaced by the given string whenever it occurs in the input, irrespective of whether it lies * inside or outside the 'safe' range. * * @param c the character to be replaced * @param replacement the string to replace the given character * @return the builder instance * @throws NullPointerException if {@code replacement} is null */ @CanIgnoreReturnValue public Builder addEscape(char c, String replacement) { checkNotNull(replacement); // This can replace an existing character (the builder is re-usable). replacementMap.put(c, replacement); return this; } /** Returns a new escaper based on the current state of the builder. */ public Escaper build() { return new ArrayBasedCharEscaper(replacementMap, safeMin, safeMax) { private final char[] replacementChars = unsafeReplacement != null ? unsafeReplacement.toCharArray() : null; @Override protected char[] escapeUnsafe(char c) { return replacementChars; } }; } } /** * Returns a {@link UnicodeEscaper} equivalent to the given escaper instance. If the escaper is * already a UnicodeEscaper then it is simply returned, otherwise it is wrapped in a * UnicodeEscaper. * * <p>When a {@link CharEscaper} escaper is wrapped by this method it acquires extra behavior with * respect to the well-formedness of Unicode character sequences and will throw {@link * IllegalArgumentException} when given bad input. * * @param escaper the instance to be wrapped * @return a UnicodeEscaper with the same behavior as the given instance * @throws NullPointerException if escaper is null * @throws IllegalArgumentException if escaper is not a UnicodeEscaper or a CharEscaper */ static UnicodeEscaper asUnicodeEscaper(Escaper escaper) { checkNotNull(escaper); if (escaper instanceof UnicodeEscaper) { return (UnicodeEscaper) escaper; } else if (escaper instanceof CharEscaper) { return wrap((CharEscaper) escaper); } // In practice this shouldn't happen because it would be very odd not to // extend either CharEscaper or UnicodeEscaper for non trivial cases. throw new IllegalArgumentException( "Cannot create a UnicodeEscaper from: " + escaper.getClass().getName()); } /** * Returns a string that would replace the given character in the specified escaper, or {@code * null} if no replacement should be made. This method is intended for use in tests through the * {@code EscaperAsserts} class; production users of {@link CharEscaper} should limit themselves * to its public interface. * * @param c the character to escape if necessary * @return the replacement string, or {@code null} if no escaping was needed */ public static String computeReplacement(CharEscaper escaper, char c) { return stringOrNull(escaper.escape(c)); } /** * Returns a string that would replace the given character in the specified escaper, or {@code * null} if no replacement should be made. This method is intended for use in tests through the * {@code EscaperAsserts} class; production users of {@link UnicodeEscaper} should limit * themselves to its public interface. * * @param cp the Unicode code point to escape if necessary * @return the replacement string, or {@code null} if no escaping was needed */ public static String computeReplacement(UnicodeEscaper escaper, @NonNegative int cp) { return stringOrNull(escaper.escape(cp)); } private static String stringOrNull(char[] in) { return (in == null) ? null : new String(in); } @SuppressWarnings("upperbound:array.access.unsafe.high")/* (1): If hiChars is not null, hiChars.length range from 0 to Interger.MAX_VALUE. loChars can be: - loChars is null, loChars.length is 1, so output.length range from 1 to Interger.MAX_VALUE + 1 - loChars is not null, loChars.length range from 0 to Interger.MAX_VALUE Therefore, array access output[n] in for loop( n from 0 to hiChars.length - 1) is safe. (2): else if hiChars is null(), hiChars.length is 1, then loChars must be non null( else return null) with length range 0 to Interger.MAX_VALUE - output.length is at least 1 so constant access output[0] is safe. line 281 if loChars is not null, loChars.length range from 0 to MAX. hiChars can be: - hiChars is null, hiChars.length is 1, output.length range from 1 to MAX + 1 - hiChars is not null, hiChars.length range from 0 to MAX, output.length range from 0 to MAX + MAX Therefore, array access output[hiCount + n] in for loop( n from 0 to loChars.length - 1) is safe. (3): else if loChars is null( hiChars must not be null), loChars.length is 1, hiChars.length range from 0 to MAX - output.length range from 1 to MAX + 1, therefore array access output[hiCount] is safe. */ /** Private helper to wrap a CharEscaper as a UnicodeEscaper. */ private static UnicodeEscaper wrap(final CharEscaper escaper) { return new UnicodeEscaper() { @Override protected char[] escape(int cp) { // If a code point maps to a single character, just escape that. if (cp < Character.MIN_SUPPLEMENTARY_CODE_POINT) { return escaper.escape((char) cp); } // Convert the code point to a surrogate pair and escape them both. // Note: This code path is horribly slow and typically allocates 4 new // char[] each time it is invoked. However this avoids any // synchronization issues and makes the escaper thread safe. char[] surrogateChars = new char[2]; Character.toChars(cp, surrogateChars, 0); char[] hiChars = escaper.escape(surrogateChars[0]); char[] loChars = escaper.escape(surrogateChars[1]); // If either hiChars or lowChars are non-null, the CharEscaper is trying // to escape the characters of a surrogate pair separately. This is // uncommon and applies only to escapers that assume UCS-2 rather than // UTF-16. See: http://en.wikipedia.org/wiki/UTF-16/UCS-2 if (hiChars == null && loChars == null) { // We expect this to be the common code path for most escapers. return null; } // Combine the characters and/or escaped sequences into a single array. int hiCount = hiChars != null ? hiChars.length : 1; int loCount = loChars != null ? loChars.length : 1; char[] output = new char[hiCount + loCount]; if (hiChars != null) { // TODO: Is this faster than System.arraycopy() for small arrays? for (int n = 0; n < hiChars.length; ++n) { output[n] = hiChars[n];//(1) } } else { output[0] = surrogateChars[0]; } if (loChars != null) { for (int n = 0; n < loChars.length; ++n) { output[hiCount + n] = loChars[n];//(2) } } else { output[hiCount] = surrogateChars[1];//(3) } return output; } }; } }
package io.github.interestinglab.waterdrop.config.impl; import io.github.interestinglab.waterdrop.config.ConfigException; import io.github.interestinglab.waterdrop.config.ConfigResolveOptions; import io.github.interestinglab.waterdrop.config.impl.AbstractConfigValue.NotPossibleToResolve; import java.util.ArrayList; import java.util.Collections; import java.util.IdentityHashMap; import java.util.List; import java.util.Set; final class ResolveContext { final private ResolveMemos memos; final private ConfigResolveOptions options; // the current path restriction, used to ensure lazy // resolution and avoid gratuitous cycles. without this, // any sibling of an object we're traversing could // cause a cycle "by side effect" // CAN BE NULL for a full resolve. final private Path restrictToChild; // This is used for tracing and debugging and nice error messages; // contains every node as we call resolve on it. final private List<AbstractConfigValue> resolveStack; final private Set<AbstractConfigValue> cycleMarkers; ResolveContext(ResolveMemos memos, ConfigResolveOptions options, Path restrictToChild, List<AbstractConfigValue> resolveStack, Set<AbstractConfigValue> cycleMarkers) { this.memos = memos; this.options = options; this.restrictToChild = restrictToChild; this.resolveStack = Collections.unmodifiableList(resolveStack); this.cycleMarkers = Collections.unmodifiableSet(cycleMarkers); } private static Set<AbstractConfigValue> newCycleMarkers() { return Collections.newSetFromMap(new IdentityHashMap<AbstractConfigValue, Boolean>()); } ResolveContext(ConfigResolveOptions options, Path restrictToChild) { // LinkedHashSet keeps the traversal order which is at least useful // in error messages if nothing else this(new ResolveMemos(), options, restrictToChild, new ArrayList<AbstractConfigValue>(), newCycleMarkers()); if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "ResolveContext restrict to child " + restrictToChild); } ResolveContext addCycleMarker(AbstractConfigValue value) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "++ Cycle marker " + value + "@" + System.identityHashCode(value)); if (cycleMarkers.contains(value)) throw new ConfigException.BugOrBroken("Added cycle marker twice " + value); Set<AbstractConfigValue> copy = newCycleMarkers(); copy.addAll(cycleMarkers); copy.add(value); return new ResolveContext(memos, options, restrictToChild, resolveStack, copy); } ResolveContext removeCycleMarker(AbstractConfigValue value) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "-- Cycle marker " + value + "@" + System.identityHashCode(value)); Set<AbstractConfigValue> copy = newCycleMarkers(); copy.addAll(cycleMarkers); copy.remove(value); return new ResolveContext(memos, options, restrictToChild, resolveStack, copy); } private ResolveContext memoize(MemoKey key, AbstractConfigValue value) { ResolveMemos changed = memos.put(key, value); return new ResolveContext(changed, options, restrictToChild, resolveStack, cycleMarkers); } ConfigResolveOptions options() { return options; } boolean isRestrictedToChild() { return restrictToChild != null; } Path restrictToChild() { return restrictToChild; } // restrictTo may be null to unrestrict ResolveContext restrict(Path restrictTo) { if (restrictTo == restrictToChild) return this; else return new ResolveContext(memos, options, restrictTo, resolveStack, cycleMarkers); } ResolveContext unrestricted() { return restrict(null); } String traceString() { String separator = ", "; StringBuilder sb = new StringBuilder(); for (AbstractConfigValue value : resolveStack) { if (value instanceof ConfigReference) { sb.append(((ConfigReference) value).expression().toString()); sb.append(separator); } } if (sb.length() > 0) sb.setLength(sb.length() - separator.length()); return sb.toString(); } private ResolveContext pushTrace(AbstractConfigValue value) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "pushing trace " + value); List<AbstractConfigValue> copy = new ArrayList<AbstractConfigValue>(resolveStack); copy.add(value); return new ResolveContext(memos, options, restrictToChild, copy, cycleMarkers); } ResolveContext popTrace() { List<AbstractConfigValue> copy = new ArrayList<AbstractConfigValue>(resolveStack); AbstractConfigValue old = copy.remove(resolveStack.size() - 1); if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth() - 1, "popped trace " + old); return new ResolveContext(memos, options, restrictToChild, copy, cycleMarkers); } int depth() { if (resolveStack.size() > 30) throw new ConfigException.BugOrBroken("resolve getting too deep"); return resolveStack.size(); } ResolveResult<? extends AbstractConfigValue> resolve(AbstractConfigValue original, ResolveSource source) throws NotPossibleToResolve { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl .trace(depth(), "resolving " + original + " restrictToChild=" + restrictToChild + " in " + source); return pushTrace(original).realResolve(original, source).popTrace(); } private ResolveResult<? extends AbstractConfigValue> realResolve(AbstractConfigValue original, ResolveSource source) throws NotPossibleToResolve { // a fully-resolved (no restrictToChild) object can satisfy a // request for a restricted object, so always check that first. final MemoKey fullKey = new MemoKey(original, null); MemoKey restrictedKey = null; AbstractConfigValue cached = memos.get(fullKey); // but if there was no fully-resolved object cached, we'll only // compute the restrictToChild object so use a more limited // memo key if (cached == null && isRestrictedToChild()) { restrictedKey = new MemoKey(original, restrictToChild()); cached = memos.get(restrictedKey); } if (cached != null) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "using cached resolution " + cached + " for " + original + " restrictToChild " + restrictToChild()); return ResolveResult.make(this, cached); } else { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "not found in cache, resolving " + original + "@" + System.identityHashCode(original)); if (cycleMarkers.contains(original)) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "Cycle detected, can't resolve; " + original + "@" + System.identityHashCode(original)); throw new NotPossibleToResolve(this); } ResolveResult<? extends AbstractConfigValue> result = original.resolveSubstitutions(this, source); AbstractConfigValue resolved = result.value; if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "resolved to " + resolved + "@" + System.identityHashCode(resolved) + " from " + original + "@" + System.identityHashCode(resolved)); ResolveContext withMemo = result.context; if (resolved == null || resolved.resolveStatus() == ResolveStatus.RESOLVED) { // if the resolved object is fully resolved by resolving // only the restrictToChildOrNull, then it can be cached // under fullKey since the child we were restricted to // turned out to be the only unresolved thing. if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "caching " + fullKey + " result " + resolved); withMemo = withMemo.memoize(fullKey, resolved); } else { // if we have an unresolved object then either we did a // partial resolve restricted to a certain child, or we are // allowing incomplete resolution, or it's a bug. if (isRestrictedToChild()) { if (restrictedKey == null) { throw new ConfigException.BugOrBroken( "restrictedKey should not be null here"); } if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "caching " + restrictedKey + " result " + resolved); withMemo = withMemo.memoize(restrictedKey, resolved); } else if (options().getAllowUnresolved()) { if (ConfigImpl.traceSubstitutionsEnabled()) ConfigImpl.trace(depth(), "caching " + fullKey + " result " + resolved); withMemo = withMemo.memoize(fullKey, resolved); } else { throw new ConfigException.BugOrBroken( "resolveSubstitutions() did not give us a resolved object"); } } return ResolveResult.make(withMemo, resolved); } } static AbstractConfigValue resolve(AbstractConfigValue value, AbstractConfigObject root, ConfigResolveOptions options) { ResolveSource source = new ResolveSource(root); ResolveContext context = new ResolveContext(options, null /* restrictToChild */); try { return context.resolve(value, source).value; } catch (NotPossibleToResolve e) { // ConfigReference was supposed to catch NotPossibleToResolve throw new ConfigException.BugOrBroken( "NotPossibleToResolve was thrown from an outermost resolve", e); } } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.bookkeeper.test; import java.io.File; import java.io.IOException; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.CountDownLatch; import junit.framework.TestCase; import org.apache.bookkeeper.bookie.Bookie; import org.apache.bookkeeper.bookie.BookieException; import org.apache.bookkeeper.client.BookKeeperTestClient; import org.apache.bookkeeper.conf.AbstractConfiguration; import org.apache.bookkeeper.conf.ClientConfiguration; import org.apache.bookkeeper.conf.ServerConfiguration; import org.apache.bookkeeper.metastore.InMemoryMetaStore; import org.apache.bookkeeper.proto.BookieServer; import org.apache.bookkeeper.replication.AutoRecoveryMain; import org.apache.bookkeeper.replication.ReplicationException.CompatibilityException; import org.apache.bookkeeper.replication.ReplicationException.UnavailableException; import org.apache.commons.io.FileUtils; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooKeeper; import org.junit.After; import org.junit.Before; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class runs several bookie servers for testing. */ public abstract class BookKeeperClusterTestCase extends TestCase { static final Logger LOG = LoggerFactory.getLogger(BookKeeperClusterTestCase.class); // ZooKeeper related variables protected ZooKeeperUtil zkUtil = new ZooKeeperUtil(); protected ZooKeeper zkc; // BookKeeper related variables protected List<File> tmpDirs = new LinkedList<File>(); protected List<BookieServer> bs = new LinkedList<BookieServer>(); protected List<ServerConfiguration> bsConfs = new LinkedList<ServerConfiguration>(); protected int numBookies; protected BookKeeperTestClient bkc; protected ServerConfiguration baseConf = new ServerConfiguration(); protected ClientConfiguration baseClientConf = new ClientConfiguration(); private Map<BookieServer, AutoRecoveryMain> autoRecoveryProcesses = new HashMap<BookieServer, AutoRecoveryMain>(); private boolean isAutoRecoveryEnabled; public BookKeeperClusterTestCase(int numBookies) { this.numBookies = numBookies; } @Before @Override public void setUp() throws Exception { LOG.info("Setting up test {}", getName()); InMemoryMetaStore.reset(); setMetastoreImplClass(baseConf); setMetastoreImplClass(baseClientConf); try { // start zookeeper service startZKCluster(); // start bookkeeper service startBKCluster(); } catch (Exception e) { LOG.error("Error setting up", e); throw e; } } @After @Override public void tearDown() throws Exception { LOG.info("TearDown"); // stop bookkeeper service stopBKCluster(); // stop zookeeper service stopZKCluster(); LOG.info("Tearing down test {}", getName()); } /** * Start zookeeper cluster * * @throws Exception */ protected void startZKCluster() throws Exception { zkUtil.startServer(); zkc = zkUtil.getZooKeeperClient(); } /** * Stop zookeeper cluster * * @throws Exception */ protected void stopZKCluster() throws Exception { zkUtil.killServer(); } /** * Start cluster. Also, starts the auto recovery process for each bookie, if * isAutoRecoveryEnabled is true. * * @throws Exception */ protected void startBKCluster() throws Exception { baseClientConf.setZkServers(zkUtil.getZooKeeperConnectString()); if (numBookies > 0) { bkc = new BookKeeperTestClient(baseClientConf); } // Create Bookie Servers (B1, B2, B3) for (int i = 0; i < numBookies; i++) { startNewBookie(); } } /** * Stop cluster. Also, stops all the auto recovery processes for the bookie * cluster, if isAutoRecoveryEnabled is true. * * @throws Exception */ protected void stopBKCluster() throws Exception { if (bkc != null) { bkc.close();; } for (BookieServer server : bs) { server.shutdown(); AutoRecoveryMain autoRecovery = autoRecoveryProcesses.get(server); if (autoRecovery != null && isAutoRecoveryEnabled()) { autoRecovery.shutdown(); LOG.debug("Shutdown auto recovery for bookieserver:" + server.getLocalAddress()); } } bs.clear(); for (File f : tmpDirs) { FileUtils.deleteDirectory(f); } } protected ServerConfiguration newServerConfiguration() throws IOException { File f = File.createTempFile("bookie", "test"); tmpDirs.add(f); f.delete(); f.mkdir(); int port = PortManager.nextFreePort(); return newServerConfiguration(port, zkUtil.getZooKeeperConnectString(), f, new File[] { f }); } protected ServerConfiguration newServerConfiguration(int port, String zkServers, File journalDir, File[] ledgerDirs) { ServerConfiguration conf = new ServerConfiguration(baseConf); conf.setBookiePort(port); conf.setZkServers(zkServers); conf.setJournalDirName(journalDir.getPath()); String[] ledgerDirNames = new String[ledgerDirs.length]; for (int i=0; i<ledgerDirs.length; i++) { ledgerDirNames[i] = ledgerDirs[i].getPath(); } conf.setLedgerDirNames(ledgerDirNames); return conf; } /** * Get bookie address for bookie at index */ public InetSocketAddress getBookie(int index) throws IllegalArgumentException { if (bs.size() <= index || index < 0) { throw new IllegalArgumentException("Invalid index, there are only " + bs.size() + " bookies. Asked for " + index); } return bs.get(index).getLocalAddress(); } /** * Kill a bookie by its socket address. Also, stops the autorecovery process * for the corresponding bookie server, if isAutoRecoveryEnabled is true. * * @param addr * Socket Address * @return the configuration of killed bookie * @throws InterruptedException */ public ServerConfiguration killBookie(InetSocketAddress addr) throws InterruptedException { BookieServer toRemove = null; int toRemoveIndex = 0; for (BookieServer server : bs) { if (server.getLocalAddress().equals(addr)) { server.shutdown(); toRemove = server; break; } ++toRemoveIndex; } if (toRemove != null) { stopAutoRecoveryService(toRemove); bs.remove(toRemove); return bsConfs.remove(toRemoveIndex); } return null; } /** * Kill a bookie by index. Also, stops the respective auto recovery process * for this bookie, if isAutoRecoveryEnabled is true. * * @param index * Bookie Index * @return the configuration of killed bookie * @throws InterruptedException * @throws IOException */ public ServerConfiguration killBookie(int index) throws InterruptedException, IOException { if (index >= bs.size()) { throw new IOException("Bookie does not exist"); } BookieServer server = bs.get(index); server.shutdown(); stopAutoRecoveryService(server); bs.remove(server); return bsConfs.remove(index); } /** * Sleep a bookie * * @param addr * Socket Address * @param seconds * Sleep seconds * @return Count Down latch which will be counted down when sleep finishes * @throws InterruptedException * @throws IOException */ public CountDownLatch sleepBookie(InetSocketAddress addr, final int seconds) throws InterruptedException, IOException { for (final BookieServer bookie : bs) { if (bookie.getLocalAddress().equals(addr)) { final CountDownLatch l = new CountDownLatch(1); Thread sleeper = new Thread() { @Override public void run() { try { bookie.suspendProcessing(); l.countDown(); Thread.sleep(seconds*1000); bookie.resumeProcessing(); } catch (Exception e) { LOG.error("Error suspending bookie", e); } } }; sleeper.start(); return l; } } throw new IOException("Bookie not found"); } /** * Sleep a bookie until I count down the latch * * @param addr * Socket Address * @param latch * Latch to wait on * @throws InterruptedException * @throws IOException */ public void sleepBookie(InetSocketAddress addr, final CountDownLatch l) throws InterruptedException, IOException { for (final BookieServer bookie : bs) { if (bookie.getLocalAddress().equals(addr)) { Thread sleeper = new Thread() { public void run() { try { bookie.suspendProcessing(); l.await(); bookie.resumeProcessing(); } catch (Exception e) { LOG.error("Error suspending bookie", e); } } }; sleeper.start(); return; } } throw new IOException("Bookie not found"); } /** * Restart bookie servers. Also restarts all the respective auto recovery * process, if isAutoRecoveryEnabled is true. * * @throws InterruptedException * @throws IOException * @throws KeeperException * @throws BookieException */ public void restartBookies() throws InterruptedException, IOException, KeeperException, BookieException, UnavailableException, CompatibilityException { restartBookies(null); } /** * Restart bookie servers using new configuration settings. Also restart the * respective auto recovery process, if isAutoRecoveryEnabled is true. * * @param newConf * New Configuration Settings * @throws InterruptedException * @throws IOException * @throws KeeperException * @throws BookieException */ public void restartBookies(ServerConfiguration newConf) throws InterruptedException, IOException, KeeperException, BookieException, UnavailableException, CompatibilityException { // shut down bookie server for (BookieServer server : bs) { server.shutdown(); stopAutoRecoveryService(server); } bs.clear(); Thread.sleep(1000); // restart them to ensure we can't int j = 0; for (ServerConfiguration conf : bsConfs) { if (null != newConf) { conf.loadConf(newConf); } bs.add(startBookie(conf)); j++; } } /** * Helper method to startup a new bookie server with the indicated port * number. Also, starts the auto recovery process, if the * isAutoRecoveryEnabled is set true. * * @param port * Port to start the new bookie server on * @throws IOException */ public int startNewBookie() throws IOException, InterruptedException, KeeperException, BookieException, UnavailableException, CompatibilityException { ServerConfiguration conf = newServerConfiguration(); bsConfs.add(conf); bs.add(startBookie(conf)); return conf.getBookiePort(); } /** * Helper method to startup a bookie server using a configuration object. * Also, starts the auto recovery process if isAutoRecoveryEnabled is true. * * @param conf * Server Configuration Object * */ protected BookieServer startBookie(ServerConfiguration conf) throws IOException, InterruptedException, KeeperException, BookieException, UnavailableException, CompatibilityException { BookieServer server = new BookieServer(conf); server.start(); int port = conf.getBookiePort(); while(bkc.getZkHandle().exists("/ledgers/available/" + InetAddress.getLocalHost().getHostAddress() + ":" + port, false) == null) { Thread.sleep(500); } bkc.readBookiesBlocking(); LOG.info("New bookie on port " + port + " has been created."); try { startAutoRecovery(server, conf); } catch (CompatibilityException ce) { LOG.error("Exception while starting AutoRecovery!", ce); } catch (UnavailableException ue) { LOG.error("Exception while starting AutoRecovery!", ue); } return server; } /** * Start a bookie with the given bookie instance. Also, starts the auto * recovery for this bookie, if isAutoRecoveryEnabled is true. */ protected BookieServer startBookie(ServerConfiguration conf, final Bookie b) throws IOException, InterruptedException, KeeperException, BookieException, UnavailableException, CompatibilityException { BookieServer server = new BookieServer(conf) { @Override protected Bookie newBookie(ServerConfiguration conf) { return b; } }; server.start(); int port = conf.getBookiePort(); while(bkc.getZkHandle().exists("/ledgers/available/" + InetAddress.getLocalHost().getHostAddress() + ":" + port, false) == null) { Thread.sleep(500); } bkc.readBookiesBlocking(); LOG.info("New bookie on port " + port + " has been created."); try { startAutoRecovery(server, conf); } catch (CompatibilityException ce) { LOG.error("Exception while starting AutoRecovery!", ce); } catch (UnavailableException ue) { LOG.error("Exception while starting AutoRecovery!", ue); } return server; } public void setMetastoreImplClass(AbstractConfiguration conf) { conf.setMetastoreImplClass(InMemoryMetaStore.class.getName()); } /** * Flags used to enable/disable the auto recovery process. If it is enabled, * starting the bookie server will starts the auto recovery process for that * bookie. Also, stopping bookie will stops the respective auto recovery * process. * * @param isAutoRecoveryEnabled * Value true will enable the auto recovery process. Value false * will disable the auto recovery process */ public void setAutoRecoveryEnabled(boolean isAutoRecoveryEnabled) { this.isAutoRecoveryEnabled = isAutoRecoveryEnabled; } /** * Flag used to check whether auto recovery process is enabled/disabled. By * default the flag is false. * * @return true, if the auto recovery is enabled. Otherwise return false. */ public boolean isAutoRecoveryEnabled() { return isAutoRecoveryEnabled; } private void startAutoRecovery(BookieServer bserver, ServerConfiguration conf) throws CompatibilityException, KeeperException, InterruptedException, IOException, UnavailableException { if (isAutoRecoveryEnabled()) { AutoRecoveryMain autoRecoveryProcess = new AutoRecoveryMain(conf); autoRecoveryProcess.start(); autoRecoveryProcesses.put(bserver, autoRecoveryProcess); LOG.debug("Starting Auditor Recovery for the bookie:" + bserver.getLocalAddress()); } } private void stopAutoRecoveryService(BookieServer toRemove) { AutoRecoveryMain autoRecoveryMain = autoRecoveryProcesses .remove(toRemove); if (null != autoRecoveryMain && isAutoRecoveryEnabled()) { autoRecoveryMain.shutdown(); LOG.debug("Shutdown auto recovery for bookieserver:" + toRemove.getLocalAddress()); } } /** * Will starts the auto recovery process for the bookie servers. One auto * recovery process per each bookie server, if isAutoRecoveryEnabled is * enabled. * * @throws CompatibilityException * - Compatibility error * @throws KeeperException * - ZK exception * @throws InterruptedException * - interrupted exception * @throws IOException * - IOException * @throws UnavailableException * - replication service has become unavailable */ public void startReplicationService() throws CompatibilityException, KeeperException, InterruptedException, IOException, UnavailableException { int index = -1; for (BookieServer bserver : bs) { startAutoRecovery(bserver, bsConfs.get(++index)); } } /** * Will stops all the auto recovery processes for the bookie cluster, if * isAutoRecoveryEnabled is true. */ public void stopReplicationService() { if(false == isAutoRecoveryEnabled()){ return; } for (Entry<BookieServer, AutoRecoveryMain> autoRecoveryProcess : autoRecoveryProcesses .entrySet()) { autoRecoveryProcess.getValue().shutdown(); LOG.debug("Shutdown Auditor Recovery for the bookie:" + autoRecoveryProcess.getKey().getLocalAddress()); } } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.ads.googleads.v9.services.stub; import com.google.ads.googleads.v9.resources.BiddingStrategy; import com.google.ads.googleads.v9.services.GetBiddingStrategyRequest; import com.google.ads.googleads.v9.services.MutateBiddingStrategiesRequest; import com.google.ads.googleads.v9.services.MutateBiddingStrategiesResponse; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.grpc.GrpcCallSettings; import com.google.api.gax.grpc.GrpcStubCallableFactory; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.collect.ImmutableMap; import com.google.longrunning.stub.GrpcOperationsStub; import io.grpc.MethodDescriptor; import io.grpc.protobuf.ProtoUtils; import java.io.IOException; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * gRPC stub implementation for the BiddingStrategyService service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @Generated("by gapic-generator-java") public class GrpcBiddingStrategyServiceStub extends BiddingStrategyServiceStub { private static final MethodDescriptor<GetBiddingStrategyRequest, BiddingStrategy> getBiddingStrategyMethodDescriptor = MethodDescriptor.<GetBiddingStrategyRequest, BiddingStrategy>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.ads.googleads.v9.services.BiddingStrategyService/GetBiddingStrategy") .setRequestMarshaller( ProtoUtils.marshaller(GetBiddingStrategyRequest.getDefaultInstance())) .setResponseMarshaller(ProtoUtils.marshaller(BiddingStrategy.getDefaultInstance())) .build(); private static final MethodDescriptor< MutateBiddingStrategiesRequest, MutateBiddingStrategiesResponse> mutateBiddingStrategiesMethodDescriptor = MethodDescriptor .<MutateBiddingStrategiesRequest, MutateBiddingStrategiesResponse>newBuilder() .setType(MethodDescriptor.MethodType.UNARY) .setFullMethodName( "google.ads.googleads.v9.services.BiddingStrategyService/MutateBiddingStrategies") .setRequestMarshaller( ProtoUtils.marshaller(MutateBiddingStrategiesRequest.getDefaultInstance())) .setResponseMarshaller( ProtoUtils.marshaller(MutateBiddingStrategiesResponse.getDefaultInstance())) .build(); private final UnaryCallable<GetBiddingStrategyRequest, BiddingStrategy> getBiddingStrategyCallable; private final UnaryCallable<MutateBiddingStrategiesRequest, MutateBiddingStrategiesResponse> mutateBiddingStrategiesCallable; private final BackgroundResource backgroundResources; private final GrpcOperationsStub operationsStub; private final GrpcStubCallableFactory callableFactory; public static final GrpcBiddingStrategyServiceStub create( BiddingStrategyServiceStubSettings settings) throws IOException { return new GrpcBiddingStrategyServiceStub(settings, ClientContext.create(settings)); } public static final GrpcBiddingStrategyServiceStub create(ClientContext clientContext) throws IOException { return new GrpcBiddingStrategyServiceStub( BiddingStrategyServiceStubSettings.newBuilder().build(), clientContext); } public static final GrpcBiddingStrategyServiceStub create( ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { return new GrpcBiddingStrategyServiceStub( BiddingStrategyServiceStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of GrpcBiddingStrategyServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcBiddingStrategyServiceStub( BiddingStrategyServiceStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new GrpcBiddingStrategyServiceCallableFactory()); } /** * Constructs an instance of GrpcBiddingStrategyServiceStub, using the given settings. This is * protected so that it is easy to make a subclass, but otherwise, the static factory methods * should be preferred. */ protected GrpcBiddingStrategyServiceStub( BiddingStrategyServiceStubSettings settings, ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory); GrpcCallSettings<GetBiddingStrategyRequest, BiddingStrategy> getBiddingStrategyTransportSettings = GrpcCallSettings.<GetBiddingStrategyRequest, BiddingStrategy>newBuilder() .setMethodDescriptor(getBiddingStrategyMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("resource_name", String.valueOf(request.getResourceName())); return params.build(); }) .build(); GrpcCallSettings<MutateBiddingStrategiesRequest, MutateBiddingStrategiesResponse> mutateBiddingStrategiesTransportSettings = GrpcCallSettings .<MutateBiddingStrategiesRequest, MutateBiddingStrategiesResponse>newBuilder() .setMethodDescriptor(mutateBiddingStrategiesMethodDescriptor) .setParamsExtractor( request -> { ImmutableMap.Builder<String, String> params = ImmutableMap.builder(); params.put("customer_id", String.valueOf(request.getCustomerId())); return params.build(); }) .build(); this.getBiddingStrategyCallable = callableFactory.createUnaryCallable( getBiddingStrategyTransportSettings, settings.getBiddingStrategySettings(), clientContext); this.mutateBiddingStrategiesCallable = callableFactory.createUnaryCallable( mutateBiddingStrategiesTransportSettings, settings.mutateBiddingStrategiesSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } public GrpcOperationsStub getOperationsStub() { return operationsStub; } @Override public UnaryCallable<GetBiddingStrategyRequest, BiddingStrategy> getBiddingStrategyCallable() { return getBiddingStrategyCallable; } @Override public UnaryCallable<MutateBiddingStrategiesRequest, MutateBiddingStrategiesResponse> mutateBiddingStrategiesCallable() { return mutateBiddingStrategiesCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.persistent; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractNamedDiffable; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.NamedObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task.Status; import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; import static org.elasticsearch.cluster.metadata.MetaData.ALL_CONTEXTS; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** * A cluster state record that contains a list of all running persistent tasks */ public final class PersistentTasksCustomMetaData extends AbstractNamedDiffable<MetaData.Custom> implements MetaData.Custom { public static final String TYPE = "persistent_tasks"; private static final String API_CONTEXT = MetaData.XContentContext.API.toString(); // TODO: Implement custom Diff for tasks private final Map<String, PersistentTask<?>> tasks; private final long lastAllocationId; public PersistentTasksCustomMetaData(long lastAllocationId, Map<String, PersistentTask<?>> tasks) { this.lastAllocationId = lastAllocationId; this.tasks = tasks; } private static final ObjectParser<Builder, Void> PERSISTENT_TASKS_PARSER = new ObjectParser<>(TYPE, Builder::new); private static final ObjectParser<TaskBuilder<PersistentTaskParams>, Void> PERSISTENT_TASK_PARSER = new ObjectParser<>("tasks", TaskBuilder::new); public static final ConstructingObjectParser<Assignment, Void> ASSIGNMENT_PARSER = new ConstructingObjectParser<>("assignment", objects -> new Assignment((String) objects[0], (String) objects[1])); private static final NamedObjectParser<TaskDescriptionBuilder<PersistentTaskParams>, Void> TASK_DESCRIPTION_PARSER; static { // Tasks parser initialization PERSISTENT_TASKS_PARSER.declareLong(Builder::setLastAllocationId, new ParseField("last_allocation_id")); PERSISTENT_TASKS_PARSER.declareObjectArray(Builder::setTasks, PERSISTENT_TASK_PARSER, new ParseField("tasks")); // Task description parser initialization ObjectParser<TaskDescriptionBuilder<PersistentTaskParams>, String> parser = new ObjectParser<>("named"); parser.declareObject(TaskDescriptionBuilder::setParams, (p, c) -> p.namedObject(PersistentTaskParams.class, c, null), new ParseField("params")); parser.declareObject(TaskDescriptionBuilder::setStatus, (p, c) -> p.namedObject(Status.class, c, null), new ParseField("status")); TASK_DESCRIPTION_PARSER = (XContentParser p, Void c, String name) -> parser.parse(p, new TaskDescriptionBuilder<>(name), name); // Assignment parser ASSIGNMENT_PARSER.declareStringOrNull(constructorArg(), new ParseField("executor_node")); ASSIGNMENT_PARSER.declareStringOrNull(constructorArg(), new ParseField("explanation")); // Task parser initialization PERSISTENT_TASK_PARSER.declareString(TaskBuilder::setId, new ParseField("id")); PERSISTENT_TASK_PARSER.declareString(TaskBuilder::setTaskName, new ParseField("name")); PERSISTENT_TASK_PARSER.declareLong(TaskBuilder::setAllocationId, new ParseField("allocation_id")); PERSISTENT_TASK_PARSER.declareNamedObjects( (TaskBuilder<PersistentTaskParams> taskBuilder, List<TaskDescriptionBuilder<PersistentTaskParams>> objects) -> { if (objects.size() != 1) { throw new IllegalArgumentException("only one task description per task is allowed"); } TaskDescriptionBuilder<PersistentTaskParams> builder = objects.get(0); taskBuilder.setTaskName(builder.taskName); taskBuilder.setParams(builder.params); taskBuilder.setStatus(builder.status); }, TASK_DESCRIPTION_PARSER, new ParseField("task")); PERSISTENT_TASK_PARSER.declareObject(TaskBuilder::setAssignment, ASSIGNMENT_PARSER, new ParseField("assignment")); PERSISTENT_TASK_PARSER.declareLong(TaskBuilder::setAllocationIdOnLastStatusUpdate, new ParseField("allocation_id_on_last_status_update")); } /** * Private builder used in XContent parser to build task-specific portion (params and status) */ private static class TaskDescriptionBuilder<Params extends PersistentTaskParams> { private final String taskName; private Params params; private Status status; private TaskDescriptionBuilder(String taskName) { this.taskName = taskName; } private TaskDescriptionBuilder setParams(Params params) { this.params = params; return this; } private TaskDescriptionBuilder setStatus(Status status) { this.status = status; return this; } } public Collection<PersistentTask<?>> tasks() { return this.tasks.values(); } public Map<String, PersistentTask<?>> taskMap() { return this.tasks; } public PersistentTask<?> getTask(String id) { return this.tasks.get(id); } public Collection<PersistentTask<?>> findTasks(String taskName, Predicate<PersistentTask<?>> predicate) { return this.tasks().stream() .filter(p -> taskName.equals(p.getTaskName())) .filter(predicate) .collect(Collectors.toList()); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PersistentTasksCustomMetaData that = (PersistentTasksCustomMetaData) o; return lastAllocationId == that.lastAllocationId && Objects.equals(tasks, that.tasks); } @Override public int hashCode() { return Objects.hash(tasks, lastAllocationId); } @Override public String toString() { return Strings.toString(this); } public long getNumberOfTasksOnNode(String nodeId, String taskName) { return tasks.values().stream().filter( task -> taskName.equals(task.taskName) && nodeId.equals(task.assignment.executorNode)).count(); } @Override public Version getMinimalSupportedVersion() { return Version.V_5_4_0; } @Override public EnumSet<MetaData.XContentContext> context() { return ALL_CONTEXTS; } public static PersistentTasksCustomMetaData fromXContent(XContentParser parser) { return PERSISTENT_TASKS_PARSER.apply(parser, null).build(); } @SuppressWarnings("unchecked") public static <Params extends PersistentTaskParams> PersistentTask<Params> getTaskWithId(ClusterState clusterState, String taskId) { PersistentTasksCustomMetaData tasks = clusterState.metaData().custom(PersistentTasksCustomMetaData.TYPE); if (tasks != null) { return (PersistentTask<Params>) tasks.getTask(taskId); } return null; } public static class Assignment { @Nullable private final String executorNode; private final String explanation; public Assignment(String executorNode, String explanation) { this.executorNode = executorNode; assert explanation != null; this.explanation = explanation; } @Nullable public String getExecutorNode() { return executorNode; } public String getExplanation() { return explanation; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Assignment that = (Assignment) o; return Objects.equals(executorNode, that.executorNode) && Objects.equals(explanation, that.explanation); } @Override public int hashCode() { return Objects.hash(executorNode, explanation); } public boolean isAssigned() { return executorNode != null; } @Override public String toString() { return "node: [" + executorNode + "], explanation: [" + explanation + "]"; } } public static final Assignment INITIAL_ASSIGNMENT = new Assignment(null, "waiting for initial assignment"); /** * A record that represents a single running persistent task */ public static class PersistentTask<P extends PersistentTaskParams> implements Writeable, ToXContentObject { private final String id; private final long allocationId; private final String taskName; @Nullable private final P params; @Nullable private final Status status; private final Assignment assignment; @Nullable private final Long allocationIdOnLastStatusUpdate; public PersistentTask(String id, String taskName, P params, long allocationId, Assignment assignment) { this(id, allocationId, taskName, params, null, assignment, null); } public PersistentTask(PersistentTask<P> task, long allocationId, Assignment assignment) { this(task.id, allocationId, task.taskName, task.params, task.status, assignment, task.allocationId); } public PersistentTask(PersistentTask<P> task, Status status) { this(task.id, task.allocationId, task.taskName, task.params, status, task.assignment, task.allocationId); } private PersistentTask(String id, long allocationId, String taskName, P params, Status status, Assignment assignment, Long allocationIdOnLastStatusUpdate) { this.id = id; this.allocationId = allocationId; this.taskName = taskName; this.params = params; this.status = status; this.assignment = assignment; this.allocationIdOnLastStatusUpdate = allocationIdOnLastStatusUpdate; if (params != null) { if (params.getWriteableName().equals(taskName) == false) { throw new IllegalArgumentException("params have to have the same writeable name as task. params: " + params.getWriteableName() + " task: " + taskName); } } if (status != null) { if (status.getWriteableName().equals(taskName) == false) { throw new IllegalArgumentException("status has to have the same writeable name as task. status: " + status.getWriteableName() + " task: " + taskName); } } } @SuppressWarnings("unchecked") public PersistentTask(StreamInput in) throws IOException { id = in.readString(); allocationId = in.readLong(); taskName = in.readString(); params = (P) in.readOptionalNamedWriteable(PersistentTaskParams.class); status = in.readOptionalNamedWriteable(Task.Status.class); assignment = new Assignment(in.readOptionalString(), in.readString()); allocationIdOnLastStatusUpdate = in.readOptionalLong(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeLong(allocationId); out.writeString(taskName); out.writeOptionalNamedWriteable(params); out.writeOptionalNamedWriteable(status); out.writeOptionalString(assignment.executorNode); out.writeString(assignment.explanation); out.writeOptionalLong(allocationIdOnLastStatusUpdate); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; PersistentTask<?> that = (PersistentTask<?>) o; return Objects.equals(id, that.id) && allocationId == that.allocationId && Objects.equals(taskName, that.taskName) && Objects.equals(params, that.params) && Objects.equals(status, that.status) && Objects.equals(assignment, that.assignment) && Objects.equals(allocationIdOnLastStatusUpdate, that.allocationIdOnLastStatusUpdate); } @Override public int hashCode() { return Objects.hash(id, allocationId, taskName, params, status, assignment, allocationIdOnLastStatusUpdate); } @Override public String toString() { return Strings.toString(this); } public String getId() { return id; } public long getAllocationId() { return allocationId; } public String getTaskName() { return taskName; } @Nullable public P getParams() { return params; } @Nullable public String getExecutorNode() { return assignment.executorNode; } public Assignment getAssignment() { return assignment; } public boolean isAssigned() { return assignment.isAssigned(); } @Nullable public Status getStatus() { return status; } @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params xParams) throws IOException { builder.startObject(); { builder.field("id", id); builder.startObject("task"); { builder.startObject(taskName); { if (params != null) { builder.field("params", params, xParams); } if (status != null) { builder.field("status", status, xParams); } } builder.endObject(); } builder.endObject(); if (API_CONTEXT.equals(xParams.param(MetaData.CONTEXT_MODE_PARAM, API_CONTEXT))) { // These are transient values that shouldn't be persisted to gateway cluster state or snapshot builder.field("allocation_id", allocationId); builder.startObject("assignment"); { builder.field("executor_node", assignment.executorNode); builder.field("explanation", assignment.explanation); } builder.endObject(); if (allocationIdOnLastStatusUpdate != null) { builder.field("allocation_id_on_last_status_update", allocationIdOnLastStatusUpdate); } } } builder.endObject(); return builder; } @Override public boolean isFragment() { return false; } } private static class TaskBuilder<Params extends PersistentTaskParams> { private String id; private long allocationId; private String taskName; private Params params; private Status status; private Assignment assignment = INITIAL_ASSIGNMENT; private Long allocationIdOnLastStatusUpdate; public TaskBuilder<Params> setId(String id) { this.id = id; return this; } public TaskBuilder<Params> setAllocationId(long allocationId) { this.allocationId = allocationId; return this; } public TaskBuilder<Params> setTaskName(String taskName) { this.taskName = taskName; return this; } public TaskBuilder<Params> setParams(Params params) { this.params = params; return this; } public TaskBuilder<Params> setStatus(Status status) { this.status = status; return this; } public TaskBuilder<Params> setAssignment(Assignment assignment) { this.assignment = assignment; return this; } public TaskBuilder<Params> setAllocationIdOnLastStatusUpdate(Long allocationIdOnLastStatusUpdate) { this.allocationIdOnLastStatusUpdate = allocationIdOnLastStatusUpdate; return this; } public PersistentTask<Params> build() { return new PersistentTask<>(id, allocationId, taskName, params, status, assignment, allocationIdOnLastStatusUpdate); } } @Override public String getWriteableName() { return TYPE; } public PersistentTasksCustomMetaData(StreamInput in) throws IOException { lastAllocationId = in.readLong(); tasks = in.readMap(StreamInput::readString, PersistentTask::new); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeLong(lastAllocationId); out.writeMap(tasks, StreamOutput::writeString, (stream, value) -> value.writeTo(stream)); } public static NamedDiff<MetaData.Custom> readDiffFrom(StreamInput in) throws IOException { return readDiffFrom(MetaData.Custom.class, TYPE, in); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("last_allocation_id", lastAllocationId); builder.startArray("tasks"); { for (PersistentTask<?> entry : tasks.values()) { entry.toXContent(builder, params); } } builder.endArray(); return builder; } public static Builder builder() { return new Builder(); } public static Builder builder(PersistentTasksCustomMetaData tasks) { return new Builder(tasks); } public static class Builder { private final Map<String, PersistentTask<?>> tasks = new HashMap<>(); private long lastAllocationId; private boolean changed; private Builder() { } private Builder(PersistentTasksCustomMetaData tasksInProgress) { if (tasksInProgress != null) { tasks.putAll(tasksInProgress.tasks); lastAllocationId = tasksInProgress.lastAllocationId; } else { lastAllocationId = 0; } } public long getLastAllocationId() { return lastAllocationId; } private Builder setLastAllocationId(long currentId) { this.lastAllocationId = currentId; return this; } private <Params extends PersistentTaskParams> Builder setTasks(List<TaskBuilder<Params>> tasks) { for (TaskBuilder builder : tasks) { PersistentTask<?> task = builder.build(); this.tasks.put(task.getId(), task); } return this; } private long getNextAllocationId() { lastAllocationId++; return lastAllocationId; } /** * Adds a new task to the builder * <p> * After the task is added its id can be found by calling {{@link #getLastAllocationId()}} method. */ public <Params extends PersistentTaskParams> Builder addTask(String taskId, String taskName, Params params, Assignment assignment) { changed = true; PersistentTask<?> previousTask = tasks.put(taskId, new PersistentTask<>(taskId, taskName, params, getNextAllocationId(), assignment)); if (previousTask != null) { throw new ResourceAlreadyExistsException("Trying to override task with id {" + taskId + "}"); } return this; } /** * Reassigns the task to another node */ public Builder reassignTask(String taskId, Assignment assignment) { PersistentTask<?> taskInProgress = tasks.get(taskId); if (taskInProgress != null) { changed = true; tasks.put(taskId, new PersistentTask<>(taskInProgress, getNextAllocationId(), assignment)); } else { throw new ResourceNotFoundException("cannot reassign task with id {" + taskId + "}, the task no longer exists"); } return this; } /** * Updates the task status */ public Builder updateTaskStatus(String taskId, Status status) { PersistentTask<?> taskInProgress = tasks.get(taskId); if (taskInProgress != null) { changed = true; tasks.put(taskId, new PersistentTask<>(taskInProgress, status)); } else { throw new ResourceNotFoundException("cannot update task with id {" + taskId + "}, the task no longer exists"); } return this; } /** * Removes the task */ public Builder removeTask(String taskId) { if (tasks.remove(taskId) != null) { changed = true; } else { throw new ResourceNotFoundException("cannot remove task with id {" + taskId + "}, the task no longer exists"); } return this; } /** * Checks if the task is currently present in the list */ public boolean hasTask(String taskId) { return tasks.containsKey(taskId); } /** * Checks if the task is currently present in the list and has the right allocation id */ public boolean hasTask(String taskId, long allocationId) { PersistentTask<?> taskInProgress = tasks.get(taskId); if (taskInProgress != null) { return taskInProgress.getAllocationId() == allocationId; } return false; } Set<String> getCurrentTaskIds() { return tasks.keySet(); } /** * Returns true if any the task list was changed since the builder was created */ public boolean isChanged() { return changed; } public PersistentTasksCustomMetaData build() { return new PersistentTasksCustomMetaData(lastAllocationId, Collections.unmodifiableMap(tasks)); } } }
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.weblayer.test; import android.net.Uri; import android.support.test.InstrumentationRegistry; import android.support.test.filters.SmallTest; import android.util.Pair; import android.webkit.ValueCallback; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.chromium.content_public.browser.test.util.CriteriaHelper; import org.chromium.content_public.browser.test.util.TestThreadUtils; import org.chromium.net.test.util.TestWebServer; import org.chromium.weblayer.Download; import org.chromium.weblayer.DownloadCallback; import org.chromium.weblayer.DownloadError; import org.chromium.weblayer.DownloadState; import org.chromium.weblayer.Profile; import org.chromium.weblayer.WebLayer; import org.chromium.weblayer.shell.InstrumentationActivity; import java.io.File; import java.util.ArrayList; import java.util.List; /** * Tests that the DownloadCallback method is invoked for downloads. */ @RunWith(WebLayerJUnit4ClassRunner.class) public class DownloadCallbackTest { @Rule public InstrumentationActivityTestRule mActivityTestRule = new InstrumentationActivityTestRule(); private static boolean sIsFileNameSupported; private InstrumentationActivity mActivity; private Callback mCallback; private static class Callback extends DownloadCallback { public String mUrl; public String mUserAgent; public String mContentDisposition; public String mMimetype; public String mLocation; public String mFileName; public @DownloadState int mState; public @DownloadError int mError; public long mContentLength; public boolean mIntercept; public boolean mSeenStarted; public boolean mSeenCompleted; public boolean mSeenFailed; @Override public boolean onInterceptDownload(Uri uri, String userAgent, String contentDisposition, String mimetype, long contentLength) { mUrl = uri.toString(); mUserAgent = userAgent; mContentDisposition = contentDisposition; mMimetype = mimetype; mContentLength = contentLength; return mIntercept; } @Override public void allowDownload(Uri uri, String requestMethod, Uri requestInitiator, ValueCallback<Boolean> callback) { callback.onReceiveValue(true); } @Override public void onDownloadStarted(Download download) { mSeenStarted = true; download.disableNotification(); } @Override public void onDownloadCompleted(Download download) { mSeenCompleted = true; mLocation = download.getLocation().toString(); if (sIsFileNameSupported) { mFileName = download.getFileNameToReportToUser().toString(); } mState = download.getState(); mError = download.getError(); mMimetype = download.getMimeType(); } @Override public void onDownloadFailed(Download download) { mSeenFailed = true; mState = download.getState(); mError = download.getError(); } public void waitForIntercept() { CriteriaHelper.pollInstrumentationThread(() -> Assert.assertNotNull(mUrl)); } public void waitForStarted() { CriteriaHelper.pollInstrumentationThread(() -> mSeenStarted); } public void waitForCompleted() { CriteriaHelper.pollInstrumentationThread(() -> mSeenCompleted); } public void waitForFailed() { CriteriaHelper.pollInstrumentationThread(() -> mSeenFailed); } } @Before public void setUp() { mActivity = mActivityTestRule.launchShellWithUrl(null); Assert.assertNotNull(mActivity); // Don't fill up the default download directory on the device. String tempDownloadDirectory = InstrumentationRegistry.getInstrumentation().getTargetContext().getCacheDir() + "/weblayer/Downloads"; mCallback = new Callback(); TestThreadUtils.runOnUiThreadBlocking(() -> { Profile profile = mActivity.getBrowser().getProfile(); profile.setDownloadCallback(mCallback); profile.setDownloadDirectory(new File(tempDownloadDirectory)); sIsFileNameSupported = WebLayer.getSupportedMajorVersion(mActivity.getApplicationContext()) >= 86; }); } /** * Verifies the DownloadCallback is informed of downloads resulting from navigations to pages * with Content-Disposition attachment. */ @Test @SmallTest public void testInterceptDownloadByContentDisposition() throws Throwable { mCallback.mIntercept = true; final String data = "download data"; final String contentDisposition = "attachment;filename=\"download.txt\""; final String mimetype = "text/plain"; List<Pair<String, String>> downloadHeaders = new ArrayList<Pair<String, String>>(); downloadHeaders.add(Pair.create("Content-Disposition", contentDisposition)); downloadHeaders.add(Pair.create("Content-Type", mimetype)); downloadHeaders.add(Pair.create("Content-Length", Integer.toString(data.length()))); TestWebServer webServer = TestWebServer.start(); try { final String pageUrl = webServer.setResponse("/download.txt", data, downloadHeaders); TestThreadUtils.runOnUiThreadBlocking(() -> { mActivity.getTab().getNavigationController().navigate(Uri.parse(pageUrl)); }); mCallback.waitForIntercept(); Assert.assertEquals(pageUrl, mCallback.mUrl); Assert.assertEquals(contentDisposition, mCallback.mContentDisposition); Assert.assertEquals(mimetype, mCallback.mMimetype); Assert.assertEquals(data.length(), mCallback.mContentLength); // TODO(estade): verify mUserAgent. } finally { webServer.shutdown(); } } /** * Verifies the DownloadCallback is informed of downloads resulting from the user clicking on a * download link. */ @Test @SmallTest public void testInterceptDownloadByLinkAttribute() { mCallback.mIntercept = true; String pageUrl = mActivityTestRule.getTestDataURL("download.html"); mActivityTestRule.navigateAndWait(pageUrl); EventUtils.simulateTouchCenterOfView(mActivity.getWindow().getDecorView()); mCallback.waitForIntercept(); Assert.assertEquals(mActivityTestRule.getTestDataURL("lorem_ipsum.txt"), mCallback.mUrl); } @Test @SmallTest public void testBasic() { String url = mActivityTestRule.getTestDataURL("content-disposition.html"); TestThreadUtils.runOnUiThreadBlocking( () -> { mActivity.getTab().getNavigationController().navigate(Uri.parse(url)); }); mCallback.waitForStarted(); mCallback.waitForCompleted(); Assert.assertTrue(mCallback.mLocation.contains( "org.chromium.weblayer.shell/cache/weblayer/Downloads/")); if (sIsFileNameSupported) { Assert.assertTrue(mCallback.mFileName.contains("test")); } Assert.assertEquals(DownloadState.COMPLETE, mCallback.mState); Assert.assertEquals(DownloadError.NO_ERROR, mCallback.mError); Assert.assertEquals("text/html", mCallback.mMimetype); } }
package im.actor.sdk.controllers.fragment; import android.app.ProgressDialog; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.support.v4.graphics.drawable.DrawableCompat; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; import im.actor.core.viewmodel.Command; import im.actor.core.viewmodel.CommandCallback; import im.actor.runtime.actors.Actor; import im.actor.runtime.actors.ActorCreator; import im.actor.runtime.actors.ActorRef; import im.actor.runtime.actors.ActorSystem; import im.actor.runtime.actors.Props; import im.actor.runtime.actors.messages.PoisonPill; import im.actor.runtime.function.Consumer; import im.actor.runtime.promise.Promise; import im.actor.sdk.ActorSDK; import im.actor.sdk.ActorStyle; import im.actor.sdk.R; import im.actor.sdk.util.ViewUtils; public class BaseFragment extends BinderCompatFragment { protected final ActorStyle style = ActorSDK.sharedActor().style; private ActorRef promiseActor = ActorSystem.system().actorOf(Props.create(new ActorCreator() { @Override public Actor create() { return new Actor(); } }), "actor/promise_actor_" + hashCode()); @Override public void onCreate(Bundle saveInstance) { super.onCreate(saveInstance); setHasOptionsMenu(true); } public void goneView(View view) { ViewUtils.goneView(view); } public void goneView(final View view, boolean isAnimated) { ViewUtils.goneView(view, isAnimated); } public void goneView(final View view, boolean isAnimated, boolean isSlow) { ViewUtils.goneView(view, isAnimated, isSlow); } public void hideView(View view) { ViewUtils.hideView(view); } public void hideView(final View view, boolean isAnimated) { ViewUtils.hideView(view, isAnimated); } public void hideView(final View view, boolean isAnimated, boolean isSlow) { ViewUtils.hideView(view, isAnimated, isSlow); } public void showView(View view) { ViewUtils.showView(view); } public void showView(final View view, boolean isAnimated) { ViewUtils.showView(view, isAnimated); } public void showView(final View view, boolean isAnimated, boolean isSlow) { ViewUtils.showView(view, isAnimated, isSlow); } public void wave(View[] layers, float scale, int duration, float offset) { ViewUtils.wave(layers, scale, duration, offset); } public void elevateView(View view) { ViewUtils.elevateView(view); } public void elevateView(View view, float scale) { ViewUtils.elevateView(view, scale); } public void elevateView(View view, boolean isAnamated, float scale) { ViewUtils.elevateView(view, isAnamated, scale); } public void elevateView(final View view, boolean isAnimated) { ViewUtils.elevateView(view, isAnimated); } public void demoteView(View view) { ViewUtils.demoteView(view); } public void demoteView(final View view, boolean isAnimated) { ViewUtils.demoteView(view, isAnimated); } public void onClick(View view, int id, final View.OnClickListener listener) { onClick(view.findViewById(id), listener); } public void onClick(View view, final View.OnClickListener listener) { view.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { listener.onClick(v); } }); } public <T> void executeSilent(Command<T> cmd, final CommandCallback<T> callback) { cmd.start(callback); } public <T> void execute(Command<T> cmd, int title, final CommandCallback<T> callback) { final ProgressDialog dialog = ProgressDialog.show(getContext(), "", getString(title), true, false); cmd.start(new CommandCallback<T>() { @Override public void onResult(T res) { dismissDialog(dialog); ; callback.onResult(res); } @Override public void onError(Exception e) { dismissDialog(dialog); ; callback.onError(e); } }); } public <T> void execute(Command<T> cmd) { execute(cmd, R.string.progress_common); } public <T> void execute(Command<T> cmd, int title) { final ProgressDialog dialog = ProgressDialog.show(getContext(), "", getString(title), true, false); cmd.start(new CommandCallback<T>() { @Override public void onResult(T res) { dismissDialog(dialog); ; } @Override public void onError(Exception e) { dismissDialog(dialog); ; } }); } public <T> void execute(Promise<T> promise) { execute(promise, R.string.progress_common); } public <T> void execute(Promise<T> promise, int title) { final ProgressDialog dialog = ProgressDialog.show(getContext(), "", getString(title), true, false); promise .then(new Consumer<T>() { @Override public void apply(T t) { dismissDialog(dialog); } }) .failure(new Consumer<Exception>() { @Override public void apply(Exception e) { dismissDialog(dialog); } }); } public View buildRecord(String titleText, String valueText, LayoutInflater inflater, ViewGroup container) { return buildRecord(titleText, valueText, 0, false, true, inflater, container); } public View buildRecord(String titleText, String valueText, boolean isLast, LayoutInflater inflater, ViewGroup container) { return buildRecord(titleText, valueText, 0, false, isLast, inflater, container); } public View buildRecord(String titleText, String valueText, int resourceId, boolean showIcon, boolean isLast, LayoutInflater inflater, ViewGroup container) { final View recordView = inflater.inflate(R.layout.contact_record, container, false); TextView value = (TextView) recordView.findViewById(R.id.value); TextView title = (TextView) recordView.findViewById(R.id.title); title.setText(titleText); title.setTextColor(style.getTextSecondaryColor()); value.setTextColor(style.getTextPrimaryColor()); value.setText(valueText); if (!isLast) { recordView.findViewById(R.id.divider).setVisibility(View.GONE); } if (resourceId != 0 && showIcon) { ImageView iconView = (ImageView) recordView.findViewById(R.id.recordIcon); Drawable drawable = DrawableCompat.wrap(getResources().getDrawable(resourceId)); DrawableCompat.setTint(drawable, style.getSettingsIconColor()); iconView.setImageDrawable(drawable); } container.addView(recordView); return recordView; } public View buildRecordBig(String valueText, int resourceId, boolean showIcon, boolean isLast, LayoutInflater inflater, ViewGroup container) { final View recordView = inflater.inflate(R.layout.contact_record_big, container, false); TextView value = (TextView) recordView.findViewById(R.id.value); value.setTextColor(style.getTextPrimaryColor()); value.setText(valueText); if (!isLast) { recordView.findViewById(R.id.divider).setVisibility(View.GONE); } if (resourceId != 0 && showIcon) { ImageView iconView = (ImageView) recordView.findViewById(R.id.recordIcon); Drawable drawable = DrawableCompat.wrap(getResources().getDrawable(resourceId)); DrawableCompat.setTint(drawable, style.getSettingsIconColor()); iconView.setImageDrawable(drawable); } container.addView(recordView); return recordView; } public View buildRecordAction(String valueText, int resourceId, boolean showIcon, boolean isLast, LayoutInflater inflater, ViewGroup container) { final View recordView = inflater.inflate(R.layout.contact_record_big, container, false); TextView value = (TextView) recordView.findViewById(R.id.value); value.setTextColor(style.getGroupActionAddTextColor()); value.setText(valueText); if (!isLast) { recordView.findViewById(R.id.divider).setVisibility(View.GONE); } if (resourceId != 0 && showIcon) { ImageView iconView = (ImageView) recordView.findViewById(R.id.recordIcon); Drawable drawable = DrawableCompat.wrap(getResources().getDrawable(resourceId)); DrawableCompat.setTint(drawable, style.getGroupActionAddIconColor()); iconView.setImageDrawable(drawable); } container.addView(recordView); return recordView; } public ActorRef getPromiseActor() { return promiseActor; } public void dismissDialog(ProgressDialog progressDialog) { try { progressDialog.dismiss(); } catch (Exception ex) { ex.printStackTrace(); } } @Override public void onDestroyView() { super.onDestroyView(); promiseActor.send(PoisonPill.INSTANCE); } }
/* * Copyright (C) 2014-2022 Philip Helger (www.helger.com) * philip[at]helger[dot]com * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.helger.photon.app.html; import java.util.Collection; import java.util.Map; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.annotation.concurrent.ThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.helger.commons.ValueEnforcer; import com.helger.commons.annotation.ReturnsMutableCopy; import com.helger.commons.collection.impl.CommonsArrayList; import com.helger.commons.collection.impl.CommonsLinkedHashMap; import com.helger.commons.collection.impl.ICommonsList; import com.helger.commons.collection.impl.ICommonsOrderedMap; import com.helger.commons.concurrent.SimpleLock; import com.helger.commons.io.resource.IReadableResource; import com.helger.html.meta.IMetaElement; import com.helger.html.meta.MetaElement; import com.helger.html.meta.MetaElementList; import com.helger.web.scope.IRequestWebScopeWithoutResponse; import com.helger.web.scope.mgr.WebScopeManager; import com.helger.xml.microdom.util.XMLMapHandler; /** * This class keeps track of all the meta elements that must be included * globally or for a single request. * * @author Philip Helger */ @ThreadSafe public final class PhotonMetaElements { public static final String DEFAULT_FILENAME = "html/metatags.xml"; private static final String REQUEST_ATTR_METAELEMENTS = PhotonMetaElements.class.getName (); private static final Logger LOGGER = LoggerFactory.getLogger (PhotonMetaElements.class); private static final MetaElementList s_aGlobal = new MetaElementList (); private static final SimpleLock s_aLock = new SimpleLock (); private PhotonMetaElements () {} public static void _readMetaElements (@Nonnull final IReadableResource aRes, @Nonnull final MetaElementList aTarget) { ValueEnforcer.notNull (aRes, "Res"); ValueEnforcer.notNull (aTarget, "Target"); if (aRes.exists ()) { final ICommonsOrderedMap <String, String> aMetaElements = new CommonsLinkedHashMap <> (); if (XMLMapHandler.readMap (aRes, aMetaElements).isFailure ()) LOGGER.error ("Failed to read meta element file " + aRes.getPath ()); for (final Map.Entry <String, String> aEntry : aMetaElements.entrySet ()) aTarget.addMetaElement (MetaElement.createMeta (aEntry.getKey (), aEntry.getValue ())); } } public static void readMetaElementsForGlobal (@Nonnull final IReadableResource aRes) { _readMetaElements (aRes, s_aGlobal); } /** * Register a new meta element for global scope. * * @param aMetaElement * The meta element to use. May not be <code>null</code>. */ public static void registerMetaElementForGlobal (@Nonnull final IMetaElement aMetaElement) { s_aGlobal.addMetaElement (aMetaElement); } /** * Unregister an existing meta element for global scope. * * @param sMetaElementName * The meta element name to be removed. May not be <code>null</code>. */ public static void unregisterMetaElementForGlobal (@Nullable final String sMetaElementName) { s_aGlobal.removeMetaElement (sMetaElementName); } /** * Unregister all existing meta elements from global scope. */ public static void unregisterAllMetaElementsFromGlobal () { s_aGlobal.removeAllMetaElements (); } /** * @return A non-<code>null</code> set with all meta elements to be included * globally. */ @Nonnull @ReturnsMutableCopy public static ICommonsList <IMetaElement> getAllRegisteredMetaElementsForGlobal () { return s_aGlobal.getAllMetaElements (); } public static void getAllRegisteredMetaElementsForGlobal (@Nonnull final Collection <? super IMetaElement> aTarget) { s_aGlobal.getAllMetaElements (aTarget); } /** * @return <code>true</code> if at least a single meta element has been * registered globally. */ public static boolean hasRegisteredMetaElementsForGlobal () { return s_aGlobal.hasMetaElements (); } @Nullable private static MetaElementList _getPerRequestSet (final boolean bCreateIfNotExisting) { final IRequestWebScopeWithoutResponse aRequestScope = WebScopeManager.getRequestScope (); return s_aLock.lockedGet ( () -> { MetaElementList ret = aRequestScope.attrs ().getCastedValue (REQUEST_ATTR_METAELEMENTS); if (ret == null && bCreateIfNotExisting) { ret = new MetaElementList (); aRequestScope.attrs ().putIn (REQUEST_ATTR_METAELEMENTS, ret); } return ret; }); } /** * Register a new meta element only for this request * * @param aMetaElement * The meta element to use. May not be <code>null</code>. */ public static void registerMetaElementForThisRequest (@Nonnull final IMetaElement aMetaElement) { _getPerRequestSet (true).addMetaElement (aMetaElement); } /** * Unregister an existing meta element only from this request * * @param sMetaElementName * The name of the meta element to be removed. May not be * <code>null</code>. */ public static void unregisterMetaElementFromThisRequest (@Nullable final String sMetaElementName) { final MetaElementList aSet = _getPerRequestSet (false); if (aSet != null) aSet.removeMetaElement (sMetaElementName); } /** * Unregister all existing meta elements from this request */ public static void unregisterAllMetaElementsFromThisRequest () { final MetaElementList aSet = _getPerRequestSet (false); if (aSet != null) aSet.removeAllMetaElements (); } /** * @return A non-<code>null</code> set with all meta elements to be included * in this request. */ @Nonnull @ReturnsMutableCopy public static ICommonsList <IMetaElement> getAllRegisteredMetaElementsForThisRequest () { final MetaElementList aSet = _getPerRequestSet (false); return aSet == null ? new CommonsArrayList <> () : aSet.getAllMetaElements (); } public static void getAllRegisteredMetaElementsForThisRequest (@Nonnull final Collection <? super IMetaElement> aTarget) { final MetaElementList aSet = _getPerRequestSet (false); if (aSet != null) aSet.getAllMetaElements (aTarget); } /** * @return <code>true</code> if at least a single CSS path has been registered * for this request only */ public static boolean hasRegisteredMetaElementsForThisRequest () { final MetaElementList aSet = _getPerRequestSet (false); return aSet != null && aSet.hasMetaElements (); } }
package mil.nga.giat.geowave.adapter.vector.transaction; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Random; import java.util.Set; import mil.nga.giat.geowave.adapter.vector.transaction.TransactionNotification; import mil.nga.giat.geowave.adapter.vector.transaction.ZooKeeperTransactionsAllocater; import org.apache.curator.test.TestingServer; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.Watcher.Event.KeeperState; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.ZooKeeper.States; import org.junit.After; import org.junit.Before; import org.junit.Test; public class ZooKeeperTransactionAllocaterTest { TestingServer zkTestServer; ZooKeeperTransactionsAllocater allocater; final List<String> createdTXIds = new ArrayList<String>(); final List<Throwable> failures = new ArrayList<Throwable>(); final Set<String> activeTX = new HashSet<String>(); volatile boolean shutdown = false; private Random random = new Random(); private int maxSize = 9; @Before public void startZookeeper() throws Exception { zkTestServer = new TestingServer( 12181); allocater = new ZooKeeperTransactionsAllocater( zkTestServer.getConnectString(), "me", new TransactionNotification() { @Override public boolean transactionCreated( String clientID, String txID ) { synchronized (createdTXIds) { if (createdTXIds.size() == maxSize) return false; createdTXIds.add(txID); return true; } } }); } private int runTest( final boolean recovery ) throws InterruptedException { Thread[] thr = new Thread[10]; for (int i = 0; i < thr.length; i++) { thr[i] = new Thread( new TXRequester( recovery)); thr[i].start(); } for (int i = 0; i < thr.length; i++) { thr[i].join(); } for (Throwable error : failures) { error.printStackTrace(); } assertEquals( 0, failures.size()); assertEquals( 0, activeTX.size()); return thr.length; } @Test public void test() throws InterruptedException { int workDone = runTest(false); System.out.println("Total created transactionIDS " + createdTXIds.size()); assertTrue(createdTXIds.size() <= workDone); } @Test public void recoveryTest() throws InterruptedException { Thread thr = new Thread( new Runnable() { @Override public void run() { boolean ok = false; while (!ok) { ok = true; try { // wait for some activity before closing the // session to test recovery synchronized (activeTX) { activeTX.wait(); } // Per hint from ZooKeeper pages, simulate a // session timeout by attaching an instance // to the same session and then closing it. final Object Lock = new Long( 122); final ZooKeeper kp = new ZooKeeper( zkTestServer.getConnectString(), 5000, new Watcher() { @Override public void process( WatchedEvent event ) { if (event.getState() == KeeperState.SyncConnected) { synchronized (Lock) { Lock.notify(); } } } }, allocater.getConnection().getSessionId(), allocater.getConnection().getSessionPasswd()); // do not close until the connection is // established synchronized (Lock) { if (kp.getState() == States.CONNECTING) { synchronized (Lock) { Lock.wait(); } } kp.close(); } } catch (Exception e) { ok = false; e.printStackTrace(); } } } }); thr.start(); runTest(true); thr.join(); } private class TXRequester implements Runnable { final boolean recovery; private TXRequester( final boolean recovery ) { this.recovery = recovery; } int s = 0; @Override public void run() { while (s < 50 && !shutdown) { s++; try { Thread.sleep(100); } catch (InterruptedException e) {} try { String txID = allocater.getTransaction(); synchronized (activeTX) { // no guarantees with forced session close as tested in // the recovery test assert (recovery || !activeTX.contains(txID)); // throws // assertion // error activeTX.add(txID); activeTX.notifyAll(); } try { Thread.sleep(200 + (Math.abs(random.nextInt()) % 200)); } catch (InterruptedException e) {} synchronized (activeTX) { activeTX.remove(txID); } allocater.releaseTransaction(txID); } catch (Throwable e) { synchronized (failures) { failures.add(e); shutdown = true; } } } } } @Test public void testPreallocate() throws IOException { final List<String> precreatedTXIds = new ArrayList<String>(); ZooKeeperTransactionsAllocater preallocater = new ZooKeeperTransactionsAllocater( zkTestServer.getConnectString(), "fred", new TransactionNotification() { @Override public boolean transactionCreated( String clientID, String txID ) { synchronized (createdTXIds) { precreatedTXIds.add(txID); return true; } } }); preallocater.preallocateTransactionIDs( 10, "wilma"); assertEquals( 10, precreatedTXIds.size()); final String txId = preallocater.getTransaction(); assertTrue(precreatedTXIds.contains(txId)); preallocater.releaseTransaction(txId); } @After public void stopZookeeper() throws IOException, InterruptedException { allocater.close(); zkTestServer.stop(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.connect.runtime.distributed; import org.apache.kafka.clients.consumer.internals.AbstractCoordinator; import org.apache.kafka.clients.consumer.internals.ConsumerNetworkClient; import org.apache.kafka.clients.GroupRebalanceConfig; import org.apache.kafka.common.metrics.Measurable; import org.apache.kafka.common.metrics.MetricConfig; import org.apache.kafka.common.metrics.Metrics; import org.apache.kafka.common.requests.JoinGroupRequest; import org.apache.kafka.common.utils.LogContext; import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Timer; import org.apache.kafka.connect.storage.ConfigBackingStore; import org.apache.kafka.connect.util.ConnectorTaskId; import org.slf4j.Logger; import java.io.Closeable; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import static org.apache.kafka.common.message.JoinGroupRequestData.JoinGroupRequestProtocolCollection; import static org.apache.kafka.common.message.JoinGroupResponseData.JoinGroupResponseMember; import static org.apache.kafka.connect.runtime.distributed.ConnectProtocolCompatibility.EAGER; /** * This class manages the coordination process with the Kafka group coordinator on the broker for managing assignments * to workers. */ public class WorkerCoordinator extends AbstractCoordinator implements Closeable { // Currently doesn't support multiple task assignment strategies, so we just fill in a default value public static final String DEFAULT_SUBPROTOCOL = "default"; private final Logger log; private final String restUrl; private final ConfigBackingStore configStorage; private ExtendedAssignment assignmentSnapshot; private ClusterConfigState configSnapshot; private final WorkerRebalanceListener listener; private final ConnectProtocolCompatibility protocolCompatibility; private LeaderState leaderState; private boolean rejoinRequested; private volatile ConnectProtocolCompatibility currentConnectProtocol; private final ConnectAssignor eagerAssignor; private final ConnectAssignor incrementalAssignor; /** * Initialize the coordination manager. */ public WorkerCoordinator(GroupRebalanceConfig config, LogContext logContext, ConsumerNetworkClient client, Metrics metrics, String metricGrpPrefix, Time time, String restUrl, ConfigBackingStore configStorage, WorkerRebalanceListener listener, ConnectProtocolCompatibility protocolCompatibility, int maxDelay) { super(config, logContext, client, metrics, metricGrpPrefix, time); this.log = logContext.logger(WorkerCoordinator.class); this.restUrl = restUrl; this.configStorage = configStorage; this.assignmentSnapshot = null; new WorkerCoordinatorMetrics(metrics, metricGrpPrefix); this.listener = listener; this.rejoinRequested = false; this.protocolCompatibility = protocolCompatibility; this.incrementalAssignor = new IncrementalCooperativeAssignor(logContext, time, maxDelay); this.eagerAssignor = new EagerAssignor(logContext); this.currentConnectProtocol = protocolCompatibility; } @Override public void requestRejoin() { rejoinRequested = true; } @Override public String protocolType() { return "connect"; } // expose for tests @Override protected synchronized boolean ensureCoordinatorReady(final Timer timer) { return super.ensureCoordinatorReady(timer); } public void poll(long timeout) { // poll for io until the timeout expires final long start = time.milliseconds(); long now = start; long remaining; do { if (coordinatorUnknown()) { ensureCoordinatorReady(time.timer(Long.MAX_VALUE)); now = time.milliseconds(); } if (rejoinNeededOrPending()) { ensureActiveGroup(); now = time.milliseconds(); } pollHeartbeat(now); long elapsed = now - start; remaining = timeout - elapsed; // Note that because the network client is shared with the background heartbeat thread, // we do not want to block in poll longer than the time to the next heartbeat. long pollTimeout = Math.min(Math.max(0, remaining), timeToNextHeartbeat(now)); client.poll(time.timer(pollTimeout)); now = time.milliseconds(); elapsed = now - start; remaining = timeout - elapsed; } while (remaining > 0); } @Override public JoinGroupRequestProtocolCollection metadata() { configSnapshot = configStorage.snapshot(); ExtendedWorkerState workerState = new ExtendedWorkerState(restUrl, configSnapshot.offset(), assignmentSnapshot); switch (protocolCompatibility) { case EAGER: return ConnectProtocol.metadataRequest(workerState); case COMPATIBLE: return IncrementalCooperativeConnectProtocol.metadataRequest(workerState, false); case SESSIONED: return IncrementalCooperativeConnectProtocol.metadataRequest(workerState, true); default: throw new IllegalStateException("Unknown Connect protocol compatibility mode " + protocolCompatibility); } } @Override protected void onJoinComplete(int generation, String memberId, String protocol, ByteBuffer memberAssignment) { ExtendedAssignment newAssignment = IncrementalCooperativeConnectProtocol.deserializeAssignment(memberAssignment); log.debug("Deserialized new assignment: {}", newAssignment); currentConnectProtocol = ConnectProtocolCompatibility.fromProtocol(protocol); // At this point we always consider ourselves to be a member of the cluster, even if there was an assignment // error (the leader couldn't make the assignment) or we are behind the config and cannot yet work on our assigned // tasks. It's the responsibility of the code driving this process to decide how to react (e.g. trying to get // up to date, try to rejoin again, leaving the group and backing off, etc.). rejoinRequested = false; if (currentConnectProtocol != EAGER) { if (!newAssignment.revokedConnectors().isEmpty() || !newAssignment.revokedTasks().isEmpty()) { listener.onRevoked(newAssignment.leader(), newAssignment.revokedConnectors(), newAssignment.revokedTasks()); } if (assignmentSnapshot != null) { assignmentSnapshot.connectors().removeAll(newAssignment.revokedConnectors()); assignmentSnapshot.tasks().removeAll(newAssignment.revokedTasks()); log.debug("After revocations snapshot of assignment: {}", assignmentSnapshot); newAssignment.connectors().addAll(assignmentSnapshot.connectors()); newAssignment.tasks().addAll(assignmentSnapshot.tasks()); } log.debug("Augmented new assignment: {}", newAssignment); } assignmentSnapshot = newAssignment; listener.onAssigned(assignmentSnapshot, generation); } @Override protected Map<String, ByteBuffer> performAssignment(String leaderId, String protocol, List<JoinGroupResponseMember> allMemberMetadata) { return ConnectProtocolCompatibility.fromProtocol(protocol) == EAGER ? eagerAssignor.performAssignment(leaderId, protocol, allMemberMetadata, this) : incrementalAssignor.performAssignment(leaderId, protocol, allMemberMetadata, this); } @Override protected void onJoinPrepare(int generation, String memberId) { log.info("Rebalance started"); leaderState(null); if (currentConnectProtocol == EAGER) { log.debug("Revoking previous assignment {}", assignmentSnapshot); if (assignmentSnapshot != null && !assignmentSnapshot.failed()) listener.onRevoked(assignmentSnapshot.leader(), assignmentSnapshot.connectors(), assignmentSnapshot.tasks()); } else { log.debug("Cooperative rebalance triggered. Keeping assignment {} until it's " + "explicitly revoked.", assignmentSnapshot); } } @Override protected boolean rejoinNeededOrPending() { return super.rejoinNeededOrPending() || (assignmentSnapshot == null || assignmentSnapshot.failed()) || rejoinRequested; } @Override public String memberId() { Generation generation = generationIfStable(); if (generation != null) return generation.memberId; return JoinGroupRequest.UNKNOWN_MEMBER_ID; } private boolean isLeader() { return assignmentSnapshot != null && memberId().equals(assignmentSnapshot.leader()); } public String ownerUrl(String connector) { if (rejoinNeededOrPending() || !isLeader()) return null; return leaderState().ownerUrl(connector); } public String ownerUrl(ConnectorTaskId task) { if (rejoinNeededOrPending() || !isLeader()) return null; return leaderState().ownerUrl(task); } /** * Get an up-to-date snapshot of the cluster configuration. * * @return the state of the cluster configuration; the result is not locally cached */ public ClusterConfigState configFreshSnapshot() { return configStorage.snapshot(); } /** * Get a snapshot of the cluster configuration. * * @return the state of the cluster configuration */ public ClusterConfigState configSnapshot() { return configSnapshot; } /** * Set the state of the cluster configuration to this worker coordinator. * * @param update the updated state of the cluster configuration */ public void configSnapshot(ClusterConfigState update) { configSnapshot = update; } /** * Get the leader state stored in this worker coordinator. * * @return the leader state */ private LeaderState leaderState() { return leaderState; } /** * Store the leader state to this worker coordinator. * * @param update the updated leader state */ public void leaderState(LeaderState update) { leaderState = update; } /** * Get the version of the connect protocol that is currently active in the group of workers. * * @return the current connect protocol version */ public short currentProtocolVersion() { return currentConnectProtocol.protocolVersion(); } private class WorkerCoordinatorMetrics { public final String metricGrpName; public WorkerCoordinatorMetrics(Metrics metrics, String metricGrpPrefix) { this.metricGrpName = metricGrpPrefix + "-coordinator-metrics"; Measurable numConnectors = new Measurable() { @Override public double measure(MetricConfig config, long now) { if (assignmentSnapshot == null) { return 0.0; } return assignmentSnapshot.connectors().size(); } }; Measurable numTasks = new Measurable() { @Override public double measure(MetricConfig config, long now) { if (assignmentSnapshot == null) { return 0.0; } return assignmentSnapshot.tasks().size(); } }; metrics.addMetric(metrics.metricName("assigned-connectors", this.metricGrpName, "The number of connector instances currently assigned to this consumer"), numConnectors); metrics.addMetric(metrics.metricName("assigned-tasks", this.metricGrpName, "The number of tasks currently assigned to this consumer"), numTasks); } } public static <K, V> Map<V, K> invertAssignment(Map<K, Collection<V>> assignment) { Map<V, K> inverted = new HashMap<>(); for (Map.Entry<K, Collection<V>> assignmentEntry : assignment.entrySet()) { K key = assignmentEntry.getKey(); for (V value : assignmentEntry.getValue()) inverted.put(value, key); } return inverted; } public static class LeaderState { private final Map<String, ExtendedWorkerState> allMembers; private final Map<String, String> connectorOwners; private final Map<ConnectorTaskId, String> taskOwners; public LeaderState(Map<String, ExtendedWorkerState> allMembers, Map<String, Collection<String>> connectorAssignment, Map<String, Collection<ConnectorTaskId>> taskAssignment) { this.allMembers = allMembers; this.connectorOwners = invertAssignment(connectorAssignment); this.taskOwners = invertAssignment(taskAssignment); } private String ownerUrl(ConnectorTaskId id) { String ownerId = taskOwners.get(id); if (ownerId == null) return null; return allMembers.get(ownerId).url(); } private String ownerUrl(String connector) { String ownerId = connectorOwners.get(connector); if (ownerId == null) return null; return allMembers.get(ownerId).url(); } } public static class ConnectorsAndTasks { public static final ConnectorsAndTasks EMPTY = new ConnectorsAndTasks(Collections.emptyList(), Collections.emptyList()); private final Collection<String> connectors; private final Collection<ConnectorTaskId> tasks; private ConnectorsAndTasks(Collection<String> connectors, Collection<ConnectorTaskId> tasks) { this.connectors = connectors; this.tasks = tasks; } public static class Builder { private Collection<String> withConnectors; private Collection<ConnectorTaskId> withTasks; public Builder() { } public ConnectorsAndTasks.Builder withCopies(Collection<String> connectors, Collection<ConnectorTaskId> tasks) { withConnectors = new ArrayList<>(connectors); withTasks = new ArrayList<>(tasks); return this; } public ConnectorsAndTasks.Builder with(Collection<String> connectors, Collection<ConnectorTaskId> tasks) { withConnectors = new ArrayList<>(connectors); withTasks = new ArrayList<>(tasks); return this; } public ConnectorsAndTasks build() { return new ConnectorsAndTasks( withConnectors != null ? withConnectors : new ArrayList<>(), withTasks != null ? withTasks : new ArrayList<>()); } } public Collection<String> connectors() { return connectors; } public Collection<ConnectorTaskId> tasks() { return tasks; } public int size() { return connectors.size() + tasks.size(); } public boolean isEmpty() { return connectors.isEmpty() && tasks.isEmpty(); } @Override public String toString() { return "{ connectorIds=" + connectors + ", taskIds=" + tasks + '}'; } } public static class WorkerLoad { private final String worker; private final Collection<String> connectors; private final Collection<ConnectorTaskId> tasks; private WorkerLoad( String worker, Collection<String> connectors, Collection<ConnectorTaskId> tasks ) { this.worker = worker; this.connectors = connectors; this.tasks = tasks; } public static class Builder { private String withWorker; private Collection<String> withConnectors; private Collection<ConnectorTaskId> withTasks; public Builder(String worker) { this.withWorker = Objects.requireNonNull(worker, "worker cannot be null"); } public WorkerLoad.Builder withCopies(Collection<String> connectors, Collection<ConnectorTaskId> tasks) { withConnectors = new ArrayList<>( Objects.requireNonNull(connectors, "connectors may be empty but not null")); withTasks = new ArrayList<>( Objects.requireNonNull(tasks, "tasks may be empty but not null")); return this; } public WorkerLoad.Builder with(Collection<String> connectors, Collection<ConnectorTaskId> tasks) { withConnectors = Objects.requireNonNull(connectors, "connectors may be empty but not null"); withTasks = Objects.requireNonNull(tasks, "tasks may be empty but not null"); return this; } public WorkerLoad build() { return new WorkerLoad( withWorker, withConnectors != null ? withConnectors : new ArrayList<>(), withTasks != null ? withTasks : new ArrayList<>()); } } public String worker() { return worker; } public Collection<String> connectors() { return connectors; } public Collection<ConnectorTaskId> tasks() { return tasks; } public int connectorsSize() { return connectors.size(); } public int tasksSize() { return tasks.size(); } public void assign(String connector) { connectors.add(connector); } public void assign(ConnectorTaskId task) { tasks.add(task); } public int size() { return connectors.size() + tasks.size(); } public boolean isEmpty() { return connectors.isEmpty() && tasks.isEmpty(); } public static Comparator<WorkerLoad> connectorComparator() { return (left, right) -> { int res = left.connectors.size() - right.connectors.size(); return res != 0 ? res : left.worker == null ? right.worker == null ? 0 : -1 : left.worker.compareTo(right.worker); }; } public static Comparator<WorkerLoad> taskComparator() { return (left, right) -> { int res = left.tasks.size() - right.tasks.size(); return res != 0 ? res : left.worker == null ? right.worker == null ? 0 : -1 : left.worker.compareTo(right.worker); }; } @Override public String toString() { return "{ worker=" + worker + ", connectorIds=" + connectors + ", taskIds=" + tasks + '}'; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof WorkerLoad)) { return false; } WorkerLoad that = (WorkerLoad) o; return worker.equals(that.worker) && connectors.equals(that.connectors) && tasks.equals(that.tasks); } @Override public int hashCode() { return Objects.hash(worker, connectors, tasks); } } }
/* * Copyright 2015 The FireNio Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.firenio.codec.http11; import java.io.IOException; import java.nio.ByteBuffer; import java.util.List; import java.util.Map; import com.firenio.Develop; import com.firenio.buffer.ByteBuf; import com.firenio.collection.AttributeKey; import com.firenio.collection.ByteTree; import com.firenio.collection.IntMap; import com.firenio.collection.Stack; import com.firenio.common.ByteUtil; import com.firenio.common.Util; import com.firenio.component.Channel; import com.firenio.component.FastThreadLocal; import com.firenio.component.Frame; import com.firenio.component.NioEventLoop; import com.firenio.component.ProtocolCodec; /** * @author wangkai */ public class HttpCodec extends ProtocolCodec { protected static final byte[] CONTENT_LENGTH_MATCH = ByteUtil.b("Content-Length:"); protected static final int decode_state_body = 2; protected static final int decode_state_complete = 3; protected static final int decode_state_header = 1; protected static final int decode_state_line_one = 0; protected static final AttributeKey<Stack<Object>> FRAME_CACHE_KEY = NioEventLoop.valueOfKey("http_frame_cache_key"); protected static final byte N = '\n'; protected static final IOException OVER_LIMIT = EXCEPTION("over writeIndex"); protected static final IOException ILLEGAL_METHOD = EXCEPTION("illegal http method"); protected static final byte R = '\r'; protected static final byte SPACE = ' '; protected static final int NUM_GET = ByteUtil.getIntLE("GET ".getBytes(), 0); protected static final int NUM_POST = ByteUtil.getIntLE("POST".getBytes(), 0); protected final int blimit; protected final byte[][] cl_bytes = new byte[1024][]; protected final int hlimit; protected final int fcache; protected final boolean lite; protected final ByteBuffer cl_buf; protected final ByteTree cached_urls; public HttpCodec() { this(0); } public HttpCodec(int frameCache) { this(null, frameCache); } public HttpCodec(String server) { this(server, 0); } public HttpCodec(String server, int frameCache) { this(server, frameCache, false); } public HttpCodec(String server, int frameCache, boolean lite) { this(server, frameCache, 1024 * 8, 1024 * 256, lite, null); } public HttpCodec(String server, int frameCache, boolean lite, ByteTree cachedUrls) { this(server, frameCache, 1024 * 8, 1024 * 256, lite, cachedUrls); } public HttpCodec(String server, int fcache, int hlimit, int blimit, boolean lite, ByteTree cachedUrls) { this.lite = lite; this.hlimit = hlimit; this.blimit = blimit; this.fcache = fcache; this.cached_urls = cachedUrls; ByteBuffer temp = ByteBuffer.allocate(128); if (server == null) { temp.put(ByteUtil.b("\r\nContent-Length: ")); } else { temp.put(ByteUtil.b("\r\nServer: " + server + "\r\nContent-Length: ")); } cl_buf = temp.duplicate(); cl_buf.flip(); int p = temp.position(); for (int i = 0; i < cl_bytes.length; i++) { temp.clear().position(p); temp.put(String.valueOf(i).getBytes()); temp.flip(); cl_bytes[i] = new byte[temp.limit()]; temp.get(cl_bytes[i]); } } private static String parse_url(ByteBuf src, int url_start, int url_end) { StringBuilder line = FastThreadLocal.get().getStringBuilder(); for (int i = url_start; i < url_end; i++) { line.append((char) (src.getByteAbs(i) & 0xff)); } return line.toString(); } static void parse_kv(Map<String, String> map, CharSequence line, int start, int end, char kvSplitor, char eSplitor) { boolean find_key = true; int i = start; int ks = start; int vs = 0; CharSequence key = null; CharSequence value = null; for (; i != end; ) { char c = line.charAt(i++); if (find_key) { if (c == kvSplitor) { ks = Util.skip(line, ' ', ks); key = line.subSequence(ks, i - 1); find_key = false; vs = i; } } else { if (c == eSplitor) { vs = Util.skip(line, ' ', vs); value = line.subSequence(vs, i - 1); find_key = true; ks = i; map.put((String) key, (String) value); } } } if (!find_key && end > vs) { map.put((String) key, (String) line.subSequence(vs, end)); } } protected static void parse_url(HttpFrame f, int skip, CharSequence line) { int index = Util.indexOf(line, '?'); int lastSpace = Util.lastIndexOf(line, ' '); if (index > -1) { parse_kv(f.getRequestParams(), line, index + 1, lastSpace, '=', '&'); f.setRequestURL((String) line.subSequence(skip, index)); } else { f.setRequestURL((String) line.subSequence(skip, lastSpace)); } } private static int read_line(StringBuilder line, ByteBuf src, int abs_pos, int length, int limit) throws IOException { int maybeRead = limit - length; int s_limit = src.absWriteIndex(); int remaining = s_limit - abs_pos; if (remaining > maybeRead) { int i = read_line(line, src, abs_pos, abs_pos + maybeRead); if (i == -1) { throw OVER_LIMIT; } return i; } else { return read_line(line, src, abs_pos, s_limit); } } private static int read_line(StringBuilder line, ByteBuf src, int abs_pos, int abs_limit) { for (int i = abs_pos; i < abs_limit; i++) { byte b = src.getByteAbs(i); if (b == N) { line.setLength(line.length() - 1); return i + 1; } else { line.append((char) (b & 0xff)); } } return -1; } private static int read_line_range(ByteBuf src, int abs_pos, int length, int limit) throws IOException { int maybeRead = limit - length; int s_limit = src.absWriteIndex(); int remaining = s_limit - abs_pos; if (remaining > maybeRead) { int res_p = src.indexOf(N, abs_pos, maybeRead); if (res_p == -1) { throw OVER_LIMIT; } return res_p; } else { return src.indexOf(N, abs_pos, remaining); } } private static boolean start_with(ByteBuf src, int ps, int pe, byte[] match) { if (pe - ps < match.length) { return false; } for (int i = 0; i < match.length; i++) { if (src.getByteAbs(ps + i) != match[i]) { return false; } } return true; } HttpFrame new_frame() { return new HttpFrame(); } private HttpFrame alloc_frame(NioEventLoop el) { if (fcache > 0) { Frame res = (Frame) el.getCache(FRAME_CACHE_KEY, fcache); if (res == null) { return new_frame(); } else { return (HttpFrame) res.reset(); } } return new_frame(); } private int decode_lite(ByteBuf src, HttpFrame f) throws IOException { int decode_state = f.getDecodeState(); int abs_pos = src.absReadIndex(); int h_len = f.getHeaderLength(); if (decode_state == decode_state_line_one) { int l_end = src.indexOf(N); if (l_end == -1) { return decode_state_line_one; } else { h_len += (l_end - abs_pos); decode_state = decode_state_header; int url_start = abs_pos; int num = src.getIntLE(abs_pos); if (num == NUM_GET) { f.setMethod(HttpMethod.GET); url_start += 4; } else if (num == NUM_POST) { f.setMethod(HttpMethod.POST); url_start += 5; } else { throw ILLEGAL_METHOD; } int url_end = l_end - 10; int url_len = url_end - url_start; int qmark = src.indexOf((byte) '?', url_start, url_len); if (qmark == -1) { String url; if (cached_urls != null) { url = cached_urls.getString(src, url_start, url_len); if (url == null) { url = parse_url(src, url_start, url_end); } } else { url = parse_url(src, url_start, url_end); } f.setRequestURL(url); } else { StringBuilder line = FastThreadLocal.get().getStringBuilder(); for (int i = url_start; i < url_end; i++) { line.append((char) (src.getByteAbs(i) & 0xff)); } int re_qmark = qmark - url_start; parse_kv(f.getRequestParams(), line, re_qmark + 1, line.length(), '=', '&'); f.setRequestURL((String) line.subSequence(0, re_qmark)); } abs_pos = l_end + 1; } } if (decode_state == decode_state_header) { for (; ; ) { int ps = abs_pos; int pe = read_line_range(src, ps, h_len, hlimit); if (pe == -1) { f.setHeaderLength(h_len); src.absReadIndex(abs_pos); break; } abs_pos = pe-- + 1; int size = pe - ps; h_len += size; if (size == 0) { if (f.getContentLength() < 1) { decode_state = decode_state_complete; } else { if (f.getContentLength() > blimit) { throw OVER_LIMIT; } decode_state = decode_state_body; } src.absReadIndex(abs_pos); break; } else { if (!f.isGet()) { if (start_with(src, ps, pe, CONTENT_LENGTH_MATCH)) { int cp = ps + CONTENT_LENGTH_MATCH.length; int cps = ByteUtil.skip(src, cp, pe, SPACE); if (cps == -1) { throw OVER_LIMIT; } int ct_len = 0; for (int i = cps; i < pe; i++) { ct_len = (src.getByteAbs(i) - '0') + ct_len * 10; } f.setContentLength(ct_len); } } } } } return decode_state; } private int decode_full(ByteBuf src, HttpFrame f) throws IOException { StringBuilder line = FastThreadLocal.get().getStringBuilder(); int decode_state = f.getDecodeState(); int h_len = f.getHeaderLength(); int abs_pos = src.absReadIndex(); if (decode_state == decode_state_line_one) { int l_end = read_line(line, src, abs_pos, 0, hlimit); if (l_end == -1) { return decode_state_line_one; } else { abs_pos = l_end; h_len += line.length(); decode_state = decode_state_header; parse_line_one(f, line); } } if (decode_state == decode_state_header) { for (; ; ) { line.setLength(0); int pn = read_line(line, src, abs_pos, h_len, hlimit); if (pn == -1) { src.absReadIndex(abs_pos); f.setHeaderLength(h_len); break; } abs_pos = pn; h_len += line.length(); if (line.length() == 0) { src.absReadIndex(abs_pos); decode_state = header_complete(f); break; } else { int p = Util.indexOf(line, ':'); if (p == -1) { continue; } int rp = Util.skip(line, ' ', p + 1); String name = line.substring(0, p); String value = line.substring(rp); f.setReadHeader(name, value); } } } return decode_state; } @Override public Frame decode(Channel ch, ByteBuf src) throws Exception { boolean remove = false; HttpAttachment att = (HttpAttachment) ch.getAttachment(); HttpFrame f = att.getUncompleteFrame(); if (f == null) { f = alloc_frame(ch.getEventLoop()); } else { remove = true; } int decode_state; if (lite) { decode_state = decode_lite(src, f); } else { decode_state = decode_full(src, f); } if (decode_state == decode_state_body) { decode_state = decode_remain_body(ch, src, f); } if (decode_state == decode_state_complete) { if (remove) { att.setUncompleteFrame(null); } return f; } else { f.setDecodeState(decode_state); att.setUncompleteFrame(f); return null; } } int decode_remain_body(Channel ch, ByteBuf src, HttpFrame f) { int contentLength = f.getContentLength(); int remain = src.readableBytes(); if (remain < contentLength) { return decode_state_body; } else { byte[] content = new byte[contentLength]; src.readBytes(content); if (f.isForm()) { String param = new String(content, ch.getCharset()); parse_kv(f.getRequestParams(), param, 0, param.length(), '=', '&'); } else { f.setContent(content); } return decode_state_complete; } } @Override public ByteBuf encode(final Channel ch, Frame frame) { HttpFrame f = (HttpFrame) frame; FastThreadLocal l = FastThreadLocal.get(); HttpAttachment att = (HttpAttachment) ch.getAttachment(); List<byte[]> bytes_array = (List<byte[]>) l.getList(); Object content = f.getContent(); ByteBuf content_buf = null; byte[] content_array = null; byte[] head_bytes = f.getStatus().getLine(); byte[] conn_bytes = f.getConnection().getLine(); byte[] type_bytes = f.getContentType().getLine(); byte[] date_bytes = f.getDate(); boolean is_array = false; int write_size = 0; if (content instanceof ByteBuf) { content_buf = (ByteBuf) content; write_size = content_buf.readableBytes(); } else if (content instanceof byte[]) { is_array = true; content_array = (byte[]) content; write_size = content_array.length; } byte[] cl_len_bytes; int cl_len; if (write_size < 1024) { cl_len_bytes = cl_bytes[write_size]; cl_len = cl_len_bytes.length; } else { cl_len_bytes = cl_buf.array(); int tmp_len = cl_buf.limit(); int len_idx = Util.valueOf(write_size, cl_len_bytes); int num_len = cl_len_bytes.length - len_idx; System.arraycopy(cl_len_bytes, len_idx, cl_len_bytes, tmp_len, num_len); cl_len = tmp_len + num_len; } int hlen = head_bytes.length; int tlen = type_bytes == null ? 0 : type_bytes.length; int clen = conn_bytes == null ? 0 : conn_bytes.length; int dlen = date_bytes == null ? 0 : date_bytes.length; int len = hlen + cl_len + dlen + 2 + clen + tlen; int h_size = 0; IntMap<byte[]> headers = f.getResponseHeaders(); if (headers != null) { for (headers.scan(); headers.hasNext(); ) { byte[] k = HttpHeader.get(headers.key()).getBytes(); byte[] v = headers.value(); h_size++; bytes_array.add(k); bytes_array.add(v); len += 4; len += k.length; len += v.length; } } len += 2; if (is_array) { len += write_size; } ByteBuf buf; if (Develop.BUF_DEBUG) { buf = ch.alloc().allocate(1); } else { buf = ch.alloc().allocate(len); } buf.writeBytes(head_bytes); buf.writeBytes(cl_len_bytes, 0, cl_len); if (conn_bytes != null) { buf.writeBytes(conn_bytes); } if (type_bytes != null) { buf.writeBytes(type_bytes); } if (date_bytes != null) { buf.writeBytes(date_bytes); } buf.writeByte(R); buf.writeByte(N); if (h_size > 0) { put_headers(buf, bytes_array, h_size); } buf.writeByte(R); buf.writeByte(N); if (is_array) { buf.writeBytes(content_array); return buf; } else { ch.write(buf); ch.write(content_buf); return null; } } public void encode(final Channel ch, ByteBuf buf, Frame frame) { HttpFrame f = (HttpFrame) frame; FastThreadLocal l = FastThreadLocal.get(); HttpAttachment att = (HttpAttachment) ch.getAttachment(); List<byte[]> bytes_list = (List<byte[]>) l.getList(); byte[] content = (byte[]) f.getContent(); byte[] head_bytes = f.getStatus().getLine(); byte[] conn_bytes = f.getConnection().getLine(); byte[] type_bytes = f.getContentType().getLine(); byte[] date_bytes = f.getDate(); int write_size = content.length; byte[] cl_len_bytes; int cl_len; if (write_size < 1024) { cl_len_bytes = cl_bytes[write_size]; cl_len = cl_len_bytes.length; } else { cl_len_bytes = cl_buf.array(); int tmp_len = cl_buf.limit(); int len_idx = Util.valueOf(write_size, cl_len_bytes); int num_len = cl_len_bytes.length - len_idx; System.arraycopy(cl_len_bytes, len_idx, cl_len_bytes, tmp_len, num_len); cl_len = tmp_len + num_len; } int h_size = 0; IntMap<byte[]> headers = f.getResponseHeaders(); if (headers != null) { for (headers.scan(); headers.hasNext(); ) { byte[] k = HttpHeader.get(headers.key()).getBytes(); byte[] v = headers.value(); h_size++; bytes_list.add(k); bytes_list.add(v); } } buf.writeBytes(head_bytes); buf.writeBytes(cl_len_bytes, 0, cl_len); if (conn_bytes != null) { buf.writeBytes(conn_bytes); } if (type_bytes != null) { buf.writeBytes(type_bytes); } if (date_bytes != null) { buf.writeBytes(date_bytes); } buf.writeByte(R); buf.writeByte(N); if (h_size > 0) { put_headers(buf, bytes_list, h_size); } buf.writeByte(R); buf.writeByte(N); buf.writeBytes(content); } protected void put_headers(ByteBuf buf, List<byte[]> encode_bytes_array, int header_size) { int j = 0; for (int i = 0; i < header_size; i++) { buf.writeBytes(encode_bytes_array.get(j++)); buf.writeByte((byte) ':'); buf.writeByte(SPACE); buf.writeBytes(encode_bytes_array.get(j++)); buf.writeByte(R); buf.writeByte(N); } } public int getBodyLimit() { return blimit; } public int getHeaderLimit() { return hlimit; } public int getHttpFrameStackSize() { return fcache; } @Override public String getProtocolId() { return "HTTP1.1"; } @Override public int getHeaderLength() { return 0; } int header_complete(HttpFrame f) throws IOException { int contentLength = 0; String c_length = f.getRequestHeader(HttpHeader.Content_Length); String c_type = f.getRequestHeader(HttpHeader.Content_Type); f.setForm(isForm(c_type)); if (!Util.isNullOrBlank(c_length)) { contentLength = Integer.parseInt(c_length); f.setContentLength(contentLength); } if (contentLength < 1) { return decode_state_complete; } else { if (contentLength > blimit) { throw OVER_LIMIT; } return decode_state_body; } } private boolean isForm(String c_type) { //TODO complete me if (c_type == null) { return false; } if (c_type.startsWith("multipart/form-data;")) { return true; } return false; } protected void parse_line_one(HttpFrame f, CharSequence line) throws IOException { int v = (line.charAt(0) << 0) | (line.charAt(1) << 8) | (line.charAt(2) << 16) | (line.charAt(3) << 24); if (v == NUM_GET) { f.setMethod(HttpMethod.GET); parse_url(f, 4, line); } else if (v == NUM_POST) { f.setMethod(HttpMethod.POST); parse_url(f, 5, line); } else { throw ILLEGAL_METHOD; } } @Override public void release(NioEventLoop eventLoop, Frame frame) { eventLoop.release(FRAME_CACHE_KEY, frame); } @Override protected Object newAttachment() { return new HttpAttachment(); } }
/*L * Copyright Oracle Inc * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-cgmdr-nci-uk/LICENSE.txt for details. */ package org.exist.xquery; import org.exist.collections.Collection; import org.exist.collections.IndexInfo; import org.exist.dom.DocumentImpl; import org.exist.security.SecurityManager; import org.exist.security.xacml.AccessContext; import org.exist.source.StringSource; import org.exist.storage.BrokerPool; import org.exist.storage.DBBroker; import org.exist.storage.lock.Lock; import org.exist.storage.serializers.Serializer; import org.exist.storage.txn.TransactionManager; import org.exist.storage.txn.Txn; import org.exist.test.TestConstants; import org.exist.xmldb.XmldbURI; import org.exist.xquery.value.Sequence; import org.exist.util.Configuration; import org.exist.util.serializer.SAXSerializer; import org.exist.util.serializer.SerializerPool; import org.xml.sax.InputSource; import java.io.StringReader; import java.io.StringWriter; import java.util.Properties; import javax.xml.transform.OutputKeys; import junit.framework.TestCase; import junit.textui.TestRunner; /** Tests for recovery of database corruption after constructed node operations (in-memory nodes) * @author Adam Retter <adam.retter@devon.gov.uk> */ public class ConstructedNodesRecoveryTest extends TestCase { private final static String xquery = "declare variable $categories := \n" + " <categories>\n" + " <category uid=\"1\">Fruit</category>\n" + " <category uid=\"2\">Vegetable</category>\n" + " <category uid=\"3\">Meat</category>\n" + " <category uid=\"4\">Dairy</category>\n" + " </categories>\n" + ";\n\n" + "for $category in $categories/category return\n" + " element option {\n" + " attribute value {\n" + " $category/@uid\n" + " },\n" + " text { $category }\n" + " }"; private final static String expectedResults [] = { "Fruit", "Vegetable", "Meat", "Dairy" }; private final static String testDocument = "<fruit>" + "<apple colour=\"green\"/>" + "<pear colour=\"green\"/>" + "<orange colour=\"orange\"/>" + "<dragonfruit colour=\"pink\"/>" + "<grapefruit colour=\"yellow\"/>" + "</fruit>"; public static void main(String[] args) { TestRunner.run(ConstructedNodesRecoveryTest.class); } /** * Issues a query against constructed nodes and then corrupts the database (intentionally) */ public void testConstructedNodesCorrupt() { constructedNodeQuery(true); } /** * Recovers from corruption (intentional) and then issues a query against constructed nodes */ public void testConstructedNodesRecover() { constructedNodeQuery(false); } private void storeTestDocument(DBBroker broker, TransactionManager transact, String documentName) throws Exception { //create a transaction Txn transaction = transact.beginTransaction(); assertNotNull(transaction); System.out.println("Transaction started ..."); //get the test collection Collection root = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI); assertNotNull(root); broker.saveCollection(transaction, root); //store test document IndexInfo info = root.validateXMLResource(transaction, broker, XmldbURI.create(documentName), testDocument); assertNotNull(info); root.store(transaction, broker, info, new InputSource(new StringReader(testDocument)), false); //commit the transaction transact.commit(transaction); System.out.println("Transaction commited ..."); } private void createTempChildCollection(DBBroker broker, TransactionManager transact, String childCollectionName) throws Exception { //create a transaction Txn transaction = transact.beginTransaction(); assertNotNull(transaction); System.out.println("Transaction started ..."); //get the test collection Collection root = broker.getOrCreateCollection(transaction, XmldbURI.TEMP_COLLECTION_URI.append(childCollectionName)); assertNotNull(root); broker.saveCollection(transaction, root); //commit the transaction transact.commit(transaction); System.out.println("Transaction commited ..."); } private void testDocumentIsValid(DBBroker broker, TransactionManager transact, String documentName) throws Exception { //create a transaction Txn transaction = transact.beginTransaction(); assertNotNull(transaction); System.out.println("Transaction started ..."); //get the test collection Collection root = broker.getOrCreateCollection(transaction, TestConstants.TEST_COLLECTION_URI); assertNotNull(root); broker.saveCollection(transaction, root); //get the test document DocumentImpl doc = root.getDocumentWithLock(broker, XmldbURI.create(documentName), Lock.READ_LOCK); Serializer serializer = broker.getSerializer(); serializer.reset(); SAXSerializer sax = null; StringWriter writer = new StringWriter(); sax = (SAXSerializer) SerializerPool.getInstance().borrowObject(SAXSerializer.class); Properties outputProperties = new Properties(); outputProperties.setProperty(OutputKeys.INDENT, "no"); outputProperties.setProperty(OutputKeys.ENCODING, "UTF-8"); sax.setOutput(writer, outputProperties); serializer.setProperties(outputProperties); serializer.setSAXHandlers(sax, sax); serializer.toSAX(doc); SerializerPool.getInstance().returnObject(sax); assertEquals(testDocument, writer.toString()); transact.commit(transaction); } private void testTempChildCollectionExists(DBBroker broker, TransactionManager transact, String childCollectionName) throws Exception { //create a transaction Txn transaction = transact.beginTransaction(); assertNotNull(transaction); System.out.println("Transaction started ..."); //get the temp child collection Collection tempChildCollection = broker.getOrCreateCollection(transaction, XmldbURI.TEMP_COLLECTION_URI.append(childCollectionName)); assertNotNull(tempChildCollection); broker.saveCollection(transaction, tempChildCollection); transact.commit(transaction); } /** * Performs a query against constructed nodes, with the option of forcefully corrupting the database * * @param forceCorruption Should the database be forcefully corrupted */ private void constructedNodeQuery(boolean forceCorruption) { BrokerPool.FORCE_CORRUPTION = forceCorruption; BrokerPool pool = null; DBBroker broker = null; try { pool = startDB(); assertNotNull(pool); broker = pool.get(SecurityManager.SYSTEM_USER); TransactionManager transact = pool.getTransactionManager(); assertNotNull(transact); //only store the documents the first time if(forceCorruption) { //store a first test document storeTestDocument(broker, transact, "testcr1.xml"); //store a second test document storeTestDocument(broker, transact, "testcr2.xml"); } //create some child collections in TEMP collection createTempChildCollection(broker, transact, "testchild1"); createTempChildCollection(broker, transact, "testchild2"); //execute an xquery XQuery service = broker.getXQueryService(); assertNotNull(service); CompiledXQuery compiled = service.compile(service.newContext(AccessContext.TEST), new StringSource(xquery)); assertNotNull(compiled); Sequence result = service.execute(compiled, null); assertNotNull(result); assertEquals(expectedResults.length, result.getItemCount()); for(int i = 0; i < result.getItemCount(); i++) { assertEquals(expectedResults[i], (String)result.itemAt(i).getStringValue()); } //read the first test document testDocumentIsValid(broker, transact, "testcr1.xml"); //read the second test document testDocumentIsValid(broker, transact, "testcr1.xml"); //test the child collections exist testTempChildCollectionExists(broker, transact, "testchild1"); testTempChildCollectionExists(broker, transact, "testchild2"); transact.getJournal().flushToLog(true); } catch(Exception e) { fail(e.getMessage()); e.printStackTrace(); } finally { if (pool != null) pool.release(broker); } } protected BrokerPool startDB() { try { Configuration config = new Configuration(); BrokerPool.configure(1, 5, config); return BrokerPool.getInstance(); } catch (Exception e) { fail(e.getMessage()); } return null; } protected void tearDown() { BrokerPool.stopAll(false); } }
/** * $Id: mxVertexHandler.java,v 1.10 2010/01/29 09:07:01 gaudenz Exp $ * Copyright (c) 2008, Gaudenz Alder */ package com.mxgraph.swing.handler; import java.awt.Color; import java.awt.Cursor; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Rectangle; import java.awt.Stroke; import java.awt.event.MouseEvent; import javax.swing.JComponent; import javax.swing.JPanel; import com.mxgraph.model.mxGeometry; import com.mxgraph.swing.mxGraphComponent; import com.mxgraph.util.mxConstants; import com.mxgraph.util.mxPoint; import com.mxgraph.util.mxRectangle; import com.mxgraph.view.mxCellState; import com.mxgraph.view.mxGraph; /** * */ public class mxVertexHandler extends mxCellHandler { /** * */ public static Cursor[] CURSORS = new Cursor[] { new Cursor(Cursor.NW_RESIZE_CURSOR), new Cursor(Cursor.N_RESIZE_CURSOR), new Cursor(Cursor.NE_RESIZE_CURSOR), new Cursor(Cursor.W_RESIZE_CURSOR), new Cursor(Cursor.E_RESIZE_CURSOR), new Cursor(Cursor.SW_RESIZE_CURSOR), new Cursor(Cursor.S_RESIZE_CURSOR), new Cursor(Cursor.SE_RESIZE_CURSOR), new Cursor(Cursor.MOVE_CURSOR) }; /** * Workaround for alt-key-state not correct in mouseReleased. */ protected transient boolean gridEnabledEvent = false; /** * Workaround for shift-key-state not correct in mouseReleased. */ protected transient boolean constrainedEvent = false; /** * * @param graphComponent * @param state */ public mxVertexHandler(mxGraphComponent graphComponent, mxCellState state) { super(graphComponent, state); } /** * */ protected Rectangle[] createHandles() { Rectangle[] h = null; if (graphComponent.getGraph().isCellResizable(getState().getCell())) { Rectangle bounds = getState().getRectangle(); int half = mxConstants.HANDLE_SIZE / 2; int left = bounds.x - half; int top = bounds.y - half; int w2 = bounds.x + (bounds.width / 2) - half; int h2 = bounds.y + (bounds.height / 2) - half; int right = bounds.x + bounds.width - half; int bottom = bounds.y + bounds.height - half; h = new Rectangle[9]; int s = mxConstants.HANDLE_SIZE; h[0] = new Rectangle(left, top, s, s); h[1] = new Rectangle(w2, top, s, s); h[2] = new Rectangle(right, top, s, s); h[3] = new Rectangle(left, h2, s, s); h[4] = new Rectangle(right, h2, s, s); h[5] = new Rectangle(left, bottom, s, s); h[6] = new Rectangle(w2, bottom, s, s); h[7] = new Rectangle(right, bottom, s, s); } else { h = new Rectangle[1]; } int s = mxConstants.LABEL_HANDLE_SIZE; mxRectangle bounds = state.getLabelBounds(); h[h.length - 1] = new Rectangle((int) (bounds.getX() + bounds.getWidth() / 2 - s), (int) (bounds.getY() + bounds.getHeight() / 2 - s), 2 * s, 2 * s); return h; } /** * */ protected JComponent createPreview() { JPanel preview = new JPanel(); preview.setBorder(mxConstants.PREVIEW_BORDER); preview.setOpaque(false); preview.setVisible(false); return preview; } /** * */ public void mouseDragged(MouseEvent e) { if (!e.isConsumed() && first != null) { //System.out.println("mouse dragged in mxVertexHandler"); gridEnabledEvent = graphComponent.isGridEnabledEvent(e); constrainedEvent = graphComponent.isConstrainedEvent(e); double dx = e.getX() - first.x; double dy = e.getY() - first.y; if (isLabel(index)) { mxPoint pt = new mxPoint(e.getPoint()); if (gridEnabledEvent) { pt = graphComponent.snapScaledPoint(pt); } int idx = (int) Math.round(pt.getX() - first.x); int idy = (int) Math.round(pt.getY() - first.y); if (constrainedEvent) { if (Math.abs(idx) > Math.abs(idy)) { idy = 0; } else { idx = 0; } } Rectangle rect = state.getLabelBounds().getRectangle(); rect.translate(idx, idy); preview.setBounds(rect); } else { mxGraph graph = graphComponent.getGraph(); double scale = graph.getView().getScale(); if (gridEnabledEvent) { dx = graph.snap(dx / scale) * scale; dy = graph.snap(dy / scale) * scale; } mxRectangle bounds = union(getState(), dx, dy, index); bounds.setWidth(bounds.getWidth() + 1); bounds.setHeight(bounds.getHeight() + 1); preview.setBounds(bounds.getRectangle()); } if (!preview.isVisible() && graphComponent.isSignificant(dx, dy)) { preview.setVisible(true); } e.consume(); } } /** * */ public void mouseReleased(MouseEvent e) { if (!e.isConsumed() && first != null) { if (preview != null && preview.isVisible()) { if (isLabel(index)) { moveLabel(e); } else { resizeCell(e); } } e.consume(); } super.mouseReleased(e); } /** * */ protected void moveLabel(MouseEvent e) { mxGraph graph = graphComponent.getGraph(); mxGeometry geometry = graph.getModel().getGeometry(state.getCell()); if (geometry != null) { double scale = graph.getView().getScale(); mxPoint pt = new mxPoint(e.getPoint()); if (gridEnabledEvent) { pt = graphComponent.snapScaledPoint(pt); } double dx = (pt.getX() - first.x) / scale; double dy = (pt.getY() - first.y) / scale; if (constrainedEvent) { if (Math.abs(dx) > Math.abs(dy)) { dy = 0; } else { dx = 0; } } mxPoint offset = geometry.getOffset(); if (offset == null) { offset = new mxPoint(); } dx += offset.getX(); dy += offset.getY(); geometry = (mxGeometry) geometry.clone(); geometry.setOffset(new mxPoint(Math.round(dx), Math.round(dy))); graph.getModel().setGeometry(state.getCell(), geometry); } } /** * * @param e */ protected void resizeCell(MouseEvent e) { mxGraph graph = graphComponent.getGraph(); double scale = graph.getView().getScale(); Object cell = state.getCell(); mxGeometry geometry = graph.getModel().getGeometry(cell); if (geometry != null) { double dx = (e.getX() - first.x) / scale; double dy = (e.getY() - first.y) / scale; if (isLabel(index)) { geometry = (mxGeometry) geometry.clone(); if (geometry.getOffset() != null) { dx += geometry.getOffset().getX(); dy += geometry.getOffset().getY(); } if (gridEnabledEvent) { dx = graph.snap(dx); dy = graph.snap(dy); } geometry.setOffset(new mxPoint(dx, dy)); graph.getModel().setGeometry(cell, geometry); } else { mxRectangle bounds = union(geometry, dx, dy, index); Rectangle rect = bounds.getRectangle(); // Snaps new bounds to grid (unscaled) if (gridEnabledEvent) { int x = (int) graph.snap(rect.x); int y = (int) graph.snap(rect.y); rect.width = (int) graph.snap(rect.width - x + rect.x); rect.height = (int) graph.snap(rect.height - y + rect.y); rect.x = x; rect.y = y; } graph.resizeCell(cell, new mxRectangle(rect)); } } } /** * */ protected Cursor getCursor(MouseEvent e, int index) { if (index >= 0 && index <= CURSORS.length) { return CURSORS[index]; } return null; } /** * * @param bounds * @param dx * @param dy * @param index */ protected mxRectangle union(mxRectangle bounds, double dx, double dy, int index) { double left = bounds.getX(); double right = left + bounds.getWidth(); double top = bounds.getY(); double bottom = top + bounds.getHeight(); if (index > 4 /* Bottom Row */) { bottom = bottom + dy; } else if (index < 3 /* Top Row */) { top = top + dy; } if (index == 0 || index == 3 || index == 5 /* Left */) { left += dx; } else if (index == 2 || index == 4 || index == 7 /* Right */) { right += dx; } double width = right - left; double height = bottom - top; // Flips over left side if (width < 0) { left += width; width = Math.abs(width); } // Flips over top side if (height < 0) { top += height; height = Math.abs(height); } return new mxRectangle(left, top, width, height); } /** * */ protected Color getSelectionColor() { return mxConstants.VERTEX_SELECTION_COLOR; } /** * */ protected Stroke getSelectionStroke() { return mxConstants.VERTEX_SELECTION_STROKE; } /** * */ public void paint(Graphics g) { Rectangle bounds = getState().getRectangle(); Graphics2D g2 = (Graphics2D) g; Stroke stroke = g2.getStroke(); g2.setStroke(getSelectionStroke()); g.setColor(getSelectionColor()); g.drawRect(bounds.x, bounds.y, bounds.width, bounds.height); g2.setStroke(stroke); super.paint(g); } }
package morfologik.fsa.builders; import static morfologik.fsa.FSAFlags.*; import static org.junit.Assert.*; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import morfologik.fsa.FSA; import morfologik.fsa.FSAFlags; import org.junit.Test; import static com.carrotsearch.randomizedtesting.RandomizedTest.*; public abstract class SerializerTestBase extends TestBase { @Test public void testA() throws IOException { byte[][] input = new byte[][] { { 'a' }, }; Arrays.sort(input, FSABuilder.LEXICAL_ORDERING); FSA s = FSABuilder.build(input); checkSerialization(input, s); } @Test public void testArcsSharing() throws IOException { byte[][] input = new byte[][] { { 'a', 'c', 'f' }, { 'a', 'd', 'g' }, { 'a', 'e', 'h' }, { 'b', 'd', 'g' }, { 'b', 'e', 'h' }, }; Arrays.sort(input, FSABuilder.LEXICAL_ORDERING); FSA s = FSABuilder.build(input); checkSerialization(input, s); } @Test public void testFSA5SerializerSimple() throws IOException { byte[][] input = new byte[][] { { 'a' }, { 'a', 'b', 'a' }, { 'a', 'c' }, { 'b' }, { 'b', 'a' }, { 'c' }, }; Arrays.sort(input, FSABuilder.LEXICAL_ORDERING); FSA s = FSABuilder.build(input); checkSerialization(input, s); } @Test public void testNotMinimal() throws IOException { byte[][] input = new byte[][] { { 'a', 'b', 'a' }, { 'b' }, { 'b', 'a' } }; Arrays.sort(input, FSABuilder.LEXICAL_ORDERING); FSA s = FSABuilder.build(input); checkSerialization(input, s); } @Test public void testFSA5Bug0() throws IOException { checkCorrect(new String[] { "3-D+A+JJ", "3-D+A+NN", "4-F+A+NN", "z+A+NN", }); } @Test public void testFSA5Bug1() throws IOException { checkCorrect(new String[] { "+NP", "n+N", "n+NP", }); } private void checkCorrect(String[] strings) throws IOException { byte[][] input = new byte[strings.length][]; for (int i = 0; i < strings.length; i++) { input[i] = strings[i].getBytes("ISO8859-1"); } Arrays.sort(input, FSABuilder.LEXICAL_ORDERING); FSA s = FSABuilder.build(input); checkSerialization(input, s); } @Test public void testEmptyInput() throws IOException { byte[][] input = new byte[][] {}; FSA s = FSABuilder.build(input); checkSerialization(input, s); } @Test public void test_abc() throws IOException { testBuiltIn(FSA.read(FSA5Test.class.getResourceAsStream("abc.fsa"))); } @Test public void test_minimal() throws IOException { testBuiltIn(FSA.read(FSA5Test.class.getResourceAsStream("minimal.fsa"))); } @Test public void test_minimal2() throws IOException { testBuiltIn(FSA.read(FSA5Test.class.getResourceAsStream("minimal2.fsa"))); } @Test public void test_en_tst() throws IOException { testBuiltIn(FSA.read(FSA5Test.class.getResourceAsStream("en_tst.dict"))); } private void testBuiltIn(FSA fsa) throws IOException { final ArrayList<byte[]> sequences = new ArrayList<byte[]>(); sequences.clear(); for (ByteBuffer bb : fsa) { sequences.add(Arrays.copyOf(bb.array(), bb.remaining())); } Collections.sort(sequences, FSABuilder.LEXICAL_ORDERING); final byte[][] in = sequences.toArray(new byte[sequences.size()][]); FSA root = FSABuilder.build(in); // Check if the DFSA is correct first. FSATestUtils.checkCorrect(in, root); // Check serialization. checkSerialization(in, root); } private void checkSerialization(byte[][] input, FSA root) throws IOException { checkSerialization0(createSerializer(), input, root); if (createSerializer().getFlags().contains(FSAFlags.NUMBERS)) { checkSerialization0(createSerializer().withNumbers(), input, root); } } private void checkSerialization0(FSASerializer serializer, final byte[][] in, FSA root) throws IOException { final byte[] fsaData = serializer.serialize(root, new ByteArrayOutputStream()).toByteArray(); FSA fsa = FSA.read(new ByteArrayInputStream(fsaData)); checkCorrect(in, fsa); } /* * Check if the FSA is correct with respect to the given input. */ protected void checkCorrect(byte[][] input, FSA fsa) { // (1) All input sequences are in the right language. HashSet<ByteBuffer> rl = new HashSet<ByteBuffer>(); for (ByteBuffer bb : fsa) { byte[] array = bb.array(); int length = bb.remaining(); rl.add(ByteBuffer.wrap(Arrays.copyOf(array, length))); } HashSet<ByteBuffer> uniqueInput = new HashSet<ByteBuffer>(); for (byte[] sequence : input) { uniqueInput.add(ByteBuffer.wrap(sequence)); } for (ByteBuffer sequence : uniqueInput) { if (!rl.remove(sequence)) { fail("Not present in the right language: " + toString(sequence)); } } // (2) No other sequence _other_ than the input is in the right // language. assertEquals(0, rl.size()); } @Test public void testAutomatonWithNodeNumbers() throws IOException { assumeTrue(createSerializer().getFlags().contains(FSAFlags.NUMBERS)); byte[][] input = new byte[][] { { 'a' }, { 'a', 'b', 'a' }, { 'a', 'c' }, { 'b' }, { 'b', 'a' }, { 'c' }, }; Arrays.sort(input, FSABuilder.LEXICAL_ORDERING); FSA s = FSABuilder.build(input); final byte[] fsaData = createSerializer().withNumbers() .serialize(s, new ByteArrayOutputStream()) .toByteArray(); FSA fsa = FSA.read(new ByteArrayInputStream(fsaData)); // Ensure we have the NUMBERS flag set. assertTrue(fsa.getFlags().contains(NUMBERS)); // Get all numbers from nodes. byte[] buffer = new byte[128]; final ArrayList<String> result = new ArrayList<String>(); FSA5Test.walkNode(buffer, 0, fsa, fsa.getRootNode(), 0, result); Collections.sort(result); assertEquals( Arrays.asList("0 a", "1 aba", "2 ac", "3 b", "4 ba", "5 c"), result); } protected abstract FSASerializer createSerializer(); /* * Drain bytes from a byte buffer to a string. */ public static String toString(ByteBuffer sequence) { byte [] bytes = new byte [sequence.remaining()]; sequence.get(bytes); return Arrays.toString(bytes); } }
/** * IntegerExp.java * --------------------------------- * Copyright (c) 2016 * RESOLVE Software Research Group * School of Computing * Clemson University * All rights reserved. * --------------------------------- * This file is subject to the terms and conditions defined in * file 'LICENSE.txt', which is part of this source code package. */ package edu.clemson.cs.r2jt.absyn; import edu.clemson.cs.r2jt.collections.List; import edu.clemson.cs.r2jt.data.Location; import edu.clemson.cs.r2jt.data.PosSymbol; import edu.clemson.cs.r2jt.typeandpopulate.MTType; public class IntegerExp extends Exp { // =========================================================== // Variables // =========================================================== /** The location member. */ private Location location; private PosSymbol qualifier; /** The value member. */ private int value; // =========================================================== // Constructors // =========================================================== public IntegerExp() {}; public IntegerExp(Location location, PosSymbol qualifier, int value) { this.location = location; this.qualifier = qualifier; this.value = value; } public Exp substituteChildren(java.util.Map<Exp, Exp> substitutions) { Exp retval = new IntegerExp(location, qualifier, value); retval.setMathType(getMathType()); retval.setMathTypeValue(getMathTypeValue()); return retval; } // =========================================================== // Accessor Methods // =========================================================== // ----------------------------------------------------------- // Get Methods // ----------------------------------------------------------- /** Returns the value of the location variable. */ public Location getLocation() { return location; } public PosSymbol getQualifier() { return qualifier; } /** Returns the value of the value variable. */ public int getValue() { return value; } // ----------------------------------------------------------- // Set Methods // ----------------------------------------------------------- /** Sets the location variable to the specified value. */ public void setLocation(Location location) { this.location = location; } public void setQualifier(PosSymbol qualifier) { this.qualifier = qualifier; } /** Sets the value variable to the specified value. */ public void setValue(int value) { this.value = value; } // =========================================================== // Public Methods // =========================================================== /** Accepts a ResolveConceptualVisitor. */ public void accept(ResolveConceptualVisitor v) { v.visitIntegerExp(this); } /** Returns a formatted text string of this class. */ public String asString(int indent, int increment) { StringBuffer sb = new StringBuffer(); printSpace(indent, sb); sb.append("IntegerExp\n"); if (qualifier != null) { printSpace(indent + increment, sb); sb.append(qualifier + "\n"); } printSpace(indent + increment, sb); sb.append(value + "\n"); return sb.toString(); } /** Returns a formatted text string of this class. */ public String toString(int indent) { StringBuffer sb = new StringBuffer(); if (qualifier != null) { sb.append(qualifier + "."); } sb.append(value); return sb.toString(); } /** Returns true if the variable is found in any sub expression of this one. **/ public boolean containsVar(String varName, boolean IsOldExp) { return false; } public Object clone() { IntegerExp clone = new IntegerExp(); clone.setQualifier(this.qualifier); clone.setValue(this.value); clone.setLocation(this.getLocation()); clone.setMathType(getMathType()); clone.setMathTypeValue(getMathTypeValue()); return clone; } public List<Exp> getSubExpressions() { return new List<Exp>(); } public void setSubExpression(int index, Exp e) { } public boolean shallowCompare(Exp e2) { if (!(e2 instanceof IntegerExp)) { return false; } if (qualifier != null && ((IntegerExp) (e2)).getQualifier() != null) { if (!(qualifier.equals(((IntegerExp) e2).getQualifier().getName()))) { return false; } } if (value != ((IntegerExp) e2).getValue()) { return false; } return true; } public Exp replace(Exp old, Exp replace) { if (!(old instanceof IntegerExp)) { return null; } else if (((IntegerExp) old).getValue() == value) return replace; else return null; } public void prettyPrint() { if (qualifier != null) { System.out.print(qualifier.getName() + "."); } System.out.print(value); } public Exp copy() { Exp retval = new IntegerExp(null, qualifier, value); retval.setMathType(getMathType()); retval.setMathTypeValue(getMathTypeValue()); return retval; } public boolean equivalent(Exp e) { boolean retval = e instanceof IntegerExp; if (retval) { IntegerExp eAsIntegerExp = (IntegerExp) e; retval = (value == eAsIntegerExp.value); } return retval; } }
/* * Copyright 2013-2016 consulo.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.desktop.container.boot; import com.intellij.openapi.util.SystemInfo; import com.intellij.util.SystemProperties; import com.sun.jna.platform.win32.Shell32Util; import com.sun.jna.platform.win32.ShlObj; import javax.annotation.Nonnull; import java.io.File; /** * @author VISTALL * @since 07-Nov-16 */ public abstract class DefaultPaths { private static class Fallback extends DefaultPaths { @Nonnull @Override public File getDocumentsDir() { String userHome = SystemProperties.getUserHome(); // some OS-es can have documents dir inside user home, for example Ubuntu File file = new File(userHome, "Documents"); if (file.exists()) { return new File(userHome + File.separatorChar + "Documents" + File.separatorChar + ourDefaultPrefix); } return new File(userHome + File.separatorChar + ourDefaultPrefix + " Project"); } @Nonnull @Override public String getLocalSettingsDir() { String userHome = SystemProperties.getUserHome(); return userHome + File.separatorChar + ".consulo_settings" + File.separatorChar + "system"; } @Nonnull @Override public String getRoamingSettingsDir() { String userHome = SystemProperties.getUserHome(); return userHome + File.separatorChar + ".consulo_settings" + File.separatorChar + "config"; } } private static abstract class SimpleDefaultPaths extends DefaultPaths { @Nonnull protected abstract String getDocumentsDirNoPrefix(); @Override @Nonnull public File getDocumentsDir() { return new File(getDocumentsDirNoPrefix(), ourDefaultPrefix); } @Nonnull protected abstract String getLocalSettingsDirNoPrefix(); @Override @Nonnull public String getLocalSettingsDir() { return getLocalSettingsDirNoPrefix() + File.separatorChar + ourDefaultPrefix; } @Nonnull protected abstract String getRoamingSettingsDirNoPrefix(); @Nonnull @Override public String getRoamingSettingsDir() { return getRoamingSettingsDirNoPrefix() + File.separatorChar + ourDefaultPrefix; } } private static class Mac extends SimpleDefaultPaths { @Nonnull @Override protected String getDocumentsDirNoPrefix() { return SystemProperties.getUserHome() + "/Documents"; } @Nonnull @Override public File getExternalPlatformDirectory(@Nonnull File defaultPath) { return new File(SystemProperties.getUserHome(), "Library/Application Support/Consulo Platform"); } @Nonnull @Override protected String getLocalSettingsDirNoPrefix() { return SystemProperties.getUserHome() + "/Library/Caches"; } @Nonnull @Override protected String getRoamingSettingsDirNoPrefix() { return SystemProperties.getUserHome() + "/Library/Preferences"; } @Nonnull @Override public String getRoamingPluginsDir() { return SystemProperties.getUserHome() + "/Library/Application Support/" + ourDefaultPrefix; } @Nonnull @Override public File getLocalLogsDir() { return new File(SystemProperties.getUserHome() + "/Library/Logs/" + ourDefaultPrefix); } } private static class Windows extends SimpleDefaultPaths { @Nonnull @Override public File getExternalPlatformDirectory(@Nonnull File defaultPath) { return new File(Shell32Util.getFolderPath(ShlObj.CSIDL_APPDATA), "Consulo Platform"); } @Nonnull @Override protected String getDocumentsDirNoPrefix() { return Shell32Util.getFolderPath(ShlObj.CSIDL_PERSONAL); } @Nonnull @Override protected String getLocalSettingsDirNoPrefix() { // will return path like C:\Users\{user.name}\AppData\Local return Shell32Util.getFolderPath(ShlObj.CSIDL_LOCAL_APPDATA); } @Nonnull @Override protected String getRoamingSettingsDirNoPrefix() { // will return path like C:\Users\{user.name}\AppData\Roaming return Shell32Util.getFolderPath(ShlObj.CSIDL_APPDATA); } } private static final DefaultPaths ourInstance = get(); private static final String ourDefaultPrefix = "Consulo"; private static DefaultPaths get() { if (SystemInfo.isMac) { return new Mac(); } else if (SystemInfo.isWindows) { return new Windows(); } return new Fallback(); } @Nonnull public static DefaultPaths getInstance() { return ourInstance; } /** * @return default directory for new projects */ @Nonnull public abstract File getDocumentsDir(); /** * @return directory for caches, etc. Removing will not broke user settings */ @Nonnull public abstract String getLocalSettingsDir(); @Nonnull public File getLocalLogsDir() { return new File(getLocalSettingsDir(), "logs"); } /** * @return directory for user settings */ @Nonnull public abstract String getRoamingSettingsDir(); @Nonnull public String getRoamingPluginsDir() { return getRoamingSettingsDir() + File.separatorChar + "plugins"; } /** * @return external platform directory, on each updates directory will be install in it, or inside distribution directory if null */ @Nonnull public File getExternalPlatformDirectory(@Nonnull File defaultPath) { return defaultPath; } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.engine.test.bpmn.deployment; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import java.util.ArrayList; import java.util.List; import org.flowable.common.engine.impl.history.HistoryLevel; import org.flowable.engine.impl.test.HistoryTestHelper; import org.flowable.engine.impl.test.PluggableFlowableTestCase; import org.flowable.engine.runtime.ProcessInstance; import org.flowable.eventsubscription.api.EventSubscription; import org.junit.jupiter.api.Test; /** * A test specifically written to test how events (start/boundary) are handled when deploying a new version of a process definition. * * @author Joram Barrez */ public class MessageEventsAndNewVersionDeploymentsWithTenantIdTest extends PluggableFlowableTestCase { private static final String TEST_PROCESS_GLOBAL_BOUNDARY_MESSAGE = "org/flowable/engine/test/bpmn/deployment/MessageEventsAndNewVersionDeploymentsTest.testGlobalMessageBoundaryEvent.bpmn20.xml"; private static final String TEST_PROCESS_START_MESSAGE = "org/flowable/engine/test/bpmn/deployment/MessageEventsAndNewVersionDeploymentsTest.testStartMessageEvent.bpmn20.xml"; private static final String TEST_PROCESS_NO_EVENTS = "org/flowable/engine/test/bpmn/deployment/MessageEventsAndNewVersionDeploymentsTest.processWithoutEvents.bpmn20.xml"; private static final String TEST_PROCESS_BOTH_START_AND_BOUNDARY_MESSAGE = "org/flowable/engine/test/bpmn/deployment/MessageEventsAndNewVersionDeploymentsTest.testBothBoundaryAndStartMessage.bpmn20.xml"; private static final String TEST_PROCESS_BOTH_START_AND_BOUNDARY_MESSAGE_SAME_MESSAGE = "org/flowable/engine/test/bpmn/deployment/MessageEventsAndNewVersionDeploymentsTest.testBothBoundaryAndStartMessageSameMessage.bpmn20.xml"; private static final String TENANT_ID = "223344"; /* * BOUNDARY MESSAGE EVENT */ @Test public void testMessageBoundaryEvent() { String deploymentId1 = deployBoundaryMessageTestProcess(); runtimeService.startProcessInstanceByKeyAndTenantId("messageTest", TENANT_ID); assertThat(getAllEventSubscriptions()).hasSize(1); String deploymentId2 = deployBoundaryMessageTestProcess(); runtimeService.startProcessInstanceByKeyAndTenantId("messageTest", TENANT_ID); assertThat(getAllEventSubscriptions()).hasSize(2); assertReceiveMessage("myMessage", 2); List<org.flowable.task.api.Task> tasks = taskService.createTaskQuery().list(); assertThat(tasks).hasSize(2); for (org.flowable.task.api.Task task : tasks) { assertThat(task.getName()).isEqualTo("Task after message"); } cleanup(deploymentId1, deploymentId2); } /** * Verifying that the event subscriptions do get removed when removing a deployment. */ @Test public void testBoundaryEventSubscriptionDeletedOnDeploymentDelete() { String deploymentId = deployBoundaryMessageTestProcess(); runtimeService.startProcessInstanceByKeyAndTenantId("messageTest", TENANT_ID); assertThat(taskService.createTaskQuery().singleResult().getName()).isEqualTo("My Task"); String deploymentId2 = deployBoundaryMessageTestProcess(); runtimeService.startProcessInstanceByKeyAndTenantId("messageTest", TENANT_ID); assertThat(taskService.createTaskQuery().count()).isEqualTo(2); assertThat(getAllEventSubscriptions()).hasSize(2); repositoryService.deleteDeployment(deploymentId, true); assertThat(taskService.createTaskQuery().singleResult().getName()).isEqualTo("My Task"); assertThat(getAllEventSubscriptions()).hasSize(1); repositoryService.deleteDeployment(deploymentId2, true); assertThat(getAllEventSubscriptions()).isEmpty(); } /** * Verifying that the event subscriptions do get removed when removing a process instance. */ @Test public void testBoundaryEventSubscriptionsDeletedOnProcessInstanceDelete() { String deploymentId1 = deployBoundaryMessageTestProcess(); runtimeService.startProcessInstanceByKeyAndTenantId("messageTest", TENANT_ID); assertThat(taskService.createTaskQuery().singleResult().getName()).isEqualTo("My Task"); String deploymentId2 = deployBoundaryMessageTestProcess(); ProcessInstance processInstance2 = runtimeService.startProcessInstanceByKeyAndTenantId("messageTest", TENANT_ID); assertThat(taskService.createTaskQuery().count()).isEqualTo(2); assertThat(getAllEventSubscriptions()).hasSize(2); // Deleting PI of second deployment runtimeService.deleteProcessInstance(processInstance2.getId(), "testing"); assertThat(taskService.createTaskQuery().singleResult().getName()).isEqualTo("My Task"); assertThat(getAllEventSubscriptions()).hasSize(1); runtimeService.messageEventReceived("myMessage", getExecutionIdsForMessageEventSubscription("myMessage").get(0)); assertThat(getAllEventSubscriptions()).isEmpty(); assertThat(taskService.createTaskQuery().singleResult().getName()).isEqualTo("Task after message"); cleanup(deploymentId1, deploymentId2); } /* * START MESSAGE EVENT */ @Test public void testStartMessageEvent() { String deploymentId1 = deployStartMessageTestProcess(); assertThat(getAllEventSubscriptions()).hasSize(1); assertEventSubscriptionsCount(1); assertThat(runtimeService.createProcessInstanceQuery().count()).isZero(); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(1); String deploymentId2 = deployStartMessageTestProcess(); assertEventSubscriptionsCount(1); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(2); assertEventSubscriptionsCount(1); cleanup(deploymentId1, deploymentId2); } @Test public void testMessageStartEventSubscriptionAfterDeploymentDelete() { // Deploy two version of process definition, delete latest and check if all is good String deploymentId1 = deployStartMessageTestProcess(); List<EventSubscription> eventSubscriptions = getAllEventSubscriptions(); assertThat(eventSubscriptions).hasSize(1); String deploymentId2 = deployStartMessageTestProcess(); eventSubscriptions = getAllEventSubscriptions(); assertEventSubscriptionsCount(1); repositoryService.deleteDeployment(deploymentId2, true); eventSubscriptions = getAllEventSubscriptions(); assertThat(eventSubscriptions).hasSize(1); cleanup(deploymentId1); assertThat(getAllEventSubscriptions()).isEmpty(); // Deploy two versions of process definition, delete the first deploymentId1 = deployStartMessageTestProcess(); deploymentId2 = deployStartMessageTestProcess(); assertThat(getAllEventSubscriptions()).hasSize(1); repositoryService.deleteDeployment(deploymentId1, true); eventSubscriptions = getAllEventSubscriptions(); assertThat(eventSubscriptions) .extracting(EventSubscription::getProcessDefinitionId) .containsExactly(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId2).singleResult().getId()); cleanup(deploymentId2); assertThat(getAllEventSubscriptions()).isEmpty(); } /** * v1 -> has start message event v2 -> has no start message event v3 -> has start message event */ @Test public void testDeployIntermediateVersionWithoutMessageStartEvent() { String deploymentId1 = deployStartMessageTestProcess(); assertThat(getAllEventSubscriptions()).hasSize(1); assertThat(runtimeService.createProcessInstanceQuery().count()).isZero(); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(1); assertEventSubscriptionsCount(1); String deploymentId2 = deployProcessWithoutEvents(); assertThat(getAllEventSubscriptions()).isEmpty(); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(1); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(1); assertEventSubscriptionsCount(0); String deploymentId3 = deployStartMessageTestProcess(); assertThat(getAllEventSubscriptions()).hasSize(1); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(1); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(2); assertEventSubscriptionsCount(1); List<EventSubscription> eventSubscriptions = getAllEventSubscriptions(); assertThat(eventSubscriptions) .extracting(EventSubscription::getProcessDefinitionId) .containsExactly(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId3).singleResult().getId()); cleanup(deploymentId1, deploymentId2, deploymentId3); } @Test public void testDeleteDeploymentWithStartMessageEvents1() { String deploymentId1; String deploymentId2; String deploymentId3; deploymentId1 = deployStartMessageTestProcess(); deploymentId2 = deployProcessWithoutEvents(); deploymentId3 = deployStartMessageTestProcess(); repositoryService.deleteDeployment(deploymentId3, true); assertEventSubscriptionsCount(0); // the latest is now the one without a message start cleanup(deploymentId1, deploymentId2); } @Test public void testDeleteDeploymentWithStartMessageEvents2() { String deploymentId1 = deployStartMessageTestProcess(); String deploymentId2 = deployProcessWithoutEvents(); String deploymentId3 = deployStartMessageTestProcess(); repositoryService.deleteDeployment(deploymentId2, true); assertEventSubscriptionsCount(1); // the latest is now the one with the message runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().singleResult().getProcessDefinitionId()) .isEqualTo(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId3).singleResult().getId()); cleanup(deploymentId1, deploymentId3); } @Test public void testDeleteDeploymentWithStartMessageEvents3() { String deploymentId1 = deployStartMessageTestProcess(); String deploymentId2 = deployProcessWithoutEvents(); String deploymentId3 = deployStartMessageTestProcess(); repositoryService.deleteDeployment(deploymentId1, true); assertEventSubscriptionsCount(1); // the latest is now the one with the message runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().singleResult().getProcessDefinitionId()) .isEqualTo(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId3).singleResult().getId()); cleanup(deploymentId2, deploymentId3); } @Test public void testDeleteDeploymentWithStartMessageEvents4() { String deploymentId1 = deployStartMessageTestProcess(); String deploymentId2 = deployProcessWithoutEvents(); String deploymentId3 = deployStartMessageTestProcess(); repositoryService.deleteDeployment(deploymentId2, true); repositoryService.deleteDeployment(deploymentId3, true); assertEventSubscriptionsCount(1); // the latest is now the one with the message start runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().singleResult().getProcessDefinitionId()) .isEqualTo(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId1).singleResult().getId()); cleanup(deploymentId1); } @Test public void testDeleteDeploymentWithStartMessageEvents5() { String deploymentId1 = deployStartMessageTestProcess(); String deploymentId2 = deployProcessWithoutEvents(); assertEventSubscriptionsCount(0); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createExecutionQuery().count()).isZero(); repositoryService.deleteDeployment(deploymentId2, true); assertEventSubscriptionsCount(1); // the first is now the one with the signal runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().singleResult().getProcessDefinitionId()) .isEqualTo(repositoryService.createProcessDefinitionQuery().deploymentId(deploymentId1).singleResult().getId()); cleanup(deploymentId1); } @Test public void testDeleteDeploymentWithStartMessageEvents6() { String deploymentId1 = deployStartMessageTestProcess(); String deploymentId2 = deployProcessWithoutEvents(); String deploymentId3 = deployStartMessageTestProcess(); String deploymentId4 = deployProcessWithoutEvents(); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createExecutionQuery().count()).isZero(); repositoryService.deleteDeployment(deploymentId2, true); repositoryService.deleteDeployment(deploymentId3, true); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createExecutionQuery().count()).isZero(); repositoryService.deleteDeployment(deploymentId1, true); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createExecutionQuery().count()).isZero(); cleanup(deploymentId4); } @Test public void testDeleteDeploymentWithStartMessageEvents7() { String deploymentId1 = deployStartMessageTestProcess(); String deploymentId2 = deployProcessWithoutEvents(); String deploymentId3 = deployStartMessageTestProcess(); String deploymentId4 = deployProcessWithoutEvents(); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createExecutionQuery().count()).isZero(); repositoryService.deleteDeployment(deploymentId2, true); repositoryService.deleteDeployment(deploymentId3, true); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createExecutionQuery().count()).isZero(); repositoryService.deleteDeployment(deploymentId4, true); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(1); cleanup(deploymentId1); } /* * BOTH BOUNDARY AND START MESSAGE */ @Test public void testBothBoundaryAndStartEvent() { // Deploy process with both boundary and start event String deploymentId1 = deployProcessWithBothStartAndBoundaryMessage(); assertEventSubscriptionsCount(1); assertThat(runtimeService.createExecutionQuery().count()).isZero(); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(2); assertThat(getAllEventSubscriptions()).hasSize(3); // 1 for the start, 2 for the boundary // Deploy version with only a boundary signal String deploymentId2 = deployBoundaryMessageTestProcess(); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(2); assertEventSubscriptionsCount(2); // 2 boundary events remain // Deploy version with signal start String deploymentId3 = deployStartMessageTestProcess(); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(3); assertEventSubscriptionsCount(3); // Delete last version again, making the one with the boundary the latest repositoryService.deleteDeployment(deploymentId3, true); assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID)); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(2); // -1, cause process instance of deploymentId3 is gone too assertEventSubscriptionsCount(2); // The 2 boundary remains // Test the boundary signal assertReceiveMessage("myBoundaryMessage", 2); assertThat(taskService.createTaskQuery().taskName("Task after boundary message").list()).hasSize(2); // Delete second version repositoryService.deleteDeployment(deploymentId2, true); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(3); // -1, cause process instance of deploymentId3 is gone too assertEventSubscriptionsCount(2); // 2 boundaries cleanup(deploymentId1); } @Test public void testBothBoundaryAndStartSameMessageId() { // Deploy process with both boundary and start event String deploymentId1 = deployProcessWithBothStartAndBoundarySameMessage(); assertThat(getAllEventSubscriptions()).hasSize(1); assertEventSubscriptionsCount(1); assertThat(runtimeService.createExecutionQuery().count()).isZero(); for (int i = 0; i < 9; i++) { // Every iteration will signal the boundary event of the previous iteration! runtimeService.startProcessInstanceByMessageAndTenantId("myMessage", TENANT_ID); } if (HistoryTestHelper.isHistoryLevelAtLeast(HistoryLevel.ACTIVITY, processEngineConfiguration)) { assertThat(historyService.createHistoricProcessInstanceQuery().count()).isEqualTo(9); } assertThat(getAllEventSubscriptions()).hasSize(10); // 1 for the start, 9 for boundary // Deploy version with only a start signal. The boundary events should still react though! String deploymentId2 = deployStartMessageTestProcess(); runtimeService.startProcessInstanceByMessageAndTenantId("myStartMessage", TENANT_ID); assertThat(runtimeService.createProcessInstanceQuery().count()).isEqualTo(10); assertEventSubscriptionsCount(10); // Remains 10: 1 one was removed, but one added for the new message assertThatThrownBy(() -> runtimeService.startProcessInstanceByMessageAndTenantId("myMessage", TENANT_ID)); cleanup(deploymentId1, deploymentId2); } /* * HELPERS */ private String deployBoundaryMessageTestProcess() { return deploy(TEST_PROCESS_GLOBAL_BOUNDARY_MESSAGE); } private String deployStartMessageTestProcess() { return deploy(TEST_PROCESS_START_MESSAGE); } private String deployProcessWithoutEvents() { return deploy(TEST_PROCESS_NO_EVENTS); } private String deployProcessWithBothStartAndBoundaryMessage() { return deploy(TEST_PROCESS_BOTH_START_AND_BOUNDARY_MESSAGE); } private String deployProcessWithBothStartAndBoundarySameMessage() { return deploy(TEST_PROCESS_BOTH_START_AND_BOUNDARY_MESSAGE_SAME_MESSAGE); } private String deploy(String path) { String deploymentId = repositoryService .createDeployment() .tenantId(TENANT_ID) .addClasspathResource(path) .deploy() .getId(); return deploymentId; } private void cleanup(String... deploymentIds) { deleteDeployments(); } private List<String> getExecutionIdsForMessageEventSubscription(final String messageName) { List<EventSubscription> eventSubscriptions = runtimeService.createEventSubscriptionQuery() .eventType("message") .eventName(messageName) .tenantId(TENANT_ID) .orderByCreateDate() .desc() .list(); List<String> executionIds = new ArrayList<>(); for (EventSubscription eventSubscription : eventSubscriptions) { executionIds.add(eventSubscription.getExecutionId()); } return executionIds; } private List<EventSubscription> getAllEventSubscriptions() { List<EventSubscription> eventSubscriptions = runtimeService.createEventSubscriptionQuery() .tenantId(TENANT_ID) .orderByCreateDate() .desc() .list(); for (EventSubscription entity : eventSubscriptions) { assertThat(entity.getEventType()).isEqualTo("message"); assertThat(entity.getProcessDefinitionId()).isNotNull(); } return eventSubscriptions; } private void assertReceiveMessage(String messageName, int executionIdsCount) { List<String> executionIds = getExecutionIdsForMessageEventSubscription(messageName); assertThat(executionIds).hasSize(executionIdsCount); for (String executionId : executionIds) { runtimeService.messageEventReceived(messageName, executionId); } } private void assertEventSubscriptionsCount(int count) { assertThat(getAllEventSubscriptions()).hasSize(count); } }
/* * Copyright 2020 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.pluginit.jdbc; import com.navercorp.pinpoint.bootstrap.context.DatabaseInfo; import com.navercorp.pinpoint.bootstrap.plugin.jdbc.JdbcUrlParserV2; import com.navercorp.pinpoint.bootstrap.plugin.test.Expectations; import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifier; import com.navercorp.pinpoint.bootstrap.plugin.test.PluginTestVerifierHolder; import com.navercorp.pinpoint.pluginit.jdbc.template.CallableStatementCallback; import com.navercorp.pinpoint.pluginit.jdbc.template.DriverManagerDataSource; import com.navercorp.pinpoint.pluginit.jdbc.template.PreparedStatementSetter; import com.navercorp.pinpoint.pluginit.jdbc.template.ResultSetExtractor; import com.navercorp.pinpoint.pluginit.jdbc.template.SimpleJdbcTemplate; import com.navercorp.pinpoint.pluginit.jdbc.template.TransactionCallback; import com.navercorp.pinpoint.pluginit.jdbc.template.TransactionDataSource; import org.junit.After; import org.junit.Assert; import org.junit.Test; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.LogManager; import javax.sql.DataSource; import java.lang.reflect.Method; import java.sql.CallableStatement; import java.sql.Driver; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.List; /** * @author Woonduk Kang(emeroad) */ public abstract class DataBaseTestCase { protected final Logger logger = LogManager.getLogger(this.getClass()); protected String DB_TYPE; protected String DB_EXECUTE_QUERY; protected String jdbcUrl; protected String databaseId; protected String databaseIdPassword; protected String databaseAddress; protected String databaseName; protected JDBCApi jdbcApi; private DataSource dataSource; public void setup(String dbType, String executeQuery, DriverProperties driverProperties, JdbcUrlParserV2 jdbcUrlParser, JDBCApi jdbcApi) { this.DB_TYPE = dbType; this.DB_EXECUTE_QUERY = executeQuery; this.jdbcUrl = driverProperties.getUrl(); DatabaseInfo databaseInfo = jdbcUrlParser.parse(jdbcUrl); this.databaseAddress = databaseInfo.getHost().get(0); this.databaseName = databaseInfo.getDatabaseId(); this.databaseId = driverProperties.getUser(); this.databaseIdPassword = driverProperties.getPassword(); this.dataSource = new DriverManagerDataSource(jdbcUrl, databaseId, databaseIdPassword); this.jdbcApi = jdbcApi; try { JDBCDriverClass jdbcDriverClass = getJDBCDriverClass(); Driver driver = jdbcDriverClass.getDriver().newInstance(); DriverManager.registerDriver(driver); } catch (Exception e) { throw new RuntimeException("driver register error", e); } } protected abstract JDBCDriverClass getJDBCDriverClass(); @After public void deregisterDriver() { DriverManagerUtils.deregisterDriver(); } public static class User { private final int id; private final String name; private final int age; public User(int id, String name, int age) { this.id = id; this.name = name; this.age = age; } public int getId() { return id; } public String getName() { return name; } public int getAge() { return age; } @Override public String toString() { return "User{" + "id=" + id + ", name='" + name + '\'' + ", age=" + age + '}'; } } @Test public void testStatement() throws Exception { final String insertQuery = "INSERT INTO test (name, age) VALUES (?, ?)"; final String selectQuery = "SELECT * FROM test"; final String deleteQuery = "DELETE FROM test"; TransactionDataSource transactionDataSource = new TransactionDataSource(this.dataSource); final SimpleJdbcTemplate template = new SimpleJdbcTemplate(transactionDataSource, SimpleJdbcTemplate.ConnectionInterceptor.EMPTY); transactionDataSource.doInTransaction(new TransactionCallback() { @Override public void doInTransaction() throws SQLException { template.execute(insertQuery, new PreparedStatementSetter() { @Override public void setValues(PreparedStatement ps) throws SQLException { ps.setString(1, "maru"); ps.setInt(2, 5); } }); List<User> users = template.executeQuery(selectQuery, new ResultSetExtractor<List<User>>() { @Override public List<User> extractData(ResultSet rs) throws SQLException { List<User> users = new ArrayList<>(); while (rs.next()) { final int id = rs.getInt("id"); final String name = rs.getString("name"); final int age = rs.getInt("age"); users.add(new User(id, name, age)); } return users; } }); logger.debug("users:{}", users); template.executeUpdate(deleteQuery); } }); PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance(); verifier.printCache(); Method connect = jdbcApi.getDriver().getConnect(); verifier.verifyTrace(Expectations.event(DB_TYPE, connect, null, databaseAddress, databaseName, Expectations.cachedArgs(jdbcUrl))); JDBCApi.ConnectionClass connectionClass = jdbcApi.getConnection(); Method setAutoCommit = connectionClass.getSetAutoCommit(); verifier.verifyTrace(Expectations.event(DB_TYPE, setAutoCommit, null, databaseAddress, databaseName, Expectations.args(false))); Method prepareStatement = connectionClass.getPrepareStatement(); verifier.verifyTrace(Expectations.event(DB_TYPE, prepareStatement, null, databaseAddress, databaseName, Expectations.sql(insertQuery, null))); Method execute = jdbcApi.getPreparedStatement().getExecute(); verifier.verifyTrace(Expectations.event(DB_EXECUTE_QUERY, execute, null, databaseAddress, databaseName, Expectations.sql(insertQuery, null, "maru, 5"))); JDBCApi.StatementClass statementClass = jdbcApi.getStatement(); Method executeQuery = statementClass.getExecuteQuery(); verifier.verifyTrace(Expectations.event(DB_EXECUTE_QUERY, executeQuery, null, databaseAddress, databaseName, Expectations.sql(selectQuery, null))); Method executeUpdate = statementClass.getExecuteUpdate(); verifier.verifyTrace(Expectations.event(DB_EXECUTE_QUERY, executeUpdate, null, databaseAddress, databaseName, Expectations.sql(deleteQuery, null))); Method commit = connectionClass.getCommit(); verifier.verifyTrace(Expectations.event(DB_TYPE, commit, null, databaseAddress, databaseName)); } /* CREATE PROCEDURE concatCharacters @a CHAR(1), @b CHAR(1), @c CHAR(2) OUTPUT AS SET @c = @a + @b; */ @Test public void testStoredProcedure_with_IN_OUT_parameters() throws Exception { final String param1 = "a"; final String param2 = "b"; final String storedProcedureQuery = "{ call concatCharacters(?, ?, ?) }"; final SimpleJdbcTemplate template = new SimpleJdbcTemplate(this.dataSource, SimpleJdbcTemplate.ConnectionInterceptor.EMPTY); String result = template.execute(storedProcedureQuery, new CallableStatementCallback<String>() { @Override public String doInCallableStatement(CallableStatement cs) throws SQLException { cs.setString(1, param1); cs.setString(2, param2); cs.registerOutParameter(3, Types.VARCHAR); cs.execute(); return cs.getString(3); } }); Assert.assertEquals(param1.concat(param2), result); PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance(); verifier.printCache(); verifier.verifyTraceCount(4); // Driver#connect(String, Properties) Method connect = jdbcApi.getDriver().getConnect(); verifier.verifyTrace(Expectations.event(DB_TYPE, connect, null, databaseAddress, databaseName, Expectations.cachedArgs(jdbcUrl))); // ConnectionJDBC2#prepareCall(String) JDBCApi.ConnectionClass connectionClass = jdbcApi.getConnection(); Method prepareCall = connectionClass.getPrepareCall(); verifier.verifyTrace(Expectations.event(DB_TYPE, prepareCall, null, databaseAddress, databaseName, Expectations.sql(storedProcedureQuery, null))); // JtdsCallableStatement#registerOutParameter(int, int) Method registerOutParameter = jdbcApi.getCallableStatement().getRegisterOutParameter(); verifier.verifyTrace(Expectations.event(DB_TYPE, registerOutParameter, null, databaseAddress, databaseName, Expectations.args(3, Types.VARCHAR))); // JtdsPreparedStatement#execute Method execute = jdbcApi.getPreparedStatement().getExecute(); verifier.verifyTrace(Expectations.event(DB_EXECUTE_QUERY, execute, null, databaseAddress, databaseName, Expectations.sql(storedProcedureQuery, null, param1 + ", " + param2))); } private static class Swap { int a; int b; public Swap(int a, int b) { this.a = a; this.b = b; } } public static class SwapResult { private List<Integer> results = new ArrayList<>(); private Swap swap; } /* CREATE PROCEDURE swapAndGetSum @a INT OUTPUT, @b INT OUTPUT AS DECLARE @temp INT; SET @temp = @a; SET @a = @b; SET @b = @temp; SELECT @temp + @a; */ @Test public void testStoredProcedure_with_INOUT_parameters() throws Exception { final int param1 = 1; final int param2 = 2; final String storedProcedureQuery = "{ call swapAndGetSum(?, ?) }"; final SimpleJdbcTemplate template = new SimpleJdbcTemplate(this.dataSource, SimpleJdbcTemplate.ConnectionInterceptor.EMPTY); SwapResult result = template.execute(storedProcedureQuery, new CallableStatementCallback<SwapResult>() { @Override public SwapResult doInCallableStatement(CallableStatement cs) throws SQLException { cs.setInt(1, param1); cs.setInt(2, param2); cs.registerOutParameter(1, Types.INTEGER); cs.registerOutParameter(2, Types.INTEGER); SwapResult swapResult = new SwapResult(); ResultSet rs = null; try { rs = cs.executeQuery(); while (rs.next()) { int sum = rs.getInt(1); swapResult.results.add(sum); } } finally { JdbcUtils.closeResultSet(rs); } int output1 = cs.getInt(1); int output2 = cs.getInt(2); swapResult.swap = new Swap(output1, output2); return swapResult; } }); Assert.assertEquals(param1 + param2, result.results.get(0).intValue()); Assert.assertEquals(param2, result.swap.a); Assert.assertEquals(param1, result.swap.b); PluginTestVerifier verifier = PluginTestVerifierHolder.getInstance(); verifier.printCache(); verifier.verifyTraceCount(5); // Driver#connect(String, Properties) Method connect = jdbcApi.getDriver().getConnect(); verifier.verifyTrace(Expectations.event(DB_TYPE, connect, null, databaseAddress, databaseName, Expectations.cachedArgs(jdbcUrl))); // ConnectionJDBC2#prepareCall(String) Method prepareCall = jdbcApi.getConnection().getPrepareCall(); verifier.verifyTrace(Expectations.event(DB_TYPE, prepareCall, null, databaseAddress, databaseName, Expectations.sql(storedProcedureQuery, null))); // JtdsCallableStatement#registerOutParameter(int, int) Method registerOutParameter = jdbcApi.getCallableStatement().getRegisterOutParameter(); // param 1 verifier.verifyTrace(Expectations.event(DB_TYPE, registerOutParameter, null, databaseAddress, databaseName, Expectations.args(1, Types.INTEGER))); // param 2 verifier.verifyTrace(Expectations.event(DB_TYPE, registerOutParameter, null, databaseAddress, databaseName, Expectations.args(2, Types.INTEGER))); // JtdsPreparedStatement#executeQuery Method executeQuery = jdbcApi.getPreparedStatement().getExecuteQuery(); verifier.verifyTrace(Expectations.event(DB_EXECUTE_QUERY, executeQuery, null, databaseAddress, databaseName, Expectations.sql(storedProcedureQuery, null, param1 + ", " + param2))); } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.network.implementation; import com.azure.core.annotation.ExpectedResponses; import com.azure.core.annotation.Get; import com.azure.core.annotation.HeaderParam; import com.azure.core.annotation.Headers; import com.azure.core.annotation.Host; import com.azure.core.annotation.HostParam; import com.azure.core.annotation.PathParam; import com.azure.core.annotation.QueryParam; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceInterface; import com.azure.core.annotation.ServiceMethod; import com.azure.core.annotation.UnexpectedResponseExceptionType; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.PagedResponse; import com.azure.core.http.rest.PagedResponseBase; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.RestProxy; import com.azure.core.management.exception.ManagementException; import com.azure.core.util.Context; import com.azure.core.util.FluxUtil; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.network.fluent.BgpServiceCommunitiesClient; import com.azure.resourcemanager.network.fluent.models.BgpServiceCommunityInner; import com.azure.resourcemanager.network.models.BgpServiceCommunityListResult; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in BgpServiceCommunitiesClient. */ public final class BgpServiceCommunitiesClientImpl implements BgpServiceCommunitiesClient { private final ClientLogger logger = new ClientLogger(BgpServiceCommunitiesClientImpl.class); /** The proxy service used to perform REST calls. */ private final BgpServiceCommunitiesService service; /** The service client containing this operation class. */ private final NetworkManagementClientImpl client; /** * Initializes an instance of BgpServiceCommunitiesClientImpl. * * @param client the instance of the service client containing this operation class. */ BgpServiceCommunitiesClientImpl(NetworkManagementClientImpl client) { this.service = RestProxy .create(BgpServiceCommunitiesService.class, client.getHttpPipeline(), client.getSerializerAdapter()); this.client = client; } /** * The interface defining all the services for NetworkManagementClientBgpServiceCommunities to be used by the proxy * service to perform REST calls. */ @Host("{$host}") @ServiceInterface(name = "NetworkManagementCli") private interface BgpServiceCommunitiesService { @Headers({"Content-Type: application/json"}) @Get("/subscriptions/{subscriptionId}/providers/Microsoft.Network/bgpServiceCommunities") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BgpServiceCommunityListResult>> list( @HostParam("$host") String endpoint, @QueryParam("api-version") String apiVersion, @PathParam("subscriptionId") String subscriptionId, @HeaderParam("Accept") String accept, Context context); @Headers({"Content-Type: application/json"}) @Get("{nextLink}") @ExpectedResponses({200}) @UnexpectedResponseExceptionType(ManagementException.class) Mono<Response<BgpServiceCommunityListResult>> listNext( @PathParam(value = "nextLink", encoded = true) String nextLink, @HostParam("$host") String endpoint, @HeaderParam("Accept") String accept, Context context); } /** * Gets all the available bgp service communities. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the available bgp service communities along with {@link PagedResponse} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BgpServiceCommunityInner>> listSinglePageAsync() { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String apiVersion = "2021-05-01"; final String accept = "application/json"; return FluxUtil .withContext( context -> service .list(this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), accept, context)) .<PagedResponse<BgpServiceCommunityInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Gets all the available bgp service communities. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the available bgp service communities along with {@link PagedResponse} on successful completion of * {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BgpServiceCommunityInner>> listSinglePageAsync(Context context) { if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } if (this.client.getSubscriptionId() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getSubscriptionId() is required and cannot be null.")); } final String apiVersion = "2021-05-01"; final String accept = "application/json"; context = this.client.mergeContext(context); return service .list(this.client.getEndpoint(), apiVersion, this.client.getSubscriptionId(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } /** * Gets all the available bgp service communities. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the available bgp service communities as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedFlux<BgpServiceCommunityInner> listAsync() { return new PagedFlux<>(() -> listSinglePageAsync(), nextLink -> listNextSinglePageAsync(nextLink)); } /** * Gets all the available bgp service communities. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the available bgp service communities as paginated response with {@link PagedFlux}. */ @ServiceMethod(returns = ReturnType.COLLECTION) private PagedFlux<BgpServiceCommunityInner> listAsync(Context context) { return new PagedFlux<>( () -> listSinglePageAsync(context), nextLink -> listNextSinglePageAsync(nextLink, context)); } /** * Gets all the available bgp service communities. * * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the available bgp service communities as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BgpServiceCommunityInner> list() { return new PagedIterable<>(listAsync()); } /** * Gets all the available bgp service communities. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return all the available bgp service communities as paginated response with {@link PagedIterable}. */ @ServiceMethod(returns = ReturnType.COLLECTION) public PagedIterable<BgpServiceCommunityInner> list(Context context) { return new PagedIterable<>(listAsync(context)); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return response for the ListServiceCommunity API service call along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BgpServiceCommunityInner>> listNextSinglePageAsync(String nextLink) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; return FluxUtil .withContext(context -> service.listNext(nextLink, this.client.getEndpoint(), accept, context)) .<PagedResponse<BgpServiceCommunityInner>>map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)) .contextWrite(context -> context.putAll(FluxUtil.toReactorContext(this.client.getContext()).readOnly())); } /** * Get the next page of items. * * @param nextLink The nextLink parameter. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return response for the ListServiceCommunity API service call along with {@link PagedResponse} on successful * completion of {@link Mono}. */ @ServiceMethod(returns = ReturnType.SINGLE) private Mono<PagedResponse<BgpServiceCommunityInner>> listNextSinglePageAsync(String nextLink, Context context) { if (nextLink == null) { return Mono.error(new IllegalArgumentException("Parameter nextLink is required and cannot be null.")); } if (this.client.getEndpoint() == null) { return Mono .error( new IllegalArgumentException( "Parameter this.client.getEndpoint() is required and cannot be null.")); } final String accept = "application/json"; context = this.client.mergeContext(context); return service .listNext(nextLink, this.client.getEndpoint(), accept, context) .map( res -> new PagedResponseBase<>( res.getRequest(), res.getStatusCode(), res.getHeaders(), res.getValue().value(), res.getValue().nextLink(), null)); } }
package com.miguelbcr.ui.rx_paparazzo2.sample.activities; import android.content.res.Configuration; import android.os.Bundle; import androidx.appcompat.app.AppCompatActivity; import androidx.appcompat.widget.Toolbar; import androidx.core.content.ContextCompat; import android.view.View; import android.widget.Toast; import androidx.recyclerview.widget.LinearLayoutManager; import androidx.recyclerview.widget.RecyclerView; import com.miguelbcr.ui.rx_paparazzo2.RxPaparazzo; import com.miguelbcr.ui.rx_paparazzo2.entities.FileData; import com.miguelbcr.ui.rx_paparazzo2.entities.Response; import com.miguelbcr.ui.rx_paparazzo2.entities.size.CustomMaxSize; import com.miguelbcr.ui.rx_paparazzo2.entities.size.OriginalSize; import com.miguelbcr.ui.rx_paparazzo2.entities.size.Size; import com.miguelbcr.ui.rx_paparazzo2.entities.size.SmallSize; import com.miguelbcr.ui.rx_paparazzo2.sample.R; import com.miguelbcr.ui.rx_paparazzo2.sample.adapters.ImagesAdapter; import com.yalantis.ucrop.UCrop; import io.reactivex.Observable; import io.reactivex.android.schedulers.AndroidSchedulers; import io.reactivex.disposables.Disposable; import io.reactivex.schedulers.Schedulers; import java.io.File; import java.util.ArrayList; import java.util.List; public class SampleActivity extends AppCompatActivity implements Testable { private static final String STATE_FILES = "FILES"; public static final int ONE_MEGABYTE_IN_BYTES = 1000000; private RecyclerView recyclerView; private ArrayList<FileData> fileDataList; private Size size; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.sample_layout); configureToolbar(); fileDataList = new ArrayList<>(); if (savedInstanceState != null) { if (savedInstanceState.containsKey(STATE_FILES)) { List files = (List) savedInstanceState.getSerializable(STATE_FILES); fileDataList.addAll(files); } } initViews(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putSerializable(STATE_FILES, fileDataList); } private void configureToolbar() { Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); getSupportActionBar().setTitle(R.string.app_name); } private void initViews() { LinearLayoutManager layoutManager = new LinearLayoutManager(this); layoutManager.setOrientation(LinearLayoutManager.HORIZONTAL); recyclerView = (RecyclerView) findViewById(R.id.rv_images); recyclerView.setHasFixedSize(true); recyclerView.setLayoutManager(layoutManager); findViewById(R.id.fab_camera).setOnClickListener(v -> captureImage()); findViewById(R.id.fab_camera_crop).setOnClickListener(v -> captureImageWithCrop()); findViewById(R.id.fab_pickup_image).setOnClickListener(v -> pickupImage()); findViewById(R.id.fab_pickup_images).setOnClickListener(v -> pickupImages()); findViewById(R.id.fab_pickup_file).setOnClickListener(v -> pickupFile()); findViewById(R.id.fab_pickup_files).setOnClickListener(v -> pickupFiles()); findViewById(R.id.fab_pickup_multiple_types_file) .setOnClickListener(v -> pickupMultipleTypesFile()); findViewById(R.id.fab_pickup_multiple_types_files) .setOnClickListener(v -> pickupMultipleTypesFiles()); loadImages(); } private void captureImage() { CustomMaxSize size = new CustomMaxSize(512); Observable<Response<SampleActivity, FileData>> takeOnePhoto = pickSingle(null, size) .usingCamera(); processSingle(takeOnePhoto); } private void captureImageWithCrop() { UCrop.Options options = new UCrop.Options(); options.setToolbarColor(ContextCompat.getColor(SampleActivity.this, R.color.colorAccent)); options.setToolbarTitle("Cropping single photo"); OriginalSize size = new OriginalSize(); Observable<Response<SampleActivity, FileData>> takePhotoAndCrop = pickSingle(options, size) .usingCamera(); processSingle(takePhotoAndCrop); } private void pickupImage() { UCrop.Options options = new UCrop.Options(); options.setToolbarColor(ContextCompat.getColor(SampleActivity.this, R.color.colorPrimaryDark)); options.setToolbarTitle("Cropping single image"); Observable<Response<SampleActivity, FileData>> pickUsingGallery = pickSingle(options, new CustomMaxSize(500)) .usingGallery(); processSingle(pickUsingGallery); } private void pickupImages() { Observable<Response<SampleActivity, List<FileData>>> pickMultiple = pickMultiple(new SmallSize()) .usingGallery(); processMultiple(pickMultiple); } private void pickupFile() { UCrop.Options options = new UCrop.Options(); options.setToolbarColor(ContextCompat.getColor(SampleActivity.this, R.color.colorPrimaryDark)); options.setToolbarTitle("Cropping single file"); Observable<Response<SampleActivity, FileData>> pickUsingGallery = pickSingle(options, new CustomMaxSize(500)) .usingFiles(); processSingle(pickUsingGallery); } private void pickupFiles() { Size size = new SmallSize(); Observable<Response<SampleActivity, List<FileData>>> pickMultiple = pickMultiple(size) .usingFiles(); processMultiple(pickMultiple); } private void pickupMultipleTypesFile() { Observable<Response<SampleActivity, FileData>> pickUsingFiles = pickSingle(null, new SmallSize()) .setMultipleMimeType("image/jpeg", "image/jpg", "image/png", "application/pdf") .useInternalStorage() .useDocumentPicker() .usingFiles(); processSingle(pickUsingFiles); } private void pickupMultipleTypesFiles() { Observable<Response<SampleActivity, List<FileData>>> pickMultiple = pickMultiple(new SmallSize()) .setMultipleMimeType("image/jpeg", "image/jpg", "image/png", "application/pdf") .useInternalStorage() .useDocumentPicker() .usingFiles(); processMultiple(pickMultiple); } private void processSingle(Observable<Response<SampleActivity, FileData>> pickUsingGallery) { pickUsingGallery .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(response -> { if (PickerUtil.checkResultCode(SampleActivity.this, response.resultCode())) { response.targetUI().loadImage(response.data()); } }, throwable -> { throwable.printStackTrace(); Toast.makeText(getApplicationContext(), "ERROR " + throwable.getMessage(), Toast.LENGTH_SHORT).show(); }); } private RxPaparazzo.SingleSelectionBuilder<SampleActivity> pickSingle(UCrop.Options options, Size size) { this.size = size; RxPaparazzo.SingleSelectionBuilder<SampleActivity> resized = RxPaparazzo.single(this) .setMaximumFileSizeInBytes(ONE_MEGABYTE_IN_BYTES) .size(size) .sendToMediaScanner(); if (options != null) { resized.crop(options); } return resized; } private Disposable processMultiple(Observable<Response<SampleActivity, List<FileData>>> pickMultiple) { return pickMultiple .subscribeOn(Schedulers.io()) .observeOn(AndroidSchedulers.mainThread()) .subscribe(response -> { if (PickerUtil.checkResultCode(SampleActivity.this, response.resultCode())) { if (response.data().size() == 1) { response.targetUI().loadImage(response.data().get(0)); } else { response.targetUI().loadImages(response.data()); } } }, throwable -> { throwable.printStackTrace(); Toast.makeText(getApplicationContext(), "ERROR " + throwable.getMessage(), Toast.LENGTH_SHORT).show(); }); } private RxPaparazzo.MultipleSelectionBuilder<SampleActivity> pickMultiple(Size size) { this.size = size; return RxPaparazzo.multiple(this) .setMaximumFileSizeInBytes(ONE_MEGABYTE_IN_BYTES) .crop() .sendToMediaScanner() .size(size); } private void loadImage(FileData fileData) { this.fileDataList = new ArrayList<>(); this.fileDataList.add(fileData); loadImages(); } private void loadImages(List<FileData> fileDataList) { this.fileDataList = new ArrayList<>(fileDataList); loadImages(); } private void loadImages() { if (fileDataList == null || fileDataList.isEmpty()) { return; } recyclerView.setVisibility(View.VISIBLE); recyclerView.setAdapter(new ImagesAdapter(fileDataList)); } @Override public List<FileData> getFileDatas() { return fileDataList; } @Override public List<String> getFilePaths() { List<String> filesPaths = new ArrayList<>(); for (FileData fileData : fileDataList) { File file = fileData.getFile(); if (file != null) { filesPaths.add(file.getAbsolutePath()); } } return filesPaths; } @Override public Size getSize() { return size; } }
/*================================================================================ Copyright (c) 2013 Steve Jin, All Rights Reserved. Copyright (c) 2009 VMware, Inc. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of VMware, Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL VMWARE, INC. OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================================================*/ package com.vmware.vim25.ws; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.ProtocolException; import java.net.URL; import java.rmi.RemoteException; import java.security.KeyManagementException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLSession; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; /** * The Web Service Engine * @author Steve Jin (sjin@vmware.com) */ final public class WSClient { private final static String SOAP_ACTION_HEADER = "SOAPAction"; private final static String SOAP_ACTION_V40 = "urn:vim25/4.0"; private final static String SOAP_ACTION_V41 = "urn:vim25/4.1"; private final static String SOAP_ACTION_V50 = "urn:vim25/5.0"; private final static String SOAP_ACTION_V51 = "urn:vim25/5.1"; private final static String SOAP_ACTION_V55 = "urn:vim25/5.5"; private URL baseUrl = null; private String cookie = null; private String vimNameSpace = null; private String soapAction = null; private int connectTimeout = 10*1000; private int readTimeout = 30*1000; XmlGen xmlGen = new XmlGenDom(); public WSClient(String serverUrl) throws MalformedURLException { this(serverUrl, true); } public WSClient(String serverUrl, boolean ignoreCert) throws MalformedURLException { if(serverUrl.endsWith("/")) { serverUrl = serverUrl.substring(0, serverUrl.length()-1); } this.baseUrl = new URL(serverUrl); if(ignoreCert) { try { trustAllHttpsCertificates(); HttpsURLConnection.setDefaultHostnameVerifier ( new HostnameVerifier() { public boolean verify(String urlHostName, SSLSession session) { return true; } } ); } catch (Exception e) {} } } public Object invoke(String methodName, Argument[] paras, String returnType) throws RemoteException { String soapMsg = XmlGen.toXML(methodName, paras, this.vimNameSpace); InputStream is = null; try { is = post(soapMsg); return xmlGen.fromXML(returnType, is); } catch (Exception e1) { throw new RemoteException("VI SDK invoke exception:" + e1); } finally { if(is!=null) try { is.close(); } catch(IOException ioe) {} } } public StringBuffer invokeAsString(String methodName, Argument[] paras) throws RemoteException { String soapMsg = XmlGen.toXML(methodName, paras, this.vimNameSpace); try { InputStream is = post(soapMsg); return readStream(is); } catch (Exception e) { throw new RemoteException("VI SDK invoke exception:" + e); } } public InputStream post(String soapMsg) throws IOException { HttpURLConnection postCon = (HttpURLConnection) baseUrl.openConnection(); if(connectTimeout > 0) postCon.setConnectTimeout(connectTimeout); if(readTimeout > 0) postCon.setReadTimeout(readTimeout); try { postCon.setRequestMethod("POST"); } catch (ProtocolException e) { e.printStackTrace(); } postCon.setDoOutput(true); postCon.setDoInput(true); postCon.setRequestProperty(SOAP_ACTION_HEADER, soapAction); postCon.setRequestProperty("Content-Type", "text/xml; charset=utf-8"); if(cookie!=null) { postCon.setRequestProperty("Cookie", cookie); } OutputStream os = postCon.getOutputStream(); OutputStreamWriter out = new OutputStreamWriter(os, "UTF8"); out.write(soapMsg); out.close(); InputStream is; try { is = postCon.getInputStream(); } catch(IOException ioe) { is = postCon.getErrorStream(); } if(cookie==null) { cookie = postCon.getHeaderField("Set-Cookie"); } return is; } public URL getBaseUrl() { return this.baseUrl; } public void setBaseUrl(URL baseUrl) { this.baseUrl = baseUrl; } public String getCookie() { return cookie; } public void setCookie(String cookie) { this.cookie = cookie; } public String getVimNameSpace() { return vimNameSpace; } public void setVimNameSpace(String vimNameSpace) { this.vimNameSpace = vimNameSpace; } public void setConnectTimeout(int timeoutMilliSec) { this.connectTimeout = timeoutMilliSec; } public int getConnectTimeout() { return this.connectTimeout; } public void setReadTimeout(int timeoutMilliSec) { this.readTimeout = timeoutMilliSec; } public int getReadTimeout() { return this.readTimeout; } /*=============================================== * API versions * "2.0.0" VI 3.0 "2.5.0" VI 3.5 (and u1) "2.5u2" VI 3.5u2 (and u3, u4) "4.0" vSphere 4.0 (and u1) "4.1" vSphere 4.1 "5.0" vSphere 5.0 "5.1" vSphere 5.1 ===============================================*/ public void setSoapActionOnApiVersion(String apiVersion) { if("4.0".equals(apiVersion)) { soapAction = SOAP_ACTION_V40; } else if("4.1".equals(apiVersion)) { soapAction = SOAP_ACTION_V41; } else if("5.0".equals(apiVersion)) { soapAction = SOAP_ACTION_V50; } else if("5.1".equals(apiVersion)) { soapAction = SOAP_ACTION_V51; } else if("5.5".equals(apiVersion)) { soapAction = SOAP_ACTION_V55; } else { //always defaults to latest version soapAction = SOAP_ACTION_V55; } } private StringBuffer readStream(InputStream is) throws IOException { StringBuffer sb = new StringBuffer(); BufferedReader in = new BufferedReader(new InputStreamReader(is)); String lineStr; while ((lineStr = in.readLine()) != null) { sb.append(lineStr); } in.close(); return sb; } private static void trustAllHttpsCertificates() throws NoSuchAlgorithmException, KeyManagementException { TrustManager[] trustAllCerts = new TrustManager[1]; trustAllCerts[0] = new TrustAllManager(); SSLContext sc = SSLContext.getInstance("SSL"); sc.init(null, trustAllCerts, null); HttpsURLConnection.setDefaultSSLSocketFactory( sc.getSocketFactory()); } private static class TrustAllManager implements X509TrustManager { public X509Certificate[] getAcceptedIssuers() { return null; } public void checkServerTrusted(X509Certificate[] certs, String authType) throws CertificateException { } public void checkClientTrusted(X509Certificate[] certs, String authType) throws CertificateException { } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.formats.ad; import java.io.FileInputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import opennlp.tools.formats.ad.ADSentenceStream.Sentence; import opennlp.tools.sentdetect.SentenceSample; import opennlp.tools.sentdetect.lang.Factory; import opennlp.tools.util.InputStreamFactory; import opennlp.tools.util.ObjectStream; import opennlp.tools.util.PlainTextByLineStream; import opennlp.tools.util.Span; /** * <b>Note:</b> Do not use this class, internal use only! */ public class ADSentenceSampleStream implements ObjectStream<SentenceSample> { private final ObjectStream<ADSentenceStream.Sentence> adSentenceStream; private int text = -1; private int para = -1; private boolean isSameText; private boolean isSamePara; private Sentence sent; private boolean isIncludeTitles = true; private boolean isTitle; private final char[] ptEosCharacters; /** * Creates a new {@link SentenceSample} stream from a line stream, i.e. * {@link ObjectStream}&lt;{@link String}&gt;, that could be a * {@link PlainTextByLineStream} object. * * @param lineStream * a stream of lines as {@link String} * @param includeHeadlines * if true will output the sentences marked as news headlines */ public ADSentenceSampleStream(ObjectStream<String> lineStream, boolean includeHeadlines) { this.adSentenceStream = new ADSentenceStream(lineStream); ptEosCharacters = Factory.ptEosCharacters; Arrays.sort(ptEosCharacters); this.isIncludeTitles = includeHeadlines; } /** * Creates a new {@link SentenceSample} stream from a {@link FileInputStream} * * @param in * input stream from the corpus * @param charsetName * the charset to use while reading the corpus * @param includeHeadlines * if true will output the sentences marked as news headlines */ public ADSentenceSampleStream(InputStreamFactory in, String charsetName, boolean includeHeadlines) throws IOException { try { this.adSentenceStream = new ADSentenceStream(new PlainTextByLineStream( in, charsetName)); } catch (UnsupportedEncodingException e) { // UTF-8 is available on all JVMs, will never happen throw new IllegalStateException(e); } ptEosCharacters = Factory.ptEosCharacters; Arrays.sort(ptEosCharacters); this.isIncludeTitles = includeHeadlines; } /** * Creates a new {@link SentenceSample} stream from a {@link FileInputStream} * * @param in * input stream from the corpus * @param charsetName * the charset to use while reading the corpus * @param includeHeadlines * if true will output the sentences marked as news headlines */ @Deprecated public ADSentenceSampleStream(FileInputStream in, String charsetName, boolean includeHeadlines) { try { this.adSentenceStream = new ADSentenceStream(new PlainTextByLineStream( in, charsetName)); } catch (UnsupportedEncodingException e) { // UTF-8 is available on all JVMs, will never happen throw new IllegalStateException(e); } ptEosCharacters = Factory.ptEosCharacters; Arrays.sort(ptEosCharacters); this.isIncludeTitles = includeHeadlines; } // The Arvores Deitadas Corpus has information about texts and paragraphs. public SentenceSample read() throws IOException { if (sent == null) { sent = this.adSentenceStream.read(); updateMeta(); if (sent == null) { return null; } } StringBuilder document = new StringBuilder(); List<Span> sentences = new ArrayList<Span>(); do { do { if (!isTitle || (isTitle && isIncludeTitles)) { if (hasPunctuation(sent.getText())) { int start = document.length(); document.append(sent.getText()); sentences.add(new Span(start, document.length())); document.append(" "); } } sent = this.adSentenceStream.read(); updateMeta(); } while (isSamePara); // break; // got one paragraph! } while (isSameText); String doc; if (document.length() > 0) { doc = document.substring(0, document.length() - 1); } else { doc = document.toString(); } return new SentenceSample(doc, sentences.toArray(new Span[sentences.size()])); } private boolean hasPunctuation(String text) { text = text.trim(); if (text.length() > 0) { char lastChar = text.charAt(text.length() - 1); if (Arrays.binarySearch(ptEosCharacters, lastChar) >= 0) { return true; } } return false; } // there are some different types of metadata depending on the corpus. // todo: merge this patterns private Pattern meta1 = Pattern .compile("^(?:[a-zA-Z\\-]*(\\d+)).*?p=(\\d+).*"); private void updateMeta() { if (this.sent != null) { String meta = this.sent.getMetadata(); Matcher m = meta1.matcher(meta); int currentText; int currentPara; if (m.matches()) { currentText = Integer.parseInt(m.group(1)); currentPara = Integer.parseInt(m.group(2)); } else { throw new RuntimeException("Invalid metadata: " + meta); } isSamePara = isSameText = false; if (currentText == text) isSameText = true; if (isSameText && currentPara == para) isSamePara = true; isTitle = meta.contains("title"); text = currentText; para = currentPara; } else { this.isSamePara = this.isSameText = false; } } public void reset() throws IOException, UnsupportedOperationException { adSentenceStream.reset(); } public void close() throws IOException { adSentenceStream.close(); } }
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Distributable under LGPL license. * See terms of license at gnu.org. */ package org.jitsi.impl.neomedia.transform.csrc; import org.jitsi.impl.neomedia.*; import org.jitsi.impl.neomedia.transform.*; import org.jitsi.service.neomedia.*; /** * We use this engine to add the list of CSRC identifiers in RTP packets that * we send to conference participants during calls where we are the mixer. * * @author Emil Ivov */ public class CsrcTransformEngine implements TransformEngine, PacketTransformer { /** * The <tt>MediaStreamImpl</tt> that this transform engine was created to * transform packets for. */ private final MediaStreamImpl mediaStream; /** * The number currently assigned to CSRC audio level extensions or * <tt>-1</tt> if no such ID has been set and audio level extensions should * not be transmitted. */ private byte csrcAudioLevelExtID = -1; /** * The buffer that we use to encode the csrc audio level extensions. */ private byte[] extensionBuff = null; /** * Indicates the length that we are currently using in the * <tt>extensionBuff</tt> buffer. */ private int extensionBuffLen = 0; /** * The dispatcher that is delivering audio levels to the media steam. */ private CsrcAudioLevelDispatcher csrcLevelDispatcher = null; /** * The direction that we are supposed to handle audio levels in. */ private MediaDirection audioLevelDirection = MediaDirection.INACTIVE; /** * Creates an engine instance that will be adding CSRC lists to the * specified <tt>stream</tt>. * * @param stream that <tt>MediaStream</tt> whose RTP packets we are going * to be adding CSRC lists. to */ public CsrcTransformEngine(MediaStreamImpl stream) { this.mediaStream = stream; } /** * Closes this <tt>PacketTransformer</tt> i.e. releases the resources * allocated by it and prepares it for garbage collection. */ public void close() { if (csrcLevelDispatcher != null) csrcLevelDispatcher.stop(); } /** * Always returns <tt>null</tt> since this engine does not require any * RTCP transformations. * * @return <tt>null</tt> since this engine does not require any * RTCP transformations. */ public PacketTransformer getRTCPTransformer() { return null; } /** * Returns a reference to this class since it is performing RTP * transformations in here. * * @return a reference to <tt>this</tt> instance of the * <tt>CsrcTransformEngine</tt>. */ public PacketTransformer getRTPTransformer() { return this; } /** * Extracts the list of CSRC identifiers and passes it to the * <tt>MediaStream</tt> associated with this engine. Other than that the * method does not do any transformations since CSRC lists are part of * RFC 3550 and they shouldn't be disrupting the rest of the application. * * @param pkt the RTP <tt>RawPacket</tt> that we are to extract a CSRC list * from. * * @return the same <tt>RawPacket</tt> that was received as a parameter * since we don't need to worry about hiding the CSRC list from the rest * of the RTP stack. */ public RawPacket reverseTransform(RawPacket pkt) { if ((csrcAudioLevelExtID > 0) && audioLevelDirection.allowsReceiving()) { //extract the audio levels and send them to the dispatcher. long[] levels = pkt.extractCsrcLevels(csrcAudioLevelExtID); if(levels != null) { if (csrcLevelDispatcher == null) { csrcLevelDispatcher = new CsrcAudioLevelDispatcher(); new Thread(csrcLevelDispatcher).start(); } csrcLevelDispatcher.addLevels(levels); } } return pkt; } /** * Extracts the list of CSRC identifiers representing participants currently * contributing to the media being sent by the <tt>MediaStream</tt> * associated with this engine and (unless the list is empty) encodes them * into the <tt>RawPacket</tt>. * * @param pkt the RTP <tt>RawPacket</tt> that we need to add a CSRC list to. * * @return the updated <tt>RawPacket</tt> instance containing the list of * CSRC identifiers. */ public synchronized RawPacket transform(RawPacket pkt) { // if somebody has modified the packet and added an extension // don't process it. As ZRTP creates special RTP packets carrying no // RTP data and those packets are used only by ZRTP we don't use them. if(pkt.getExtensionBit()) return pkt; long[] csrcList = mediaStream.getLocalContributingSourceIDs(); if(csrcList == null || csrcList.length == 0) { //nothing to do. return pkt; } pkt.setCsrcList( csrcList); //attach audio levels if we are expected to do so. if ((this.csrcAudioLevelExtID > 0) && audioLevelDirection.allowsSending() && (mediaStream instanceof AudioMediaStreamImpl)) { byte[] levelsExt = createLevelExtensionBuffer(csrcList); pkt.addExtension(levelsExt, extensionBuffLen); } return pkt; } /** * Sets the ID that this transformer should be using for audio level * extensions or disables audio level extensions if <tt>extID</tt> is * <tt>-1</tt>. * * @param extID ID that this transformer should be using for audio level * extensions or <tt>-1</tt> if audio level extensions should be disabled * @param dir the direction that we are expected to hand this extension in. * */ public void setCsrcAudioLevelAudioLevelExtensionID(byte extID, MediaDirection dir) { this.csrcAudioLevelExtID = extID; this.audioLevelDirection = dir; } /** * Creates a audio level extension buffer containing the level extension * header and the audio levels corresponding to (and in the same order as) * the <tt>CSRC</tt> IDs in the <tt>csrcList</tt> * * @param csrcList the list of CSRC IDs whose level we'd like the extension * to contain. * @return the extension buffer in the form that it should be added to the * RTP packet. */ private byte[] createLevelExtensionBuffer(long[] csrcList) { int buffLen = 1 + //CSRC one byte extension hdr csrcList.length; // calculate extension padding int padLen = 4 - buffLen%4; if(padLen == 4) padLen = 0; buffLen += padLen; byte[] extensionBuff = getExtensionBuff(buffLen); extensionBuff[0] = (byte)((csrcAudioLevelExtID << 4) | (csrcList.length - 1)); int csrcOffset = 1; // initial offset is equal to ext hdr size for(long csrc : csrcList) { byte level = (byte) ((AudioMediaStreamImpl) mediaStream) .getLastMeasuredAudioLevel(csrc); extensionBuff[csrcOffset] = level; csrcOffset ++; } return extensionBuff; } /** * Returns a reusable byte array which is guaranteed to have the requested * <tt>ensureCapacity</tt> length and sets our internal length keeping * var. * * @param ensureCapacity the minimum length that we need the returned buffer * to have. * @return a reusable <tt>byte[]</tt> array guaranteed to have a length * equal to or greater than <tt>ensureCapacity</tt>. */ private byte[] getExtensionBuff(int ensureCapacity) { if ((extensionBuff == null) || (extensionBuff.length < ensureCapacity)) extensionBuff = new byte[ensureCapacity]; extensionBuffLen = ensureCapacity; return extensionBuff; } /** * A simple thread that waits for new levels to be reported from incoming * RTP packets and then delivers them to the <tt>AudioMediaStream</tt> * associated with this engine. The reason we need to do this in a separate * thread is, of course, the time sensitive nature of incoming RTP packets. */ private class CsrcAudioLevelDispatcher implements Runnable { /** Indicates whether this thread is supposed to be running */ private boolean isRunning = false; /** The levels that we last received from the reverseTransform thread*/ private long[] lastReportedLevels = null; /** * Waits for new levels to be reported via the <tt>addLevels()</tt> * method and then delivers them to the <tt>AudioMediaStream</tt> that * we are associated with. */ public void run() { isRunning = true; // Audio levels are received in RTP audio streams only. if(!(mediaStream instanceof AudioMediaStreamImpl)) return; AudioMediaStreamImpl audioStream = (AudioMediaStreamImpl) mediaStream; while(isRunning) { long[] audioLevels; synchronized(this) { if(lastReportedLevels == null) { try { wait(); } catch (InterruptedException ie) {} continue; } else { audioLevels = lastReportedLevels; lastReportedLevels = null; } } if(audioLevels != null) audioStream.audioLevelsReceived(audioLevels); } } /** * A level matrix that we should deliver to our media stream and * its listeners in a separate thread. * * @param levels the levels that we'd like to queue for processing. */ public void addLevels(long[] levels) { synchronized(this) { this.lastReportedLevels = levels; notifyAll(); } } /** * Causes our run method to exit so that this thread would stop * handling levels. */ public void stop() { synchronized(this) { this.lastReportedLevels = null; isRunning = false; notifyAll(); } } } }
/* * ============================================================================= * * Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package uk.ac.abdn.fits.support.thymeleaf.springmail.web; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import javax.mail.MessagingException; import javax.servlet.ServletConfig; import javax.servlet.ServletContext; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; import org.apache.commons.lang.RandomStringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.mock.web.MockHttpServletResponse; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.context.ServletConfigAware; import org.springframework.web.multipart.MultipartFile; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.View; import org.springframework.web.servlet.ViewResolver; import org.springframework.ui.Model; import uk.ac.abdn.fits.business.client.option.TOption; import uk.ac.abdn.fits.business.restful.RESTFulRequest; import uk.ac.abdn.fits.support.thymeleaf.springmail.service.EmailService; @Controller public class MailController implements ServletConfigAware{ @Autowired private EmailService emailService; @Autowired private ViewResolver viewResolver; public static HashMap<String, Calendar> htmlCreationTime = new HashMap<String, Calendar>(); // @RequestMapping("/") // public String root() { // return "redirect:/index.html"; // } /* Home page. */ // @RequestMapping("/index.html") // public String index() { // return "index.html"; // } /* Sending confirmation page. */ // @RequestMapping("/sent.html") // public String sent() { // return "sent.html"; // } private ServletConfig config; public void setServletConfig(ServletConfig servletConfig) { this.config = servletConfig; } /* * Send HTML mail (simple) */ @RequestMapping(value = "/sendMailSimple", method = RequestMethod.POST) public @ResponseBody RESTFulRequest sendSimpleMail( HttpSession session, HttpServletRequest request, // HttpServletResponse response, Model model, final Locale locale) throws MessagingException { String recipientName = "Cheng Zeng"; String recipientEmail = "c.zeng@abdn.ac.uk"; String fname = (String)session.getAttribute("fname"); String lname = (String)session.getAttribute("lname"); String email = (String)session.getAttribute("email"); if(fname!= null && lname != null){ recipientName = fname+" "+ lname; System.out.println("recipientName: "+recipientName); } if(email!= null && !email.equals("")){ recipientEmail = email; System.out.println("recipientEmail: "+recipientEmail); } List<TOption> not_relaxed = (List<TOption>)session.getAttribute("options"); List<TOption> relaxed_options = (List<TOption>)session.getAttribute("relaxed_options"); String date_of_travel = (String)session.getAttribute("date_of_travel"); String origin_postcode = (String)session.getAttribute("origin_postcode"); List<TOption> not_relaxed_rtn = (List<TOption>)session.getAttribute("options_rtn"); List<TOption> relaxed_options_rtn = (List<TOption>)session.getAttribute("relaxed_options_rtn"); String origin_postcode_rtn = (String)session.getAttribute("origin_postcode_rtn"); String url = null; View resolvedView; try { model.addAttribute("date_of_travel", date_of_travel); model.addAttribute("origin_postcode",origin_postcode); model.addAttribute("options", not_relaxed); model.addAttribute("relaxed_options", relaxed_options); model.addAttribute("caption", "Transport options ranked using preferences" ); model.addAttribute("origin_postcode_rtn",origin_postcode_rtn); model.addAttribute("options_rtn", not_relaxed_rtn); model.addAttribute("relaxed_options_rtn", relaxed_options_rtn); model.addAttribute("email_view", "email_view"); resolvedView = this.viewResolver.resolveViewName("matching", Locale.UK); MockHttpServletResponse mockResp = new MockHttpServletResponse(); resolvedView.render(model.asMap(), request, mockResp); // System.out.println("rendered html : " + mockResp.getContentAsString()); url = saveAsHtml(request, mockResp.getContentAsString()); emailService.sendRichMail(recipientName, recipientEmail, locale, url); } catch (Exception e) { e.printStackTrace(); } return new RESTFulRequest(1, "mail sent to "+ recipientEmail); } @RequestMapping(value = "/result/{filename:[\\w]+\\.html}", method = RequestMethod.GET) public String retrieveHtml( @PathVariable("filename") String filename, Model model, final Locale locale){ String path = this.getClass().getClassLoader().getResource("").getPath(); path = path+"../files/match_outputs"; File file = new File(path+File.separator+filename); if(!file.exists()){ System.out.println("request resource does not exist."); return "error"; } System.out.println("retrieveHtml redirect: "+ "redirect: /files/"+filename); return "redirect:/files/"+filename; } private String saveAsHtml(HttpServletRequest request, String htmlContent){ // String path = request.getSession().getServletContext().getRealPath(""); String path = this.getClass().getClassLoader().getResource("").getPath(); path = path+"../files"; File dir = new File(path); if(!dir.exists()){ dir.mkdirs(); } path = path+File.separator+"match_outputs"; dir = new File(path); if(!dir.exists()){ dir.mkdirs(); } String fileName = getFileName(); String url = null; FileWriter fw; BufferedWriter bw; Calendar c = Calendar.getInstance(); try { fw = new FileWriter(new File(path+File.separator+fileName)); bw = new BufferedWriter(fw); bw.write(htmlContent); bw.flush(); bw.close(); htmlCreationTime.put(fileName, c); } catch (IOException e) { e.printStackTrace(); } path = request.getSession().getServletContext().getRealPath("/WEB-INF/files/"); System.out.println("session path: "+path); // path = "http://localhost:8080/ke/result"; path = "http://139.133.73.11:8080/ke/result"; System.out.println("URL: "+path+"/"+fileName); return path+"/"+fileName; } public String getFileName(){ String timeAsString = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date()); String id = RandomStringUtils.randomAlphabetic(4); return timeAsString+id+".html"; } /* * Send HTML mail with attachment. */ @RequestMapping(value = "/sendMailWithAttachment", method = RequestMethod.POST) public String sendMailWithAttachment( @RequestParam("recipientName") final String recipientName, @RequestParam("recipientEmail") final String recipientEmail, @RequestParam("attachment") final MultipartFile attachment, final Locale locale) throws MessagingException, IOException { this.emailService.sendMailWithAttachment( recipientName, recipientEmail, attachment.getOriginalFilename(), attachment.getBytes(), attachment.getContentType(), locale); return "redirect:sent.html"; } /* * Send HTML mail with inline image */ @RequestMapping(value = "/sendMailWithInlineImage", method = RequestMethod.POST) public String sendMailWithInline( @RequestParam("recipientName") final String recipientName, @RequestParam("recipientEmail") final String recipientEmail, @RequestParam("image") final MultipartFile image, final Locale locale) throws MessagingException, IOException { this.emailService.sendMailWithInline( recipientName, recipientEmail, image.getName(), image.getBytes(), image.getContentType(), locale); return "redirect:sent.html"; } @ExceptionHandler(Exception.class) public String error() { return "error.html"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.io.disk; import java.io.EOFException; import java.util.List; import org.apache.flink.core.memory.MemoryType; import org.junit.Assert; import org.apache.flink.core.memory.MemorySegment; import org.apache.flink.runtime.io.disk.iomanager.BlockChannelReader; import org.apache.flink.runtime.io.disk.iomanager.BlockChannelWriter; import org.apache.flink.runtime.io.disk.iomanager.FileIOChannel; import org.apache.flink.runtime.io.disk.iomanager.ChannelReaderInputView; import org.apache.flink.runtime.io.disk.iomanager.ChannelWriterOutputView; import org.apache.flink.runtime.io.disk.iomanager.IOManager; import org.apache.flink.runtime.io.disk.iomanager.IOManagerAsync; import org.apache.flink.runtime.jobgraph.tasks.AbstractInvokable; import org.apache.flink.runtime.memory.MemoryManager; import org.apache.flink.runtime.operators.testutils.DummyInvokable; import org.apache.flink.runtime.operators.testutils.TestData; import org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator.KeyMode; import org.apache.flink.runtime.operators.testutils.TestData.TupleGenerator.ValueMode; import org.apache.flink.api.common.typeutils.TypeSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.junit.After; import org.junit.Before; import org.junit.Test; /** */ public class ChannelViewsTest { private static final long SEED = 649180756312423613L; private static final int KEY_MAX = Integer.MAX_VALUE; private static final int VALUE_SHORT_LENGTH = 114; private static final int VALUE_LONG_LENGTH = 112 * 1024; private static final int NUM_PAIRS_SHORT = 1000000; private static final int NUM_PAIRS_LONG = 3000; private static final int MEMORY_SIZE = 1024 * 1024; private static final int MEMORY_PAGE_SIZE = 64 * 1024; private static final int NUM_MEMORY_SEGMENTS = 3; private final AbstractInvokable parentTask = new DummyInvokable(); private IOManager ioManager; private MemoryManager memoryManager; // -------------------------------------------------------------------------------------------- @Before public void beforeTest() { this.memoryManager = new MemoryManager(MEMORY_SIZE, 1, MEMORY_PAGE_SIZE, MemoryType.HEAP, true); this.ioManager = new IOManagerAsync(); } @After public void afterTest() { this.ioManager.shutdown(); if (!this.ioManager.isProperlyShutDown()) { Assert.fail("I/O Manager was not properly shut down."); } if (memoryManager != null) { Assert.assertTrue("Memory leak: not all segments have been returned to the memory manager.", this.memoryManager.verifyEmpty()); this.memoryManager.shutdown(); this.memoryManager = null; } } // -------------------------------------------------------------------------------------------- @Test public void testWriteReadSmallRecords() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.deserialize(readRec, inView); int k1 = rec.f0; String v1 = rec.f1; int k2 = readRec.f0; String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); } @Test public void testWriteAndReadLongRecords() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_LONG_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_LONG; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_LONG; i++) { generator.next(rec); serializer.deserialize(readRec, inView); final int k1 = rec.f0; final String v1 = rec.f1; final int k2 = readRec.f0; final String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); } @Test public void testReadTooMany() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them try { final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT + 1; i++) { generator.next(rec); serializer.deserialize(readRec, inView); final int k1 = rec.f0; final String v1 = rec.f1; final int k2 = readRec.f0; final String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } Assert.fail("Expected an EOFException which did not occur."); } catch (EOFException eofex) { // expected } catch (Throwable t) { // unexpected Assert.fail("Unexpected Exception: " + t.getMessage()); } this.memoryManager.release(inView.close()); reader.deleteChannel(); } @Test public void testReadWithoutKnownBlockCount() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.deserialize(readRec, inView); int k1 = rec.f0; String v1 = rec.f1; int k2 = readRec.f0; String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); } @Test public void testWriteReadOneBufferOnly() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, 1); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, 1); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.deserialize(readRec, inView); int k1 = rec.f0; String v1 = rec.f1; int k2 = readRec.f0; String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); } @Test public void testWriteReadNotAll() throws Exception { final TestData.TupleGenerator generator = new TestData.TupleGenerator(SEED, KEY_MAX, VALUE_SHORT_LENGTH, KeyMode.RANDOM, ValueMode.RANDOM_LENGTH); final FileIOChannel.ID channel = this.ioManager.createChannel(); final TypeSerializer<Tuple2<Integer, String>> serializer = TestData.getIntStringTupleSerializer(); // create the writer output view List<MemorySegment> memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelWriter<MemorySegment> writer = this.ioManager.createBlockChannelWriter(channel); final ChannelWriterOutputView outView = new ChannelWriterOutputView(writer, memory, MEMORY_PAGE_SIZE); // write a number of pairs final Tuple2<Integer, String> rec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT; i++) { generator.next(rec); serializer.serialize(rec, outView); } this.memoryManager.release(outView.close()); // create the reader input view memory = this.memoryManager.allocatePages(this.parentTask, NUM_MEMORY_SEGMENTS); final BlockChannelReader<MemorySegment> reader = this.ioManager.createBlockChannelReader(channel); final ChannelReaderInputView inView = new ChannelReaderInputView(reader, memory, outView.getBlockCount(), true); generator.reset(); // read and re-generate all records and compare them final Tuple2<Integer, String> readRec = new Tuple2<>(); for (int i = 0; i < NUM_PAIRS_SHORT / 2; i++) { generator.next(rec); serializer.deserialize(readRec, inView); int k1 = rec.f0; String v1 = rec.f1; int k2 = readRec.f0; String v2 = readRec.f1; Assert.assertTrue("The re-generated and the read record do not match.", k1 == k2 && v1.equals(v2)); } this.memoryManager.release(inView.close()); reader.deleteChannel(); } }
package org.insightech.er.editor.controller.editpart.element.connection; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.eclipse.draw2d.AbsoluteBendpoint; import org.eclipse.draw2d.BendpointConnectionRouter; import org.eclipse.draw2d.ConnectionAnchor; import org.eclipse.draw2d.geometry.Point; import org.eclipse.draw2d.geometry.Rectangle; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPolicy; import org.eclipse.gef.editparts.AbstractConnectionEditPart; import org.eclipse.gef.editpolicies.ConnectionEndpointEditPolicy; import org.insightech.er.ERDiagramActivator; import org.insightech.er.Resources; import org.insightech.er.editor.controller.editpart.element.node.NodeElementEditPart; import org.insightech.er.editor.controller.editpart.element.node.TableViewEditPart; import org.insightech.er.editor.model.AbstractModel; import org.insightech.er.editor.model.ERDiagram; import org.insightech.er.editor.model.diagram_contents.element.connection.Bendpoint; import org.insightech.er.editor.model.diagram_contents.element.connection.ConnectionElement; import org.insightech.er.editor.model.diagram_contents.element.node.NodeElement; import org.insightech.er.editor.model.diagram_contents.element.node.category.Category; import org.insightech.er.editor.model.settings.CategorySetting; import org.insightech.er.editor.view.figure.anchor.XYChopboxAnchor; import org.insightech.er.editor.view.figure.connection.ERDiagramConnection; public abstract class AbstractERDiagramConnectionEditPart extends AbstractConnectionEditPart implements PropertyChangeListener { private static Logger logger = Logger.getLogger(AbstractERDiagramConnectionEditPart.class.getName()); private static final boolean DEBUG = false; /** * {@inheritDoc} */ @Override public void activate() { super.activate(); final AbstractModel model = (AbstractModel) getModel(); model.addPropertyChangeListener(this); } /** * {@inheritDoc} */ @Override public void deactivate() { final AbstractModel model = (AbstractModel) getModel(); model.removePropertyChangeListener(this); super.deactivate(); } protected ERDiagramConnection createERDiagramConnection() { final boolean bezier = getDiagram().getDiagramContents().getSettings().isUseBezierCurve(); final ERDiagramConnection connection = new ERDiagramConnection(bezier); connection.setConnectionRouter(new BendpointConnectionRouter()); return connection; } /** * {@inheritDoc} */ @Override protected void createEditPolicies() { installEditPolicy(EditPolicy.CONNECTION_ENDPOINTS_ROLE, new ConnectionEndpointEditPolicy()); // this.installEditPolicy(EditPolicy.GRAPHICAL_NODE_ROLE, // new ConnectionGraphicalNodeEditPolicy()); } @Override public final void propertyChange(final PropertyChangeEvent event) { try { if (DEBUG) { logger.log(Level.INFO, this.getClass().getName() + ":" + event.getPropertyName() + ":" + event.toString()); } doPropertyChange(event); } catch (final Exception e) { ERDiagramActivator.showExceptionDialog(e); } } protected void doPropertyChange(final PropertyChangeEvent event) { if (event.getPropertyName().equals("refreshBendpoint")) { refreshBendpoints(); } else if (event.getPropertyName().equals("refreshVisuals")) { refreshVisuals(); } } protected ERDiagram getDiagram() { return (ERDiagram) getRoot().getContents().getModel(); } protected Category getCurrentCategory() { return getDiagram().getCurrentCategory(); } /** * {@inheritDoc} */ @Override public void refreshVisuals() { if (isActive()) { final ConnectionElement element = (ConnectionElement) getModel(); ((ERDiagramConnection) figure).setColor(Resources.getColor(element.getColor())); fillterConnectionByCategory(); decorateRelation(); calculateAnchorLocation(); refreshBendpoints(); } } public void refreshVisualsWithColumn() { refreshVisuals(); final TableViewEditPart sourceTableViewEditPart = (TableViewEditPart) getSource(); if (sourceTableViewEditPart != null) { sourceTableViewEditPart.refreshVisuals(); } final TableViewEditPart targetTableViewEditPart = (TableViewEditPart) getTarget(); if (targetTableViewEditPart != null) { targetTableViewEditPart.refreshVisuals(); } } private void fillterConnectionByCategory() { final EditPart sourceEditPart = getSource(); final EditPart targetEditPart = getTarget(); final ERDiagram diagram = getDiagram(); if (diagram != null) { final Category category = getCurrentCategory(); if (category != null) { figure.setVisible(false); final CategorySetting categorySettings = getDiagram().getDiagramContents().getSettings().getCategorySetting(); if (sourceEditPart != null && targetEditPart != null) { final NodeElement sourceModel = (NodeElement) sourceEditPart.getModel(); final NodeElement targetModel = (NodeElement) targetEditPart.getModel(); boolean containsSource = false; if (category.contains(sourceModel)) { containsSource = true; } else if (categorySettings.isShowReferredTables()) { for (final NodeElement referringElement : sourceModel.getReferringElementList()) { if (category.contains(referringElement)) { containsSource = true; break; } } } if (containsSource) { if (category.contains(targetModel)) { figure.setVisible(true); } else if (categorySettings.isShowReferredTables()) { for (final NodeElement referringElement : targetModel.getReferringElementList()) { if (category.contains(referringElement)) { figure.setVisible(true); break; } } } } } } else { figure.setVisible(true); } } } private void calculateAnchorLocation() { final ConnectionElement connection = (ConnectionElement) getModel(); final NodeElementEditPart sourceEditPart = (NodeElementEditPart) getSource(); Point sourcePoint = null; Point targetPoint = null; if (sourceEditPart != null && connection.getSourceXp() != -1 && connection.getSourceYp() != -1) { final Rectangle bounds = sourceEditPart.getFigure().getBounds(); sourcePoint = new Point(bounds.x + (bounds.width * connection.getSourceXp() / 100), bounds.y + (bounds.height * connection.getSourceYp() / 100)); } final NodeElementEditPart targetEditPart = (NodeElementEditPart) getTarget(); if (targetEditPart != null && connection.getTargetXp() != -1 && connection.getTargetYp() != -1) { final Rectangle bounds = targetEditPart.getFigure().getBounds(); targetPoint = new Point(bounds.x + (bounds.width * connection.getTargetXp() / 100), bounds.y + (bounds.height * connection.getTargetYp() / 100)); } final ConnectionAnchor sourceAnchor = getConnectionFigure().getSourceAnchor(); if (sourceAnchor instanceof XYChopboxAnchor) { ((XYChopboxAnchor) sourceAnchor).setLocation(sourcePoint); } final ConnectionAnchor targetAnchor = getConnectionFigure().getTargetAnchor(); if (targetAnchor instanceof XYChopboxAnchor) { ((XYChopboxAnchor) targetAnchor).setLocation(targetPoint); } } protected void refreshBendpoints() { final ConnectionElement connection = (ConnectionElement) getModel(); final List<org.eclipse.draw2d.Bendpoint> constraint = new ArrayList<org.eclipse.draw2d.Bendpoint>(); for (final Bendpoint bendPoint : connection.getBendpoints()) { final List<org.eclipse.draw2d.Bendpoint> realPointList = getRealBendpoint(bendPoint); constraint.addAll(realPointList); } getConnectionFigure().setRoutingConstraint(constraint); } protected List<org.eclipse.draw2d.Bendpoint> getRealBendpoint(final Bendpoint bendPoint) { final List<org.eclipse.draw2d.Bendpoint> constraint = new ArrayList<org.eclipse.draw2d.Bendpoint>(); constraint.add(new AbsoluteBendpoint(bendPoint.getX(), bendPoint.getY())); return constraint; } protected void decorateRelation() {} }
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.dmdl.windgate.jdbc.driver; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.text.MessageFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.junit.rules.TestWatcher; import org.junit.runner.Description; /** * Keep a connection of H2 'in memory' Database. */ public class H2Resource extends TestWatcher { private final String name; private Class<?> context; private Connection connection; /** * Creates a new instance. * The target URL will be {@code "jdbc:h2:mem:<name>"}. * @param name simple name of database */ public H2Resource(String name) { this.name = name; } @Override protected void starting(Description description) { org.h2.Driver.load(); this.context = description.getTestClass(); this.connection = open(); boolean green = false; try { leakcheck(); before(); green = true; } catch (Exception e) { throw new AssertionError(e); } finally { if (green == false) { finished(description); } } } private void leakcheck() { try { execute0("CREATE TABLE H2_TEST_DUPCHECK (SID IDENTITY PRIMARY KEY)"); } catch (SQLException e) { throw new AssertionError(e); } } /** * runs before executes each test. * @throws Exception if failed */ protected void before() throws Exception { return; } /** * Creates a new connection. * @return the created connection */ public Connection open() { try { return DriverManager.getConnection(getJdbcUrl()); } catch (SQLException e) { throw new AssertionError(e); } } /** * Returns the target URL. * @return target URL */ public String getJdbcUrl() { return "jdbc:h2:mem:" + name; } /** * Returns query result columns list. * @param sql target SQL * @return result rows list that contains columns array */ public List<List<Object>> query(String sql) { try { return query0(sql); } catch (Exception e) { throw new AssertionError(e); } } /** * Returns query result columns list. * @param sql target SQL * @return result rows list that contains columns array */ public List<Object> single(String sql) { try { List<List<Object>> query = query0(sql); assertThat(sql, query.size(), is(1)); return query.get(0); } catch (Exception e) { throw new AssertionError(e); } } /** * Count rows in the table. * @param table target table * @return number of row in the table, or -1 if failed */ public int count(String table) { try { List<List<Object>> r = query0(MessageFormat.format("SELECT COUNT(*) FROM {0}", table)); if (r.size() != 1) { return -1; } return ((Number) r.get(0).get(0)).intValue(); } catch (Exception e) { e.printStackTrace(); return -1; } } private List<List<Object>> query0(String sql) throws SQLException { try (Statement s = connection.createStatement()) { ResultSet rs = s.executeQuery(sql); ResultSetMetaData meta = rs.getMetaData(); int size = meta.getColumnCount(); List<List<Object>> results = new ArrayList<>(); while (rs.next()) { Object[] columns = new Object[size]; for (int i = 0; i < size; i++) { columns[i] = rs.getObject(i + 1); } results.add(Arrays.asList(columns)); } return results; } } /** * Executes DML. * @param sql DML */ public void execute(String sql) { try { execute0(sql); } catch (Exception e) { throw new AssertionError(e); } } private void execute0(String sql) throws SQLException { try (PreparedStatement ps = connection.prepareStatement(sql)) { ps.execute(); connection.commit(); } } /** * Executes DML in target file. * @param sqlFile resource file */ public void executeFile(String sqlFile) { String content = load(sqlFile); execute(content); } private String load(String resource) { try (InputStream source = context.getResourceAsStream(resource)) { assertThat(resource, source, is(not(nullValue()))); StringBuilder buf = new StringBuilder(); try (Reader reader = new InputStreamReader(source, "UTF-8")) { char[] cbuf = new char[1024]; while (true) { int read = reader.read(cbuf); if (read < 0) { break; } buf.append(cbuf, 0, read); } } return buf.toString(); } catch (Exception e) { throw new AssertionError(e); } } @Override public void finished(Description description) { if (connection != null) { try { connection.close(); } catch (SQLException e) { throw new AssertionError(e); } } } }
// Copyright 2018 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.remote; import static com.google.devtools.build.lib.remote.util.RxFutures.toCompletable; import static com.google.devtools.build.lib.remote.util.RxFutures.toListenableFuture; import static com.google.devtools.build.lib.remote.util.RxFutures.toSingle; import static com.google.devtools.build.lib.remote.util.Utils.grpcAwareErrorMessage; import build.bazel.remote.execution.v2.Digest; import build.bazel.remote.execution.v2.RequestMetadata; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.ListenableFuture; import com.google.devtools.build.lib.buildeventstream.BuildEvent.LocalFile; import com.google.devtools.build.lib.buildeventstream.BuildEventArtifactUploader; import com.google.devtools.build.lib.buildeventstream.PathConverter; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.events.ExtendedEventHandler; import com.google.devtools.build.lib.remote.common.RemoteActionExecutionContext; import com.google.devtools.build.lib.remote.util.DigestUtil; import com.google.devtools.build.lib.remote.util.TracingMetadataUtils; import com.google.devtools.build.lib.vfs.Path; import io.netty.util.AbstractReferenceCounted; import io.netty.util.ReferenceCounted; import io.reactivex.rxjava3.core.Flowable; import io.reactivex.rxjava3.core.Scheduler; import io.reactivex.rxjava3.core.Single; import io.reactivex.rxjava3.schedulers.Schedulers; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CancellationException; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; /** A {@link BuildEventArtifactUploader} backed by {@link RemoteCache}. */ class ByteStreamBuildEventArtifactUploader extends AbstractReferenceCounted implements BuildEventArtifactUploader { private final Executor executor; private final ExtendedEventHandler reporter; private final boolean verboseFailures; private final RemoteCache remoteCache; private final String buildRequestId; private final String commandId; private final String remoteServerInstanceName; private final AtomicBoolean shutdown = new AtomicBoolean(); private final Scheduler scheduler; ByteStreamBuildEventArtifactUploader( Executor executor, ExtendedEventHandler reporter, boolean verboseFailures, RemoteCache remoteCache, String remoteServerInstanceName, String buildRequestId, String commandId) { this.executor = executor; this.reporter = reporter; this.verboseFailures = verboseFailures; this.remoteCache = remoteCache; this.buildRequestId = buildRequestId; this.commandId = commandId; this.remoteServerInstanceName = remoteServerInstanceName; this.scheduler = Schedulers.from(executor); } /** Returns {@code true} if Bazel knows that the file is stored on a remote system. */ private static boolean isRemoteFile(Path file) { return file.getFileSystem() instanceof RemoteActionFileSystem && ((RemoteActionFileSystem) file.getFileSystem()).isRemote(file); } private static final class PathMetadata { private final Path path; private final Digest digest; private final boolean directory; private final boolean remote; PathMetadata(Path path, Digest digest, boolean directory, boolean remote) { this.path = path; this.digest = digest; this.directory = directory; this.remote = remote; } public Path getPath() { return path; } public Digest getDigest() { return digest; } public boolean isDirectory() { return directory; } public boolean isRemote() { return remote; } } /** * Collects metadata for {@code file}. Depending on the underlying filesystem used this method * might do I/O. */ private static PathMetadata readPathMetadata(Path file) throws IOException { if (file.isDirectory()) { return new PathMetadata(file, /* digest= */ null, /* directory= */ true, /* remote= */ false); } DigestUtil digestUtil = new DigestUtil(file.getFileSystem().getDigestFunction()); Digest digest = digestUtil.compute(file); return new PathMetadata(file, digest, /* directory= */ false, isRemoteFile(file)); } private static void processQueryResult( ImmutableSet<Digest> missingDigests, List<PathMetadata> filesToQuery, List<PathMetadata> knownRemotePaths) { for (PathMetadata file : filesToQuery) { if (missingDigests.contains(file.getDigest())) { knownRemotePaths.add(file); } else { PathMetadata remotePathMetadata = new PathMetadata( file.getPath(), file.getDigest(), file.isDirectory(), /* remote= */ true); knownRemotePaths.add(remotePathMetadata); } } } private static boolean shouldUpload(PathMetadata path) { return path.getDigest() != null && !path.isRemote() && !path.isDirectory(); } private Single<List<PathMetadata>> queryRemoteCache( RemoteCache remoteCache, RemoteActionExecutionContext context, List<PathMetadata> paths) { List<PathMetadata> knownRemotePaths = new ArrayList<>(paths.size()); List<PathMetadata> filesToQuery = new ArrayList<>(); Set<Digest> digestsToQuery = new HashSet<>(); for (PathMetadata path : paths) { if (shouldUpload(path)) { filesToQuery.add(path); digestsToQuery.add(path.getDigest()); } else { knownRemotePaths.add(path); } } if (digestsToQuery.isEmpty()) { return Single.just(knownRemotePaths); } return toSingle(() -> remoteCache.findMissingDigests(context, digestsToQuery), executor) .onErrorResumeNext( error -> { reporterUploadError(error); // Assuming all digests are missing if failed to query return Single.just(ImmutableSet.copyOf(digestsToQuery)); }) .map( missingDigests -> { processQueryResult(missingDigests, filesToQuery, knownRemotePaths); return knownRemotePaths; }); } private void reporterUploadError(Throwable error) { if (error instanceof CancellationException) { return; } String errorMessage = "Uploading BEP referenced local files: " + grpcAwareErrorMessage(error, verboseFailures); reporter.handle(Event.warn(errorMessage)); } private Single<List<PathMetadata>> uploadLocalFiles( RemoteCache remoteCache, RemoteActionExecutionContext context, List<PathMetadata> paths) { return Flowable.fromIterable(paths) .flatMapSingle( path -> { if (!shouldUpload(path)) { return Single.just(path); } return toCompletable( () -> remoteCache.uploadFile(context, path.getDigest(), path.getPath()), executor) .toSingleDefault(path) .onErrorResumeNext( error -> { reporterUploadError(error); return Single.just( new PathMetadata( path.getPath(), /*digest=*/ null, path.isDirectory(), path.isRemote())); }); }) .collect(Collectors.toList()); } private Single<PathConverter> upload(Set<Path> files) { if (files.isEmpty()) { return Single.just(PathConverter.NO_CONVERSION); } RequestMetadata metadata = TracingMetadataUtils.buildMetadata(buildRequestId, commandId, "bes-upload", null); RemoteActionExecutionContext context = RemoteActionExecutionContext.create(metadata); return Single.using( remoteCache::retain, remoteCache -> Flowable.fromIterable(files) .map( file -> { try { return readPathMetadata(file); } catch (IOException e) { reporterUploadError(e); return new PathMetadata( file, /*digest=*/ null, /*directory=*/ false, /*remote=*/ false); } }) .collect(Collectors.toList()) .flatMap(paths -> queryRemoteCache(remoteCache, context, paths)) .flatMap(paths -> uploadLocalFiles(remoteCache, context, paths)) .map(paths -> new PathConverterImpl(remoteServerInstanceName, paths)), RemoteCache::release); } @Override public ListenableFuture<PathConverter> upload(Map<Path, LocalFile> files) { return toListenableFuture(upload(files.keySet()).subscribeOn(scheduler)); } @Override public boolean mayBeSlow() { return true; } @Override protected void deallocate() { if (shutdown.getAndSet(true)) { return; } remoteCache.release(); } @Override public ReferenceCounted touch(Object o) { return this; } private static class PathConverterImpl implements PathConverter { private final String remoteServerInstanceName; private final Map<Path, Digest> pathToDigest; private final Set<Path> skippedPaths; PathConverterImpl(String remoteServerInstanceName, List<PathMetadata> uploads) { Preconditions.checkNotNull(uploads); this.remoteServerInstanceName = remoteServerInstanceName; pathToDigest = new HashMap<>(uploads.size()); ImmutableSet.Builder<Path> skippedPaths = ImmutableSet.builder(); for (PathMetadata pair : uploads) { Path path = pair.getPath(); Digest digest = pair.getDigest(); if (digest != null) { pathToDigest.put(path, digest); } else { skippedPaths.add(path); } } this.skippedPaths = skippedPaths.build(); } @Override public String apply(Path path) { Preconditions.checkNotNull(path); Digest digest = pathToDigest.get(path); if (digest == null) { if (skippedPaths.contains(path)) { return null; } // It's a programming error to reference a file that has not been uploaded. throw new IllegalStateException( String.format("Illegal file reference: '%s'", path.getPathString())); } return String.format( "bytestream://%s/blobs/%s/%d", remoteServerInstanceName, digest.getHash(), digest.getSizeBytes()); } } }
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.coeus.propdev.impl.budget.modular; import org.kuali.coeus.propdev.impl.core.ProposalDevelopmentDocument; import org.kuali.coeus.sys.api.model.ScaleTwoDecimal; import org.kuali.coeus.common.budget.framework.core.Budget; import org.kuali.coeus.common.budget.framework.core.BudgetDocument; import org.kuali.coeus.common.budget.framework.core.BudgetParentDocument; import org.kuali.coeus.common.budget.framework.period.BudgetPeriod; import org.kuali.coeus.common.budget.framework.version.BudgetDocumentVersion; import org.kuali.coeus.common.budget.framework.version.BudgetVersionOverview; import org.kuali.kra.infrastructure.Constants; import org.kuali.kra.infrastructure.KeyConstants; import org.kuali.rice.core.api.CoreApiServiceLocator; import org.kuali.rice.core.api.config.property.ConfigurationService; import org.kuali.rice.coreservice.framework.CoreFrameworkServiceLocator; import org.kuali.rice.coreservice.framework.parameter.ParameterService; import org.kuali.rice.kew.api.exception.WorkflowException; import org.kuali.rice.krad.service.DocumentService; import org.kuali.rice.krad.service.KRADServiceLocatorWeb; import org.kuali.rice.krad.util.GlobalVariables; import java.util.Collection; import java.util.List; import java.util.Set; /** * This validates the Budget Modular's Total Direct Cost. * * <p> * The validation methods in this class may produce errors and/or warnings. * </p> * * <p> * The error handling behavior in this class is important to mention * since it is a little different than the rest of KC. * * This class adds error messages directly to the * {@link GlobalVariables#getErrorMap() GlobalVariables.getMessageMap()} * Make sure to add to the error map's path before calling the validate method. * * Currently warning are generated and placed in a {@code Set<String>}. * This is different because warnings are not supported by the rice framework * and therefore behave differently from errors. * * See {@link #getErrorMessages() getErrorMessages()} * </p> */ public final class BudgetModularTotalDirectCostRule { private final DocumentService documentService; private final ConfigurationService configService; private final ParameterService paramService; private final String budgetStatusCompleteCode; private final String tdcWarning; public BudgetModularTotalDirectCostRule() { this(CoreApiServiceLocator.getKualiConfigurationService(), KRADServiceLocatorWeb.getDocumentService(), CoreFrameworkServiceLocator.getParameterService()); } /** * Sets the services that this rule uses. This constructor is provided for easier unit testing. * * @param configService the config service * @param documentService the document service * @throws NullPointerException if the configService or documentService service is null */ BudgetModularTotalDirectCostRule(final ConfigurationService configService, final DocumentService documentService, final ParameterService paramService) { if (configService == null) { throw new NullPointerException("the configService is null"); } if (documentService == null) { throw new NullPointerException("the documentService is null"); } if (paramService == null) { throw new NullPointerException("the paramService is null"); } this.documentService = documentService; this.configService = configService; this.paramService = paramService; this.budgetStatusCompleteCode = this.paramService.getParameterValueAsString( BudgetDocument.class, Constants.BUDGET_STATUS_COMPLETE_CODE); this.tdcWarning = this.configService.getPropertyValueAsString( KeyConstants.WARNING_BUDGET_VERSION_MODULAR_INVALID_TDC); } /** * Validates the total direct cost (tdc) for each budget version. * * <p> * This method will validate tdc for every <b>completed</b> budget version * that has a <b>modular</b> budget. * </p> * * <p> * The tdc rule that this method is checking is whether or not the tdc * contains a positive value. If none of the budget versions meeting * the aforementioned criteria is positive than an error is produced. * If at least one budget version meeting the aforementioned criteria * is positive and at least one is not positive than a warning is produced. * </p> * * @param parentDocument the document to check rule against * @param reportErrors whether to report errors * @param warningMessages container to place warning messages. Warning messages * are added to this set to be accessed by the caller. * * @throws NullPointerException if the pdDocument or warningMessages are null. */ public boolean validateTotalDirectCost(final BudgetParentDocument parentDocument, final boolean reportErrors, Set<String> warningMessages) { if (parentDocument == null) { throw new NullPointerException("the document is null"); } if (warningMessages == null) { throw new NullPointerException("the warningMessages is null"); } boolean passed = true; final List<BudgetDocumentVersion> budgetDocumentOverviews = parentDocument.getBudgetDocumentVersions(); for (int i = 0; i < budgetDocumentOverviews.size(); i++) { final BudgetDocumentVersion budgetDocumentOverview = budgetDocumentOverviews.get(i); BudgetVersionOverview budgetOverview = budgetDocumentOverview.getBudgetVersionOverview(); if (this.budgetStatusCompleteCode.equalsIgnoreCase( budgetOverview.getBudgetStatus())) { final BudgetDocument budgetDocument = this.getBudgetDocument( budgetOverview.getDocumentNumber()); updateDocumentBudget(budgetDocument, budgetOverview); passed &= this.checkTotalDirectCost(budgetDocument, i, reportErrors, warningMessages); } } return passed; } /* * The budgetdocument.budget may not have complete data match with version */ private void updateDocumentBudget(BudgetDocument budgetDocument, BudgetVersionOverview version) { Budget budget = budgetDocument.getBudget(); budget.setFinalVersionFlag(version.isFinalVersionFlag()); budget.setBudgetStatus(version.getBudgetStatus()); budget.setModularBudgetFlag(version.getModularBudgetFlag()); } /** * Checks the tdc on a {@link BudgetDocument BudgetDocument} * following the business rules described at * {@link #validateTotalDirectCost(ProposalDevelopmentDocument, boolean, Set) validateTotalDirectCost()} * * @param budgetDocument the current budget document * @param currentIndex the current index corresponding to the document. * @param reportErrors whether to report errors * @param warningMessages container to place warning messages. * @return true if no errors false if errors */ private boolean checkTotalDirectCost(final BudgetDocument budgetDocument, final int currentIndex, final boolean reportErrors, Set<String> warningMessages) { assert budgetDocument != null : "the budget overview was null"; assert currentIndex >= 0 : "the current index was not valid, index: " + currentIndex; assert warningMessages != null : "the warningMessages is null"; if (Boolean.FALSE.equals(budgetDocument.getBudget().getModularBudgetFlag())) { return true; } final Collection<BudgetPeriod> budgetPeriods = budgetDocument.getBudget().getBudgetPeriods(); if (budgetPeriods != null) { int positiveCount = -1; for (final BudgetPeriod budgetPeriod : budgetPeriods) { if (budgetPeriod != null) { final BudgetModular budgetModular = budgetPeriod.getBudgetModular(); positiveCount = (positiveCount != -1) ? positiveCount : 0; if (budgetModular != null) { final ScaleTwoDecimal tdc = budgetModular.getTotalDirectCost(); if (tdc.isPositive()) { positiveCount++; } else { warningMessages.add(this.tdcWarning); } } else { warningMessages.add(this.tdcWarning); } } } if (positiveCount == 0 && reportErrors) { GlobalVariables.getMessageMap().putError("budgetVersionOverview[" + currentIndex + "].budgetStatus", KeyConstants.ERROR_BUDGET_STATUS_COMPLETE_WHEN_NOT_MODULER); return false; } } return true; } /** * Retrieves a budget document from a document number through the * {@link DocumentService DocumentService}. * * @param docNumber the document number * * @return the budget document * @throws RuntimeException if a problem occurs getting the BudgetDocument * once an exception hierarchy has been created */ private BudgetDocument getBudgetDocument(final String docNumber) { assert docNumber != null : "docNumber is null"; assert docNumber.trim().length() > 0 : "docNumber whitespace or empty"; try { return (BudgetDocument) this.documentService.getByDocumentHeaderId(docNumber); } catch (final WorkflowException e) { throw new RuntimeException("Error getting document by header id, document number [" + docNumber + "]", e); } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.authorization.fluent; import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedFlux; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.util.Context; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphExtensionInner; import com.azure.resourcemanager.authorization.fluent.models.MicrosoftGraphTodoTaskInner; import com.azure.resourcemanager.authorization.fluent.models.UsersTodoListsExpand; import com.azure.resourcemanager.authorization.fluent.models.UsersTodoListsOrderby; import com.azure.resourcemanager.authorization.fluent.models.UsersTodoListsSelect; import java.util.List; import reactor.core.publisher.Mono; /** An instance of this class provides access to all the operations defined in UsersTodoListsClient. */ public interface UsersTodoListsClient { /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<MicrosoftGraphExtensionInner> listExtensionsAsync( String userId, String todoTaskListId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsOrderby> orderby, List<String> select, List<String> expand); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<MicrosoftGraphExtensionInner> listExtensionsAsync(String userId, String todoTaskListId); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<MicrosoftGraphExtensionInner> listExtensions(String userId, String todoTaskListId); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<MicrosoftGraphExtensionInner> listExtensions( String userId, String todoTaskListId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsOrderby> orderby, List<String> select, List<String> expand, Context context); /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<MicrosoftGraphExtensionInner>> createExtensionsWithResponseAsync( String userId, String todoTaskListId, MicrosoftGraphExtensionInner body); /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphExtensionInner> createExtensionsAsync( String userId, String todoTaskListId, MicrosoftGraphExtensionInner body); /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) MicrosoftGraphExtensionInner createExtensions( String userId, String todoTaskListId, MicrosoftGraphExtensionInner body); /** * Create new navigation property to extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extension. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<MicrosoftGraphExtensionInner> createExtensionsWithResponse( String userId, String todoTaskListId, MicrosoftGraphExtensionInner body, Context context); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<MicrosoftGraphExtensionInner>> getExtensionsWithResponseAsync( String userId, String todoTaskListId, String extensionId, List<String> select, List<String> expand); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphExtensionInner> getExtensionsAsync( String userId, String todoTaskListId, String extensionId, List<String> select, List<String> expand); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphExtensionInner> getExtensionsAsync(String userId, String todoTaskListId, String extensionId); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) MicrosoftGraphExtensionInner getExtensions(String userId, String todoTaskListId, String extensionId); /** * Get extensions from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return extensions from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<MicrosoftGraphExtensionInner> getExtensionsWithResponse( String userId, String todoTaskListId, String extensionId, List<String> select, List<String> expand, Context context); /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> updateExtensionsWithResponseAsync( String userId, String todoTaskListId, String extensionId, MicrosoftGraphExtensionInner body); /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> updateExtensionsAsync( String userId, String todoTaskListId, String extensionId, MicrosoftGraphExtensionInner body); /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void updateExtensions(String userId, String todoTaskListId, String extensionId, MicrosoftGraphExtensionInner body); /** * Update the navigation property extensions in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param body New navigation property values. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> updateExtensionsWithResponse( String userId, String todoTaskListId, String extensionId, MicrosoftGraphExtensionInner body, Context context); /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> deleteExtensionsWithResponseAsync( String userId, String todoTaskListId, String extensionId, String ifMatch); /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteExtensionsAsync(String userId, String todoTaskListId, String extensionId, String ifMatch); /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteExtensionsAsync(String userId, String todoTaskListId, String extensionId); /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void deleteExtensions(String userId, String todoTaskListId, String extensionId); /** * Delete navigation property extensions for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param extensionId key: id of extension. * @param ifMatch ETag. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> deleteExtensionsWithResponse( String userId, String todoTaskListId, String extensionId, String ifMatch, Context context); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<MicrosoftGraphTodoTaskInner> listTasksAsync( String userId, String todoTaskListId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsOrderby> orderby, List<UsersTodoListsSelect> select, List<UsersTodoListsExpand> expand); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedFlux<MicrosoftGraphTodoTaskInner> listTasksAsync(String userId, String todoTaskListId); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<MicrosoftGraphTodoTaskInner> listTasks(String userId, String todoTaskListId); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param top Show only the first n items. * @param skip Skip the first n items. * @param search Search items by search phrases. * @param filter Filter items by property values. * @param count Include count of items. * @param orderby Order items by property values. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<MicrosoftGraphTodoTaskInner> listTasks( String userId, String todoTaskListId, Integer top, Integer skip, String search, String filter, Boolean count, List<UsersTodoListsOrderby> orderby, List<UsersTodoListsSelect> select, List<UsersTodoListsExpand> expand, Context context); /** * Create new navigation property to tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return todoTask. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<MicrosoftGraphTodoTaskInner>> createTasksWithResponseAsync( String userId, String todoTaskListId, MicrosoftGraphTodoTaskInner body); /** * Create new navigation property to tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return todoTask. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphTodoTaskInner> createTasksAsync( String userId, String todoTaskListId, MicrosoftGraphTodoTaskInner body); /** * Create new navigation property to tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return todoTask. */ @ServiceMethod(returns = ReturnType.SINGLE) MicrosoftGraphTodoTaskInner createTasks(String userId, String todoTaskListId, MicrosoftGraphTodoTaskInner body); /** * Create new navigation property to tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param body New navigation property. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return todoTask. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<MicrosoftGraphTodoTaskInner> createTasksWithResponse( String userId, String todoTaskListId, MicrosoftGraphTodoTaskInner body, Context context); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<MicrosoftGraphTodoTaskInner>> getTasksWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, List<UsersTodoListsSelect> select, List<UsersTodoListsExpand> expand); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param select Select properties to be returned. * @param expand Expand related entities. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphTodoTaskInner> getTasksAsync( String userId, String todoTaskListId, String todoTaskId, List<UsersTodoListsSelect> select, List<UsersTodoListsExpand> expand); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<MicrosoftGraphTodoTaskInner> getTasksAsync(String userId, String todoTaskListId, String todoTaskId); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.SINGLE) MicrosoftGraphTodoTaskInner getTasks(String userId, String todoTaskListId, String todoTaskId); /** * Get tasks from users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param select Select properties to be returned. * @param expand Expand related entities. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return tasks from users. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<MicrosoftGraphTodoTaskInner> getTasksWithResponse( String userId, String todoTaskListId, String todoTaskId, List<UsersTodoListsSelect> select, List<UsersTodoListsExpand> expand, Context context); /** * Update the navigation property tasks in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> updateTasksWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphTodoTaskInner body); /** * Update the navigation property tasks in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> updateTasksAsync( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphTodoTaskInner body); /** * Update the navigation property tasks in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property values. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void updateTasks(String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphTodoTaskInner body); /** * Update the navigation property tasks in users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param body New navigation property values. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> updateTasksWithResponse( String userId, String todoTaskListId, String todoTaskId, MicrosoftGraphTodoTaskInner body, Context context); /** * Delete navigation property tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Response<Void>> deleteTasksWithResponseAsync( String userId, String todoTaskListId, String todoTaskId, String ifMatch); /** * Delete navigation property tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param ifMatch ETag. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteTasksAsync(String userId, String todoTaskListId, String todoTaskId, String ifMatch); /** * Delete navigation property tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the completion. */ @ServiceMethod(returns = ReturnType.SINGLE) Mono<Void> deleteTasksAsync(String userId, String todoTaskListId, String todoTaskId); /** * Delete navigation property tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ @ServiceMethod(returns = ReturnType.SINGLE) void deleteTasks(String userId, String todoTaskListId, String todoTaskId); /** * Delete navigation property tasks for users. * * @param userId key: id of user. * @param todoTaskListId key: id of todoTaskList. * @param todoTaskId key: id of todoTask. * @param ifMatch ETag. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.resourcemanager.authorization.fluent.models.OdataErrorMainException thrown if the request is * rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ @ServiceMethod(returns = ReturnType.SINGLE) Response<Void> deleteTasksWithResponse( String userId, String todoTaskListId, String todoTaskId, String ifMatch, Context context); }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cassandra.db; import java.nio.ByteBuffer; import java.util.Set; import org.junit.Test; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.db.index.PerColumnSecondaryIndex; import org.apache.cassandra.db.index.PerRowSecondaryIndex; import org.apache.cassandra.db.index.SecondaryIndexSearcher; import org.apache.cassandra.utils.ByteBufferUtil; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class SecondaryIndexColumnSizeTest { @Test public void test64kColumn() { // a byte buffer more than 64k ByteBuffer buffer = ByteBuffer.allocate(1024 * 65); buffer.clear(); //read more than 64k for (int i=0; i<1024*64/4 + 1; i++) buffer.putInt(0); // for read buffer.flip(); Column column = new Column(ByteBufferUtil.bytes("test"), buffer, 0); MockRowIndex mockRowIndex = new MockRowIndex(); MockColumnIndex mockColumnIndex = new MockColumnIndex(); assertTrue(mockRowIndex.validate(column)); assertFalse(mockColumnIndex.validate(column)); // test less than 64k value buffer.flip(); buffer.clear(); buffer.putInt(20); buffer.flip(); assertTrue(mockRowIndex.validate(column)); assertTrue(mockColumnIndex.validate(column)); } private class MockRowIndex extends PerRowSecondaryIndex { @Override public void init() { } @Override public void validateOptions() throws ConfigurationException { } @Override public String getIndexName() { return null; } @Override protected SecondaryIndexSearcher createSecondaryIndexSearcher(Set<ByteBuffer> columns) { return null; } @Override public void forceBlockingFlush() { } @Override public long getLiveSize() { return 0; } @Override public ColumnFamilyStore getIndexCfs() { return null; } @Override public void removeIndex(ByteBuffer columnName) { } @Override public void invalidate() { } @Override public void truncateBlocking(long truncatedAt) { } public void index(ByteBuffer rowKey, ColumnFamily cf) { } public void index(ByteBuffer rowKey) { } public void delete(DecoratedKey key) { } @Override public void reload() { } } private class MockColumnIndex extends PerColumnSecondaryIndex { @Override public void init() { } @Override public void validateOptions() throws ConfigurationException { } @Override public String getIndexName() { return null; } @Override protected SecondaryIndexSearcher createSecondaryIndexSearcher(Set<ByteBuffer> columns) { return null; } @Override public void forceBlockingFlush() { } @Override public long getLiveSize() { return 0; } @Override public ColumnFamilyStore getIndexCfs() { return null; } @Override public void removeIndex(ByteBuffer columnName) { } @Override public void invalidate() { } @Override public void truncateBlocking(long truncatedAt) { } @Override public void delete(ByteBuffer rowKey, Column col) { } @Override public void insert(ByteBuffer rowKey, Column col) { } @Override public void update(ByteBuffer rowKey, Column col) { } @Override public void reload() { } } }
/* * The MIT License (MIT) * * Copyright (c) 2015 ThePsionic * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.thepsionic.undercraft.init; import com.thepsionic.undercraft.items.UndercraftHealItem; import com.thepsionic.undercraft.items.UndercraftMainItem; import com.thepsionic.undercraft.items.consumable.*; import com.thepsionic.undercraft.ref.ItemRef; import com.thepsionic.undercraft.ref.ModRef; import net.minecraft.client.Minecraft; import net.minecraft.client.renderer.block.model.ModelResourceLocation; import net.minecraft.client.renderer.color.IItemColor; import net.minecraft.client.renderer.color.ItemColors; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.nbt.NBTTagCompound; import net.minecraftforge.fml.common.registry.GameRegistry; import java.util.Random; public class UndercraftItems { //Consumables public static Item bandage, monstercandy, spiderdonut, spidercider, butterscotchpie, nicecream, snowmanpiece, bisicle, unisicle, cinnamonbunny, astronautfood, crabapple, seatea, abandonedquiche, temmieflakes, dogsalad, instantnoodles, hotdog, hotcat, junkfood, hushpuppy, starfait, glamburger, legendaryhero, facesteak, popatochisps, badmemory, lastdream; //Other public static Item heart; public static void init() { //Consumables bandage = new UndercraftHealItem(ItemRef.BANDAGE, true, 150, 2, 2); monstercandy = new UndercraftHealItem(ItemRef.MONSTERCANDY, true, 25, 2, 2); spiderdonut = new UndercraftHealItem(ItemRef.SPIDERDONUT, true, 30, 2, 2); spidercider = new UndercraftHealItem(ItemRef.SPIDERCIDER, true, 60, 2, 4); butterscotchpie = new UndercraftHealItem(ItemRef.BUTTERSCOTCHPIE, true, 180, 2, 100); nicecream = new NiceCream(); snowmanpiece = new UndercraftHealItem(ItemRef.SNOWMANPIECE, true, 40, 2, 9); bisicle = new Bisicle(); unisicle = new UndercraftHealItem(ItemRef.UNISICLE, true, 2, 2, 2); cinnamonbunny = new UndercraftHealItem(ItemRef.CINNAMONBUNNY, true, 8, 2, 4); astronautfood = new UndercraftHealItem(ItemRef.ASTRONAUTFOOD, true, 25, 1, 4); crabapple = new UndercraftHealItem(ItemRef.CRABAPPLE, true, 5, 2, 3); seatea = new SeaTea(); abandonedquiche = new UndercraftHealItem(ItemRef.ABANDONEDQUICHE, true, 76, 2, 6); temmieflakes = new UndercraftHealItem(ItemRef.TEMMIEFLAKES, true, 2, 2, 1); dogsalad = new DogSalad(); instantnoodles = new InstantNoodles(); hotdog = new HotDog(); hotcat = new HotCat(); junkfood = new UndercraftHealItem(ItemRef.JUNKFOOD, true, 1, 2, 3); hushpuppy = new UndercraftHealItem(ItemRef.HUSHPUPPY, true, 150, 2, 13); starfait = new UndercraftHealItem(ItemRef.STARFAIT, true, 10, 2, 2); glamburger = new Glamburger(); legendaryhero = new LegendaryHero(); facesteak = new UndercraftHealItem(ItemRef.FACESTEAK, true, 14, 4, 12); popatochisps = new UndercraftHealItem(ItemRef.POPATOCHISPS, true, 35, 1, 2); badmemory = new BadMemory(); lastdream = new UndercraftHealItem(ItemRef.LASTDREAM, true, 250, 1, 3); //Other heart = new UndercraftMainItem(ItemRef.HEARTTABICON, false, 999999, 4); } public static void register() { //Consumables GameRegistry.register(bandage); GameRegistry.register(monstercandy); GameRegistry.register(spiderdonut); GameRegistry.register(spidercider); GameRegistry.register(butterscotchpie); GameRegistry.register(nicecream); GameRegistry.register(snowmanpiece); GameRegistry.register(bisicle); GameRegistry.register(unisicle); GameRegistry.register(cinnamonbunny); GameRegistry.register(astronautfood); GameRegistry.register(crabapple); GameRegistry.register(seatea); GameRegistry.register(abandonedquiche); GameRegistry.register(temmieflakes); GameRegistry.register(dogsalad); GameRegistry.register(instantnoodles); GameRegistry.register(hotdog); GameRegistry.register(hotcat); GameRegistry.register(junkfood); GameRegistry.register(hushpuppy); GameRegistry.register(starfait); GameRegistry.register(glamburger); GameRegistry.register(legendaryhero); GameRegistry.register(facesteak); GameRegistry.register(popatochisps); GameRegistry.register(badmemory); GameRegistry.register(lastdream); //Other GameRegistry.register(heart); } public static void registerRenders() { //Consumables registerRender(bandage, ItemRef.BANDAGE); registerRender(monstercandy, ItemRef.MONSTERCANDY); registerRender(spiderdonut, ItemRef.SPIDERDONUT); registerRender(spidercider, ItemRef.SPIDERCIDER); registerRender(butterscotchpie, ItemRef.BUTTERSCOTCHPIE); registerRender(nicecream, ItemRef.NICECREAM); registerRender(snowmanpiece, ItemRef.SNOWMANPIECE); registerRender(bisicle, ItemRef.BISICLE); registerRender(unisicle, ItemRef.UNISICLE); registerRender(cinnamonbunny, ItemRef.CINNAMONBUNNY); registerRender(astronautfood, ItemRef.ASTRONAUTFOOD); registerRender(crabapple, ItemRef.CRABAPPLE); registerRender(seatea, ItemRef.SEATEA); registerRender(abandonedquiche, ItemRef.ABANDONEDQUICHE); registerRender(temmieflakes, ItemRef.TEMMIEFLAKES); registerRender(dogsalad, ItemRef.DOGSALAD); registerRender(instantnoodles, ItemRef.INSTANTNOODLES); registerRender(hotdog, ItemRef.HOTDOG); registerRender(hotcat, ItemRef.HOTCAT); registerRender(junkfood, ItemRef.JUNKFOOD); registerRender(hushpuppy, ItemRef.HUSHPUPPY); registerRender(starfait, ItemRef.STARFAIT); registerRender(glamburger, ItemRef.GLAMBURGER); registerRender(legendaryhero, ItemRef.LEGENDARYHERO); registerRender(facesteak, ItemRef.FACESTEAK); registerRender(popatochisps, ItemRef.POPATOCHISPS); registerRender(badmemory, ItemRef.BADMEMORY); registerRender(lastdream, ItemRef.LASTDREAM); //Other registerRender(heart, ItemRef.HEARTTABICON); //addVariants(); //for (ItemClass.Enum enum : ItemClass.Enum.values()) //registerRender(items, enum.getMeta(), enum.getType().getDescription()); } public static void addVariants() { //ModelBakery.addVariantName(items, ItemClass.Enum.getAllItemTypesForModelBakery()); } public static void registerColors() { ItemColors itemColors = Minecraft.getMinecraft().getItemColors(); itemColors.registerItemColorHandler(new IItemColor() { @Override public int getColorFromItemstack(ItemStack stack, int tintIndex) { if (stack.getTagCompound() == null) stack.setTagCompound(new NBTTagCompound()); if (!stack.getTagCompound().hasKey("color" + tintIndex)) stack.getTagCompound().setInteger("color" + tintIndex, new Random().nextInt(0xFFFFFF)); return stack.getTagCompound().getInteger("color" + tintIndex); } }, temmieflakes); itemColors.registerItemColorHandler(new IItemColor() { @Override public int getColorFromItemstack(ItemStack stack, int tintIndex) { if (stack.getTagCompound() == null) stack.setTagCompound(new NBTTagCompound()); if (!stack.getTagCompound().hasKey("color" + tintIndex)) stack.getTagCompound().setInteger("color" + tintIndex, new Random().nextInt(0xFFFFFF)); return stack.getTagCompound().getInteger("color" + tintIndex); } }, monstercandy); } /** * Registers an Item in the ItemModelMesher with the metadata 0 and the given model name * * @param item The Item to register the render for * @param renderName The name of the model to register the Item for */ private static void registerRender(Item item, String renderName) { registerRender(item, 0, renderName); } /** * Registers an Item in the ItemModelMesher with the given metadata and model name * * @param item The Item to register the render for * @param meta The metadata of the Item to register * @param renderName The name of the model to register the Item for */ private static void registerRender(Item item, int meta, String renderName) { Minecraft.getMinecraft().getRenderItem().getItemModelMesher().register(item, meta, new ModelResourceLocation(ModRef.MODID + ":" + renderName, "inventory")); } }
/* ----------------------------------------------------------------------------- * Rule$zone_adjustments.java * ----------------------------------------------------------------------------- * * Producer : com.parse2.aparse.Parser 2.2 * Produced : Thu Jan 05 18:57:59 CET 2017 * * ----------------------------------------------------------------------------- */ package org.murillo.abnf; import java.util.ArrayList; final public class Rule$zone_adjustments extends Rule { private Rule$zone_adjustments(String spelling, ArrayList<Rule> rules) { super(spelling, rules); } public Object accept(Visitor visitor) { return visitor.visit(this); } public static Rule$zone_adjustments parse(ParserContext context) { context.push("zone-adjustments"); boolean parsed = true; int s0 = context.index; ArrayList<Rule> e0 = new ArrayList<Rule>(); Rule rule; parsed = false; if (!parsed) { { ArrayList<Rule> e1 = new ArrayList<Rule>(); int s1 = context.index; parsed = true; if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Terminal$NumericValue.parse(context, "%x7a", "[\\x7a]", 1); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Terminal$StringValue.parse(context, "="); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; f1; i1++) { int g1 = context.index; parsed = false; if (!parsed) { { ArrayList<Rule> e2 = new ArrayList<Rule>(); int s2 = context.index; parsed = true; if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule$SP.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule$time.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule$SP.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { int g2 = context.index; parsed = false; if (!parsed) { { ArrayList<Rule> e3 = new ArrayList<Rule>(); int s3 = context.index; parsed = true; if (parsed) { boolean f3 = true; int c3 = 0; for (int i3 = 0; i3 < 1 && f3; i3++) { rule = Terminal$StringValue.parse(context, "-"); if ((f3 = rule != null)) { e3.add(rule); c3++; } } parsed = c3 == 1; } if (parsed) e2.addAll(e3); else context.index = s3; } } if (context.index > g2) c2++; f2 = c2 > i2; } parsed = true; } if (parsed) { boolean f2 = true; int c2 = 0; for (int i2 = 0; i2 < 1 && f2; i2++) { rule = Rule$typed_time.parse(context); if ((f2 = rule != null)) { e2.add(rule); c2++; } } parsed = c2 == 1; } if (parsed) e1.addAll(e2); else context.index = s2; } } if (context.index > g1) c1++; f1 = c1 > i1; } parsed = true; } if (parsed) { boolean f1 = true; int c1 = 0; for (int i1 = 0; i1 < 1 && f1; i1++) { rule = Rule$CRLF.parse(context); if ((f1 = rule != null)) { e1.add(rule); c1++; } } parsed = c1 == 1; } if (parsed) e0.addAll(e1); else context.index = s1; } } rule = null; if (parsed) rule = new Rule$zone_adjustments(context.text.substring(s0, context.index), e0); else context.index = s0; context.pop("zone-adjustments", parsed); return (Rule$zone_adjustments)rule; } } /* ----------------------------------------------------------------------------- * eof * ----------------------------------------------------------------------------- */
/* * Copyright (C) 2015 Giuseppe Cardone <ippatsuman@gmail.com> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.gcardone.junidecode; /** * Character map for Unicode characters with codepoint U+93xx. * @author Giuseppe Cardone * @version 0.1 */ class X93 { public static final String[] map = new String[]{ "Lun ", // 0x00 "Kua ", // 0x01 "Ling ", // 0x02 "Bei ", // 0x03 "Lu ", // 0x04 "Li ", // 0x05 "Qiang ", // 0x06 "Pou ", // 0x07 "Juan ", // 0x08 "Min ", // 0x09 "Zui ", // 0x0a "Peng ", // 0x0b "An ", // 0x0c "Pi ", // 0x0d "Xian ", // 0x0e "Ya ", // 0x0f "Zhui ", // 0x10 "Lei ", // 0x11 "A ", // 0x12 "Kong ", // 0x13 "Ta ", // 0x14 "Kun ", // 0x15 "Du ", // 0x16 "Wei ", // 0x17 "Chui ", // 0x18 "Zi ", // 0x19 "Zheng ", // 0x1a "Ben ", // 0x1b "Nie ", // 0x1c "Cong ", // 0x1d "Qun ", // 0x1e "Tan ", // 0x1f "Ding ", // 0x20 "Qi ", // 0x21 "Qian ", // 0x22 "Zhuo ", // 0x23 "Qi ", // 0x24 "Yu ", // 0x25 "Jin ", // 0x26 "Guan ", // 0x27 "Mao ", // 0x28 "Chang ", // 0x29 "Tian ", // 0x2a "Xi ", // 0x2b "Lian ", // 0x2c "Tao ", // 0x2d "Gu ", // 0x2e "Cuo ", // 0x2f "Shu ", // 0x30 "Zhen ", // 0x31 "Lu ", // 0x32 "Meng ", // 0x33 "Lu ", // 0x34 "Hua ", // 0x35 "Biao ", // 0x36 "Ga ", // 0x37 "Lai ", // 0x38 "Ken ", // 0x39 "Kazari ", // 0x3a "Bu ", // 0x3b "Nai ", // 0x3c "Wan ", // 0x3d "Zan ", // 0x3e "[?] ", // 0x3f "De ", // 0x40 "Xian ", // 0x41 "[?] ", // 0x42 "Huo ", // 0x43 "Liang ", // 0x44 "[?] ", // 0x45 "Men ", // 0x46 "Kai ", // 0x47 "Ying ", // 0x48 "Di ", // 0x49 "Lian ", // 0x4a "Guo ", // 0x4b "Xian ", // 0x4c "Du ", // 0x4d "Tu ", // 0x4e "Wei ", // 0x4f "Cong ", // 0x50 "Fu ", // 0x51 "Rou ", // 0x52 "Ji ", // 0x53 "E ", // 0x54 "Rou ", // 0x55 "Chen ", // 0x56 "Ti ", // 0x57 "Zha ", // 0x58 "Hong ", // 0x59 "Yang ", // 0x5a "Duan ", // 0x5b "Xia ", // 0x5c "Yu ", // 0x5d "Keng ", // 0x5e "Xing ", // 0x5f "Huang ", // 0x60 "Wei ", // 0x61 "Fu ", // 0x62 "Zhao ", // 0x63 "Cha ", // 0x64 "Qie ", // 0x65 "She ", // 0x66 "Hong ", // 0x67 "Kui ", // 0x68 "Tian ", // 0x69 "Mou ", // 0x6a "Qiao ", // 0x6b "Qiao ", // 0x6c "Hou ", // 0x6d "Tou ", // 0x6e "Cong ", // 0x6f "Huan ", // 0x70 "Ye ", // 0x71 "Min ", // 0x72 "Jian ", // 0x73 "Duan ", // 0x74 "Jian ", // 0x75 "Song ", // 0x76 "Kui ", // 0x77 "Hu ", // 0x78 "Xuan ", // 0x79 "Duo ", // 0x7a "Jie ", // 0x7b "Zhen ", // 0x7c "Bian ", // 0x7d "Zhong ", // 0x7e "Zi ", // 0x7f "Xiu ", // 0x80 "Ye ", // 0x81 "Mei ", // 0x82 "Pai ", // 0x83 "Ai ", // 0x84 "Jie ", // 0x85 "[?] ", // 0x86 "Mei ", // 0x87 "Chuo ", // 0x88 "Ta ", // 0x89 "Bang ", // 0x8a "Xia ", // 0x8b "Lian ", // 0x8c "Suo ", // 0x8d "Xi ", // 0x8e "Liu ", // 0x8f "Zu ", // 0x90 "Ye ", // 0x91 "Nou ", // 0x92 "Weng ", // 0x93 "Rong ", // 0x94 "Tang ", // 0x95 "Suo ", // 0x96 "Qiang ", // 0x97 "Ge ", // 0x98 "Shuo ", // 0x99 "Chui ", // 0x9a "Bo ", // 0x9b "Pan ", // 0x9c "Sa ", // 0x9d "Bi ", // 0x9e "Sang ", // 0x9f "Gang ", // 0xa0 "Zi ", // 0xa1 "Wu ", // 0xa2 "Ying ", // 0xa3 "Huang ", // 0xa4 "Tiao ", // 0xa5 "Liu ", // 0xa6 "Kai ", // 0xa7 "Sun ", // 0xa8 "Sha ", // 0xa9 "Sou ", // 0xaa "Wan ", // 0xab "Hao ", // 0xac "Zhen ", // 0xad "Zhen ", // 0xae "Luo ", // 0xaf "Yi ", // 0xb0 "Yuan ", // 0xb1 "Tang ", // 0xb2 "Nie ", // 0xb3 "Xi ", // 0xb4 "Jia ", // 0xb5 "Ge ", // 0xb6 "Ma ", // 0xb7 "Juan ", // 0xb8 "Kasugai ", // 0xb9 "Habaki ", // 0xba "Suo ", // 0xbb "[?] ", // 0xbc "[?] ", // 0xbd "[?] ", // 0xbe "Na ", // 0xbf "Lu ", // 0xc0 "Suo ", // 0xc1 "Ou ", // 0xc2 "Zu ", // 0xc3 "Tuan ", // 0xc4 "Xiu ", // 0xc5 "Guan ", // 0xc6 "Xuan ", // 0xc7 "Lian ", // 0xc8 "Shou ", // 0xc9 "Ao ", // 0xca "Man ", // 0xcb "Mo ", // 0xcc "Luo ", // 0xcd "Bi ", // 0xce "Wei ", // 0xcf "Liu ", // 0xd0 "Di ", // 0xd1 "Qiao ", // 0xd2 "Cong ", // 0xd3 "Yi ", // 0xd4 "Lu ", // 0xd5 "Ao ", // 0xd6 "Keng ", // 0xd7 "Qiang ", // 0xd8 "Cui ", // 0xd9 "Qi ", // 0xda "Chang ", // 0xdb "Tang ", // 0xdc "Man ", // 0xdd "Yong ", // 0xde "Chan ", // 0xdf "Feng ", // 0xe0 "Jing ", // 0xe1 "Biao ", // 0xe2 "Shu ", // 0xe3 "Lou ", // 0xe4 "Xiu ", // 0xe5 "Cong ", // 0xe6 "Long ", // 0xe7 "Zan ", // 0xe8 "Jian ", // 0xe9 "Cao ", // 0xea "Li ", // 0xeb "Xia ", // 0xec "Xi ", // 0xed "Kang ", // 0xee "[?] ", // 0xef "Beng ", // 0xf0 "[?] ", // 0xf1 "[?] ", // 0xf2 "Zheng ", // 0xf3 "Lu ", // 0xf4 "Hua ", // 0xf5 "Ji ", // 0xf6 "Pu ", // 0xf7 "Hui ", // 0xf8 "Qiang ", // 0xf9 "Po ", // 0xfa "Lin ", // 0xfb "Suo ", // 0xfc "Xiu ", // 0xfd "San ", // 0xfe "Cheng " // 0xff }; }
package com.liyu.itester.touch; import android.content.Context; import android.content.Intent; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.util.AttributeSet; import android.util.DisplayMetrics; import android.util.TypedValue; import android.view.MotionEvent; import android.view.View; public class BorderTouchView extends View { private Paint mPaint; private Rect mNorth[]; private Rect mSouth[]; private Rect mWest[]; private Rect mEast[]; private OnTouchChangedListener mListener; Context mCon; private boolean mNorthFlags[]; private boolean mSouthFlags[]; private boolean mWestFlags[]; private boolean mEastFlags[]; private boolean mDistanceValid; private int mMaxDistance; private int mX = 0; private int mY = 0; private int mRectWidth; private int mRectHeight; private int mNorthHeight; private int mSouthHeight; private int mWestHeight; private int mEastHeight; boolean isdone = false; public BorderTouchView(Context context) { this(context, null); } public BorderTouchView(Context context, AttributeSet attrs) { super(context, attrs); mCon = context; DisplayMetrics dm = getResources().getDisplayMetrics(); mRectWidth = (int) TypedValue.applyDimension( TypedValue.COMPLEX_UNIT_DIP, 40, dm); mRectHeight = (int) dm.density * 40; // Maximum distance between points mMaxDistance = (int) dm.density * 21; mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); mPaint.setStyle(Paint.Style.FILL); mPaint.setColor(Color.WHITE); } public void setOnTouchChangedListener(OnTouchChangedListener listener) { mListener = listener; } @Override public boolean onTouchEvent(MotionEvent event) { int x = (int) event.getX(); int y = (int) event.getY(); switch (event.getAction()) { case MotionEvent.ACTION_DOWN: mCon.sendBroadcast(new Intent("com.liyu.itester.hide")); touchDown(x, y); invalidate(); break; case MotionEvent.ACTION_MOVE: touchMove(x, y); invalidate(); break; case MotionEvent.ACTION_UP: mCon.sendBroadcast(new Intent("com.liyu.itester.show")); touchUp(); invalidate(); break; } return true; } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { int l, r, t, b; if(isdone) return; mNorth = new Rect[w / mRectHeight]; mNorthFlags = new boolean[mNorth.length]; mSouth = new Rect[mNorth.length]; mSouthFlags = new boolean[mSouth.length]; mWest = new Rect[(h - 2 * mRectWidth) / mRectHeight]; mWestFlags = new boolean[mWest.length]; mEast = new Rect[mWest.length]; mEastFlags = new boolean[mEast.length]; mNorthHeight = w / mNorth.length; for (int i = 0; i < mNorth.length; i++) { l = mNorthHeight * i; r = (i == mNorth.length - 1) ? w : l + mNorthHeight; t = 0; b = mRectWidth; mNorth[i] = new Rect(l + 1, t + 1, r - 1, b - 1); } mSouthHeight = w / mSouth.length; for (int i = 0; i < mSouth.length; i++) { l = mSouthHeight * i; r = (i == mSouth.length - 1) ? w : l + mSouthHeight; t = h - mRectWidth; b = h; mSouth[i] = new Rect(l + 1, t + 1, r - 1, b - 1); } mWestHeight = (h - 2 * mRectWidth) / mWest.length; for (int i = 0; i < mWest.length; i++) { l = 0; r = mRectWidth; t = mRectWidth + mWestHeight * i; b = (i == mWest.length - 1) ? h - mRectWidth : t + mWestHeight; mWest[i] = new Rect(l + 1, t + 1, r - 1, b - 1); } mEastHeight = (h - 2 * mRectWidth) / mEast.length; for (int i = 0; i < mEast.length; i++) { l = w - mRectWidth; r = w; t = mRectWidth + mEastHeight * i; b = (i == mEast.length - 1) ? h - mRectWidth : t + mEastHeight; mEast[i] = new Rect(l + 1, t + 1, r - 1, b - 1); } isdone = true; } @Override protected void onDraw(Canvas canvas) { canvas.drawColor(Color.BLACK); for (int i = 0; i < mNorth.length; i++) { mPaint.setColor(mNorthFlags[i] ? Color.GREEN : Color.WHITE); canvas.drawRect(mNorth[i], mPaint); } for (int i = 0; i < mSouth.length; i++) { mPaint.setColor(mSouthFlags[i] ? Color.GREEN : Color.WHITE); canvas.drawRect(mSouth[i], mPaint); } for (int i = 0; i < mWest.length; i++) { mPaint.setColor(mWestFlags[i] ? Color.GREEN : Color.WHITE); canvas.drawRect(mWest[i], mPaint); } for (int i = 0; i < mEast.length; i++) { mPaint.setColor(mEastFlags[i] ? Color.GREEN : Color.WHITE); canvas.drawRect(mEast[i], mPaint); } } private void touchDown(int x, int y) { // mDistanceValid = true; mX = x; mY = y; } private void touchMove(int x, int y) { int dx = Math.abs(x - mX); int dy = Math.abs(y - mY); mX = x; mY = y; setBorderFlag(x, y); // if (mDistanceValid) { // mDistanceValid = dx < mMaxDistance && dy < mMaxDistance; // } // // if (mDistanceValid) { // setBorderFlag(x, y); // } } private void touchUp() { if (mListener != null && checkBorders()) { mListener.onTouchFinish(this); } } private void setBorderFlag(int x, int y) { if (y < mRectWidth) { // North int i = x / mNorthHeight; if (i > -1 && i < mNorthFlags.length) { mNorthFlags[i] = true; } } else if (y > getHeight() - mRectWidth) { // South int i = x / mSouthHeight; if (i > -1 && i < mSouthFlags.length) { mSouthFlags[i] = true; } } else if (x < mRectWidth) { // West int i = (y - mRectWidth) / mWestHeight; if (i > -1 && i < mWestFlags.length) { mWestFlags[i] = true; } } else if (x > getWidth() - mRectWidth) { // East int i = (y - mRectWidth) / mEastHeight; if (i > -1 && i < mEastFlags.length) { mEastFlags[i] = true; } } } private boolean checkBorders() { for (int i = 0; i < mNorthFlags.length; i++) { if (!mNorthFlags[i]) return false; } for (int i = 0; i < mSouthFlags.length; i++) { if (!mSouthFlags[i]) return false; } for (int i = 0; i < mWestFlags.length; i++) { if (!mWestFlags[i]) return false; } for (int i = 0; i < mEastFlags.length; i++) { if (!mEastFlags[i]) return false; } return true; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.stepfunctions.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Contains details about an activity scheduled during an execution. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/states-2016-11-23/ActivityScheduledEventDetails" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ActivityScheduledEventDetails implements Serializable, Cloneable, StructuredPojo { /** * <p> * The Amazon Resource Name (ARN) of the scheduled activity. * </p> */ private String resource; /** * <p> * The JSON data input to the activity task. * </p> */ private String input; /** * <p> * The maximum allowed duration of the activity task. * </p> */ private Long timeoutInSeconds; /** * <p> * The maximum allowed duration between two heartbeats for the activity task. * </p> */ private Long heartbeatInSeconds; /** * <p> * The Amazon Resource Name (ARN) of the scheduled activity. * </p> * * @param resource * The Amazon Resource Name (ARN) of the scheduled activity. */ public void setResource(String resource) { this.resource = resource; } /** * <p> * The Amazon Resource Name (ARN) of the scheduled activity. * </p> * * @return The Amazon Resource Name (ARN) of the scheduled activity. */ public String getResource() { return this.resource; } /** * <p> * The Amazon Resource Name (ARN) of the scheduled activity. * </p> * * @param resource * The Amazon Resource Name (ARN) of the scheduled activity. * @return Returns a reference to this object so that method calls can be chained together. */ public ActivityScheduledEventDetails withResource(String resource) { setResource(resource); return this; } /** * <p> * The JSON data input to the activity task. * </p> * * @param input * The JSON data input to the activity task. */ public void setInput(String input) { this.input = input; } /** * <p> * The JSON data input to the activity task. * </p> * * @return The JSON data input to the activity task. */ public String getInput() { return this.input; } /** * <p> * The JSON data input to the activity task. * </p> * * @param input * The JSON data input to the activity task. * @return Returns a reference to this object so that method calls can be chained together. */ public ActivityScheduledEventDetails withInput(String input) { setInput(input); return this; } /** * <p> * The maximum allowed duration of the activity task. * </p> * * @param timeoutInSeconds * The maximum allowed duration of the activity task. */ public void setTimeoutInSeconds(Long timeoutInSeconds) { this.timeoutInSeconds = timeoutInSeconds; } /** * <p> * The maximum allowed duration of the activity task. * </p> * * @return The maximum allowed duration of the activity task. */ public Long getTimeoutInSeconds() { return this.timeoutInSeconds; } /** * <p> * The maximum allowed duration of the activity task. * </p> * * @param timeoutInSeconds * The maximum allowed duration of the activity task. * @return Returns a reference to this object so that method calls can be chained together. */ public ActivityScheduledEventDetails withTimeoutInSeconds(Long timeoutInSeconds) { setTimeoutInSeconds(timeoutInSeconds); return this; } /** * <p> * The maximum allowed duration between two heartbeats for the activity task. * </p> * * @param heartbeatInSeconds * The maximum allowed duration between two heartbeats for the activity task. */ public void setHeartbeatInSeconds(Long heartbeatInSeconds) { this.heartbeatInSeconds = heartbeatInSeconds; } /** * <p> * The maximum allowed duration between two heartbeats for the activity task. * </p> * * @return The maximum allowed duration between two heartbeats for the activity task. */ public Long getHeartbeatInSeconds() { return this.heartbeatInSeconds; } /** * <p> * The maximum allowed duration between two heartbeats for the activity task. * </p> * * @param heartbeatInSeconds * The maximum allowed duration between two heartbeats for the activity task. * @return Returns a reference to this object so that method calls can be chained together. */ public ActivityScheduledEventDetails withHeartbeatInSeconds(Long heartbeatInSeconds) { setHeartbeatInSeconds(heartbeatInSeconds); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getResource() != null) sb.append("Resource: ").append(getResource()).append(","); if (getInput() != null) sb.append("Input: ").append("***Sensitive Data Redacted***").append(","); if (getTimeoutInSeconds() != null) sb.append("TimeoutInSeconds: ").append(getTimeoutInSeconds()).append(","); if (getHeartbeatInSeconds() != null) sb.append("HeartbeatInSeconds: ").append(getHeartbeatInSeconds()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ActivityScheduledEventDetails == false) return false; ActivityScheduledEventDetails other = (ActivityScheduledEventDetails) obj; if (other.getResource() == null ^ this.getResource() == null) return false; if (other.getResource() != null && other.getResource().equals(this.getResource()) == false) return false; if (other.getInput() == null ^ this.getInput() == null) return false; if (other.getInput() != null && other.getInput().equals(this.getInput()) == false) return false; if (other.getTimeoutInSeconds() == null ^ this.getTimeoutInSeconds() == null) return false; if (other.getTimeoutInSeconds() != null && other.getTimeoutInSeconds().equals(this.getTimeoutInSeconds()) == false) return false; if (other.getHeartbeatInSeconds() == null ^ this.getHeartbeatInSeconds() == null) return false; if (other.getHeartbeatInSeconds() != null && other.getHeartbeatInSeconds().equals(this.getHeartbeatInSeconds()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getResource() == null) ? 0 : getResource().hashCode()); hashCode = prime * hashCode + ((getInput() == null) ? 0 : getInput().hashCode()); hashCode = prime * hashCode + ((getTimeoutInSeconds() == null) ? 0 : getTimeoutInSeconds().hashCode()); hashCode = prime * hashCode + ((getHeartbeatInSeconds() == null) ? 0 : getHeartbeatInSeconds().hashCode()); return hashCode; } @Override public ActivityScheduledEventDetails clone() { try { return (ActivityScheduledEventDetails) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.stepfunctions.model.transform.ActivityScheduledEventDetailsMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Contains a list of Snapshot resources. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class SnapshotList extends com.google.api.client.json.GenericJson { /** * [Output Only] Unique identifier for the resource; defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String id; /** * A list of Snapshot resources. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Snapshot> items; static { // hack to force ProGuard to consider Snapshot used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Snapshot.class); } /** * Type of resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String nextPageToken; /** * [Output Only] Server-defined URL for this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * [Output Only] Informational warning message. * The value may be {@code null}. */ @com.google.api.client.util.Key private Warning warning; /** * [Output Only] Unique identifier for the resource; defined by the server. * @return value or {@code null} for none */ public java.lang.String getId() { return id; } /** * [Output Only] Unique identifier for the resource; defined by the server. * @param id id or {@code null} for none */ public SnapshotList setId(java.lang.String id) { this.id = id; return this; } /** * A list of Snapshot resources. * @return value or {@code null} for none */ public java.util.List<Snapshot> getItems() { return items; } /** * A list of Snapshot resources. * @param items items or {@code null} for none */ public SnapshotList setItems(java.util.List<Snapshot> items) { this.items = items; return this; } /** * Type of resource. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * Type of resource. * @param kind kind or {@code null} for none */ public SnapshotList setKind(java.lang.String kind) { this.kind = kind; return this; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. * @return value or {@code null} for none */ public java.lang.String getNextPageToken() { return nextPageToken; } /** * [Output Only] This token allows you to get the next page of results for list requests. If the * number of results is larger than maxResults, use the nextPageToken as a value for the query * parameter pageToken in the next list request. Subsequent list requests will have their own * nextPageToken to continue paging through the results. * @param nextPageToken nextPageToken or {@code null} for none */ public SnapshotList setNextPageToken(java.lang.String nextPageToken) { this.nextPageToken = nextPageToken; return this; } /** * [Output Only] Server-defined URL for this resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for this resource. * @param selfLink selfLink or {@code null} for none */ public SnapshotList setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * [Output Only] Informational warning message. * @return value or {@code null} for none */ public Warning getWarning() { return warning; } /** * [Output Only] Informational warning message. * @param warning warning or {@code null} for none */ public SnapshotList setWarning(Warning warning) { this.warning = warning; return this; } @Override public SnapshotList set(String fieldName, Object value) { return (SnapshotList) super.set(fieldName, value); } @Override public SnapshotList clone() { return (SnapshotList) super.clone(); } /** * [Output Only] Informational warning message. */ public static final class Warning extends com.google.api.client.json.GenericJson { /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String code; /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Data> data; static { // hack to force ProGuard to consider Data used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Data.class); } /** * [Output Only] A human-readable description of the warning code. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * @return value or {@code null} for none */ public java.lang.String getCode() { return code; } /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * @param code code or {@code null} for none */ public Warning setCode(java.lang.String code) { this.code = code; return this; } /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * @return value or {@code null} for none */ public java.util.List<Data> getData() { return data; } /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * @param data data or {@code null} for none */ public Warning setData(java.util.List<Data> data) { this.data = data; return this; } /** * [Output Only] A human-readable description of the warning code. * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * [Output Only] A human-readable description of the warning code. * @param message message or {@code null} for none */ public Warning setMessage(java.lang.String message) { this.message = message; return this; } @Override public Warning set(String fieldName, Object value) { return (Warning) super.set(fieldName, value); } @Override public Warning clone() { return (Warning) super.clone(); } /** * Model definition for SnapshotListWarningData. */ public static final class Data extends com.google.api.client.json.GenericJson { /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String key; /** * [Output Only] A warning data value corresponding to the key. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String value; /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * @return value or {@code null} for none */ public java.lang.String getKey() { return key; } /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * @param key key or {@code null} for none */ public Data setKey(java.lang.String key) { this.key = key; return this; } /** * [Output Only] A warning data value corresponding to the key. * @return value or {@code null} for none */ public java.lang.String getValue() { return value; } /** * [Output Only] A warning data value corresponding to the key. * @param value value or {@code null} for none */ public Data setValue(java.lang.String value) { this.value = value; return this; } @Override public Data set(String fieldName, Object value) { return (Data) super.set(fieldName, value); } @Override public Data clone() { return (Data) super.clone(); } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.compute.v2019_11_01; import java.util.Collection; import com.fasterxml.jackson.annotation.JsonCreator; import com.microsoft.rest.ExpandableStringEnum; /** * Defines values for VirtualMachineSizeTypes. */ public final class VirtualMachineSizeTypes extends ExpandableStringEnum<VirtualMachineSizeTypes> { /** Static value Basic_A0 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A0 = fromString("Basic_A0"); /** Static value Basic_A1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A1 = fromString("Basic_A1"); /** Static value Basic_A2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A2 = fromString("Basic_A2"); /** Static value Basic_A3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A3 = fromString("Basic_A3"); /** Static value Basic_A4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes BASIC_A4 = fromString("Basic_A4"); /** Static value Standard_A0 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A0 = fromString("Standard_A0"); /** Static value Standard_A1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A1 = fromString("Standard_A1"); /** Static value Standard_A2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A2 = fromString("Standard_A2"); /** Static value Standard_A3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A3 = fromString("Standard_A3"); /** Static value Standard_A4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A4 = fromString("Standard_A4"); /** Static value Standard_A5 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A5 = fromString("Standard_A5"); /** Static value Standard_A6 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A6 = fromString("Standard_A6"); /** Static value Standard_A7 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A7 = fromString("Standard_A7"); /** Static value Standard_A8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A8 = fromString("Standard_A8"); /** Static value Standard_A9 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A9 = fromString("Standard_A9"); /** Static value Standard_A10 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A10 = fromString("Standard_A10"); /** Static value Standard_A11 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A11 = fromString("Standard_A11"); /** Static value Standard_A1_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A1_V2 = fromString("Standard_A1_v2"); /** Static value Standard_A2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A2_V2 = fromString("Standard_A2_v2"); /** Static value Standard_A4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A4_V2 = fromString("Standard_A4_v2"); /** Static value Standard_A8_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A8_V2 = fromString("Standard_A8_v2"); /** Static value Standard_A2m_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A2M_V2 = fromString("Standard_A2m_v2"); /** Static value Standard_A4m_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A4M_V2 = fromString("Standard_A4m_v2"); /** Static value Standard_A8m_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_A8M_V2 = fromString("Standard_A8m_v2"); /** Static value Standard_B1s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B1S = fromString("Standard_B1s"); /** Static value Standard_B1ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B1MS = fromString("Standard_B1ms"); /** Static value Standard_B2s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B2S = fromString("Standard_B2s"); /** Static value Standard_B2ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B2MS = fromString("Standard_B2ms"); /** Static value Standard_B4ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B4MS = fromString("Standard_B4ms"); /** Static value Standard_B8ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_B8MS = fromString("Standard_B8ms"); /** Static value Standard_D1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D1 = fromString("Standard_D1"); /** Static value Standard_D2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2 = fromString("Standard_D2"); /** Static value Standard_D3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D3 = fromString("Standard_D3"); /** Static value Standard_D4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4 = fromString("Standard_D4"); /** Static value Standard_D11 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D11 = fromString("Standard_D11"); /** Static value Standard_D12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D12 = fromString("Standard_D12"); /** Static value Standard_D13 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D13 = fromString("Standard_D13"); /** Static value Standard_D14 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D14 = fromString("Standard_D14"); /** Static value Standard_D1_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D1_V2 = fromString("Standard_D1_v2"); /** Static value Standard_D2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2_V2 = fromString("Standard_D2_v2"); /** Static value Standard_D3_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D3_V2 = fromString("Standard_D3_v2"); /** Static value Standard_D4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4_V2 = fromString("Standard_D4_v2"); /** Static value Standard_D5_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D5_V2 = fromString("Standard_D5_v2"); /** Static value Standard_D2_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2_V3 = fromString("Standard_D2_v3"); /** Static value Standard_D4_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4_V3 = fromString("Standard_D4_v3"); /** Static value Standard_D8_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D8_V3 = fromString("Standard_D8_v3"); /** Static value Standard_D16_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D16_V3 = fromString("Standard_D16_v3"); /** Static value Standard_D32_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D32_V3 = fromString("Standard_D32_v3"); /** Static value Standard_D64_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D64_V3 = fromString("Standard_D64_v3"); /** Static value Standard_D2s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D2S_V3 = fromString("Standard_D2s_v3"); /** Static value Standard_D4s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D4S_V3 = fromString("Standard_D4s_v3"); /** Static value Standard_D8s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D8S_V3 = fromString("Standard_D8s_v3"); /** Static value Standard_D16s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D16S_V3 = fromString("Standard_D16s_v3"); /** Static value Standard_D32s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D32S_V3 = fromString("Standard_D32s_v3"); /** Static value Standard_D64s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D64S_V3 = fromString("Standard_D64s_v3"); /** Static value Standard_D11_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D11_V2 = fromString("Standard_D11_v2"); /** Static value Standard_D12_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D12_V2 = fromString("Standard_D12_v2"); /** Static value Standard_D13_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D13_V2 = fromString("Standard_D13_v2"); /** Static value Standard_D14_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D14_V2 = fromString("Standard_D14_v2"); /** Static value Standard_D15_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_D15_V2 = fromString("Standard_D15_v2"); /** Static value Standard_DS1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS1 = fromString("Standard_DS1"); /** Static value Standard_DS2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS2 = fromString("Standard_DS2"); /** Static value Standard_DS3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS3 = fromString("Standard_DS3"); /** Static value Standard_DS4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS4 = fromString("Standard_DS4"); /** Static value Standard_DS11 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS11 = fromString("Standard_DS11"); /** Static value Standard_DS12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS12 = fromString("Standard_DS12"); /** Static value Standard_DS13 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13 = fromString("Standard_DS13"); /** Static value Standard_DS14 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14 = fromString("Standard_DS14"); /** Static value Standard_DS1_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS1_V2 = fromString("Standard_DS1_v2"); /** Static value Standard_DS2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS2_V2 = fromString("Standard_DS2_v2"); /** Static value Standard_DS3_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS3_V2 = fromString("Standard_DS3_v2"); /** Static value Standard_DS4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS4_V2 = fromString("Standard_DS4_v2"); /** Static value Standard_DS5_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS5_V2 = fromString("Standard_DS5_v2"); /** Static value Standard_DS11_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS11_V2 = fromString("Standard_DS11_v2"); /** Static value Standard_DS12_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS12_V2 = fromString("Standard_DS12_v2"); /** Static value Standard_DS13_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13_V2 = fromString("Standard_DS13_v2"); /** Static value Standard_DS14_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14_V2 = fromString("Standard_DS14_v2"); /** Static value Standard_DS15_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS15_V2 = fromString("Standard_DS15_v2"); /** Static value Standard_DS13-4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13_4_V2 = fromString("Standard_DS13-4_v2"); /** Static value Standard_DS13-2_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS13_2_V2 = fromString("Standard_DS13-2_v2"); /** Static value Standard_DS14-8_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14_8_V2 = fromString("Standard_DS14-8_v2"); /** Static value Standard_DS14-4_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_DS14_4_V2 = fromString("Standard_DS14-4_v2"); /** Static value Standard_E2_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E2_V3 = fromString("Standard_E2_v3"); /** Static value Standard_E4_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E4_V3 = fromString("Standard_E4_v3"); /** Static value Standard_E8_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E8_V3 = fromString("Standard_E8_v3"); /** Static value Standard_E16_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E16_V3 = fromString("Standard_E16_v3"); /** Static value Standard_E32_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32_V3 = fromString("Standard_E32_v3"); /** Static value Standard_E64_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64_V3 = fromString("Standard_E64_v3"); /** Static value Standard_E2s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E2S_V3 = fromString("Standard_E2s_v3"); /** Static value Standard_E4s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E4S_V3 = fromString("Standard_E4s_v3"); /** Static value Standard_E8s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E8S_V3 = fromString("Standard_E8s_v3"); /** Static value Standard_E16s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E16S_V3 = fromString("Standard_E16s_v3"); /** Static value Standard_E32s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32S_V3 = fromString("Standard_E32s_v3"); /** Static value Standard_E64s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64S_V3 = fromString("Standard_E64s_v3"); /** Static value Standard_E32-16_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32_16_V3 = fromString("Standard_E32-16_v3"); /** Static value Standard_E32-8s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E32_8S_V3 = fromString("Standard_E32-8s_v3"); /** Static value Standard_E64-32s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64_32S_V3 = fromString("Standard_E64-32s_v3"); /** Static value Standard_E64-16s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_E64_16S_V3 = fromString("Standard_E64-16s_v3"); /** Static value Standard_F1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F1 = fromString("Standard_F1"); /** Static value Standard_F2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F2 = fromString("Standard_F2"); /** Static value Standard_F4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F4 = fromString("Standard_F4"); /** Static value Standard_F8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F8 = fromString("Standard_F8"); /** Static value Standard_F16 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F16 = fromString("Standard_F16"); /** Static value Standard_F1s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F1S = fromString("Standard_F1s"); /** Static value Standard_F2s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F2S = fromString("Standard_F2s"); /** Static value Standard_F4s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F4S = fromString("Standard_F4s"); /** Static value Standard_F8s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F8S = fromString("Standard_F8s"); /** Static value Standard_F16s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F16S = fromString("Standard_F16s"); /** Static value Standard_F2s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F2S_V2 = fromString("Standard_F2s_v2"); /** Static value Standard_F4s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F4S_V2 = fromString("Standard_F4s_v2"); /** Static value Standard_F8s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F8S_V2 = fromString("Standard_F8s_v2"); /** Static value Standard_F16s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F16S_V2 = fromString("Standard_F16s_v2"); /** Static value Standard_F32s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F32S_V2 = fromString("Standard_F32s_v2"); /** Static value Standard_F64s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F64S_V2 = fromString("Standard_F64s_v2"); /** Static value Standard_F72s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_F72S_V2 = fromString("Standard_F72s_v2"); /** Static value Standard_G1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G1 = fromString("Standard_G1"); /** Static value Standard_G2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G2 = fromString("Standard_G2"); /** Static value Standard_G3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G3 = fromString("Standard_G3"); /** Static value Standard_G4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G4 = fromString("Standard_G4"); /** Static value Standard_G5 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_G5 = fromString("Standard_G5"); /** Static value Standard_GS1 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS1 = fromString("Standard_GS1"); /** Static value Standard_GS2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS2 = fromString("Standard_GS2"); /** Static value Standard_GS3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS3 = fromString("Standard_GS3"); /** Static value Standard_GS4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS4 = fromString("Standard_GS4"); /** Static value Standard_GS5 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS5 = fromString("Standard_GS5"); /** Static value Standard_GS4-8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS4_8 = fromString("Standard_GS4-8"); /** Static value Standard_GS4-4 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS4_4 = fromString("Standard_GS4-4"); /** Static value Standard_GS5-16 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS5_16 = fromString("Standard_GS5-16"); /** Static value Standard_GS5-8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_GS5_8 = fromString("Standard_GS5-8"); /** Static value Standard_H8 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H8 = fromString("Standard_H8"); /** Static value Standard_H16 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16 = fromString("Standard_H16"); /** Static value Standard_H8m for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H8M = fromString("Standard_H8m"); /** Static value Standard_H16m for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16M = fromString("Standard_H16m"); /** Static value Standard_H16r for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16R = fromString("Standard_H16r"); /** Static value Standard_H16mr for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_H16MR = fromString("Standard_H16mr"); /** Static value Standard_L4s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L4S = fromString("Standard_L4s"); /** Static value Standard_L8s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L8S = fromString("Standard_L8s"); /** Static value Standard_L16s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L16S = fromString("Standard_L16s"); /** Static value Standard_L32s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_L32S = fromString("Standard_L32s"); /** Static value Standard_M64s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64S = fromString("Standard_M64s"); /** Static value Standard_M64ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64MS = fromString("Standard_M64ms"); /** Static value Standard_M128s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128S = fromString("Standard_M128s"); /** Static value Standard_M128ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128MS = fromString("Standard_M128ms"); /** Static value Standard_M64-32ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64_32MS = fromString("Standard_M64-32ms"); /** Static value Standard_M64-16ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M64_16MS = fromString("Standard_M64-16ms"); /** Static value Standard_M128-64ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128_64MS = fromString("Standard_M128-64ms"); /** Static value Standard_M128-32ms for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_M128_32MS = fromString("Standard_M128-32ms"); /** Static value Standard_NC6 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC6 = fromString("Standard_NC6"); /** Static value Standard_NC12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC12 = fromString("Standard_NC12"); /** Static value Standard_NC24 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24 = fromString("Standard_NC24"); /** Static value Standard_NC24r for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24R = fromString("Standard_NC24r"); /** Static value Standard_NC6s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC6S_V2 = fromString("Standard_NC6s_v2"); /** Static value Standard_NC12s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC12S_V2 = fromString("Standard_NC12s_v2"); /** Static value Standard_NC24s_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24S_V2 = fromString("Standard_NC24s_v2"); /** Static value Standard_NC24rs_v2 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24RS_V2 = fromString("Standard_NC24rs_v2"); /** Static value Standard_NC6s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC6S_V3 = fromString("Standard_NC6s_v3"); /** Static value Standard_NC12s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC12S_V3 = fromString("Standard_NC12s_v3"); /** Static value Standard_NC24s_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24S_V3 = fromString("Standard_NC24s_v3"); /** Static value Standard_NC24rs_v3 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NC24RS_V3 = fromString("Standard_NC24rs_v3"); /** Static value Standard_ND6s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND6S = fromString("Standard_ND6s"); /** Static value Standard_ND12s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND12S = fromString("Standard_ND12s"); /** Static value Standard_ND24s for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND24S = fromString("Standard_ND24s"); /** Static value Standard_ND24rs for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_ND24RS = fromString("Standard_ND24rs"); /** Static value Standard_NV6 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NV6 = fromString("Standard_NV6"); /** Static value Standard_NV12 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NV12 = fromString("Standard_NV12"); /** Static value Standard_NV24 for VirtualMachineSizeTypes. */ public static final VirtualMachineSizeTypes STANDARD_NV24 = fromString("Standard_NV24"); /** * Creates or finds a VirtualMachineSizeTypes from its string representation. * @param name a name to look for * @return the corresponding VirtualMachineSizeTypes */ @JsonCreator public static VirtualMachineSizeTypes fromString(String name) { return fromString(name, VirtualMachineSizeTypes.class); } /** * @return known VirtualMachineSizeTypes values */ public static Collection<VirtualMachineSizeTypes> values() { return values(VirtualMachineSizeTypes.class); } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package Interfaz; import java.awt.Color; import javax.swing.BorderFactory; import javax.swing.JOptionPane; import javax.swing.border.LineBorder; import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.swing.DefaultCellEditor; import javax.swing.JComboBox; import javax.swing.JTable; import javax.swing.table.DefaultTableCellRenderer; import javax.swing.table.DefaultTableModel; import javax.swing.table.TableColumn; import optimizacion.Coordenada; import optimizacion.Variable; /** * * @author Sergio */ public class Interfaz extends javax.swing.JFrame { /** * Creates new form Interfaz */ static int contador=1; double num,ind,ind1; int con=0,contRest=0; double y; DefaultTableModel model; DefaultTableModel modelRest; ArrayList<Restriccion> rest=new ArrayList<Restriccion>(); ArrayList<Restriccion> despeje=new ArrayList<Restriccion>(); ArrayList<Coordenada> coor=new ArrayList<Coordenada>(); public Interfaz() { initComponents(); cargar(); cargarRest(); this.setLocationRelativeTo(null); // setUpSportColumn(tablaRest,tablaRest.getColumnModel().getColumn(3)); this.getContentPane().setBackground(Color.white); } public void cargarDatos(int cont){ model.insertRow(con, new Object[]{}); model.setValueAt("X"+cont, con, 0); model.setValueAt("variable "+cont, con, 1); con++; } public void cargar(){ String data[][]={}; String col[]={"Nombre","Descripcion"}; model=new DefaultTableModel(data,col); datos.setModel(model); } public void setUpSportColumn(JTable table, TableColumn sportColumn) { //Set up the editor for the sport cells. JComboBox comboBox = new JComboBox(); comboBox.addItem("<="); comboBox.addItem(">="); comboBox.addItem("="); sportColumn.setCellEditor(new DefaultCellEditor(comboBox)); //Set up tool tips for the sport cells. DefaultTableCellRenderer renderer = new DefaultTableCellRenderer(); renderer.setToolTipText("Click for combo box"); sportColumn.setCellRenderer(renderer); } public void cargarDatosRest(){ setUpSportColumn(tablaRest,tablaRest.getColumnModel().getColumn(modelRest.getColumnCount()-1)); modelRest.insertRow(contRest, new Object[]{}); for (int i = 0; i < modelRest.getColumnCount()-1; i++) { modelRest.setValueAt(0, contRest, i); } modelRest.setValueAt("<=", contRest, modelRest.getColumnCount()-1); // modelRest.setValueAt(0, contRest, 0); // modelRest.setValueAt(0, contRest, 1); // modelRest.setValueAt(0, contRest, 2); // modelRest.setValueAt("<=", contRest, 3); } public void cargarRest(){ String data[][]={}; String col[]={"X1","X2","B","OPC"}; modelRest=new DefaultTableModel(data,col); tablaRest.setModel(modelRest); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jScrollBar2 = new javax.swing.JScrollBar(); grupo = new javax.swing.ButtonGroup(); panel = new javax.swing.JPanel(); jLabel1 = new javax.swing.JLabel(); jPanel1 = new javax.swing.JPanel(); jScrollPane1 = new javax.swing.JScrollPane(); jList1 = new javax.swing.JList(); jLabel2 = new javax.swing.JLabel(); jLabel9 = new javax.swing.JLabel(); Maximizar = new javax.swing.JRadioButton(); jRadioButton2 = new javax.swing.JRadioButton(); jSeparator3 = new javax.swing.JSeparator(); jSeparator5 = new javax.swing.JSeparator(); jPanel2 = new javax.swing.JPanel(); jScrollPane2 = new javax.swing.JScrollPane(); jTextArea1 = new javax.swing.JTextArea(); jLabel3 = new javax.swing.JLabel(); jLabel4 = new javax.swing.JLabel(); jLabel5 = new javax.swing.JLabel(); jLabel6 = new javax.swing.JLabel(); jTextField1 = new javax.swing.JTextField(); jSeparator4 = new javax.swing.JSeparator(); jPanel3 = new javax.swing.JPanel(); jLabel7 = new javax.swing.JLabel(); jSeparator1 = new javax.swing.JSeparator(); jScrollPane5 = new javax.swing.JScrollPane(); datos1 = new javax.swing.JTable(); jPanel4 = new javax.swing.JPanel(); jButton1 = new javax.swing.JButton(); jButton2 = new javax.swing.JButton(); jButton3 = new javax.swing.JButton(); jButton4 = new javax.swing.JButton(); jPanel5 = new javax.swing.JPanel(); jLabel8 = new javax.swing.JLabel(); botonDelete = new javax.swing.JButton(); botonAdd = new javax.swing.JButton(); jLabel11 = new javax.swing.JLabel(); jSeparator2 = new javax.swing.JSeparator(); jScrollPane4 = new javax.swing.JScrollPane(); tablaRest = new javax.swing.JTable(); jPanel7 = new javax.swing.JPanel(); jLabel16 = new javax.swing.JLabel(); botonAdd1 = new javax.swing.JButton(); botonDelete1 = new javax.swing.JButton(); jScrollPane3 = new javax.swing.JScrollPane(); datos = new javax.swing.JTable(); jSeparator6 = new javax.swing.JSeparator(); jMenuBar1 = new javax.swing.JMenuBar(); jMenu1 = new javax.swing.JMenu(); jMenu2 = new javax.swing.JMenu(); jMenuItem1 = new javax.swing.JMenuItem(); jMenuItem2 = new javax.swing.JMenuItem(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setBackground(new java.awt.Color(255, 255, 255)); setResizable(false); panel.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jLabel1.setFont(new java.awt.Font("Gill Sans MT", 1, 24)); // NOI18N jLabel1.setText("Metodos de optimizacion programacion lineal"); javax.swing.GroupLayout panelLayout = new javax.swing.GroupLayout(panel); panel.setLayout(panelLayout); panelLayout.setHorizontalGroup( panelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, panelLayout.createSequentialGroup() .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel1) .addGap(87, 87, 87)) ); panelLayout.setVerticalGroup( panelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(panelLayout.createSequentialGroup() .addContainerGap() .addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 39, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jPanel1.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jList1.setModel(new javax.swing.AbstractListModel() { String[] strings = { "Metodo Simplex", "Tecnica de la M", "Metodo Dual", "Metodo Grafico" }; public int getSize() { return strings.length; } public Object getElementAt(int i) { return strings[i]; } }); jScrollPane1.setViewportView(jList1); jLabel2.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel2.setText("Metodos"); jLabel9.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel9.setText("Tipo de objetivo"); Maximizar.setText("Maximizar"); Maximizar.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { MaximizarActionPerformed(evt); } }); jRadioButton2.setText("Minimizar"); javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); jPanel1.setLayout(jPanel1Layout); jPanel1Layout.setHorizontalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jSeparator3, javax.swing.GroupLayout.PREFERRED_SIZE, 101, javax.swing.GroupLayout.PREFERRED_SIZE) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(19, 19, 19) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 111, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(Maximizar) .addComponent(jRadioButton2) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(10, 10, 10) .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(jLabel9, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jSeparator5))))) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(29, 29, 29) .addComponent(jLabel2)))) .addContainerGap(23, Short.MAX_VALUE)) ); jPanel1Layout.setVerticalGroup( jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel1Layout.createSequentialGroup() .addGap(25, 25, 25) .addComponent(jLabel2) .addGap(7, 7, 7) .addComponent(jSeparator3, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(1, 1, 1) .addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE, 97, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jLabel9) .addGap(9, 9, 9) .addComponent(jSeparator5, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(Maximizar) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jRadioButton2) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jPanel2.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jTextArea1.setColumns(20); jTextArea1.setRows(5); jScrollPane2.setViewportView(jTextArea1); jLabel3.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel3.setText("Descripcion del problema"); jLabel4.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel4.setText("ID:"); jLabel5.setText("0"); jLabel6.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel6.setText("Nombre:"); javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); jPanel2.setLayout(jPanel2Layout); jPanel2Layout.setHorizontalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(33, 33, 33) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(10, 10, 10) .addComponent(jSeparator4, javax.swing.GroupLayout.PREFERRED_SIZE, 213, javax.swing.GroupLayout.PREFERRED_SIZE)) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addComponent(jLabel6) .addGap(29, 29, 29) .addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, 143, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addGroup(jPanel2Layout.createSequentialGroup() .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING) .addComponent(jScrollPane2) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(34, 34, 34) .addComponent(jLabel3) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jLabel4) .addGap(32, 32, 32) .addComponent(jLabel5, javax.swing.GroupLayout.PREFERRED_SIZE, 52, javax.swing.GroupLayout.PREFERRED_SIZE))) .addGap(20, 20, 20))))) ); jPanel2Layout.setVerticalGroup( jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(12, 12, 12) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jTextField1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jLabel6)) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel2Layout.createSequentialGroup() .addGap(18, 18, 18) .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jLabel4) .addComponent(jLabel5))) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jLabel3))) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jSeparator4, javax.swing.GroupLayout.PREFERRED_SIZE, 2, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(13, 13, 13) .addComponent(jScrollPane2, javax.swing.GroupLayout.PREFERRED_SIZE, 129, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(43, Short.MAX_VALUE)) ); jPanel3.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jLabel7.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel7.setText("Funcion Objetivo"); datos1.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N datos1.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null, null} }, new String [] { "", "X1", "X2" } )); jScrollPane5.setViewportView(datos1); javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); jPanel3.setLayout(jPanel3Layout); jPanel3Layout.setHorizontalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addGap(31, 31, 31) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addComponent(jLabel7) .addGap(0, 170, Short.MAX_VALUE)) .addComponent(jSeparator1)) .addContainerGap()) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane5, javax.swing.GroupLayout.PREFERRED_SIZE, 294, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) ); jPanel3Layout.setVerticalGroup( jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addContainerGap() .addComponent(jLabel7) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jSeparator1, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(98, Short.MAX_VALUE)) .addGroup(jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel3Layout.createSequentialGroup() .addGap(39, 39, 39) .addComponent(jScrollPane5, javax.swing.GroupLayout.PREFERRED_SIZE, 44, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(57, Short.MAX_VALUE))) ); jPanel4.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jButton1.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jButton1.setText("<-"); jButton2.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jButton2.setText("->"); jButton3.setText("Solucionar"); jButton3.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton3ActionPerformed(evt); } }); jButton4.setText("Graficar"); jButton4.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jButton4ActionPerformed(evt); } }); javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4); jPanel4.setLayout(jPanel4Layout); jPanel4Layout.setHorizontalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel4Layout.createSequentialGroup() .addGap(42, 42, 42) .addComponent(jButton3) .addGap(26, 26, 26) .addComponent(jButton4) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jButton1) .addGap(31, 31, 31) .addComponent(jButton2) .addGap(47, 47, 47)) ); jPanel4Layout.setVerticalGroup( jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel4Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(jButton1) .addComponent(jButton2) .addComponent(jButton3) .addComponent(jButton4)) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jPanel5.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jLabel8.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel8.setText("Restricciones"); botonDelete.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N botonDelete.setText("-"); botonDelete.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseEntered(java.awt.event.MouseEvent evt) { botonDeleteMouseEntered(evt); } public void mouseExited(java.awt.event.MouseEvent evt) { botonDeleteMouseExited(evt); } }); botonAdd.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N botonAdd.setText("+"); botonAdd.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseEntered(java.awt.event.MouseEvent evt) { botonAddMouseEntered(evt); } public void mouseExited(java.awt.event.MouseEvent evt) { botonAddMouseExited(evt); } }); botonAdd.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { botonAddActionPerformed(evt); } }); jLabel11.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N tablaRest.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N tablaRest.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { }, new String [] { } )); jScrollPane4.setViewportView(tablaRest); javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5); jPanel5.setLayout(jPanel5Layout); jPanel5Layout.setHorizontalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addGap(23, 23, 23) .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addComponent(jLabel8) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(botonAdd, javax.swing.GroupLayout.PREFERRED_SIZE, 47, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(botonDelete, javax.swing.GroupLayout.PREFERRED_SIZE, 47, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(30, 30, 30)) .addGroup(jPanel5Layout.createSequentialGroup() .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addGap(90, 90, 90) .addComponent(jLabel11)) .addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 147, javax.swing.GroupLayout.PREFERRED_SIZE)) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)))) .addGroup(jPanel5Layout.createSequentialGroup() .addContainerGap() .addComponent(jScrollPane4, javax.swing.GroupLayout.PREFERRED_SIZE, 375, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(0, 0, Short.MAX_VALUE)) ); jPanel5Layout.setVerticalGroup( jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel5Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel8) .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(botonDelete) .addComponent(botonAdd))) .addGap(1, 1, 1) .addComponent(jSeparator2, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(7, 7, 7) .addComponent(jLabel11) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane4, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE) .addContainerGap()) ); jPanel7.setBorder(new javax.swing.border.LineBorder(new java.awt.Color(0, 0, 0), 2, true)); jLabel16.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N jLabel16.setText("Variables"); botonAdd1.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N botonAdd1.setText("+"); botonAdd1.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseEntered(java.awt.event.MouseEvent evt) { botonAdd1MouseEntered(evt); } public void mouseExited(java.awt.event.MouseEvent evt) { botonAdd1MouseExited(evt); } }); botonAdd1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { botonAdd1ActionPerformed(evt); } }); botonDelete1.setFont(new java.awt.Font("Tahoma", 1, 12)); // NOI18N botonDelete1.setText("-"); botonDelete1.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseEntered(java.awt.event.MouseEvent evt) { botonDelete1MouseEntered(evt); } public void mouseExited(java.awt.event.MouseEvent evt) { botonDelete1MouseExited(evt); } }); botonDelete1.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { botonDelete1ActionPerformed(evt); } }); datos.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N datos.setModel(new javax.swing.table.DefaultTableModel( new Object [][] { {null, null}, {null, null} }, new String [] { "Nombre", "Descripcion" } )); jScrollPane3.setViewportView(datos); javax.swing.GroupLayout jPanel7Layout = new javax.swing.GroupLayout(jPanel7); jPanel7.setLayout(jPanel7Layout); jPanel7Layout.setHorizontalGroup( jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel7Layout.createSequentialGroup() .addGap(36, 36, 36) .addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jLabel16) .addComponent(jScrollPane3, javax.swing.GroupLayout.PREFERRED_SIZE, 294, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(jSeparator6, javax.swing.GroupLayout.PREFERRED_SIZE, 130, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(botonAdd1, javax.swing.GroupLayout.PREFERRED_SIZE, 47, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(18, 18, 18) .addComponent(botonDelete1, javax.swing.GroupLayout.PREFERRED_SIZE, 47, javax.swing.GroupLayout.PREFERRED_SIZE) .addGap(37, 37, 37)) ); jPanel7Layout.setVerticalGroup( jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel7Layout.createSequentialGroup() .addContainerGap() .addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(jPanel7Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(botonAdd1) .addComponent(botonDelete1)) .addGroup(jPanel7Layout.createSequentialGroup() .addComponent(jLabel16) .addGap(1, 1, 1) .addComponent(jSeparator6, javax.swing.GroupLayout.PREFERRED_SIZE, 10, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jScrollPane3, javax.swing.GroupLayout.PREFERRED_SIZE, 62, javax.swing.GroupLayout.PREFERRED_SIZE))) .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) ); jMenu1.setText("Archivo"); jMenu2.setText("Guardar como"); jMenu1.add(jMenu2); jMenuItem1.setText("Abrir"); jMenu1.add(jMenuItem1); jMenuItem2.setText("Descargar"); jMenuItem2.addActionListener(new java.awt.event.ActionListener() { public void actionPerformed(java.awt.event.ActionEvent evt) { jMenuItem2ActionPerformed(evt); } }); jMenu1.add(jMenuItem2); jMenuBar1.add(jMenu1); setJMenuBar(jMenuBar1); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addGap(32, 32, 32) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(jPanel7, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(panel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addGroup(layout.createSequentialGroup() .addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addGroup(layout.createSequentialGroup() .addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) .addComponent(jPanel4, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))) .addContainerGap(27, Short.MAX_VALUE)) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(panel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(jPanel7, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) .addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) .addComponent(jPanel3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(jPanel4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addContainerGap(13, Short.MAX_VALUE)) ); pack(); }// </editor-fold>//GEN-END:initComponents private void jButton4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton4ActionPerformed for (int i = 0; i < tablaRest.getRowCount(); i++) { ArrayList<Variable> var=new ArrayList<Variable>(); for (int j = 0; j < tablaRest.getColumnCount(); j++) { if(j== tablaRest.getColumnCount()-1){ if(modelRest.getValueAt(i, j).toString().equalsIgnoreCase("<=")){ // JOptionPane.showMessageDialog(null,modelRest.getValueAt(i, j).toString()); var.add(new Variable(modelRest.getColumnName(j),1)); }else if(modelRest.getValueAt(i, j).toString().equalsIgnoreCase(">=")){ var.add(new Variable(modelRest.getColumnName(j),2)); }else if(modelRest.getValueAt(i, j).toString().equalsIgnoreCase("=")){ var.add(new Variable(modelRest.getColumnName(j),3)); } }else{ // JOptionPane.showMessageDialog(null,modelRest.getValueAt(i, j).toString()); var.add(new Variable(modelRest.getColumnName(j),Double.parseDouble(modelRest.getValueAt(i, j).toString()))); } } rest.add(new Restriccion(var)); } despejarY(); combinaciones(); for (int i = 0; i < coor.size(); i++) { JOptionPane.showMessageDialog(null, coor.get(i).getX()+ ","+coor.get(i).getY()); } }//GEN-LAST:event_jButton4ActionPerformed private void MaximizarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_MaximizarActionPerformed // TODO add your handling code here: }//GEN-LAST:event_MaximizarActionPerformed private void botonAddMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonAddMouseEntered }//GEN-LAST:event_botonAddMouseEntered private void botonDeleteMouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonDeleteMouseEntered }//GEN-LAST:event_botonDeleteMouseEntered private void botonAddMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonAddMouseExited }//GEN-LAST:event_botonAddMouseExited private void botonDeleteMouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonDeleteMouseExited }//GEN-LAST:event_botonDeleteMouseExited private void jMenuItem2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jMenuItem2ActionPerformed // TODO add your handling code here: }//GEN-LAST:event_jMenuItem2ActionPerformed private void botonAdd1MouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonAdd1MouseEntered // TODO add your handling code here: }//GEN-LAST:event_botonAdd1MouseEntered private void botonAdd1MouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonAdd1MouseExited // TODO add your handling code here: }//GEN-LAST:event_botonAdd1MouseExited private void botonDelete1MouseEntered(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonDelete1MouseEntered // TODO add your handling code here: }//GEN-LAST:event_botonDelete1MouseEntered private void botonDelete1MouseExited(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_botonDelete1MouseExited // TODO add your handling code here: }//GEN-LAST:event_botonDelete1MouseExited private void botonAddActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_botonAddActionPerformed cargarDatosRest() ; contRest++; }//GEN-LAST:event_botonAddActionPerformed private void botonAdd1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_botonAdd1ActionPerformed cargarDatos(contador); contador++; int pos1=0,pos2=0; // JOptionPane.showMessageDialog(null, model.getRowCount()); try{ if( model.getRowCount()>2){ modelRest.addColumn(model.getValueAt(model.getRowCount()-1, 0)); ArrayList<String> arreglo=new ArrayList<String>(); for (int i = 0; i <modelRest.getColumnCount(); i++) { if(modelRest.getColumnName(i).equalsIgnoreCase("b")){ pos1=i; }else if(modelRest.getColumnName(i).equalsIgnoreCase("opc")){ pos2=i; } arreglo.add(modelRest.getColumnName(i)); } int dis=modelRest.getColumnCount()-pos2-1; // JOptionPane.showMessageDialog(null, dis); for (int i = 2; i <= dis+1; i++) { arreglo.set((modelRest.getColumnCount()-1)-i,modelRest.getColumnName(modelRest.getColumnCount()-(i-1))); } arreglo.set(pos1+dis, "B"); arreglo.set(pos2+dis, "OPC"); modelRest.setColumnIdentifiers(arreglo.toArray()); } }catch(Exception e){ } }//GEN-LAST:event_botonAdd1ActionPerformed private void jButton3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton3ActionPerformed JOptionPane.showMessageDialog(null, modelRest.getValueAt(0, 1)); }//GEN-LAST:event_jButton3ActionPerformed private void botonDelete1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_botonDelete1ActionPerformed model.removeRow(model.getRowCount()-1); con--; contador--; }//GEN-LAST:event_botonDelete1ActionPerformed /** * @param args the command line arguments */ public void despejarY(){ double opc; for (int i = 0; i <rest.size(); i++) { opc=rest.get(i).variable.get(3).getValor(); ArrayList<Variable> var=new ArrayList<Variable>(); if(rest.get(i).variable.get(0).getValor()!=0){ if(rest.get(i).variable.get(1).getValor()!=0 ){ num= rest.get(i).variable.get(1).getValor()*-1; }else{ num=0; } }else{ num=1; } if(rest.get(i).variable.get(0).getValor()!=0){ y=1; ind=num/rest.get(i).variable.get(0).getValor(); ind1=rest.get(i).variable.get(2).getValor()/rest.get(i).variable.get(0).getValor(); }else{ y=0; ind=num; ind1=rest.get(i).variable.get(2).getValor()/rest.get(i).variable.get(1).getValor(); } var.add(new Variable("X2",ind)); var.add(new Variable("B",ind1)); var.add(new Variable("OPC",opc)); var.add(new Variable("X1",y)); despeje.add(new Restriccion(var)); // encontrarInterseccion(despeje.get(0),despeje.get(1)); } //JOptionPane.showMessageDialog(null, encontrarInterseccionejey(despeje.get(0)).getX()); // JOptionPane.showMessageDialog(null, encontrarInterseccion(despeje.get(0),despeje.get(1)).getY()); } public Coordenada encontrarInterseccion(Restriccion a,Restriccion b){ double x,y; Coordenada nueva; if(a.variable.get(3).getValor()!=0 && b.variable.get(3).getValor()!=0){ x=(b.variable.get(1).getValor() - a.variable.get(1).getValor())/(a.variable.get(0).getValor()-b.variable.get(0).getValor()); y=a.variable.get(0).getValor()*x + a.variable.get(1).getValor(); nueva=new Coordenada(x,y); return nueva; }else if(a.variable.get(3).getValor()!=0 && b.variable.get(3).getValor()==0){ x=b.variable.get(1).getValor(); y=a.variable.get(0).getValor()*x + a.variable.get(1).getValor(); nueva=new Coordenada(x,y); return nueva; }else if(a.variable.get(3).getValor()==0 && b.variable.get(3).getValor()!=0){ x=a.variable.get(1).getValor(); y=b.variable.get(0).getValor()*x + b.variable.get(1).getValor(); nueva=new Coordenada(x,y); return nueva; } x=0; y=0; nueva=new Coordenada(x,y); return nueva; } public Coordenada encontrarInterseccionejex(Restriccion a){ double x,y; Coordenada nueva; if(a.variable.get(3).getValor()!=0){ x=0; y=a.variable.get(1).getValor(); nueva=new Coordenada(x,y); }else{ x=0; y=0; nueva=new Coordenada(x,y); } return nueva; } public boolean tieneSolucion(ArrayList<Coordenada> a){ boolean tiene=false; if(a.size()>=3){ tiene=true; } return tiene; } public Coordenada encontrarInterseccionejey(Restriccion a){ double x,y; Coordenada nueva; if(a.variable.get(3).getValor()!=0){ x=-1*(a.variable.get(1).getValor()/a.variable.get(0).getValor()); y=0; nueva=new Coordenada(x,y); }else{ y=0; x=a.variable.get(1).getValor(); nueva=new Coordenada(x,y); } return nueva; } public void combinaciones(){ int j=0; for (int i = 0; i < despeje.size(); i++) { j=i; while(j<despeje.size()-1){ j++; if(seleccionarPuntosValidos(encontrarInterseccion(despeje.get(i),despeje.get(j)),despeje)){ coor.add(encontrarInterseccion(despeje.get(i),despeje.get(j))); } System.out.println("i:"+i); System.out.println("j:"+j); } j=0; } for (int i = 0; i < despeje.size(); i++) { if(seleccionarPuntosValidos(encontrarInterseccionejex(despeje.get(i)),despeje)){ coor.add(encontrarInterseccionejex(despeje.get(i))); } if(seleccionarPuntosValidos(encontrarInterseccionejey(despeje.get(i)),despeje)){ coor.add(encontrarInterseccionejey(despeje.get(i))); } } if(tieneSolucion(coor)){ System.out.println("Tiene solucion"); } else{ System.out.println("No tiene solucion"); } } public boolean seleccionarPuntosValidos(Coordenada validar,ArrayList<Restriccion> rest){ boolean entra=false; for (int i = 0; i < rest.size(); i++) { if(rest.get(i).variable.get(2).getValor()==1){ if(validar.getY()<=(rest.get(i).variable.get(0).getValor()*validar.getX()+rest.get(i).variable.get(1).getValor()) && validar.getY()>=0 && validar.getX()>=0){ entra=true; }else{ break; } }else if(rest.get(i).variable.get(2).getValor()==2){ if(validar.getY()>=(rest.get(i).variable.get(0).getValor()*validar.getX()+rest.get(i).variable.get(1).getValor())&& validar.getY()>=0 && validar.getX()>=0){ entra=true; }else{ break; } }else if(rest.get(i).variable.get(2).getValor()==3){ if(validar.getY()==(rest.get(i).variable.get(0).getValor()*validar.getX()+rest.get(i).variable.get(1).getValor())&& validar.getY()>=0 && validar.getX()>=0){ entra=true; }else{ break; } } } return entra; } public static void main(String args[]) { /* Set the Nimbus look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) { if ("Nimbus".equals(info.getName())) { javax.swing.UIManager.setLookAndFeel(info.getClassName()); break; } } } catch (ClassNotFoundException ex) { java.util.logging.Logger.getLogger(Interfaz.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (InstantiationException ex) { java.util.logging.Logger.getLogger(Interfaz.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (IllegalAccessException ex) { java.util.logging.Logger.getLogger(Interfaz.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } catch (javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(Interfaz.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(new Runnable() { public void run() { new Interfaz().setVisible(true); } }); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JRadioButton Maximizar; private javax.swing.JButton botonAdd; private javax.swing.JButton botonAdd1; private javax.swing.JButton botonDelete; private javax.swing.JButton botonDelete1; private javax.swing.JTable datos; private javax.swing.JTable datos1; private javax.swing.ButtonGroup grupo; private javax.swing.JButton jButton1; private javax.swing.JButton jButton2; private javax.swing.JButton jButton3; private javax.swing.JButton jButton4; private javax.swing.JLabel jLabel1; private javax.swing.JLabel jLabel11; private javax.swing.JLabel jLabel16; private javax.swing.JLabel jLabel2; private javax.swing.JLabel jLabel3; private javax.swing.JLabel jLabel4; private javax.swing.JLabel jLabel5; private javax.swing.JLabel jLabel6; private javax.swing.JLabel jLabel7; private javax.swing.JLabel jLabel8; private javax.swing.JLabel jLabel9; private javax.swing.JList jList1; private javax.swing.JMenu jMenu1; private javax.swing.JMenu jMenu2; private javax.swing.JMenuBar jMenuBar1; private javax.swing.JMenuItem jMenuItem1; private javax.swing.JMenuItem jMenuItem2; private javax.swing.JPanel jPanel1; private javax.swing.JPanel jPanel2; private javax.swing.JPanel jPanel3; private javax.swing.JPanel jPanel4; private javax.swing.JPanel jPanel5; private javax.swing.JPanel jPanel7; private javax.swing.JRadioButton jRadioButton2; private javax.swing.JScrollBar jScrollBar2; private javax.swing.JScrollPane jScrollPane1; private javax.swing.JScrollPane jScrollPane2; private javax.swing.JScrollPane jScrollPane3; private javax.swing.JScrollPane jScrollPane4; private javax.swing.JScrollPane jScrollPane5; private javax.swing.JSeparator jSeparator1; private javax.swing.JSeparator jSeparator2; private javax.swing.JSeparator jSeparator3; private javax.swing.JSeparator jSeparator4; private javax.swing.JSeparator jSeparator5; private javax.swing.JSeparator jSeparator6; private javax.swing.JTextArea jTextArea1; private javax.swing.JTextField jTextField1; private javax.swing.JPanel panel; private javax.swing.JTable tablaRest; // End of variables declaration//GEN-END:variables }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.flume.test.agent; import org.apache.commons.io.FileUtils; import org.apache.flume.test.util.StagedInstall; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.alias.CredentialShell; import org.apache.hadoop.util.ToolRunner; import org.junit.After; import org.junit.Before; import org.junit.ClassRule; import org.junit.Test; import org.junit.contrib.java.lang.system.EnvironmentVariables; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Objects; import java.util.Properties; import java.util.Scanner; import java.util.Set; import java.util.concurrent.TimeUnit; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; public class TestConfig { private static final Logger LOGGER = LoggerFactory.getLogger(TestConfig.class); @ClassRule public static final EnvironmentVariables environmentVariables = new EnvironmentVariables(); private Properties agentProps; private Map<String, String> agentEnv; private Map<String, String> agentOptions; private File sinkOutputDir1; private File sinkOutputDir2; private File sinkOutputDir3; private File hadoopCredStore; @Before public void setup() throws Exception { File agentDir = StagedInstall.getInstance().getStageDir(); LOGGER.debug("Using agent stage dir: {}", agentDir); File testDir = new File(agentDir, TestConfig.class.getName()); if (testDir.exists()) { FileUtils.deleteDirectory(testDir); } assertTrue(testDir.mkdirs()); agentProps = new Properties(); agentEnv = new HashMap<>(); agentOptions = new HashMap<>(); agentOptions.put("-C", getAdditionalClassPath()); // Create the rest of the properties file agentProps.put("agent.sources.seq-01.type", "seq"); agentProps.put("agent.sources.seq-01.totalEvents", "100"); agentProps.put("agent.sources.seq-01.channels", "mem-01 mem-02 mem-03"); agentProps.put("agent.channels.mem-01.type", "MEMORY"); agentProps.put("agent.channels.mem-01.capacity", String.valueOf(100000)); agentProps.put("agent.channels.mem-02.type", "MEMORY"); agentProps.put("agent.channels.mem-02.capacity", String.valueOf(100000)); agentProps.put("agent.channels.mem-03.type", "MEMORY"); agentProps.put("agent.channels.mem-04.capacity", String.valueOf(100000)); sinkOutputDir1 = new File(testDir, "out1"); assertTrue("Unable to create sink output dir: " + sinkOutputDir1.getPath(), sinkOutputDir1.mkdir()); sinkOutputDir2 = new File(testDir, "out2"); assertTrue("Unable to create sink output dir: " + sinkOutputDir2.getPath(), sinkOutputDir2.mkdir()); sinkOutputDir3 = new File(testDir, "out3"); assertTrue("Unable to create sink output dir: " + sinkOutputDir3.getPath(), sinkOutputDir3.mkdir()); environmentVariables.set("HADOOP_CREDSTORE_PASSWORD", "envSecret"); agentEnv.put("dirname_env", sinkOutputDir1.getAbsolutePath()); agentEnv.put("HADOOP_CREDSTORE_PASSWORD", "envSecret"); hadoopCredStore = new File(testDir, "credstore.jceks"); String providerPath = "jceks://file/" + hadoopCredStore.getAbsolutePath(); ToolRunner.run( new Configuration(), new CredentialShell(), ("create dirname_hadoop -value " + sinkOutputDir3.getAbsolutePath() + " -provider " + providerPath).split(" ")); agentProps.put("agent.sinks.roll-01.channel", "mem-01"); agentProps.put("agent.sinks.roll-01.type", "FILE_ROLL"); agentProps.put("agent.sinks.roll-01.sink.directory", "${filter-01[\"dirname_env\"]}"); agentProps.put("agent.sinks.roll-01.sink.rollInterval", "0"); agentProps.put("agent.sinks.roll-02.channel", "mem-02"); agentProps.put("agent.sinks.roll-02.type", "FILE_ROLL"); agentProps.put("agent.sinks.roll-02.sink.directory", sinkOutputDir2.getParentFile().getAbsolutePath() + "/${filter-02['out2']}"); agentProps.put("agent.sinks.roll-02.sink.rollInterval", "0"); agentProps.put("agent.sinks.roll-03.channel", "mem-03"); agentProps.put("agent.sinks.roll-03.type", "FILE_ROLL"); agentProps.put("agent.sinks.roll-03.sink.directory", "${filter-03[dirname_hadoop]}"); agentProps.put("agent.sinks.roll-03.sink.rollInterval", "0"); agentProps.put("agent.configfilters.filter-01.type", "env"); agentProps.put("agent.configfilters.filter-02.type", "external"); agentProps.put("agent.configfilters.filter-02.command", "echo"); agentProps.put("agent.configfilters.filter-03.type", "hadoop"); agentProps.put("agent.configfilters.filter-03.credential.provider.path", providerPath); agentProps.put("agent.sources", "seq-01"); agentProps.put("agent.channels", "mem-01 mem-02 mem-03"); agentProps.put("agent.sinks", "roll-01 roll-02 roll-03"); agentProps.put("agent.configfilters", "filter-01 filter-02 filter-03"); } private String getAdditionalClassPath() throws Exception { URL resource = this.getClass().getClassLoader().getResource("classpath.txt"); Path path = Paths.get(Objects.requireNonNull(resource).getPath()); return Files.readAllLines(path).stream().findFirst().orElse(""); } @After public void teardown() throws Exception { StagedInstall.getInstance().stopAgent(); } private void validateSeenEvents(File outDir, int outFiles, int events) throws IOException { File[] sinkOutputDirChildren = outDir.listFiles(); assertEquals("Unexpected number of files in output dir", outFiles, sinkOutputDirChildren.length); Set<String> seenEvents = new HashSet<>(); for (File outFile : sinkOutputDirChildren) { Scanner scanner = new Scanner(outFile); while (scanner.hasNext()) { seenEvents.add(scanner.nextLine()); } } for (int event = 0; event < events; event++) { assertTrue( "Missing event: {" + event + "}", seenEvents.contains(String.valueOf(event)) ); } } @Test public void testConfigReplacement() throws Exception { LOGGER.debug("testConfigReplacement() started."); StagedInstall.getInstance().startAgent("agent", agentProps, agentEnv, agentOptions); TimeUnit.SECONDS.sleep(10); // Wait for sources and sink to process files // Ensure we received all events. validateSeenEvents(sinkOutputDir1, 1, 100); validateSeenEvents(sinkOutputDir2, 1, 100); validateSeenEvents(sinkOutputDir3, 1, 100); LOGGER.debug("Processed all the events!"); LOGGER.debug("testConfigReplacement() ended."); } @Test public void testConfigReload() throws Exception { LOGGER.debug("testConfigReplacement() started."); agentProps.put("agent.channels.mem-01.transactionCapacity", "10"); agentProps.put("agent.sinks.roll-01.sink.batchSize", "20"); StagedInstall.getInstance().startAgent("agent", agentProps, agentEnv, agentOptions); TimeUnit.SECONDS.sleep(10); // Wait for sources and sink to process files // This directory is empty due to misconfiguration validateSeenEvents(sinkOutputDir1, 0, 0); // These are well configured validateSeenEvents(sinkOutputDir2, 1, 100); validateSeenEvents(sinkOutputDir3, 1, 100); LOGGER.debug("Processed all the events!"); //repair the config agentProps.put("agent.channels.mem-01.transactionCapacity", "20"); StagedInstall.getInstance().reconfigure(agentProps); TimeUnit.SECONDS.sleep(40); // Wait for sources and sink to process files // Ensure we received all events. validateSeenEvents(sinkOutputDir1, 1, 100); LOGGER.debug("testConfigReplacement() ended."); } }
package org.opencb.opencga.catalog.managers; import org.opencb.datastore.core.ObjectMap; import org.opencb.datastore.core.QueryOptions; import org.opencb.datastore.core.QueryResult; import org.opencb.opencga.catalog.CatalogManager; import org.opencb.opencga.catalog.db.api.CatalogDBAdaptor; import org.opencb.opencga.catalog.exceptions.CatalogException; import org.opencb.opencga.catalog.utils.ParamUtils; import org.opencb.opencga.catalog.authentication.AuthenticationManager; import org.opencb.opencga.catalog.authorization.AuthorizationManager; import org.opencb.opencga.catalog.io.CatalogIOManagerFactory; import org.opencb.opencga.catalog.managers.api.IUserManager; import org.opencb.opencga.catalog.models.Session; import org.opencb.opencga.catalog.models.User; import org.opencb.opencga.catalog.exceptions.CatalogDBException; import org.opencb.opencga.catalog.exceptions.CatalogIOException; import org.opencb.opencga.core.common.MailUtils; import org.opencb.opencga.core.common.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.security.NoSuchAlgorithmException; import java.util.Arrays; import java.util.Properties; import java.util.regex.Pattern; /** * @author Jacobo Coll &lt;jacobo167@gmail.com&gt; */ public class UserManager extends AbstractManager implements IUserManager { protected final String creationUserPolicy; protected static Logger logger = LoggerFactory.getLogger(UserManager.class); protected static final String EMAIL_PATTERN = "^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@" + "[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$"; protected static final Pattern emailPattern = Pattern.compile(EMAIL_PATTERN); public UserManager(AuthorizationManager authorizationManager, AuthenticationManager authenticationManager, CatalogDBAdaptor catalogDBAdaptor, CatalogIOManagerFactory ioManagerFactory, Properties catalogProperties) { super(authorizationManager, authenticationManager, catalogDBAdaptor, ioManagerFactory, catalogProperties); creationUserPolicy = catalogProperties.getProperty(CatalogManager.CATALOG_MANAGER_POLICY_CREATION_USER, "always"); } @Override public String getUserId(String sessionId) { return userDBAdaptor.getUserIdBySessionId(sessionId); } @Override public void changePassword(String userId, String oldPassword, String newPassword) throws CatalogException { ParamUtils.checkParameter(userId, "userId"); // checkParameter(sessionId, "sessionId"); ParamUtils.checkParameter(oldPassword, "oldPassword"); ParamUtils.checkParameter(newPassword, "newPassword"); // checkSessionId(userId, sessionId); //Only the user can change his own password userDBAdaptor.updateUserLastActivity(userId); authenticationManager.changePassword(userId, oldPassword, newPassword); } @Override public QueryResult<User> create(QueryOptions params, String sessionId) throws CatalogException { return create( params.getString("id"), params.getString("name"), params.getString("email"), params.getString("password"), params.getString("organization"), params,sessionId ); } @Override public QueryResult<User> create(String id, String name, String email, String password, String organization, QueryOptions options, String sessionId) throws CatalogException { ParamUtils.checkParameter(id, "id"); ParamUtils.checkParameter(password, "password"); ParamUtils.checkParameter(name, "name"); checkEmail(email); organization = organization != null ? organization : ""; User user = new User(id, name, email, password, organization, User.Role.USER, ""); switch (creationUserPolicy) { case "onlyAdmin": { String userId = getUserId(sessionId); if (!userId.isEmpty() && authorizationManager.getUserRole(userId).equals(User.Role.ADMIN)) { user.getAttributes().put("creatorUserId", userId); } else { throw new CatalogException("CreateUser Fail. Required Admin role"); } break; } case "anyLoggedUser": { ParamUtils.checkParameter(sessionId, "sessionId"); String userId = getUserId(sessionId); if (userId.isEmpty()) { throw new CatalogException("CreateUser Fail. Required existing account"); } user.getAttributes().put("creatorUserId", userId); break; } case "always": default: break; } try { catalogIOManagerFactory.getDefault().createUser(user.getId()); return userDBAdaptor.insertUser(user, options); } catch (CatalogIOException | CatalogDBException e) { if (!userDBAdaptor.userExists(user.getId())) { logger.error("ERROR! DELETING USER! " + user.getId()); catalogIOManagerFactory.getDefault().deleteUser(user.getId()); } throw e; } } @Override public QueryResult<User> read(String userId, QueryOptions options, String sessionId) throws CatalogException { return read(userId, null, options, sessionId); } @Override public QueryResult<User> read(String userId, String lastActivity, QueryOptions options, String sessionId) throws CatalogException { ParamUtils.checkParameter(userId, "userId"); ParamUtils.checkParameter(sessionId, "sessionId"); checkSessionId(userId, sessionId); options = ParamUtils.defaultObject(options, QueryOptions::new); if (!options.containsKey("include") && !options.containsKey("exclude")) { options.put("exclude", Arrays.asList("password", "sessions")); } // if(options.containsKey("exclude")) { // options.getListAs("exclude", String.class).add("sessions"); // } //FIXME: Should other users get access to other user information? (If so, then filter projects) //FIXME: Should setPassword(null)?? QueryResult<User> user = userDBAdaptor.getUser(userId, options, lastActivity); return user; } @Override public QueryResult<User> readAll(QueryOptions query, QueryOptions options, String sessionId) throws CatalogException { return null; } /** * Modify some params from the user profile: * name * email * organization * attributes * configs * * @throws CatalogException */ @Override public QueryResult<User> update(String userId, ObjectMap parameters, QueryOptions options, String sessionId) throws CatalogException { ParamUtils.checkParameter(userId, "userId"); ParamUtils.checkParameter(sessionId, "sessionId"); ParamUtils.checkObj(parameters, "parameters"); checkSessionId(userId, sessionId); for (String s : parameters.keySet()) { if (!s.matches("name|email|organization|attributes|configs")) { throw new CatalogDBException("Parameter '" + s + "' can't be changed"); } } if (parameters.containsKey("email")) { checkEmail(parameters.getString("email")); } userDBAdaptor.updateUserLastActivity(userId); return userDBAdaptor.modifyUser(userId, parameters); } @Override public QueryResult<User> delete(String userId, QueryOptions options, String sessionId) throws CatalogException { QueryResult<User> user = read(userId, options, sessionId); ParamUtils.checkParameter(userId, "userId"); ParamUtils.checkParameter(sessionId, "sessionId"); String userIdBySessionId = userDBAdaptor.getUserIdBySessionId(sessionId); if (userIdBySessionId.equals(userId) || authorizationManager.getUserRole(userIdBySessionId).equals(User.Role.ADMIN)) { try { catalogIOManagerFactory.getDefault().deleteUser(userId); } catch (CatalogIOException e) { e.printStackTrace(); } userDBAdaptor.deleteUser(userId); } user.setId("deleteUser"); return user; } @Override public QueryResult resetPassword(String userId, String email) throws CatalogException { ParamUtils.checkParameter(userId, "userId"); ParamUtils.checkParameter(email, "email"); userDBAdaptor.updateUserLastActivity(userId); String newPassword = StringUtils.randomString(6); String newCryptPass; try { newCryptPass = StringUtils.sha1(newPassword); } catch (NoSuchAlgorithmException e) { throw new CatalogDBException("could not encode password"); } QueryResult qr = userDBAdaptor.resetPassword(userId, email, newCryptPass); String mailUser = catalogProperties.getProperty(CatalogManager.CATALOG_MAIL_USER); String mailPassword = catalogProperties.getProperty(CatalogManager.CATALOG_MAIL_PASSWORD); String mailHost = catalogProperties.getProperty(CatalogManager.CATALOG_MAIL_HOST); String mailPort = catalogProperties.getProperty(CatalogManager.CATALOG_MAIL_PORT); MailUtils.sendResetPasswordMail(email, newPassword, mailUser, mailPassword, mailHost, mailPort); return qr; } @Override public QueryResult<ObjectMap> loginAsAnonymous(String sessionIp) throws CatalogException, IOException { ParamUtils.checkParameter(sessionIp, "sessionIp"); Session session = new Session(sessionIp); String userId = "anonymous_" + session.getId(); // TODO sessionID should be created here catalogIOManagerFactory.getDefault().createAnonymousUser(userId); try { return userDBAdaptor.loginAsAnonymous(session); } catch (CatalogDBException e) { catalogIOManagerFactory.getDefault().deleteUser(userId); throw e; } } @Override public QueryResult<ObjectMap> login(String userId, String password, String sessionIp) throws CatalogException, IOException { ParamUtils.checkParameter(userId, "userId"); ParamUtils.checkParameter(password, "password"); ParamUtils.checkParameter(sessionIp, "sessionIp"); Session session = new Session(sessionIp); return userDBAdaptor.login(userId, password, session); } @Override public QueryResult logout(String userId, String sessionId) throws CatalogException { ParamUtils.checkParameter(userId, "userId"); ParamUtils.checkParameter(sessionId, "sessionId"); checkSessionId(userId, sessionId); switch (authorizationManager.getUserRole(userId)) { default: return userDBAdaptor.logout(userId, sessionId); case ANONYMOUS: return logoutAnonymous(sessionId); } } @Override public QueryResult logoutAnonymous(String sessionId) throws CatalogException { ParamUtils.checkParameter(sessionId, "sessionId"); String userId = getUserId(sessionId); ParamUtils.checkParameter(userId, "userId"); checkSessionId(userId, sessionId); logger.info("logout anonymous user. userId: " + userId + " sesionId: " + sessionId); catalogIOManagerFactory.getDefault().deleteAnonymousUser(userId); return userDBAdaptor.logoutAnonymous(sessionId); } private void checkSessionId(String userId, String sessionId) throws CatalogException { String userIdBySessionId = userDBAdaptor.getUserIdBySessionId(sessionId); if (!userIdBySessionId.equals(userId)) { throw new CatalogException("Invalid sessionId for user: " + userId); } } static void checkEmail(String email) throws CatalogException { if (email == null || !emailPattern.matcher(email).matches()) { throw new CatalogException("email not valid"); } } }
/* * Copyright 2012 LinkedIn, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobtype; import static azkaban.security.commons.SecurityUtils.MAPREDUCE_JOB_CREDENTIALS_BINARY; import static azkaban.utils.StringUtils.DOUBLE_QUOTE; import static azkaban.utils.StringUtils.SINGLE_QUOTE; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVEAUXJARS; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTORECONNECTURLKEY; import static org.apache.hadoop.security.UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION; import java.io.File; import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Properties; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.cli.CliDriver; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.cli.OptionsProcessor; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.security.UserGroupInformation; import org.apache.log4j.Logger; import azkaban.jobtype.hiveutils.HiveQueryExecutionException; import azkaban.utils.Props; public class HadoopSecureHiveWrapper { private static final String DOUBLE_QUOTE_STRING = Character .toString(DOUBLE_QUOTE); private static final String SINGLE_QUOTE_STRING = Character .toString(SINGLE_QUOTE); private static final Logger logger = Logger.getRootLogger(); private static CliSessionState ss; private static String hiveScript; public static void main(final String[] args) throws Exception { Properties jobProps = HadoopSecureWrapperUtils.loadAzkabanProps(); HadoopConfigurationInjector.injectResources(new Props(null, jobProps)); hiveScript = jobProps.getProperty("hive.script"); if (HadoopSecureWrapperUtils.shouldProxy(jobProps)) { String tokenFile = System.getenv(HADOOP_TOKEN_FILE_LOCATION); UserGroupInformation proxyUser = HadoopSecureWrapperUtils.setupProxyUser(jobProps, tokenFile, logger); proxyUser.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { runHive(args); return null; } }); } else { runHive(args); } } public static void runHive(String[] args) throws Exception { final HiveConf hiveConf = new HiveConf(SessionState.class); populateHiveConf(hiveConf, args); if (System.getenv(HADOOP_TOKEN_FILE_LOCATION) != null) { System.out.println("Setting hadoop tokens ... "); hiveConf.set(MAPREDUCE_JOB_CREDENTIALS_BINARY, System.getenv(HADOOP_TOKEN_FILE_LOCATION)); System.setProperty(MAPREDUCE_JOB_CREDENTIALS_BINARY, System.getenv(HADOOP_TOKEN_FILE_LOCATION)); } logger.info("HiveConf = " + hiveConf); logger.info("According to the conf, we're talking to the Hive hosted at: " + HiveConf.getVar(hiveConf, METASTORECONNECTURLKEY)); String orig = HiveConf.getVar(hiveConf, HIVEAUXJARS); String expanded = expandHiveAuxJarsPath(orig); if (orig == null || orig.equals(expanded)) { logger.info("Hive aux jars variable not expanded"); } else { logger.info("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]"); HiveConf.setVar(hiveConf, HIVEAUXJARS, expanded); } OptionsProcessor op = new OptionsProcessor(); if (!op.process_stage1(new String[] {})) { throw new IllegalArgumentException("Can't process empty args?!?"); } // hadoop-20 and above - we need to augment classpath using hiveconf // components // see also: code in ExecDriver.java ClassLoader loader = hiveConf.getClassLoader(); String auxJars = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVEAUXJARS); logger.info("Got auxJars = " + auxJars); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } hiveConf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); // See https://issues.apache.org/jira/browse/HIVE-1411 hiveConf.set("datanucleus.plugin.pluginRegistryBundleCheck", "LOG"); // to force hive to use the jobclient to submit the job, never using // HADOOPBIN (to do localmode) hiveConf.setBoolean("hive.exec.mode.local.auto", false); ss = new CliSessionState(hiveConf); SessionState.start(ss); logger.info("SessionState = " + ss); ss.out = System.out; ss.err = System.err; ss.in = System.in; if (!op.process_stage2(ss)) { throw new IllegalArgumentException( "Can't process arguments from session state"); } logger.info("Executing query: " + hiveScript); CliDriver cli = new CliDriver(); Map<String, String> hiveVarMap = getHiveVarMap(args); logger.info("hiveVarMap: " + hiveVarMap); if (!hiveVarMap.isEmpty()) { cli.setHiveVariables(getHiveVarMap(args)); } int returnCode = cli.processFile(hiveScript); if (returnCode != 0) { logger.warn("Got exception " + returnCode + " from line: " + hiveScript); throw new HiveQueryExecutionException(returnCode, hiveScript); } } /** * Normally hive.aux.jars.path is expanded from just being a path to the full * list of files in the directory by the hive shell script. Since we normally * won't be running from the script, it's up to us to do that work here. We * use a heuristic that if there is no occurrence of ".jar" in the original, * it needs expansion. Otherwise it's already been done for us. * * Also, surround the files with uri niceities. */ static String expandHiveAuxJarsPath(String original) throws IOException { if (original == null || original.contains(".jar")) return original; File[] files = new File(original).listFiles(); if (files == null || files.length == 0) { logger .info("No files in to expand in aux jar path. Returning original parameter"); return original; } return filesToURIString(files); } static String filesToURIString(File[] files) throws IOException { StringBuffer sb = new StringBuffer(); for (int i = 0; i < files.length; i++) { sb.append("file:///").append(files[i].getCanonicalPath()); if (i != files.length - 1) sb.append(","); } return sb.toString(); } /** * Extract hiveconf from command line arguments and populate them into * HiveConf * * An example: -hiveconf 'zipcode=10', -hiveconf hive.root.logger=INFO,console * * @param hiveConf * @param args */ private static void populateHiveConf(HiveConf hiveConf, String[] args) { if (args == null) { return; } int index = 0; for (; index < args.length; index++) { if ("-hiveconf".equals(args[index])) { String hiveConfParam = stripSingleDoubleQuote(args[++index]); String[] tokens = hiveConfParam.split("="); if (tokens.length == 2) { String name = tokens[0]; String value = tokens[1]; logger.info("Setting: " + name + "=" + value + " to hiveConf"); hiveConf.set(name, value); } else { logger.warn("Invalid hiveconf: " + hiveConfParam); } } } } private static Map<String, String> getHiveVarMap(String[] args) { if (args == null) { return Collections.emptyMap(); } Map<String, String> hiveVarMap = new HashMap<String, String>(); int index = 0; for (; index < args.length; index++) { if ("-hivevar".equals(args[index])) { String hiveVarParam = stripSingleDoubleQuote(args[++index]); String[] tokens = hiveVarParam.split("="); if (tokens.length == 2) { String name = tokens[0]; String value = tokens[1]; logger.info("Setting hivevar: " + name + "=" + value); hiveVarMap.put(name, value); } else { logger.warn("Invalid hivevar: " + hiveVarParam); } } } return hiveVarMap; } /** * Strip single quote or double quote at either end of the string * * @param input * @return string with w/o leading or trailing single or double quote */ private static String stripSingleDoubleQuote(String input) { if (StringUtils.isEmpty(input)) { return input; } if (input.startsWith(SINGLE_QUOTE_STRING) || input.startsWith(DOUBLE_QUOTE_STRING)) { input = input.substring(1); } if (input.endsWith(SINGLE_QUOTE_STRING) || input.endsWith(DOUBLE_QUOTE_STRING)) { input = input.substring(0, input.length() - 1); } return input; } }
/* Generic definitions */ /* Assertions (useful to generate conditional code) */ /* Current type and class (and size, if applicable) */ /* Value methods */ /* Interfaces (keys) */ /* Interfaces (values) */ /* Abstract implementations (keys) */ /* Abstract implementations (values) */ /* Static containers (keys) */ /* Static containers (values) */ /* Implementations */ /* Synchronized wrappers */ /* Unmodifiable wrappers */ /* Other wrappers */ /* Methods (keys) */ /* Methods (values) */ /* Methods (keys/values) */ /* Methods that have special names depending on keys (but the special names depend on values) */ /* Equality */ /* Object/Reference-only definitions (keys) */ /* Object/Reference-only definitions (values) */ /* * Copyright (C) 2002-2013 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.objects; import it.unimi.dsi.fastutil.Hash; import it.unimi.dsi.fastutil.HashCommon; import it.unimi.dsi.fastutil.booleans.BooleanArrays; import static it.unimi.dsi.fastutil.HashCommon.arraySize; import static it.unimi.dsi.fastutil.HashCommon.maxFill; import java.util.Collection; import java.util.NoSuchElementException; /** A type-specific hash set with with a fast, small-footprint implementation. * * <P>Instances of this class use a hash table to represent a set. The table is * enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made * smaller (even on a {@link #clear()}). A family of {@linkplain #trim() trimming * methods} lets you control the size of the table; this is particularly useful * if you reuse instances of this class. * * <p><strong>Warning:</strong> The implementation of this class has significantly * changed in <code>fastutil</code> 6.1.0. Please read the * comments about this issue in the section &ldquo;Faster Hash Tables&rdquo; of the <a href="../../../../../overview-summary.html">overview</a>. * * @see Hash * @see HashCommon */ public class ReferenceOpenHashSet <K> extends AbstractReferenceSet <K> implements java.io.Serializable, Cloneable, Hash { private static final long serialVersionUID = 0L; private static final boolean ASSERTS = false; /** The array of keys. */ protected transient K key[]; /** The array telling whether a position is used. */ protected transient boolean used[]; /** The acceptable load factor. */ protected final float f; /** The current table size. */ protected transient int n; /** Threshold after which we rehash. It must be the table size times {@link #f}. */ protected transient int maxFill; /** The mask for wrapping a position counter. */ protected transient int mask; /** Number of entries in the set. */ protected int size; /** Creates a new hash set. * * <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>. * * @param expected the expected number of elements in the hash set. * @param f the load factor. */ @SuppressWarnings("unchecked") public ReferenceOpenHashSet( final int expected, final float f ) { if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" ); if ( expected < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" ); this.f = f; n = arraySize( expected, f ); mask = n - 1; maxFill = maxFill( n, f ); key = (K[]) new Object[ n ]; used = new boolean[ n ]; } /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. * * @param expected the expected number of elements in the hash set. */ public ReferenceOpenHashSet( final int expected ) { this( expected, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements * and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor. */ public ReferenceOpenHashSet() { this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash set copying a given collection. * * @param c a {@link Collection} to be copied into the new hash set. * @param f the load factor. */ public ReferenceOpenHashSet( final Collection<? extends K> c, final float f ) { this( c.size(), f ); addAll( c ); } /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given collection. * * @param c a {@link Collection} to be copied into the new hash set. */ public ReferenceOpenHashSet( final Collection<? extends K> c ) { this( c, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash set copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash set. * @param f the load factor. */ public ReferenceOpenHashSet( final ReferenceCollection <? extends K> c, final float f ) { this( c.size(), f ); addAll( c ); } /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying a given type-specific collection. * * @param c a type-specific collection to be copied into the new hash set. */ public ReferenceOpenHashSet( final ReferenceCollection <? extends K> c ) { this( c, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash set using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. * @param f the load factor. */ public ReferenceOpenHashSet( final ObjectIterator <K> i, final float f ) { this( DEFAULT_INITIAL_SIZE, f ); while( i.hasNext() ) add( i.next() ); } /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator. * * @param i a type-specific iterator whose elements will fill the set. */ public ReferenceOpenHashSet( final ObjectIterator <K> i ) { this( i, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash set and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. * @param f the load factor. */ public ReferenceOpenHashSet( final K[] a, final int offset, final int length, final float f ) { this( length < 0 ? 0 : length, f ); ObjectArrays.ensureOffsetLength( a, offset, length ); for( int i = 0; i < length; i++ ) add( a[ offset + i ] ); } /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array. * * @param a an array whose elements will be used to fill the set. * @param offset the first element to use. * @param length the number of elements to use. */ public ReferenceOpenHashSet( final K[] a, final int offset, final int length ) { this( a, offset, length, DEFAULT_LOAD_FACTOR ); } /** Creates a new hash set copying the elements of an array. * * @param a an array to be copied into the new hash set. * @param f the load factor. */ public ReferenceOpenHashSet( final K[] a, final float f ) { this( a, 0, a.length, f ); } /** Creates a new hash set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor * copying the elements of an array. * * @param a an array to be copied into the new hash set. */ public ReferenceOpenHashSet( final K[] a ) { this( a, DEFAULT_LOAD_FACTOR ); } /* * The following methods implements some basic building blocks used by * all accessors. They are (and should be maintained) identical to those used in HashMap.drv. */ public boolean add( final K k ) { // The starting point. int pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( (key[ pos ]) == (k) ) ) return false; pos = ( pos + 1 ) & mask; } used[ pos ] = true; key[ pos ] = k; if ( ++size >= maxFill ) rehash( arraySize( size + 1, f ) ); if ( ASSERTS ) checkTable(); return true; } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. * * @param pos a starting position. * @return the position cleared by the shifting process. */ protected final int shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; while( used[ pos ] ) { slot = ( (key[ pos ]) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(key[ pos ]) ) ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( ! used[ pos ] ) break; key[ last ] = key[ pos ]; } used[ last ] = false; key[ last ] = null; return last; } @SuppressWarnings("unchecked") public boolean remove( final Object k ) { // The starting point. int pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( (key[ pos ]) == (k) ) ) { size--; shiftKeys( pos ); if ( ASSERTS ) checkTable(); return true; } pos = ( pos + 1 ) & mask; } return false; } @SuppressWarnings("unchecked") public boolean contains( final Object k ) { // The starting point. int pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & mask; // There's always an unused entry. while( used[ pos ] ) { if ( ( (key[ pos ]) == (k) ) ) return true; pos = ( pos + 1 ) & mask; } return false; } /* Removes all elements from this set. * * <P>To increase object reuse, this method does not change the table size. * If you want to reduce the table size, you must use {@link #trim()}. * */ public void clear() { if ( size == 0 ) return; size = 0; BooleanArrays.fill( used, false ); ObjectArrays.fill( key, null ); } public int size() { return size; } public boolean isEmpty() { return size == 0; } /** A no-op for backward compatibility. * * @param growthFactor unused. * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public void growthFactor( int growthFactor ) {} /** Gets the growth factor (2). * * @return the growth factor of this set, which is fixed (2). * @see #growthFactor(int) * @deprecated Since <code>fastutil</code> 6.1.0, hash tables are doubled when they are too full. */ @Deprecated public int growthFactor() { return 16; } /** An iterator over a hash set. */ private class SetIterator extends AbstractObjectIterator <K> { /** The index of the next entry to be returned, if positive or zero. If negative, the next entry to be returned, if any, is that of index -pos -2 from the {@link #wrapped} list. */ int pos = ReferenceOpenHashSet.this.n; /** The index of the last entry that has been returned (more precisely, the value of {@link #pos}). It is -1 if either we did not return an entry yet, or the last returned entry has been removed. */ int last = -1; /** A downward counter measuring how many entries must still be returned. */ int c = size; /** A lazily allocated list containing elements that have wrapped around the table because of removals; such elements would not be enumerated (other elements would be usually enumerated twice in their place). */ ReferenceArrayList <K> wrapped; { final boolean used[] = ReferenceOpenHashSet.this.used; if ( c != 0 ) while( ! used[ --pos ] ); } public boolean hasNext() { return c != 0; } public K next() { if ( ! hasNext() ) throw new NoSuchElementException(); c--; // We are just enumerating elements from the wrapped list. if ( pos < 0 ) return wrapped.get( - ( last = --pos ) - 2 ); final K retVal = key[ last = pos ]; //System.err.println( "Count: " + c ); if ( c != 0 ) { final boolean used[] = ReferenceOpenHashSet.this.used; while ( pos-- != 0 && !used[ pos ] ); // When here pos < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty. } return retVal; } /** Shifts left entries with the specified hash code, starting at the specified position, * and empties the resulting free entry. If any entry wraps around the table, instantiates * lazily {@link #wrapped} and stores the entry. * * @param pos a starting position. * @return the position cleared by the shifting process. */ final int shiftKeys( int pos ) { // Shift entries with the same hash. int last, slot; for(;;) { pos = ( ( last = pos ) + 1 ) & mask; while( used[ pos ] ) { slot = ( (key[ pos ]) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(key[ pos ]) ) ) & mask; if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break; pos = ( pos + 1 ) & mask; } if ( ! used[ pos ] ) break; if ( pos < last ) { // Wrapped entry. if ( wrapped == null ) wrapped = new ReferenceArrayList <K>(); wrapped.add( key[ pos ] ); } key[ last ] = key[ pos ]; } used[ last ] = false; key[ last ] = null; return last; } @SuppressWarnings("unchecked") public void remove() { if ( last == -1 ) throw new IllegalStateException(); if ( pos < -1 ) { // We're removing wrapped entries. ReferenceOpenHashSet.this.remove( wrapped.set( - pos - 2, null ) ); last = -1; return; } size--; if ( shiftKeys( last ) == pos && c > 0 ) { c++; next(); } last = -1; // You can no longer remove this entry. if ( ASSERTS ) checkTable(); } } public ObjectIterator <K> iterator() { return new SetIterator(); } /** A no-op for backward compatibility. The kind of tables implemented by * this class never need rehashing. * * <P>If you need to reduce the table size to fit exactly * this set, use {@link #trim()}. * * @return true. * @see #trim() * @deprecated A no-op. */ @Deprecated public boolean rehash() { return true; } /** Rehashes this set, making the table as small as possible. * * <P>This method rehashes the table to the smallest size satisfying the * load factor. It can be used when the set will not be changed anymore, so * to optimize access speed and size. * * <P>If the table size is already the minimum possible, this method * does nothing. * * @return true if there was enough memory to trim the set. * @see #trim(int) */ public boolean trim() { final int l = arraySize( size, f ); if ( l >= n ) return true; try { rehash( l ); } catch(OutOfMemoryError cantDoIt) { return false; } return true; } /** Rehashes this set if the table is too large. * * <P>Let <var>N</var> be the smallest table size that can hold * <code>max(n,{@link #size()})</code> entries, still satisfying the load factor. If the current * table size is smaller than or equal to <var>N</var>, this method does * nothing. Otherwise, it rehashes this set in a table of size * <var>N</var>. * * <P>This method is useful when reusing sets. {@linkplain #clear() Clearing a * set} leaves the table size untouched. If you are reusing a set * many times, you can call this method with a typical * size to avoid keeping around a very large table just * because of a few large transient sets. * * @param n the threshold for the trimming. * @return true if there was enough memory to trim the set. * @see #trim() */ public boolean trim( final int n ) { final int l = HashCommon.nextPowerOfTwo( (int)Math.ceil( n / f ) ); if ( this.n <= l ) return true; try { rehash( l ); } catch( OutOfMemoryError cantDoIt ) { return false; } return true; } /** Resizes the set. * * <P>This method implements the basic rehashing strategy, and may be * overriden by subclasses implementing different rehashing strategies (e.g., * disk-based rehashing). However, you should not override this method * unless you understand the internal workings of this class. * * @param newN the new size */ @SuppressWarnings("unchecked") protected void rehash( final int newN ) { int i = 0, pos; final boolean used[] = this.used; K k; final K key[] = this.key; final int newMask = newN - 1; final K newKey[] = (K[]) new Object[ newN ]; final boolean newUsed[] = new boolean[ newN ]; for( int j = size; j-- != 0; ) { while( ! used[ i ] ) i++; k = key[ i ]; pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & newMask; while ( newUsed[ pos ] ) pos = ( pos + 1 ) & newMask; newUsed[ pos ] = true; newKey[ pos ] = k; i++; } n = newN; mask = newMask; maxFill = maxFill( n, f ); this.key = newKey; this.used = newUsed; } /** Returns a deep copy of this set. * * <P>This method performs a deep copy of this hash set; the data stored in the * set, however, is not cloned. Note that this makes a difference only for object keys. * * @return a deep copy of this set. */ @SuppressWarnings("unchecked") public ReferenceOpenHashSet <K> clone() { ReferenceOpenHashSet <K> c; try { c = (ReferenceOpenHashSet <K>)super.clone(); } catch(CloneNotSupportedException cantHappen) { throw new InternalError(); } c.key = key.clone(); c.used = used.clone(); return c; } /** Returns a hash code for this set. * * This method overrides the generic method provided by the superclass. * Since <code>equals()</code> is not overriden, it is important * that the value returned by this method is the same value as * the one returned by the overriden method. * * @return a hash code for this set. */ public int hashCode() { int h = 0, i = 0, j = size; while( j-- != 0 ) { while( ! used[ i ] ) i++; if ( this != key[ i ] ) h += ( (key[ i ]) == null ? 0 : System.identityHashCode(key[ i ]) ); i++; } return h; } private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException { final ObjectIterator <K> i = iterator(); s.defaultWriteObject(); for( int j = size; j-- != 0; ) s.writeObject( i.next() ); } @SuppressWarnings("unchecked") private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException { s.defaultReadObject(); n = arraySize( size, f ); maxFill = maxFill( n, f ); mask = n - 1; final K key[] = this.key = (K[]) new Object[ n ]; final boolean used[] = this.used = new boolean[ n ]; K k; for( int i = size, pos = 0; i-- != 0; ) { k = (K) s.readObject(); pos = ( (k) == null ? 0x87fcd5c : it.unimi.dsi.fastutil.HashCommon.murmurHash3( System.identityHashCode(k) ) ) & mask; while ( used[ pos ] ) pos = ( pos + 1 ) & mask; used[ pos ] = true; key[ pos ] = k; } if ( ASSERTS ) checkTable(); } private void checkTable() {} }
package com.radiadesign.catalina.session; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Set; import org.apache.catalina.Context; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleListener; import org.apache.catalina.LifecycleState; import org.apache.catalina.Loader; import org.apache.catalina.Session; import org.apache.catalina.Valve; import org.apache.catalina.session.ManagerBase; import org.apache.catalina.util.LifecycleSupport; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import redis.clients.jedis.Protocol; public class RedisSessionManager extends ManagerBase implements Lifecycle { protected byte[] NULL_SESSION = "null".getBytes(); private final Log log = LogFactory.getLog(RedisSessionManager.class); protected String host = "localhost"; protected int port = 6379; protected int database = 0; protected String password = null; protected int timeout = Protocol.DEFAULT_TIMEOUT; protected JedisPool connectionPool; protected RedisSessionHandlerValve handlerValve; protected ThreadLocal<RedisSession> currentSession = new ThreadLocal<RedisSession>(); protected ThreadLocal<String> currentSessionId = new ThreadLocal<String>(); protected ThreadLocal<Boolean> currentSessionIsPersisted = new ThreadLocal<Boolean>(); protected Serializer serializer; protected static String name = "RedisSessionManager"; protected String serializationStrategyClass = "com.radiadesign.catalina.session.JavaSerializer"; /** * The lifecycle event support for this component. */ protected LifecycleSupport lifecycle = new LifecycleSupport(this); public String getHost() { return host; } public void setHost(String host) { this.host = host; } public int getPort() { return port; } public void setPort(int port) { this.port = port; } public int getDatabase() { return database; } public void setDatabase(int database) { this.database = database; } public int getTimeout() { return timeout; } public void setTimeout(int timeout) { this.timeout = timeout; } public String getPassword() { return password; } public void setPassword(String password) { this.password = password; } public void setSerializationStrategyClass(String strategy) { this.serializationStrategyClass = strategy; } public int getRejectedSessions() { // Essentially do nothing. return 0; } public void setRejectedSessions(int i) { // Do nothing. } protected Jedis acquireConnection() { Jedis jedis = connectionPool.getResource(); if (getDatabase() != 0) { jedis.select(getDatabase()); } return jedis; } protected void returnConnection(Jedis jedis, Boolean error) { if (error) { connectionPool.returnBrokenResource(jedis); } else { connectionPool.returnResource(jedis); } } protected void returnConnection(Jedis jedis) { returnConnection(jedis, false); } public void load() throws ClassNotFoundException, IOException { } public void unload() throws IOException { } /** * Add a lifecycle event listener to this component. * * @param listener * The listener to add */ public void addLifecycleListener(LifecycleListener listener) { lifecycle.addLifecycleListener(listener); } /** * Get the lifecycle listeners associated with this lifecycle. If this * Lifecycle has no listeners registered, a zero-length array is returned. */ public LifecycleListener[] findLifecycleListeners() { return lifecycle.findLifecycleListeners(); } /** * Remove a lifecycle event listener from this component. * * @param listener * The listener to remove */ public void removeLifecycleListener(LifecycleListener listener) { lifecycle.removeLifecycleListener(listener); } /** * Start this component and implement the requirements of * {@link org.apache.catalina.util.LifecycleBase#startInternal()}. * * @exception LifecycleException * if this component detects a fatal error that prevents this * component from being used */ @Override protected synchronized void startInternal() throws LifecycleException { super.startInternal(); setState(LifecycleState.STARTING); Boolean attachedToValve = false; for (Valve valve : getContext().getPipeline().getValves()) { if (valve instanceof RedisSessionHandlerValve) { this.handlerValve = (RedisSessionHandlerValve) valve; this.handlerValve.setRedisSessionManager(this); log.info("Attached to RedisSessionHandlerValve"); attachedToValve = true; break; } } if (!attachedToValve) { String error = "Unable to attach to session handling valve; sessions cannot be saved after the request without the valve starting properly."; log.fatal(error); throw new LifecycleException(error); } try { initializeSerializer(); } catch (ClassNotFoundException e) { log.fatal("Unable to load serializer", e); throw new LifecycleException(e); } catch (InstantiationException e) { log.fatal("Unable to load serializer", e); throw new LifecycleException(e); } catch (IllegalAccessException e) { log.fatal("Unable to load serializer", e); throw new LifecycleException(e); } log.info("Will expire sessions after " + getMaxInactiveInterval() + " seconds"); initializeDatabaseConnection(); setDistributable(true); } /** * Stop this component and implement the requirements of * {@link org.apache.catalina.util.LifecycleBase#stopInternal()}. * * @exception LifecycleException * if this component detects a fatal error that prevents this * component from being used */ @Override protected synchronized void stopInternal() throws LifecycleException { if (log.isDebugEnabled()) { log.debug("Stopping"); } setState(LifecycleState.STOPPING); try { connectionPool.destroy(); } catch (Exception e) { // Do nothing. } // Require a new random number generator if we are restarted super.stopInternal(); } @Override public Session createSession(String sessionId) { RedisSession session = (RedisSession) createEmptySession(); // Initialize the properties of the new session and return it session.setNew(true); session.setValid(true); session.setCreationTime(System.currentTimeMillis()); session.setMaxInactiveInterval(getMaxInactiveInterval()); String jvmRoute = getJvmRoute(); Boolean error = true; Jedis jedis = null; try { jedis = acquireConnection(); // Ensure generation of a unique session identifier. do { if (null == sessionId) { sessionId = generateSessionId(); } if (jvmRoute != null) { sessionId += '.' + jvmRoute; } } while (jedis.setnx(sessionId.getBytes(), NULL_SESSION) == 1L); // 1 // = // key // set; // 0 // = // key // already // existed /* * Even though the key is set in Redis, we are not going to flag the * current thread as having had the session persisted since the * session isn't actually serialized to Redis yet. This ensures that * the save(session) at the end of the request will serialize the * session into Redis with 'set' instead of 'setnx'. */ error = false; session.setId(sessionId); session.tellNew(); currentSession.set(session); currentSessionId.set(sessionId); currentSessionIsPersisted.set(false); } finally { if (jedis != null) { returnConnection(jedis, error); } } return session; } @Override public Session createEmptySession() { return new RedisSession(this); } @Override public void add(Session session) { try { save(session); } catch (IOException ex) { log.warn("Unable to add to session manager store: " + ex.getMessage()); throw new RuntimeException( "Unable to add to session manager store.", ex); } } @Override public Session findSession(String id) throws IOException { RedisSession session; if (id == null) { session = null; currentSessionIsPersisted.set(false); } else if (id.equals(currentSessionId.get())) { session = currentSession.get(); } else { session = loadSessionFromRedis(id); if (session != null) { currentSessionIsPersisted.set(true); } } currentSession.set(session); currentSessionId.set(id); return session; } public void clear() { Jedis jedis = null; Boolean error = true; try { jedis = acquireConnection(); jedis.flushDB(); error = false; } finally { if (jedis != null) { returnConnection(jedis, error); } } } public int getSize() throws IOException { Jedis jedis = null; Boolean error = true; try { jedis = acquireConnection(); int size = jedis.dbSize().intValue(); error = false; return size; } finally { if (jedis != null) { returnConnection(jedis, error); } } } public String[] keys() throws IOException { Jedis jedis = null; Boolean error = true; try { jedis = acquireConnection(); Set<String> keySet = jedis.keys("*"); error = false; return keySet.toArray(new String[keySet.size()]); } finally { if (jedis != null) { returnConnection(jedis, error); } } } public RedisSession loadSessionFromRedis(String id) throws IOException { RedisSession session; Jedis jedis = null; Boolean error = true; try { log.trace("Attempting to load session " + id + " from Redis"); jedis = acquireConnection(); byte[] data = jedis.get(id.getBytes()); error = false; if (data == null) { log.trace("Session " + id + " not found in Redis"); session = null; } else if (Arrays.equals(NULL_SESSION, data)) { throw new IllegalStateException( "Race condition encountered: attempted to load session[" + id + "] which has been created but not yet serialized."); } else { log.trace("Deserializing session " + id + " from Redis"); session = (RedisSession) createEmptySession(); serializer.deserializeInto(data, session); session.setId(id); session.setNew(false); session.setMaxInactiveInterval(getMaxInactiveInterval() * 1000); session.access(); session.setValid(true); session.resetDirtyTracking(); if (log.isTraceEnabled()) { log.trace("Session Contents [" + id + "]:"); for (Object name : Collections.list(session .getAttributeNames())) { log.trace(" " + name); } } } return session; } catch (IOException e) { log.fatal(e.getMessage()); throw e; } catch (ClassNotFoundException ex) { log.fatal("Unable to deserialize into session", ex); throw new IOException("Unable to deserialize into session", ex); } finally { if (jedis != null) { returnConnection(jedis, error); } } } public void save(Session session) throws IOException { Jedis jedis = null; Boolean error = true; try { log.trace("Saving session " + session + " into Redis"); RedisSession redisSession = (RedisSession) session; if (log.isTraceEnabled()) { log.trace("Session Contents [" + redisSession.getId() + "]:"); for (Object name : Collections.list(redisSession .getAttributeNames())) { log.trace(" " + name); } } Boolean sessionIsDirty = redisSession.isDirty(); redisSession.resetDirtyTracking(); byte[] binaryId = redisSession.getId().getBytes(); jedis = acquireConnection(); if (sessionIsDirty || currentSessionIsPersisted.get() != true) { jedis.set(binaryId, serializer.serializeFrom(redisSession)); } currentSessionIsPersisted.set(true); log.trace("Setting expire timeout on session [" + redisSession.getId() + "] to " + getMaxInactiveInterval()); jedis.expire(binaryId, getMaxInactiveInterval()); error = false; } catch (IOException e) { log.error(e.getMessage()); throw e; } finally { if (jedis != null) { returnConnection(jedis, error); } } } public void remove(Session session) { Jedis jedis = null; Boolean error = true; log.trace("Removing session ID : " + session.getId()); try { jedis = acquireConnection(); jedis.del(session.getId()); error = false; } finally { if (jedis != null) { returnConnection(jedis, error); } } } public void afterRequest() { RedisSession redisSession = currentSession.get(); if (redisSession != null) { currentSession.remove(); currentSessionId.remove(); currentSessionIsPersisted.remove(); log.trace("Session removed from ThreadLocal :" + redisSession.getIdInternal()); } } @Override public void processExpires() { // We are going to use Redis's ability to expire keys for session // expiration. // Do nothing. } private void initializeDatabaseConnection() throws LifecycleException { try { // TODO: Allow configuration of pool (such as size...) connectionPool = new JedisPool(new JedisPoolConfig(), getHost(), getPort(), getTimeout(), getPassword()); } catch (Exception e) { e.printStackTrace(); throw new LifecycleException("Error Connecting to Redis", e); } } private void initializeSerializer() throws ClassNotFoundException, IllegalAccessException, InstantiationException { log.info("Attempting to use serializer :" + serializationStrategyClass); serializer = (Serializer) Class.forName(serializationStrategyClass) .newInstance(); Loader loader = null; Context context = this.getContext(); if (context != null) { loader = context.getLoader(); } ClassLoader classLoader = null; if (loader != null) { classLoader = loader.getClassLoader(); } serializer.setClassLoader(classLoader); } }
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terracotta.nomad.client; import org.terracotta.nomad.client.change.NomadChange; import org.terracotta.nomad.client.results.AllResultsReceiver; import org.terracotta.nomad.client.results.CommitResultsReceiver; import org.terracotta.nomad.client.results.DiscoverResultsReceiver; import org.terracotta.nomad.client.results.PrepareResultsReceiver; import org.terracotta.nomad.client.results.RollbackResultsReceiver; import org.terracotta.nomad.client.results.TakeoverResultsReceiver; import org.terracotta.nomad.messages.CommitMessage; import org.terracotta.nomad.messages.DiscoverResponse; import org.terracotta.nomad.messages.PrepareMessage; import org.terracotta.nomad.messages.RejectionReason; import org.terracotta.nomad.messages.RollbackMessage; import org.terracotta.nomad.messages.TakeoverMessage; import org.terracotta.nomad.server.NomadException; import java.net.InetSocketAddress; import java.time.Clock; import java.time.Instant; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import static java.util.stream.Collectors.toList; public class NomadMessageSender<T> implements AllResultsReceiver<T> { private final List<NomadEndpoint<T>> servers; private final Clock clock; private final String host; private final String user; private final Map<InetSocketAddress, Long> mutativeMessageCounts = new ConcurrentHashMap<>(); private final AtomicLong maxVersionNumber = new AtomicLong(); private final List<NomadEndpoint<T>> preparedServers = new CopyOnWriteArrayList<>(); protected volatile UUID changeUuid; public NomadMessageSender(List<NomadEndpoint<T>> servers, String host, String user, Clock clock) { this.host = host; this.user = user; this.servers = servers; this.clock = clock; } public void sendDiscovers(DiscoverResultsReceiver<T> results) { results.startDiscovery(servers.stream().map(NomadEndpoint::getAddress).collect(toList())); for (NomadEndpoint<T> server : servers) { runSync( server::discover, discovery -> results.discovered(server.getAddress(), discovery), unwrap(e -> results.discoverFail(server.getAddress(), e)) ); } results.endDiscovery(); } public void sendSecondDiscovers(DiscoverResultsReceiver<T> results) { results.startSecondDiscovery(); for (NomadEndpoint<T> server : servers) { long mutativeMessageCount = mutativeMessageCounts.get(server.getAddress()); runSync( server::discover, discovery -> { long secondMutativeMessageCount = discovery.getMutativeMessageCount(); if (secondMutativeMessageCount == mutativeMessageCount) { results.discoverRepeated(server.getAddress()); } else { String lastMutationHost = discovery.getLastMutationHost(); String lastMutationUser = discovery.getLastMutationUser(); results.discoverOtherClient(server.getAddress(), lastMutationHost, lastMutationUser); } }, unwrap(e -> results.discoverFail(server.getAddress(), e)) ); } // The endSecondDiscovery() call is made outside this method } public void sendPrepares(PrepareResultsReceiver results, UUID changeUuid, NomadChange change) { results.startPrepare(changeUuid); long newVersionNumber = maxVersionNumber.get() + 1; Instant now = clock.instant(); for (NomadEndpoint<T> server : servers) { long mutativeMessageCount = mutativeMessageCounts.get(server.getAddress()); runSync( () -> server.prepare( new PrepareMessage( mutativeMessageCount, host, user, now, changeUuid, newVersionNumber, change ) ), response -> { if (response.isAccepted()) { results.prepared(server.getAddress()); } else { RejectionReason rejectionReason = response.getRejectionReason(); switch (rejectionReason) { case UNACCEPTABLE: String rejectionMessage = response.getRejectionMessage(); results.prepareChangeUnacceptable(server.getAddress(), rejectionMessage); break; case DEAD: String lastMutationHost = response.getLastMutationHost(); String lastMutationUser = response.getLastMutationUser(); results.prepareOtherClient(server.getAddress(), lastMutationHost, lastMutationUser); break; case BAD: throw new AssertionError("A server rejected a message as bad: " + server.getAddress()); default: throw new AssertionError("Unexpected RejectionReason: " + rejectionReason); } } }, unwrap(e -> results.prepareFail(server.getAddress(), e)) ); } results.endPrepare(); } public void sendCommits(CommitResultsReceiver results) { results.startCommit(); Instant now = clock.instant(); for (NomadEndpoint<T> server : preparedServers) { long mutativeMessageCount = mutativeMessageCounts.get(server.getAddress()); runSync( () -> { return server.commit( new CommitMessage( mutativeMessageCount + 1, host, user, now, changeUuid ) ); }, response -> { if (response.isAccepted()) { results.committed(server.getAddress()); } else { RejectionReason rejectionReason = response.getRejectionReason(); switch (rejectionReason) { case UNACCEPTABLE: throw new AssertionError("Commit should not return UNACCEPTABLE"); case DEAD: String lastMutationHost = response.getLastMutationHost(); String lastMutationUser = response.getLastMutationUser(); results.commitOtherClient(server.getAddress(), lastMutationHost, lastMutationUser); break; case BAD: throw new AssertionError("A server rejected a message as bad: " + server.getAddress()); default: throw new AssertionError("Unexpected RejectionReason: " + rejectionReason); } } }, unwrap(e -> results.commitFail(server.getAddress(), e)) ); } results.endCommit(); } public void sendRollbacks(RollbackResultsReceiver results) { results.startRollback(); Instant now = clock.instant(); for (NomadEndpoint<T> server : preparedServers) { long mutativeMessageCount = mutativeMessageCounts.get(server.getAddress()); runSync( () -> server.rollback( new RollbackMessage( mutativeMessageCount + 1, host, user, now, changeUuid ) ), response -> { if (response.isAccepted()) { results.rolledBack(server.getAddress()); } else { RejectionReason rejectionReason = response.getRejectionReason(); switch (rejectionReason) { case UNACCEPTABLE: throw new AssertionError("Rollback should not return UNACCEPTABLE"); case DEAD: String lastMutationHost = response.getLastMutationHost(); String lastMutationUser = response.getLastMutationUser(); results.rollbackOtherClient(server.getAddress(), lastMutationHost, lastMutationUser); break; case BAD: throw new AssertionError("A server rejected a message as bad: " + server.getAddress()); default: throw new AssertionError("Unexpected RejectionReason: " + rejectionReason); } } }, unwrap(e -> results.rollbackFail(server.getAddress(), e)) ); } results.endRollback(); } public void sendTakeovers(TakeoverResultsReceiver results) { results.startTakeover(); Instant now = clock.instant(); for (NomadEndpoint<T> server : servers) { long mutativeMessageCount = mutativeMessageCounts.get(server.getAddress()); runSync( () -> server.takeover( new TakeoverMessage( mutativeMessageCount, host, user, now ) ), response -> { if (response.isAccepted()) { results.takeover(server.getAddress()); } else { RejectionReason rejectionReason = response.getRejectionReason(); switch (rejectionReason) { case UNACCEPTABLE: throw new AssertionError("Takeover should not return UNACCEPTABLE"); case DEAD: String lastMutationHost = response.getLastMutationHost(); String lastMutationUser = response.getLastMutationUser(); results.takeoverOtherClient(server.getAddress(), lastMutationHost, lastMutationUser); break; case BAD: throw new AssertionError("A server rejected a message as bad: " + server.getAddress()); default: throw new AssertionError("Unexpected RejectionReason: " + rejectionReason); } } }, unwrap(e -> results.takeoverFail(server.getAddress(), e)) ); } results.endTakeover(); } @Override public void discovered(InetSocketAddress server, DiscoverResponse<T> discovery) { long expectedMutativeMessageCount = discovery.getMutativeMessageCount(); long highestVersionNumber = discovery.getHighestVersion(); mutativeMessageCounts.put(server, expectedMutativeMessageCount); maxVersionNumber.accumulateAndGet(highestVersionNumber, Long::max); } @SuppressWarnings("OptionalGetWithoutIsPresent") public final void registerPreparedServer(InetSocketAddress address) { preparedServers.add(servers.stream().filter(s -> s.getAddress().equals(address)).findAny().get()); } private <T> void runSync(Callable<T> callable, Consumer<T> onSuccess, Consumer<Throwable> onError) { try { T result = callable.call(); if (result == null) { throw new AssertionError("Response expected. Bug or wrong mocking ?"); } onSuccess.accept(result); } catch (Exception e) { onError.accept(e); } } private static Consumer<Throwable> unwrap(Consumer<Throwable> c) { return t -> c.accept(t instanceof NomadException && t.getCause() != null && t.getCause() != t ? t.getCause() : t); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.ha; import static org.apache.geode.distributed.ConfigurationProperties.DURABLE_CLIENT_ID; import static org.apache.geode.distributed.ConfigurationProperties.DURABLE_CLIENT_TIMEOUT; import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS; import static org.apache.geode.distributed.ConfigurationProperties.LOG_FILE; import static org.apache.geode.distributed.ConfigurationProperties.LOG_LEVEL; import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_ARCHIVE_FILE; import static org.apache.geode.distributed.ConfigurationProperties.STATISTIC_SAMPLING_ENABLED; import static org.junit.Assert.assertEquals; import java.util.Collection; import java.util.Iterator; import java.util.Properties; import java.util.concurrent.TimeUnit; import org.awaitility.Awaitility; import org.junit.Test; import org.junit.experimental.categories.Category; import org.apache.geode.cache.CacheFactory; import org.apache.geode.cache.EntryEvent; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionFactory; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.client.ClientCacheFactory; import org.apache.geode.cache.client.ClientRegionFactory; import org.apache.geode.cache.client.ClientRegionShortcut; import org.apache.geode.cache.server.CacheServer; import org.apache.geode.cache.util.CacheListenerAdapter; import org.apache.geode.distributed.DistributedSystem; import org.apache.geode.internal.OSProcess; import org.apache.geode.internal.cache.GemFireCacheImpl; import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier; import org.apache.geode.internal.cache.tier.sockets.CacheClientProxy; import org.apache.geode.internal.i18n.LocalizedStrings; import org.apache.geode.test.dunit.DistributedTestUtils; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.IgnoredException; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; import org.apache.geode.test.dunit.WaitCriterion; import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase; import org.apache.geode.test.junit.categories.ClientSubscriptionTest; import org.apache.geode.test.junit.categories.DistributedTest; @Category({DistributedTest.class, ClientSubscriptionTest.class}) public class Bug48571DUnitTest extends JUnit4DistributedTestCase { private static VM server = null; private VM client = null; private static GemFireCacheImpl cache = null; private static final String region = Bug48571DUnitTest.class.getSimpleName() + "_region"; private static int numOfCreates = 0; private static int numOfUpdates = 0; private static int numOfInvalidates = 0; private static boolean lastKeyReceived = false; @Override public final void postSetUp() throws Exception { final Host host = Host.getHost(0); server = host.getVM(0); client = host.getVM(1); } @Override public final void preTearDown() throws Exception { reset(); server.invoke(Bug48571DUnitTest::reset); client.invoke(Bug48571DUnitTest::reset); } private static void reset() { lastKeyReceived = false; numOfCreates = 0; numOfUpdates = 0; numOfInvalidates = 0; if (cache != null && !cache.isClosed()) { cache.close(); cache.getDistributedSystem().disconnect(); } } @Test public void testStatsMatchWithSize() throws Exception { IgnoredException.addIgnoredException("Unexpected IOException||Connection reset"); // start a server int port = server.invoke(Bug48571DUnitTest::createServerCache); // create durable client, with durable RI client.invoke(() -> Bug48571DUnitTest.createClientCache(client.getHost(), port)); // do puts on server from three different threads, pause after 500 puts each. server.invoke(Bug48571DUnitTest::doPuts); // close durable client client.invoke(Bug48571DUnitTest::closeClientCache); server.invoke("verifyProxyHasBeenPaused", Bug48571DUnitTest::verifyProxyHasBeenPaused); // resume puts on server, add another 100. server.invoke(Bug48571DUnitTest::resumePuts); // start durable client client.invoke(() -> Bug48571DUnitTest.createClientCache(client.getHost(), port)); // wait for full queue dispatch client.invoke(Bug48571DUnitTest::waitForLastKey); // verify the stats server.invoke(Bug48571DUnitTest::verifyStats); } private static void verifyProxyHasBeenPaused() { Awaitility.await().atMost(60, TimeUnit.SECONDS).until(() -> { CacheClientNotifier ccn = CacheClientNotifier.getInstance(); Collection<CacheClientProxy> ccProxies = ccn.getClientProxies(); boolean pausedFlag = false; for (CacheClientProxy ccp : ccProxies) { System.out.println("proxy status " + ccp.getState()); if (ccp.isPaused()) { pausedFlag = true; break; } } assertEquals("Proxy has not been paused in 1 minute", true, pausedFlag); }); } private static int createServerCache() throws Exception { Properties props = new Properties(); props.setProperty(LOCATORS, "localhost[" + DistributedTestUtils.getDUnitLocatorPort() + "]"); props.setProperty(LOG_FILE, "server_" + OSProcess.getId() + ".log"); props.setProperty(LOG_LEVEL, "info"); props.setProperty(STATISTIC_ARCHIVE_FILE, "server_" + OSProcess.getId() + ".gfs"); props.setProperty(STATISTIC_SAMPLING_ENABLED, "true"); CacheFactory cf = new CacheFactory(props); DistributedSystem ds = new Bug48571DUnitTest().getSystem(props); ds.disconnect(); cache = (GemFireCacheImpl) cf.create(); RegionFactory<String, String> rf = cache.createRegionFactory(RegionShortcut.REPLICATE); rf.setConcurrencyChecksEnabled(false); rf.create(region); CacheServer server1 = cache.addCacheServer(); server1.setPort(0); server1.start(); return server1.getPort(); } private static void closeClientCache() { cache.close(true); } private static void createClientCache(Host host, Integer port) { Properties props = new Properties(); props.setProperty(MCAST_PORT, "0"); props.setProperty(LOCATORS, ""); props.setProperty(DURABLE_CLIENT_ID, "durable-48571"); props.setProperty(DURABLE_CLIENT_TIMEOUT, "300000"); props.setProperty(LOG_FILE, "client_" + OSProcess.getId() + ".log"); props.setProperty(LOG_LEVEL, "info"); props.setProperty(STATISTIC_ARCHIVE_FILE, "client_" + OSProcess.getId() + ".gfs"); props.setProperty(STATISTIC_SAMPLING_ENABLED, "true"); ClientCacheFactory ccf = new ClientCacheFactory(props); ccf.setPoolSubscriptionEnabled(true); ccf.setPoolSubscriptionAckInterval(50); ccf.setPoolSubscriptionRedundancy(0); ccf.addPoolServer(host.getHostName(), port); DistributedSystem ds = new Bug48571DUnitTest().getSystem(props); ds.disconnect(); cache = (GemFireCacheImpl) ccf.create(); ClientRegionFactory<String, String> crf = cache.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY); crf.setConcurrencyChecksEnabled(false); crf.addCacheListener(new CacheListenerAdapter<String, String>() { public void afterInvalidate(EntryEvent<String, String> event) { cache.getLoggerI18n() .fine("Invalidate Event: " + event.getKey() + ", " + event.getNewValue()); numOfInvalidates++; } public void afterCreate(EntryEvent<String, String> event) { if (event.getKey().equals("last_key")) { lastKeyReceived = true; } cache.getLoggerI18n().fine("Create Event: " + event.getKey() + ", " + event.getNewValue()); numOfCreates++; } public void afterUpdate(EntryEvent<String, String> event) { cache.getLoggerI18n().fine("Update Event: " + event.getKey() + ", " + event.getNewValue()); numOfUpdates++; } }); Region<String, String> r = crf.create(region); r.registerInterest("ALL_KEYS", true); cache.readyForEvents(); } private static void doPuts() throws Exception { final Region<String, String> r = cache.getRegion(region); Thread t1 = new Thread(() -> { for (int i = 0; i < 500; i++) { r.put("T1_KEY_" + i, "VALUE_" + i); } }); Thread t2 = new Thread(() -> { for (int i = 0; i < 500; i++) { r.put("T2_KEY_" + i, "VALUE_" + i); } }); Thread t3 = new Thread(() -> { for (int i = 0; i < 500; i++) { r.put("T3_KEY_" + i, "VALUE_" + i); } }); t1.start(); t2.start(); t3.start(); t1.join(); t2.join(); t3.join(); } private static void resumePuts() { Region<String, String> r = cache.getRegion(region); for (int i = 0; i < 100; i++) { r.put("NEWKEY_" + i, "NEWVALUE_" + i); } r.put("last_key", "last_value"); } private static void waitForLastKey() { WaitCriterion wc = new WaitCriterion() { @Override public boolean done() { return lastKeyReceived; } @Override public String description() { return "Did not receive last key."; } }; Wait.waitForCriterion(wc, 60 * 1000, 500, true); } private static void verifyStats() { Awaitility.await().atMost(60, TimeUnit.SECONDS).until(() -> { CacheClientNotifier ccn = CacheClientNotifier.getInstance(); CacheClientProxy ccp = ccn.getClientProxies().iterator().next(); cache.getLoggerI18n().info(LocalizedStrings.DEBUG, "getQueueSize() " + ccp.getQueueSize()); cache.getLoggerI18n().info(LocalizedStrings.DEBUG, "getQueueSizeStat() " + ccp.getQueueSizeStat()); cache.getLoggerI18n().info(LocalizedStrings.DEBUG, "getEventsEnqued() " + ccp.getHARegionQueue().getStatistics().getEventsEnqued()); cache.getLoggerI18n().info(LocalizedStrings.DEBUG, "getEventsDispatched() " + ccp.getHARegionQueue().getStatistics().getEventsDispatched()); cache.getLoggerI18n().info(LocalizedStrings.DEBUG, "getEventsRemoved() " + ccp.getHARegionQueue().getStatistics().getEventsRemoved()); cache.getLoggerI18n().info(LocalizedStrings.DEBUG, "getNumVoidRemovals() " + ccp.getHARegionQueue().getStatistics().getNumVoidRemovals()); assertEquals("The queue size did not match the stat value", ccp.getQueueSize(), ccp.getQueueSizeStat()); }); } }
/** * Copyright 2010 Newcastle University * * http://research.ncl.ac.uk/smart/ * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.test.controller; import com.test.util.Oauth2Utils; import org.apache.oltu.oauth2.client.OAuthClient; import org.apache.oltu.oauth2.client.URLConnectionClient; import org.apache.oltu.oauth2.client.request.OAuthClientRequest; import org.apache.oltu.oauth2.client.response.OAuthAccessTokenResponse; import org.apache.oltu.oauth2.common.OAuth; import org.apache.oltu.oauth2.common.exception.OAuthProblemException; import org.apache.oltu.oauth2.common.exception.OAuthSystemException; import org.apache.oltu.oauth2.common.message.types.GrantType; import org.apache.oltu.oauth2.jwt.JWT; import org.apache.oltu.oauth2.jwt.io.JWTClaimsSetWriter; import org.apache.oltu.oauth2.jwt.io.JWTHeaderWriter; import org.apache.oltu.oauth2.jwt.io.JWTWriter; import org.apache.oltu.openidconnect.client.response.OpenIdConnectResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.servlet.ModelAndView; import org.springframework.web.servlet.view.RedirectView; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import javax.servlet.http.HttpServletRequest; import java.io.IOException; import java.net.URL; import java.security.SecureRandom; import java.security.cert.CertificateException; /** * * * */ @Controller public class TokenController { private Logger logger = LoggerFactory.getLogger(TokenController.class); private @Value("#{openidConnect['clientId']}") String clientId; private @Value("#{openidConnect['clientSecret']}") String clientSecret; private @Value("#{openidConnect['authorizationEndpoint']}") String authorizationEndpoint; private @Value("#{openidConnect['tokenEndpoint']}") String tokenEndpoint; private @Value("#{openidConnect['scope']}") String scope; private @Value("#{openidConnect['redirectUri']}") String redirectUri; private final JWTWriter jwtWriter = new JWTWriter(); static { //for localhost testing only HttpsURLConnection.setDefaultHostnameVerifier( new javax.net.ssl.HostnameVerifier() { public boolean verify(String hostname, javax.net.ssl.SSLSession sslSession) { if (hostname.equals("localhost")) { return true; } return false; } }); trustAllHttpsCertificates(); } private static void trustAllHttpsCertificates() { TrustManager[] trustAllCerts = new TrustManager[]{new X509TrustManager() { // public java.security.cert.X509Certificate[] getAcceptedIssuers() { // return null; // } // // public void checkClientTrusted(X509Certificate[] certs, String authType) { // } // // public void checkServerTrusted(X509Certificate[] certs, String authType) { // } @Override public void checkClientTrusted(java.security.cert.X509Certificate[] x509Certificates, String s) throws CertificateException { //To change body of implemented methods use File | Settings | File Templates. } @Override public void checkServerTrusted(java.security.cert.X509Certificate[] x509Certificates, String s) throws CertificateException { //To change body of implemented methods use File | Settings | File Templates. } @Override public java.security.cert.X509Certificate[] getAcceptedIssuers() { // return new java.security.cert.X509Certificate[0]; //To change body of implemented methods use File | Settings | File Templates. return null; } }}; try { SSLContext sc = SSLContext.getInstance("TLS"); sc.init(null, trustAllCerts, new SecureRandom()); HttpsURLConnection.setDefaultSSLSocketFactory(sc.getSocketFactory()); } catch (Exception e) { ; } } @RequestMapping("/get_token") public ModelAndView authorize( HttpServletRequest req) throws OAuthSystemException, IOException { ModelMap map = new ModelMap(); try { logger.info("==============================get_token============================================="); String code = req.getParameter("code"); logger.info("code=" + code); OAuthClientRequest request = OAuthClientRequest .tokenLocation(tokenEndpoint) .setClientId(clientId) .setClientSecret(clientSecret) .setRedirectURI(redirectUri).setScope(scope) .setCode(code) .setGrantType(GrantType.AUTHORIZATION_CODE) .buildBodyMessage(); OAuthClient client = new OAuthClient(new URLConnectionClient()); OAuthAccessTokenResponse oauthResponse = null; Class<? extends OAuthAccessTokenResponse> cl = OpenIdConnectResponse.class; oauthResponse = client.accessToken(request, cl); logger.info("getAccessToken" + oauthResponse.getAccessToken()); logger.info("getExpiresIn" + oauthResponse.getExpiresIn()); logger.info("getRefreshToken" + Oauth2Utils.isIssued(oauthResponse.getRefreshToken())); req.getSession().setAttribute(OAuth.OAUTH_ACCESS_TOKEN, oauthResponse.getAccessToken()); OpenIdConnectResponse openIdConnectResponse = ((OpenIdConnectResponse) oauthResponse); JWT idToken = openIdConnectResponse.getIdToken(); logger.info("idToken" + idToken.getRawString()); logger.info("getHeader " + idToken.getHeader()); logger.info("getHeader " + new JWTHeaderWriter().write(idToken.getHeader())); logger.info("getClaimsSet " + idToken.getClaimsSet()); logger.info("getClaimsSet " + new JWTClaimsSetWriter().write(idToken.getClaimsSet())); URL url = new URL(tokenEndpoint); logger.info("getIdTokenValid " + openIdConnectResponse.checkId(url.getHost(), clientId)); return new ModelAndView(new RedirectView("get_resource")); } catch (OAuthProblemException e) { logger.error(e.getMessage()); StringBuffer sb = new StringBuffer(); sb.append("</br>"); sb.append("Error code: ").append(e.getError()).append("</br>"); sb.append("Error description: ").append(e.getDescription()).append("</br>"); sb.append("Error uri: ").append(e.getUri()).append("</br>"); sb.append("State: ").append(e.getState()).append("</br>"); map.put("msg", sb.toString()); return new ModelAndView("index", map); } } }
/** * Copyright 2005-2015 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.uif.util; import java.lang.reflect.InvocationTargetException; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import org.kuali.rice.core.api.uif.RemotableQuickFinder.Builder; import org.kuali.rice.krad.bo.BusinessObject; import org.kuali.rice.krad.bo.DataObjectRelationship; import org.kuali.rice.krad.datadictionary.RelationshipDefinition; import org.kuali.rice.krad.document.Document; import org.kuali.rice.krad.service.LegacyDataAdapter; import org.kuali.rice.krad.util.ForeignKeyFieldsPopulationState; /** * Mock implementation of {@link LegacyDataAdapter} for supporting UIF unit tests. * * @author Kuali Rice Team (rice.collab@kuali.org) */ public class MockLegacyDataAdapter implements LegacyDataAdapter { /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#save(java.lang.Object) */ @Override public <T> T save(T dataObject) { return dataObject; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#linkAndSave(java.lang.Object) */ @Override public <T> T linkAndSave(T dataObject) { return dataObject; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#saveDocument(java.lang.Object) */ @Override public <T> T saveDocument(T document) { return document; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findBySinglePrimaryKey(java.lang.Class, java.lang.Object) */ @Override public <T> T findBySinglePrimaryKey(Class<T> clazz, Object primaryKey) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findByPrimaryKey(java.lang.Class, java.util.Map) */ @Override public <T> T findByPrimaryKey(Class<T> clazz, Map<String, ?> primaryKeys) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#delete(java.lang.Object) */ @Override public void delete(Object dataObject) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#deleteMatching(java.lang.Class, java.util.Map) */ @Override public void deleteMatching(Class<?> clazz, Map<String, ?> fieldValues) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#retrieve(java.lang.Object) */ @Override public <T> T retrieve(T dataObject) { return dataObject; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findAll(java.lang.Class) */ @Override public <T> Collection<T> findAll(Class<T> clazz) { return Collections.emptyList(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findMatching(java.lang.Class, java.util.Map) */ @Override public <T> Collection<T> findMatching(Class<T> clazz, Map<String, ?> fieldValues) { return Collections.emptyList(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findMatchingOrderBy(java.lang.Class, java.util.Map, * java.lang.String, boolean) */ @Override public <T> Collection<T> findMatchingOrderBy(Class<T> clazz, Map<String, ?> fieldValues, String sortField, boolean sortAscending) { return Collections.emptyList(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getPrimaryKeyFieldValues(java.lang.Object) */ @Override public Map<String, ?> getPrimaryKeyFieldValues(Object dataObject) { return Collections.emptyMap(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#retrieveNonKeyFields(java.lang.Object) */ @Override public void retrieveNonKeyFields(Object persistableObject) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#retrieveReferenceObject(java.lang.Object, java.lang.String) */ @Override public void retrieveReferenceObject(Object persistableObject, String referenceObjectName) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#refreshAllNonUpdatingReferences(java.lang.Object) */ @Override public void refreshAllNonUpdatingReferences(Object persistableObject) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#resolveProxy(java.lang.Object) */ @Override public Object resolveProxy(Object o) { return o; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isProxied(java.lang.Object) */ @Override public boolean isProxied(Object object) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findCollectionBySearchHelper(java.lang.Class, java.util.Map, * boolean, boolean, java.lang.Integer) */ @Override public <T> Collection<T> findCollectionBySearchHelper(Class<T> clazz, Map<String, String> formProps, boolean unbounded, boolean allPrimaryKeyValuesPresentAndNotWildcard, Integer searchResultsLimit) { return Collections.emptyList(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findCollectionBySearchHelper(java.lang.Class, java.util.Map, * java.util.List, boolean, boolean, java.lang.Integer) */ @Override public <T> Collection<T> findCollectionBySearchHelper(Class<T> clazz, Map<String, String> formProps, List<String> wildcardAsLiteralPropertyNames, boolean unbounded, boolean allPrimaryKeyValuesPresentAndNotWildcard, Integer searchResultsLimit) { return Collections.emptyList(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findObjectBySearch(java.lang.Class, java.util.Map) */ @Override public <T> T findObjectBySearch(Class<T> clazz, Map<String, String> formProps) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#allPrimaryKeyValuesPresentAndNotWildcard(java.lang.Class, * java.util.Map) */ @Override public boolean allPrimaryKeyValuesPresentAndNotWildcard(Class<?> boClass, Map<String, String> formProps) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#listPrimaryKeyFieldNames(java.lang.Class) */ @Override public List<String> listPrimaryKeyFieldNames(Class<?> clazz) { return Collections.emptyList(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#determineCollectionObjectType(java.lang.Class, * java.lang.String) */ @Override public Class<?> determineCollectionObjectType(Class<?> containingClass, String collectionPropertyName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getForeignKeyFieldsPopulationState(java.lang.Object, * java.lang.String) */ @Override public ForeignKeyFieldsPopulationState getForeignKeyFieldsPopulationState(Object bo, String referenceName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getForeignKeysForReference(java.lang.Class, java.lang.String) */ @Override public Map<String, String> getForeignKeysForReference(Class<?> clazz, String attributeName) { return Collections.emptyMap(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#hasPrimaryKeyFieldValues(java.lang.Object) */ @Override public boolean hasPrimaryKeyFieldValues(Object persistableObject) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#hasReference(java.lang.Class, java.lang.String) */ @Override public boolean hasReference(Class<?> boClass, String referenceName) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#hasCollection(java.lang.Class, java.lang.String) */ @Override public boolean hasCollection(Class<?> boClass, String collectionName) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isExtensionAttribute(java.lang.Class, java.lang.String, * java.lang.Class) */ @Override public boolean isExtensionAttribute(Class<?> boClass, String attributePropertyName, Class<?> propertyType) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getExtensionAttributeClass(java.lang.Class, java.lang.String) */ @Override public Class<?> getExtensionAttributeClass(Class<?> boClass, String attributePropertyName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getPrimaryKeyFieldValuesDOMDS(java.lang.Object) */ @Override public Map<String, ?> getPrimaryKeyFieldValuesDOMDS(Object dataObject) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#equalsByPrimaryKeys(java.lang.Object, java.lang.Object) */ @Override public boolean equalsByPrimaryKeys(Object do1, Object do2) { return false; } // /** // * @see org.kuali.rice.krad.service.LegacyDataAdapter#toPersistableBusinessObject(java.lang.Object) // */ // @Override // public PersistableBusinessObject toPersistableBusinessObject(Object object) { // return null; // } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#materializeAllSubObjects(java.lang.Object) */ @Override public void materializeAllSubObjects(Object object) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getPropertyType(java.lang.Object, java.lang.String) */ @Override public Class<?> getPropertyType(Object object, String propertyName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getExtension(java.lang.Class) */ @Override public Object getExtension( Class<?> businessObjectClass) throws InstantiationException, IllegalAccessException { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#refreshReferenceObject(org.kuali.rice.krad.bo.PersistableBusinessObject, * java.lang.String) */ @Override public void refreshReferenceObject(Object businessObject, String referenceObjectName) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isLockable(java.lang.Object) */ @Override public boolean isLockable(Object object) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#verifyVersionNumber(java.lang.Object) */ @Override public void verifyVersionNumber(Object dataObject) { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#createQuickFinder(java.lang.Class, java.lang.String) */ @Override public Builder createQuickFinder(Class<?> containingClass, String attributeName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isReferenceUpdatable(java.lang.Class, java.lang.String) */ @Override public boolean isReferenceUpdatable(Class<?> boClass, String referenceName) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#listReferenceObjectFields(java.lang.Class) */ @Override public Map<String, Class> listReferenceObjectFields(Class<?> boClass) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isCollectionUpdatable(java.lang.Class, java.lang.String) */ @Override public boolean isCollectionUpdatable(Class<?> boClass, String collectionName) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#listCollectionObjectTypes(java.lang.Class) */ @Override public Map<String, Class> listCollectionObjectTypes(Class<?> boClass) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getReferenceIfExists(java.lang.Object, java.lang.String) */ @Override public BusinessObject getReferenceIfExists(Object bo, String referenceName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#allForeignKeyValuesPopulatedForReference(java.lang.Object, * java.lang.String) */ @Override public boolean allForeignKeyValuesPopulatedForReference(Object bo, String referenceName) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getDictionaryRelationship(java.lang.Class, java.lang.String) */ @Override public RelationshipDefinition getDictionaryRelationship(Class<?> c, String attributeName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getTitleAttribute(java.lang.Class) */ @Override public String getTitleAttribute(Class<?> dataObjectClass) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#areNotesSupported(java.lang.Class) */ @Override public boolean areNotesSupported(Class<?> dataObjectClass) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getDataObjectIdentifierString(java.lang.Object) */ @Override public String getDataObjectIdentifierString(Object dataObject) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getInquiryObjectClassIfNotTitle(java.lang.Object, * java.lang.String) */ @Override public Class<?> getInquiryObjectClassIfNotTitle(Object dataObject, String propertyName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getInquiryParameters(java.lang.Object, java.util.List, * java.lang.String) */ @Override public Map<String, String> getInquiryParameters(Object dataObject, List<String> keys, String propertyName) { return Collections.emptyMap(); } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#hasLocalLookup(java.lang.Class) */ @Override public boolean hasLocalLookup(Class<?> dataObjectClass) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#hasLocalInquiry(java.lang.Class) */ @Override public boolean hasLocalInquiry(Class<?> dataObjectClass) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getDataObjectRelationship(java.lang.Object, java.lang.Class, * java.lang.String, java.lang.String, boolean, boolean, boolean) */ @Override public DataObjectRelationship getDataObjectRelationship(Object dataObject, Class<?> dataObjectClass, String attributeName, String attributePrefix, boolean keysOnly, boolean supportsLookup, boolean supportsInquiry) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isPersistable(java.lang.Class) */ @Override public boolean isPersistable(Class<?> dataObjectClass) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#setObjectPropertyDeep(java.lang.Object, java.lang.String, * java.lang.Class, java.lang.Object) */ @Override public void setObjectPropertyDeep(Object bo, String propertyName, Class<?> type, Object propertyValue) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#materializeClassForProxiedObject(java.lang.Object) */ @Override public Class<?> materializeClassForProxiedObject(Object object) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#getNestedValue(java.lang.Object, java.lang.String) */ @Override public Object getNestedValue(Object bo, String fieldName) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#createNewObjectFromClass(java.lang.Class) */ @Override public Object createNewObjectFromClass(Class clazz) { Object object = null; try { object = clazz.newInstance(); } catch (InstantiationException e) { // do nothing } catch (IllegalAccessException e) { // do nothing } return object; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#isNull(java.lang.Object) */ @Override public boolean isNull(Object object) { return false; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#setObjectProperty(java.lang.Object, java.lang.String, * java.lang.Class, java.lang.Object) */ @Override public void setObjectProperty(Object bo, String propertyName, Class propertyType, Object propertyValue) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findByDocumentHeaderId(java.lang.Class, java.lang.String) */ @Override public <T extends Document> T findByDocumentHeaderId(Class<T> documentClass, String id) { return null; } /** * @see org.kuali.rice.krad.service.LegacyDataAdapter#findByDocumentHeaderIds(java.lang.Class, java.util.List) */ @Override public <T extends Document> List<T> findByDocumentHeaderIds(Class<T> documentClass, List<String> ids) { return Collections.emptyList(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver.wal; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.util.Arrays; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfoBuilder; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.regionserver.ChunkCreator; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.MemStoreLAB; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.wal.AbstractFSWALProvider; import org.apache.hadoop.hbase.wal.WAL; import org.apache.hadoop.hbase.wal.WALFactory; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameter; import org.junit.runners.Parameterized.Parameters; /** * Tests for WAL write durability */ @RunWith(Parameterized.class) @Category({ RegionServerTests.class, MediumTests.class }) public class TestDurability { @ClassRule public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestDurability.class); private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static FileSystem FS; private static MiniDFSCluster CLUSTER; private static Configuration CONF; private static Path DIR; private static byte[] FAMILY = Bytes.toBytes("family"); private static byte[] ROW = Bytes.toBytes("row"); private static byte[] COL = Bytes.toBytes("col"); @Parameter public String walProvider; @Rule public TestName name = new TestName(); @Parameters(name = "{index}: provider={0}") public static Iterable<Object[]> data() { return Arrays.asList(new Object[] { "defaultProvider" }, new Object[] { "asyncfs" }); } @BeforeClass public static void setUpBeforeClass() throws Exception { CONF = TEST_UTIL.getConfiguration(); TEST_UTIL.startMiniDFSCluster(1); CLUSTER = TEST_UTIL.getDFSCluster(); FS = CLUSTER.getFileSystem(); DIR = TEST_UTIL.getDataTestDirOnTestFS("TestDurability"); CommonFSUtils.setRootDir(CONF, DIR); } @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Before public void setUp() { CONF.set(WALFactory.WAL_PROVIDER, walProvider); } @After public void tearDown() throws IOException { FS.delete(DIR, true); } @Test public void testDurability() throws Exception { WALFactory wals = new WALFactory(CONF, ServerName.valueOf("TestDurability", 16010, EnvironmentEdgeManager.currentTime()) .toString()); HRegion region = createHRegion(wals, Durability.USE_DEFAULT); WAL wal = region.getWAL(); HRegion deferredRegion = createHRegion(region.getTableDescriptor(), region.getRegionInfo(), "deferredRegion", wal, Durability.ASYNC_WAL); region.put(newPut(null)); verifyWALCount(wals, wal, 1); // a put through the deferred table does not write to the wal immediately, // but maybe has been successfully sync-ed by the underlying AsyncWriter + // AsyncFlusher thread deferredRegion.put(newPut(null)); // but will after we sync the wal wal.sync(); verifyWALCount(wals, wal, 2); // a put through a deferred table will be sync with the put sync'ed put deferredRegion.put(newPut(null)); wal.sync(); verifyWALCount(wals, wal, 3); region.put(newPut(null)); verifyWALCount(wals, wal, 4); // a put through a deferred table will be sync with the put sync'ed put deferredRegion.put(newPut(Durability.USE_DEFAULT)); wal.sync(); verifyWALCount(wals, wal, 5); region.put(newPut(Durability.USE_DEFAULT)); verifyWALCount(wals, wal, 6); // SKIP_WAL never writes to the wal region.put(newPut(Durability.SKIP_WAL)); deferredRegion.put(newPut(Durability.SKIP_WAL)); verifyWALCount(wals, wal, 6); wal.sync(); verifyWALCount(wals, wal, 6); // Async overrides sync table default region.put(newPut(Durability.ASYNC_WAL)); deferredRegion.put(newPut(Durability.ASYNC_WAL)); wal.sync(); verifyWALCount(wals, wal, 8); // sync overrides async table default region.put(newPut(Durability.SYNC_WAL)); deferredRegion.put(newPut(Durability.SYNC_WAL)); verifyWALCount(wals, wal, 10); // fsync behaves like sync region.put(newPut(Durability.FSYNC_WAL)); deferredRegion.put(newPut(Durability.FSYNC_WAL)); verifyWALCount(wals, wal, 12); } @Test public void testIncrement() throws Exception { byte[] row1 = Bytes.toBytes("row1"); byte[] col1 = Bytes.toBytes("col1"); byte[] col2 = Bytes.toBytes("col2"); byte[] col3 = Bytes.toBytes("col3"); // Setting up region WALFactory wals = new WALFactory(CONF, ServerName.valueOf("TestIncrement", 16010, EnvironmentEdgeManager.currentTime()) .toString()); HRegion region = createHRegion(wals, Durability.USE_DEFAULT); WAL wal = region.getWAL(); // col1: amount = 0, 1 write back to WAL Increment inc1 = new Increment(row1); inc1.addColumn(FAMILY, col1, 0); Result res = region.increment(inc1); assertEquals(1, res.size()); assertEquals(0, Bytes.toLong(res.getValue(FAMILY, col1))); verifyWALCount(wals, wal, 1); // col1: amount = 1, 1 write back to WAL inc1 = new Increment(row1); inc1.addColumn(FAMILY, col1, 1); res = region.increment(inc1); assertEquals(1, res.size()); assertEquals(1, Bytes.toLong(res.getValue(FAMILY, col1))); verifyWALCount(wals, wal, 2); // col1: amount = 0, 1 write back to WAL inc1 = new Increment(row1); inc1.addColumn(FAMILY, col1, 0); res = region.increment(inc1); assertEquals(1, res.size()); assertEquals(1, Bytes.toLong(res.getValue(FAMILY, col1))); verifyWALCount(wals, wal, 3); // col1: amount = 0, col2: amount = 0, col3: amount = 0 // 1 write back to WAL inc1 = new Increment(row1); inc1.addColumn(FAMILY, col1, 0); inc1.addColumn(FAMILY, col2, 0); inc1.addColumn(FAMILY, col3, 0); res = region.increment(inc1); assertEquals(3, res.size()); assertEquals(1, Bytes.toLong(res.getValue(FAMILY, col1))); assertEquals(0, Bytes.toLong(res.getValue(FAMILY, col2))); assertEquals(0, Bytes.toLong(res.getValue(FAMILY, col3))); verifyWALCount(wals, wal, 4); // col1: amount = 5, col2: amount = 4, col3: amount = 3 // 1 write back to WAL inc1 = new Increment(row1); inc1.addColumn(FAMILY, col1, 5); inc1.addColumn(FAMILY, col2, 4); inc1.addColumn(FAMILY, col3, 3); res = region.increment(inc1); assertEquals(3, res.size()); assertEquals(6, Bytes.toLong(res.getValue(FAMILY, col1))); assertEquals(4, Bytes.toLong(res.getValue(FAMILY, col2))); assertEquals(3, Bytes.toLong(res.getValue(FAMILY, col3))); verifyWALCount(wals, wal, 5); } /** * Test when returnResults set to false in increment it should not return the result instead it * resturn null. */ @Test public void testIncrementWithReturnResultsSetToFalse() throws Exception { byte[] row1 = Bytes.toBytes("row1"); byte[] col1 = Bytes.toBytes("col1"); // Setting up region WALFactory wals = new WALFactory(CONF, ServerName.valueOf("testIncrementWithReturnResultsSetToFalse", 16010, EnvironmentEdgeManager.currentTime()) .toString()); HRegion region = createHRegion(wals, Durability.USE_DEFAULT); Increment inc1 = new Increment(row1); inc1.setReturnResults(false); inc1.addColumn(FAMILY, col1, 1); Result res = region.increment(inc1); assertTrue(res.isEmpty()); } private Put newPut(Durability durability) { Put p = new Put(ROW); p.addColumn(FAMILY, COL, COL); if (durability != null) { p.setDurability(durability); } return p; } private void verifyWALCount(WALFactory wals, WAL log, int expected) throws Exception { Path walPath = AbstractFSWALProvider.getCurrentFileName(log); WAL.Reader reader = wals.createReader(FS, walPath); int count = 0; WAL.Entry entry = new WAL.Entry(); while (reader.next(entry) != null) { count++; } reader.close(); assertEquals(expected, count); } // lifted from TestAtomicOperation private HRegion createHRegion(WALFactory wals, Durability durability) throws IOException { TableName tableName = TableName.valueOf(name.getMethodName().replaceAll("[^A-Za-z0-9-_]", "_")); TableDescriptor htd = TableDescriptorBuilder.newBuilder(tableName) .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build(); RegionInfo info = RegionInfoBuilder.newBuilder(tableName).build(); Path path = new Path(DIR, tableName.getNameAsString()); if (FS.exists(path)) { if (!FS.delete(path, true)) { throw new IOException("Failed delete of " + path); } } ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT); return HRegion.createHRegion(info, path, CONF, htd, wals.getWAL(info)); } private HRegion createHRegion(TableDescriptor td, RegionInfo info, String dir, WAL wal, Durability durability) throws IOException { Path path = new Path(DIR, dir); if (FS.exists(path)) { if (!FS.delete(path, true)) { throw new IOException("Failed delete of " + path); } } ChunkCreator.initialize(MemStoreLAB.CHUNK_SIZE_DEFAULT, false, 0, 0, 0, null, MemStoreLAB.INDEX_CHUNK_SIZE_PERCENTAGE_DEFAULT); return HRegion.createHRegion(info, path, CONF, td, wal); } }
package org.apereo.cas.support.saml.web.idp.profile.builders.enc; import org.apereo.cas.configuration.CasConfigurationProperties; import org.apereo.cas.configuration.model.support.saml.idp.SamlIdPResponseProperties; import org.apereo.cas.support.saml.SamlException; import org.apereo.cas.support.saml.SamlIdPUtils; import org.apereo.cas.support.saml.SamlUtils; import org.apereo.cas.support.saml.idp.metadata.locator.SamlIdPMetadataCredentialResolver; import org.apereo.cas.support.saml.idp.metadata.locator.SamlIdPMetadataLocator; import org.apereo.cas.support.saml.idp.metadata.locator.SamlIdPSamlRegisteredServiceCriterion; import org.apereo.cas.support.saml.services.SamlRegisteredService; import org.apereo.cas.support.saml.services.idp.metadata.SamlRegisteredServiceServiceProviderMetadataFacade; import org.apereo.cas.util.DigestUtils; import org.apereo.cas.util.LoggingUtils; import org.apereo.cas.util.RegexUtils; import org.apereo.cas.util.crypto.CertUtils; import org.apereo.cas.util.crypto.PrivateKeyFactoryBean; import com.google.common.collect.Sets; import lombok.Getter; import lombok.RequiredArgsConstructor; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import lombok.val; import net.shibboleth.utilities.java.support.resolver.CriteriaSet; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.opensaml.core.criterion.EntityIdCriterion; import org.opensaml.messaging.context.MessageContext; import org.opensaml.saml.common.SAMLObject; import org.opensaml.saml.common.binding.impl.SAMLOutboundDestinationHandler; import org.opensaml.saml.common.binding.security.impl.EndpointURLSchemeSecurityHandler; import org.opensaml.saml.common.binding.security.impl.SAMLOutboundProtocolMessageSigningHandler; import org.opensaml.saml.criterion.EntityRoleCriterion; import org.opensaml.saml.criterion.RoleDescriptorCriterion; import org.opensaml.saml.metadata.criteria.entity.impl.EvaluableEntityRoleEntityDescriptorCriterion; import org.opensaml.saml.metadata.resolver.MetadataResolver; import org.opensaml.saml.saml2.core.RequestAbstractType; import org.opensaml.saml.saml2.metadata.IDPSSODescriptor; import org.opensaml.saml.saml2.metadata.RoleDescriptor; import org.opensaml.saml.security.impl.SAMLMetadataSignatureSigningParametersResolver; import org.opensaml.security.credential.AbstractCredential; import org.opensaml.security.credential.BasicCredential; import org.opensaml.security.credential.Credential; import org.opensaml.security.credential.MutableCredential; import org.opensaml.security.credential.UsageType; import org.opensaml.security.criteria.UsageCriterion; import org.opensaml.security.x509.BasicX509Credential; import org.opensaml.xmlsec.SignatureSigningConfiguration; import org.opensaml.xmlsec.SignatureSigningParameters; import org.opensaml.xmlsec.config.impl.DefaultSecurityConfigurationBootstrap; import org.opensaml.xmlsec.context.SecurityParametersContext; import org.opensaml.xmlsec.criterion.SignatureSigningConfigurationCriterion; import org.opensaml.xmlsec.impl.BasicAlgorithmPolicyConfiguration; import org.opensaml.xmlsec.impl.BasicSignatureSigningConfiguration; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.security.PrivateKey; import java.util.ArrayList; import java.util.Objects; import java.util.Optional; import java.util.regex.Pattern; /** * This is {@link DefaultSamlIdPObjectSigner}. * * @author Misagh Moayyed * @since 5.0.0 */ @Slf4j @RequiredArgsConstructor @Getter public class DefaultSamlIdPObjectSigner implements SamlIdPObjectSigner { private final MetadataResolver samlIdPMetadataResolver; private final CasConfigurationProperties casProperties; private final SamlIdPMetadataLocator samlIdPMetadataLocator; private static boolean doesCredentialFingerprintMatch(final AbstractCredential credential, final SamlRegisteredService samlRegisteredService) { val fingerprint = samlRegisteredService.getSigningCredentialFingerprint(); if (StringUtils.isNotBlank(fingerprint)) { val digest = DigestUtils.digest("SHA-1", Objects.requireNonNull(credential.getPublicKey()).getEncoded()); val pattern = RegexUtils.createPattern(fingerprint, Pattern.CASE_INSENSITIVE); LOGGER.debug("Matching credential fingerprint [{}] against filter [{}] for service [{}]", digest, fingerprint, samlRegisteredService.getName()); return pattern.matcher(digest).find(); } return true; } private static AbstractCredential finalizeSigningCredential(final MutableCredential credential, final Credential original) { credential.setEntityId(original.getEntityId()); credential.setUsageType(original.getUsageType()); Objects.requireNonNull(original.getCredentialContextSet()) .forEach(ctx -> Objects.requireNonNull(credential.getCredentialContextSet()).add(ctx)); return (AbstractCredential) credential; } @Override public <T extends SAMLObject> T encode(final T samlObject, final SamlRegisteredService service, final SamlRegisteredServiceServiceProviderMetadataFacade adaptor, final HttpServletResponse response, final HttpServletRequest request, final String binding, final RequestAbstractType authnRequest, final MessageContext messageContext) throws Exception { LOGGER.trace("Attempting to encode [{}] for [{}]", samlObject.getClass().getName(), adaptor.getEntityId()); prepareOutboundContext(samlObject, adaptor, messageContext, binding, authnRequest); prepareSecurityParametersContext(adaptor, messageContext, service); prepareEndpointURLSchemeSecurityHandler(messageContext); prepareSamlOutboundDestinationHandler(messageContext); prepareSamlOutboundProtocolMessageSigningHandler(messageContext); return samlObject; } /** * Prepare saml outbound protocol message signing handler. * * @param <T> the type parameter * @param outboundContext the outbound context * @throws Exception the exception */ protected <T extends SAMLObject> void prepareSamlOutboundProtocolMessageSigningHandler(final MessageContext outboundContext) throws Exception { LOGGER.trace("Attempting to sign the outbound SAML message..."); val handler = new SAMLOutboundProtocolMessageSigningHandler(); handler.setSignErrorResponses(casProperties.getAuthn().getSamlIdp().getResponse().isSignError()); handler.invoke(outboundContext); LOGGER.debug("Signed SAML message successfully"); } /** * Prepare saml outbound destination handler. * * @param <T> the type parameter * @param outboundContext the outbound context * @throws Exception the exception */ protected <T extends SAMLObject> void prepareSamlOutboundDestinationHandler(final MessageContext outboundContext) throws Exception { val handlerDest = new SAMLOutboundDestinationHandler(); handlerDest.initialize(); handlerDest.invoke(outboundContext); } /** * Prepare endpoint url scheme security handler. * * @param <T> the type parameter * @param outboundContext the outbound context * @throws Exception the exception */ protected <T extends SAMLObject> void prepareEndpointURLSchemeSecurityHandler(final MessageContext outboundContext) throws Exception { val handlerEnd = new EndpointURLSchemeSecurityHandler(); handlerEnd.initialize(); handlerEnd.invoke(outboundContext); } /** * Prepare security parameters context. * * @param <T> the type parameter * @param adaptor the adaptor * @param outboundContext the outbound context * @param service the service */ protected <T extends SAMLObject> void prepareSecurityParametersContext( final SamlRegisteredServiceServiceProviderMetadataFacade adaptor, final MessageContext outboundContext, final SamlRegisteredService service) { val secParametersContext = outboundContext.getSubcontext(SecurityParametersContext.class, true); val roleDesc = adaptor.getSsoDescriptor(); val signingParameters = buildSignatureSigningParameters(roleDesc, service); Objects.requireNonNull(secParametersContext).setSignatureSigningParameters(signingParameters); } /** * Prepare outbound context. * * @param <T> the type parameter * @param samlObject the saml object * @param adaptor the adaptor * @param outboundContext the outbound context * @param binding the binding * @param authnRequest the authn request * @throws SamlException the saml exception */ protected <T extends SAMLObject> void prepareOutboundContext( final T samlObject, final SamlRegisteredServiceServiceProviderMetadataFacade adaptor, final MessageContext outboundContext, final String binding, final RequestAbstractType authnRequest) throws SamlException { LOGGER.trace("Outbound saml object to use is [{}]", samlObject.getClass().getName()); outboundContext.setMessage(samlObject); SamlIdPUtils.preparePeerEntitySamlEndpointContext(Pair.of(authnRequest, outboundContext), outboundContext, adaptor, binding); } /** * Build signature signing parameters signature signing parameters. * * @param descriptor the descriptor * @param service the service * @return the signature signing parameters */ @SneakyThrows protected SignatureSigningParameters buildSignatureSigningParameters(final RoleDescriptor descriptor, final SamlRegisteredService service) { val criteria = new CriteriaSet(); val signatureSigningConfiguration = getSignatureSigningConfiguration(service); criteria.add(new SignatureSigningConfigurationCriterion(signatureSigningConfiguration)); criteria.add(new RoleDescriptorCriterion(descriptor)); val resolver = new SAMLMetadataSignatureSigningParametersResolver(); LOGGER.trace("Resolving signature signing parameters for [{}]", descriptor.getElementQName().getLocalPart()); val params = resolver.resolveSingle(criteria); if (params != null) { LOGGER.trace("Created signature signing parameters." + "\nSignature algorithm: [{}]" + "\nSignature canonicalization algorithm: [{}]" + "\nSignature reference digest methods: [{}]" + "\nSignature reference canonicalization algorithm: [{}]", params.getSignatureAlgorithm(), params.getSignatureCanonicalizationAlgorithm(), params.getSignatureReferenceDigestMethod(), params.getSignatureReferenceCanonicalizationAlgorithm()); } else { LOGGER.warn("Unable to resolve SignatureSigningParameters, response signing will fail." + " Make sure domain names in IDP metadata URLs and certificates match CAS domain name"); } return params; } /** * Gets signature signing configuration. * The resolved used is {@link SamlIdPMetadataCredentialResolver} that * allows the entire criteria set to be passed to the role descriptor resolver. * This behavior allows the passing of {@link SamlIdPSamlRegisteredServiceCriterion} * so signing configuration, etc can be fetched for a specific service as an override, * if on is in fact defined for the service. * * @param service the service * @return the signature signing configuration * @throws Exception the exception */ protected SignatureSigningConfiguration getSignatureSigningConfiguration(final SamlRegisteredService service) throws Exception { val config = configureSignatureSigningSecurityConfiguration(service); val samlIdp = casProperties.getAuthn().getSamlIdp(); val privateKey = getSigningPrivateKey(service); val mdCredentialResolver = new SamlIdPMetadataCredentialResolver(); val roleDescriptorResolver = SamlIdPUtils.getRoleDescriptorResolver( samlIdPMetadataResolver, samlIdp.getMetadata().getCore().isRequireValidMetadata()); mdCredentialResolver.setRoleDescriptorResolver(roleDescriptorResolver); mdCredentialResolver.setKeyInfoCredentialResolver( DefaultSecurityConfigurationBootstrap.buildBasicInlineKeyInfoCredentialResolver()); mdCredentialResolver.initialize(); val criteriaSet = new CriteriaSet(); criteriaSet.add(new SignatureSigningConfigurationCriterion(config)); criteriaSet.add(new UsageCriterion(UsageType.SIGNING)); val entityIdCriteriaSet = new CriteriaSet( new EvaluableEntityRoleEntityDescriptorCriterion(IDPSSODescriptor.DEFAULT_ELEMENT_NAME), new SamlIdPSamlRegisteredServiceCriterion(service)); LOGGER.trace("Resolving entity id from SAML2 IdP metadata for signature signing configuration is [{}]", service.getName()); val entityId = Objects.requireNonNull(samlIdPMetadataResolver.resolveSingle(entityIdCriteriaSet)).getEntityID(); LOGGER.trace("Resolved entity id from SAML2 IdP metadata is [{}]", entityId); criteriaSet.add(new EntityIdCriterion(entityId)); criteriaSet.add(new EntityRoleCriterion(IDPSSODescriptor.DEFAULT_ELEMENT_NAME)); criteriaSet.add(new SamlIdPSamlRegisteredServiceCriterion(service)); LOGGER.trace("Resolved signing credentials based on criteria [{}]", criteriaSet); val credentials = Sets.newLinkedHashSet(mdCredentialResolver.resolve(criteriaSet)); LOGGER.trace("Resolved [{}] signing credentials", credentials.size()); val finalCredentials = new ArrayList<Credential>(); credentials.stream() .map(c -> getResolvedSigningCredential(c, privateKey, service)) .filter(Objects::nonNull) .filter(c -> doesCredentialFingerprintMatch(c, service)) .forEach(finalCredentials::add); if (finalCredentials.isEmpty()) { LOGGER.error("Unable to locate any signing credentials for service [{}]", service.getName()); throw new IllegalArgumentException("Unable to locate signing credentials"); } config.setSigningCredentials(finalCredentials); LOGGER.trace("Signature signing credentials configured with [{}] credentials", finalCredentials.size()); return config; } /** * Gets signing private key. * * @param registeredService the registered service * @return the signing private key * @throws Exception the exception */ protected PrivateKey getSigningPrivateKey(final SamlRegisteredService registeredService) throws Exception { val samlIdp = casProperties.getAuthn().getSamlIdp(); val signingKey = samlIdPMetadataLocator.resolveSigningKey(Optional.of(registeredService)); val privateKeyFactoryBean = new PrivateKeyFactoryBean(); privateKeyFactoryBean.setLocation(signingKey); if (StringUtils.isBlank(registeredService.getSigningKeyAlgorithm())) { privateKeyFactoryBean.setAlgorithm(samlIdp.getAlgs().getPrivateKeyAlgName()); } else { privateKeyFactoryBean.setAlgorithm(registeredService.getSigningKeyAlgorithm()); } privateKeyFactoryBean.setSingleton(false); LOGGER.debug("Locating signature signing key for [{}] using algorithm [{}]", registeredService.getMetadataLocation(), privateKeyFactoryBean.getAlgorithm()); return privateKeyFactoryBean.getObject(); } private BasicSignatureSigningConfiguration configureSignatureSigningSecurityConfiguration(final SamlRegisteredService service) { val config = DefaultSecurityConfigurationBootstrap.buildDefaultSignatureSigningConfiguration(); LOGGER.trace("Default signature signing blocked algorithms: [{}]", config.getExcludedAlgorithms()); LOGGER.trace("Default signature signing signature algorithms: [{}]", config.getSignatureAlgorithms()); LOGGER.trace("Default signature signing signature canonicalization algorithm: [{}]", config.getSignatureCanonicalizationAlgorithm()); LOGGER.trace("Default signature signing allowed algorithms: [{}]", config.getIncludedAlgorithms()); LOGGER.trace("Default signature signing reference digest methods: [{}]", config.getSignatureReferenceDigestMethods()); val samlIdp = casProperties.getAuthn().getSamlIdp(); val globalAlgorithms = samlIdp.getAlgs(); val overrideSignatureReferenceDigestMethods = service.getSigningSignatureReferenceDigestMethods().isEmpty() ? globalAlgorithms.getOverrideSignatureReferenceDigestMethods() : service.getSigningSignatureReferenceDigestMethods(); if (overrideSignatureReferenceDigestMethods != null && !overrideSignatureReferenceDigestMethods.isEmpty()) { config.setSignatureReferenceDigestMethods(overrideSignatureReferenceDigestMethods); } val overrideSignatureAlgorithms = service.getSigningSignatureAlgorithms().isEmpty() ? globalAlgorithms.getOverrideSignatureAlgorithms() : service.getSigningSignatureAlgorithms(); if (overrideSignatureAlgorithms != null && !overrideSignatureAlgorithms.isEmpty()) { config.setSignatureAlgorithms(overrideSignatureAlgorithms); } val overrideBlockedSignatureAlgorithms = service.getSigningSignatureBlackListedAlgorithms().isEmpty() ? globalAlgorithms.getOverrideBlockedSignatureSigningAlgorithms() : service.getSigningSignatureBlackListedAlgorithms(); if (overrideBlockedSignatureAlgorithms != null && !overrideBlockedSignatureAlgorithms.isEmpty()) { config.setExcludedAlgorithms(overrideBlockedSignatureAlgorithms); } val overrideAllowedAlgorithms = service.getSigningSignatureWhiteListedAlgorithms().isEmpty() ? globalAlgorithms.getOverrideAllowedSignatureSigningAlgorithms() : service.getSigningSignatureWhiteListedAlgorithms(); if (overrideAllowedAlgorithms != null && !overrideAllowedAlgorithms.isEmpty()) { config.setIncludedAlgorithms(overrideAllowedAlgorithms); } if (StringUtils.isNotBlank(service.getSigningSignatureCanonicalizationAlgorithm())) { config.setSignatureCanonicalizationAlgorithm(service.getSigningSignatureCanonicalizationAlgorithm()); } else if (StringUtils.isNotBlank(globalAlgorithms.getOverrideSignatureCanonicalizationAlgorithm())) { config.setSignatureCanonicalizationAlgorithm(globalAlgorithms.getOverrideSignatureCanonicalizationAlgorithm()); } LOGGER.trace("Finalized signature signing blocked algorithms: [{}]", config.getExcludedAlgorithms()); LOGGER.trace("Finalized signature signing signature algorithms: [{}]", config.getSignatureAlgorithms()); LOGGER.trace("Finalized signature signing signature canonicalization algorithm: [{}]", config.getSignatureCanonicalizationAlgorithm()); LOGGER.trace("Finalized signature signing allowed algorithms: [{}]", config.getIncludedAlgorithms()); LOGGER.trace("Finalized signature signing reference digest methods: [{}]", config.getSignatureReferenceDigestMethods()); if (StringUtils.isNotBlank(service.getWhiteListBlackListPrecedence())) { val precedence = BasicAlgorithmPolicyConfiguration.Precedence.valueOf(service.getWhiteListBlackListPrecedence().trim().toUpperCase()); config.setIncludeExcludePrecedence(precedence); } return config; } private AbstractCredential getResolvedSigningCredential(final Credential credential, final PrivateKey privateKey, final SamlRegisteredService service) { try { val samlIdp = casProperties.getAuthn().getSamlIdp(); val credType = SamlIdPResponseProperties.SignatureCredentialTypes.valueOf( StringUtils.defaultIfBlank(service.getSigningCredentialType(), samlIdp.getResponse().getCredentialType().name()).toUpperCase()); LOGGER.trace("Requested credential type [{}] is found for service [{}]", credType, service.getName()); switch (credType) { case BASIC: LOGGER.debug("Building credential signing key [{}] based on requested credential type", credType); if (credential.getPublicKey() == null) { throw new IllegalArgumentException("Unable to identify the public key from the signing credential"); } return finalizeSigningCredential(new BasicCredential(credential.getPublicKey(), privateKey), credential); case X509: default: if (credential instanceof BasicX509Credential) { val certificate = BasicX509Credential.class.cast(credential).getEntityCertificate(); LOGGER.debug("Locating signature signing certificate from credential [{}]", CertUtils.toString(certificate)); return finalizeSigningCredential(new BasicX509Credential(certificate, privateKey), credential); } val signingCert = samlIdPMetadataLocator.resolveSigningCertificate(Optional.of(service)); LOGGER.debug("Locating signature signing certificate file from [{}]", signingCert); val certificate = SamlUtils.readCertificate(signingCert); return finalizeSigningCredential(new BasicX509Credential(certificate, privateKey), credential); } } catch (final Exception e) { LoggingUtils.error(LOGGER, e); } return null; } }
/*--- formatted by Jindent 2.1, (www.c-lab.de/~jindent) ---*/ /** * *************************************************************** * The LEAP libraries, when combined with certain JADE platform components, * provide a run-time environment for enabling FIPA agents to execute on * lightweight devices running Java. LEAP and JADE teams have jointly * designed the API for ease of integration and hence to take advantage * of these dual developments and extensions so that users only see * one development platform and a * single homogeneous set of APIs. Enabling deployment to a wide range of * devices whilst still having access to the full development * environment and functionalities that JADE provides. * Copyright (C) 2001 Telecom Italia LAB S.p.A. * Copyright (C) 2001 Motorola. * * GNU Lesser General Public License * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation, * version 2.1 of the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. * ************************************************************** */ package jade.imtp.leap.JICP; //#MIDP_EXCLUDE_FILE import jade.core.Profile; import jade.imtp.leap.*; import jade.security.JADESecurityException; import jade.util.Logger; import jade.util.leap.Properties; import java.io.EOFException; import java.io.IOException; import java.io.InterruptedIOException; import java.net.*; import java.util.Enumeration; import java.util.Hashtable; import java.util.Vector; /** * Class declaration * @author Giovanni Caire - TILAB * @author Ronnie Taib - Motorola * @author Nicolas Lhuillier - Motorola * @author Steffen Rusitschka - Siemens */ public class JICPServer extends Thread //#J2ME_EXCLUDE_BEGIN implements PDPContextManager.Listener, JICPMediatorManager //#J2ME_EXCLUDE_END { private static final int INIT = 0; private static final int REQUEST_READ = 1; private static final int REQUEST_SERVED = 2; private static final int RESPONSE_SENT = 3; public static final String ACCEPT_LOCAL_HOST_ONLY = "jade_imtp_leap_JICP_JICPServer_acceptlocalhostonly"; public static final String UNCHECK_LOCAL_HOST = "jade_imtp_leap_JICP_JICPServer_unchecklocalhost"; private static final int LISTENING = 0; private static final int TERMINATING = 1; private int state = LISTENING; private String host; private ServerSocket server; private ICP.Listener cmdListener; private int mediatorCnt = 1; private Hashtable mediators = new Hashtable(); //#J2ME_EXCLUDE_BEGIN public static final String ACCEPT_MEDIATORS = "jade_imtp_leap_JICP_JICPServer_acceptmediators"; private boolean acceptMediators = true; private Properties leapProps = new Properties(); private PDPContextManager myPDPContextManager; //#J2ME_EXCLUDE_END private int maxHandlers; private Vector connectionHandlers; private ConnectionFactory connFactory; private Logger myLogger; /** * Constructor declaration */ public JICPServer(Profile p, JICPPeer myPeer, ICP.Listener l, ConnectionFactory f, int max) throws ICPException { connectionHandlers = new Vector(); cmdListener = l; connFactory = f; maxHandlers = max; myLogger = Logger.getMyLogger(getClass().getName()); StringBuffer sb = null; int idLength; String peerID = myPeer.getID(); if (peerID != null) { sb = new StringBuffer(peerID); sb.append('-'); idLength = sb.length(); } else { sb = new StringBuffer(); idLength = 0; } // Local host sb.append(JICPProtocol.LOCAL_HOST_KEY); host = p.getParameter(sb.toString(), null); boolean acceptLocalHostOnly = false; if (host == null || host.equals(Profile.LOCALHOST_CONSTANT)) { // Local host not specified --> Get it automatically sb.setLength(idLength); sb.append(JICPProtocol.REMOTE_URL_KEY); String remoteURL = p.getParameter(sb.toString(), null); if (remoteURL != null) { // Retrieve the local host address by means of the GET_ADDRESS JICP functionality host = myPeer.getAddress(remoteURL); } else { // Retrieve the host address/name from the underlying operating system host = Profile.getDefaultNetworkName(p.getBooleanProperty(Profile.PRIVILEDGE_LOGICAL_NAME, false)); } } else { // Unless the UNCKECK_LOCAL_HOST property is set, if a local-host is explicitly specified check // that it is a valid local address if (!p.getBooleanProperty(UNCHECK_LOCAL_HOST, false) && !Profile.isLocalHost(host)) { throw new ICPException("Error: Not possible to launch JADE on a remote host ("+host+"). Check the -host and -local-host options."); } // Then if the ACCEPT_LOCAL_HOST_ONLY property is specified, // we will accept connections only on the specified local network address acceptLocalHostOnly = p.getBooleanProperty(ACCEPT_LOCAL_HOST_ONLY, false); } // Local port: a peripheral container can change it if busy... int port = JICPProtocol.DEFAULT_PORT; boolean changePortIfBusy = !p.getBooleanProperty(Profile.MAIN, true) || p.getBooleanProperty(LEAPIMTPManager.CHANGE_PORT_IF_BUSY, false); sb.setLength(idLength); sb.append(JICPProtocol.LOCAL_PORT_KEY); String strPort = p.getParameter(sb.toString(), null); try { port = Integer.parseInt(strPort); } catch (Exception e) { // Try to use the Peer-ID as the port number try { port = Integer.parseInt(peerID); } catch (Exception e1) { // Keep default } } //#J2ME_EXCLUDE_BEGIN // Get the accept-mediators option acceptMediators = p.getBooleanProperty(ACCEPT_MEDIATORS, true); if (acceptMediators) { // Read the LEAP configuration properties sb.setLength(idLength); sb.append(LEAP_PROPERTY_FILE); String fileName = p.getParameter(sb.toString(), LEAP_PROPERTY_FILE_DEFAULT); try { leapProps.load(fileName); } catch (Exception e) { myLogger.log(Logger.FINE, "Can't read LEAP property file " + fileName + ". " + e); // Ignore: no back end properties specified } // Initialize the PDPContextManager if specified String pdpContextManagerClass = leapProps.getProperty(PDP_CONTEXT_MANAGER_CLASS); if (pdpContextManagerClass != null) { try { myLogger.log(Logger.INFO, "Loading PDPContextManager of class " + pdpContextManagerClass); myPDPContextManager = (PDPContextManager) Class.forName(pdpContextManagerClass).newInstance(); myPDPContextManager.init(leapProps); myPDPContextManager.registerListener(this); } catch (Throwable t) { t.printStackTrace(); myPDPContextManager = null; } } } //#J2ME_EXCLUDE_END // Create the ServerSocket. server = myPeer.getServerSocket((acceptLocalHostOnly ? host : null), port, changePortIfBusy); setDaemon(true); setName("JICPServer-" + getLocalPort()); } public int getLocalPort() { return server.getLocalPort(); } public String getLocalHost() { // If a local-host was not specified, we accept connection on all local network addresses, // but we expose the local host address we "prefer". return host; } /** Shut down this JICP server */ public synchronized void shutdown() { if(myLogger.isLoggable(Logger.FINE)) myLogger.log(Logger.FINE,"Shutting down JICPServer..."); state = TERMINATING; try { // Force the listening thread (this) to exit from the accept() // Calling this.interrupt(); should be the right way, but it seems // not to work...so do that by closing the server socket. server.close(); // Wait for the listening thread to complete this.join(); } catch (IOException ioe) { ioe.printStackTrace(); } catch (InterruptedException ie) { ie.printStackTrace(); } } /** * JICPServer thread entry point. Accept incoming connections * and for each of them start a ConnectionHandler that handles it. */ public void run() { while (state != TERMINATING) { try { // Accept connection Socket s = server.accept(); InetAddress addr = s.getInetAddress(); int port = s.getPort(); if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Incoming connection from "+addr+":"+port); Connection c = connFactory.createConnection(s); ConnectionHandler ch = new ConnectionHandler(c, addr, port); if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Create new ConnectionHandler ("+ch+")"); connectionHandlers.addElement(ch); ch.start(); // start a handler and go back to listening } catch (InterruptedIOException e) { // These can be generated by socket timeout (just ignore // the exception) or by a call to the shutdown() // method (the state has been set to TERMINATING and the // server will exit). } catch (Exception e) { if (state == LISTENING) { if(myLogger.isLoggable(Logger.WARNING)) myLogger.log(Logger.WARNING,"Problems accepting a new connection"); e.printStackTrace(); // Stop listening state = TERMINATING; } } } // END of while(listen) if(myLogger.isLoggable(Logger.FINE)) myLogger.log(Logger.FINE,"JICPServer terminated"); // release socket try { server.close(); } catch (IOException io) { if(myLogger.isLoggable(Logger.WARNING)) myLogger.log(Logger.WARNING,"I/O error closing the server socket"); io.printStackTrace(); } server = null; // Close all connection handler synchronized (connectionHandlers) { ConnectionHandler ch; Enumeration en = connectionHandlers.elements(); while(en.hasMoreElements()) { ch = (ConnectionHandler) en.nextElement(); ch.close(); } } //#J2ME_EXCLUDE_BEGIN // Close all mediators Enumeration e = mediators.elements(); while (e.hasMoreElements()) { JICPMediator m = (JICPMediator) e.nextElement(); m.kill(); } mediators.clear(); //#J2ME_EXCLUDE_END } /** Called by the JICPPeer ticker at each tick */ public void tick(long currentTime) { //#J2ME_EXCLUDE_BEGIN synchronized (mediators) { Enumeration e = mediators.elements(); while (e.hasMoreElements()) { JICPMediator m = (JICPMediator) e.nextElement(); m.tick(currentTime); } } //#J2ME_EXCLUDE_END } /** Inner class ConnectionHandler. Handle a connection accepted by this JICPServer */ class ConnectionHandler extends Thread { private Connection c; private InetAddress addr; private int port; private boolean loop = false; private int status = INIT; private boolean closeConnection = true; /** * Constructor declaration * @param s */ public ConnectionHandler(Connection c, InetAddress addr, int port) { this.c = c; this.addr = addr; this.port = port; } /** * close connection handler */ public void close() { if (status != RESPONSE_SENT) { // We are serving a request --> Prepare to close connection handler loop = false; closeConnection = true; if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Predispose to close connection handler ("+this+")"); } else { // We are waiting for the next request --> Close connection to force connection handler termination try { if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Close connection socket to force exit from connection handler ("+this+")"); c.close(); } catch (IOException e) { if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Exception closing connection with "+addr+":"+port); } } } /** * Thread entry point */ public void run() { if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"CommandHandler started"); byte type = (byte) 0; try { do { // Read the incoming JICPPacket JICPPacket pkt = c.readPacket(); JICPPacket reply = null; status = REQUEST_READ; type = pkt.getType(); switch (type) { case JICPProtocol.COMMAND_TYPE: case JICPProtocol.RESPONSE_TYPE: // Get the right recipient and let it process the command. String recipientID = pkt.getRecipientID(); if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Recipient: "+recipientID); if (recipientID != null) { //#J2ME_EXCLUDE_BEGIN // The recipient is one of the mediators JICPMediator m = (JICPMediator) mediators.get(recipientID); if (m != null) { if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Passing incoming packet to mediator "+recipientID); reply = m.handleJICPPacket(pkt, addr, port); } else { // If the packet is a response we don't need to reply if (type == JICPProtocol.COMMAND_TYPE) { reply = new JICPPacket("Unknown recipient "+recipientID, null); } } //#J2ME_EXCLUDE_END } else { // The recipient is my ICP.Listener (the local CommandDispatcher) loop = true; if (type == JICPProtocol.COMMAND_TYPE) { if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Passing incoming COMMAND to local listener"); byte[] rsp = cmdListener.handleCommand(pkt.getData()); byte dataInfo = JICPProtocol.DEFAULT_INFO; if (connectionHandlers.size() >= maxHandlers) { // Too many connections open --> close the connection as soon as the command has been served dataInfo |= JICPProtocol.TERMINATED_INFO; loop = false; } reply = new JICPPacket(JICPProtocol.RESPONSE_TYPE, dataInfo, rsp); } if ((pkt.getInfo() & JICPProtocol.TERMINATED_INFO) != 0) { loop = false; } } break; //#J2ME_EXCLUDE_BEGIN case JICPProtocol.GET_ADDRESS_TYPE: // Respond sending back the caller address if(myLogger.isLoggable(Logger.INFO)) myLogger.log(Logger.INFO,"Received a GET_ADDRESS request from "+addr+":"+port); String addressStr = addr.getHostAddress(); if (pkt.getData() != null ) { addressStr += ":"+port; } reply = new JICPPacket(JICPProtocol.RESPONSE_TYPE, JICPProtocol.DEFAULT_INFO, addressStr.getBytes()); break; case JICPProtocol.CREATE_MEDIATOR_TYPE: if (acceptMediators) { if(myLogger.isLoggable(Logger.INFO)) myLogger.log(Logger.INFO,"Received a CREATE_MEDIATOR request from "+ addr + ":" + port); // Starts a new Mediator and sends back its ID String s = new String(pkt.getData()); Properties p = FrontEndStub.parseCreateMediatorRequest(s); // If the platform-name is specified refuse the request: JICPServer does not accept this kind of mediator creation request String pn = p.getProperty(Profile.PLATFORM_ID); if (pn != null) { myLogger.log(Logger.WARNING, "CREATE_MEDIATOR request with specified platform-name: "+pn); reply = new JICPPacket(JICPProtocol.NOT_AUTHORIZED_ERROR, new JADESecurityException("Platform-name specified")); break; } // If there is a PDPContextManager add the PDP context properties if (myPDPContextManager != null) { try{ //FIXME: Get username and password specified among the CREATE_MEDIATOR properties Properties pdpContextInfo = myPDPContextManager.getPDPContextInfo(addr, p.getProperty(Profile.OWNER)); myLogger.log(Logger.FINE, "PDPContext properties = "+pdpContextInfo); mergeProperties(p, pdpContextInfo); }catch(JADESecurityException jse){ if(myLogger.isLoggable(Logger.WARNING)) myLogger.log(Logger.WARNING,"CREATE_MEDIATOR request from non authorized address: "+addr); reply = new JICPPacket(JICPProtocol.NOT_AUTHORIZED_ERROR, jse); break; } } // Get mediator ID from the passed properties (if present) String id = p.getProperty(JICPProtocol.MEDIATOR_ID_KEY); String msisdn = p.getProperty(PDPContextManager.MSISDN); if(id != null) { if (msisdn != null && !msisdn.equals(id)) { // Security attack: Someone is pretending to be someone other if(myLogger.isLoggable(Logger.WARNING)) myLogger.log(Logger.WARNING,"CREATE_MEDIATOR request with mediator-id != MSISDN. Address is: "+addr); reply = new JICPPacket(JICPProtocol.NOT_AUTHORIZED_ERROR, new JADESecurityException("Inconsistent mediator-id and msisdn")); break; } // An existing front-end whose back-end was lost. The BackEnd must resynch p.setProperty(jade.core.BackEndContainer.RESYNCH, "true"); } else { // Use the MSISDN (if present) id = msisdn; if (id == null) { // Construct a default id using the string representation of the server's TCP endpoint id = "BE-"+getLocalHost() + ':' + getLocalPort() + '-' + String.valueOf(mediatorCnt++); } } // If last connection from the same device aborted, the old // BackEnd may still exist as a zombie. In case ids are assigned // using the MSISDN the new name is equals to the old one. if (id.equals(msisdn)) { JICPMediator old = (JICPMediator) mediators.get(id); if (old != null) { // This is a zombie mediator --> kill it myLogger.log(Logger.INFO, "Replacing old mediator "+id); old.kill(); // Be sure the zombie container has been removed waitABit(1000); } } // Start the mediator JICPMediator m = startMediator(id, p); closeConnection = !m.handleIncomingConnection(c, pkt, addr, port); mediators.put(m.getID(), m); // Create an ad-hoc reply including the assigned mediator-id and the IP address p.setProperty(JICPProtocol.MEDIATOR_ID_KEY, m.getID()); p.setProperty(JICPProtocol.LOCAL_HOST_KEY, addr.getHostAddress()); String replyMsg = FrontEndStub.encodeCreateMediatorResponse(p); reply = new JICPPacket(JICPProtocol.RESPONSE_TYPE, JICPProtocol.DEFAULT_INFO, replyMsg.getBytes()); reply.setSessionID((byte) 31); // Dummy session ID != from valid ones } else { myLogger.log(Logger.WARNING,"CREATE_MEDIATOR request received with accept-mediator option set to false. Address is: "+addr); reply = new JICPPacket(JICPProtocol.NOT_AUTHORIZED_ERROR, null); } break; case JICPProtocol.CONNECT_MEDIATOR_TYPE: if (acceptMediators) { // A mediated container is (re)connecting to its mediator recipientID = pkt.getRecipientID(); // FIXME: If there is a PDPContextManager check that the recipientID is the MSISDN if(myLogger.isLoggable(Logger.INFO)) myLogger.log(Logger.INFO,"Received a CONNECT_MEDIATOR request from "+addr+":"+port+". Mediator ID is "+recipientID); JICPMediator m = (JICPMediator) mediators.get(recipientID); if (m != null) { // Don't close the connection, but pass it to the proper // mediator. closeConnection = !m.handleIncomingConnection(c, pkt, addr, port); reply = new JICPPacket(JICPProtocol.RESPONSE_TYPE, JICPProtocol.DEFAULT_INFO, addr.getHostAddress().getBytes()); } else { if(myLogger.isLoggable(Logger.INFO)) myLogger.log(Logger.INFO,"Mediator "+recipientID+" not found"); reply = new JICPPacket(JICPProtocol.NOT_FOUND_ERROR, null); } } else { myLogger.log(Logger.WARNING,"CONNECT_MEDIATOR request received with accept-mediator option set to false. Address is: "+addr); reply = new JICPPacket(JICPProtocol.NOT_AUTHORIZED_ERROR, null); } break; //#J2ME_EXCLUDE_END default: // Send back an error response if(myLogger.isLoggable(Logger.WARNING)) myLogger.log(Logger.WARNING,"Uncorrect JICP data type: "+pkt.getType()); reply = new JICPPacket("Uncorrect JICP data type: "+pkt.getType(), null); } status = REQUEST_SERVED; // Send the actual response data if (reply != null) { //reply.writeTo(out); c.writePacket(reply); } status = RESPONSE_SENT; } while (loop); } catch (Exception e) { switch (status) { case INIT:{ if(myLogger.isLoggable(Logger.SEVERE)) myLogger.log(Logger.SEVERE,"Communication error reading incoming packet from "+addr+":"+port); e.printStackTrace(); } break; case REQUEST_READ: if(myLogger.isLoggable(Logger.SEVERE)) myLogger.log(Logger.SEVERE,"Error handling incoming packet"); e.printStackTrace(); // If the incoming packet was a command, try // to send back a generic error response if (type == JICPProtocol.COMMAND_TYPE && c != null) { try { c.writePacket(new JICPPacket("Unexpected error", e)); } catch (IOException ioe) { // Just print a warning if(myLogger.isLoggable(Logger.WARNING)) myLogger.log(Logger.WARNING,"Can't send back error indication "+ioe); } } break; case REQUEST_SERVED: if(myLogger.isLoggable(Logger.SEVERE)) myLogger.log(Logger.SEVERE,"Communication error writing return packet to "+addr+":"+port+" ["+e.toString()+"]"); break; case RESPONSE_SENT: // This is a re-used connection waiting for the next incoming packet if (e instanceof EOFException) { if(myLogger.isLoggable(Logger.FINE)) myLogger.log(Logger.FINE,"Client "+addr+":"+port+" has closed the connection."); } else { if(myLogger.isLoggable(Logger.FINE)) myLogger.log(Logger.FINE,"Unexpected client "+addr+":"+port+" termination. "+e.toString()); } } } finally { try { if (closeConnection) { // Close connection if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"Closing connection with "+addr+":"+port); c.close(); } } catch (IOException io) { if(myLogger.isLoggable(Logger.INFO)) myLogger.log(Logger.INFO,"I/O error while closing the connection"); io.printStackTrace(); } connectionHandlers.remove(this); if(myLogger.isLoggable(Logger.FINEST)) myLogger.log(Logger.FINEST,"ConnectionHandler closed ("+this+")"); } } } // END of inner class ConnectionHandler //#J2ME_EXCLUDE_BEGIN /** * Called by a Mediator to notify that it is no longer active */ public void deregisterMediator(String id) { myLogger.log(Logger.FINE, "Deregistering mediator "+id); mediators.remove(id); } /** Called by the PDPContextManager (if any) */ public void handlePDPContextClosed(String id) { // FIXME: to be implemented } private void mergeProperties(Properties p1, Properties p2) { Enumeration e = p2.propertyNames(); while (e.hasMoreElements()) { String key = (String) e.nextElement(); p1.setProperty(key, p2.getProperty(key)); } } private JICPMediator startMediator(String id, Properties p) throws Exception { String className = p.getProperty(JICPProtocol.MEDIATOR_CLASS_KEY); if (className != null) { JICPMediator m = (JICPMediator) Class.forName(className).newInstance(); mergeProperties(p, leapProps); myLogger.log(Logger.FINE, "Initializing mediator "+id+" with properties "+p); m.init(this, id, p); return m; } else { throw new ICPException("No JICPMediator class specified."); } } private void waitABit(long t) { try { Thread.sleep(t); } catch (InterruptedException ie) { } } //#J2ME_EXCLUDE_END }
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.krad.uif.field; import org.apache.commons.lang.StringUtils; import org.kuali.rice.krad.datadictionary.parse.BeanTag; import org.kuali.rice.krad.datadictionary.parse.BeanTagAttribute; import org.kuali.rice.krad.datadictionary.validator.ValidationTrace; import org.kuali.rice.krad.datadictionary.validator.Validator; import org.kuali.rice.krad.uif.UifConstants; import org.kuali.rice.krad.uif.element.Link; import org.kuali.rice.krad.uif.util.LifecycleElement; import org.kuali.rice.krad.uif.widget.LightBox; /** * Field that encloses a link element. * * @author Kuali Rice Team (rice.collab@kuali.org) */ @BeanTag(name = "linkField", parent = "Uif-LinkField") public class LinkField extends FieldBase { private static final long serialVersionUID = -1908504471910271148L; private Link link; private String sortAs; public LinkField() { super(); } /** * The following initialization is performed: * * <ul> * <li>Set the linkLabel if blank to the Field label</li> * </ul> * * {@inheritDoc} */ @Override public void performInitialization(Object model) { super.performInitialization(model); if (StringUtils.isBlank(getLinkText())) { setLinkText(this.getLabel()); } } /** * PerformFinalize override - calls super, corrects the field's Label for attribute to point to this field's * content * * @param model the model * @param parent the parent component */ @Override public void performFinalize(Object model, LifecycleElement parent) { super.performFinalize(model, parent); //determine what id to use for the for attribute of the label, if present if (this.getFieldLabel() != null && this.getLink() != null && StringUtils.isNotBlank(this.getLink().getId())) { this.getFieldLabel().setLabelForComponentId(this.getLink().getId()); } } /** * Returns the <code>Link</code> field. * * @return The Link field */ @BeanTagAttribute(type= BeanTagAttribute.AttributeType.DIRECTORBYTYPE) public Link getLink() { return link; } /** * Setter for the <code>Link</code> component. * * @param link */ public void setLink(Link link) { this.link = link; } /** * Returns the label of the <code>Link</code> field that will be used to render the label of the link. * * @return The link label */ @BeanTagAttribute public String getLinkText() { return link.getLinkText(); } /** * Setter for the link label. Sets the value on the <code>Link</code> field. * * @param linkLabel */ public void setLinkText(String linkLabel) { link.setLinkText(linkLabel); } /** * Returns the target of the <code>Link</code> field that will be used to specify where to open the href. * * @return The target */ @BeanTagAttribute public String getTarget() { return link.getTarget(); } /** * Setter for the link target. Sets the value on the <code>Link</code> field. * * @param target */ public void setTarget(String target) { link.setTarget(target); } /** * Returns the href text of the <code>Link</code> field. * * @return The href text */ @BeanTagAttribute public String getHref() { return link.getHref(); } /** * Setter for the hrefText. Sets the value on the <code>Link</code> field. * * @param hrefText */ public void setHref(String hrefText) { link.setHref(hrefText); } /** * Returns the <code>LightBox</code> used to open the link in * * @return The <code>LightBox</code> */ @BeanTagAttribute(type= BeanTagAttribute.AttributeType.DIRECTORBYTYPE) public LightBox getLightBox() { if (link != null) { return link.getLightBox(); } return null; } /** * Setter for the lightBox * * @param lightBox */ public void setLightBox(LightBox lightBox) { if (link != null) { link.setLightBox(lightBox); } } @BeanTagAttribute(name = "sortAs") public String getSortAs() { return sortAs; } public void setSortAs(String sortAs) { if (!(sortAs.equals(UifConstants.TableToolsValues.DATE) || sortAs.equals(UifConstants.TableToolsValues.NUMERIC) || sortAs.equals(UifConstants.TableToolsValues.STRING))) { throw new IllegalArgumentException("invalid sortAs value of " + sortAs + ", allowed: " + UifConstants.TableToolsValues.DATE + "|" + UifConstants.TableToolsValues.NUMERIC + "|" + UifConstants.TableToolsValues.STRING); } this.sortAs = sortAs; } /** * {@inheritDoc} */ @Override public void completeValidation(ValidationTrace tracer) { tracer.addBean(this); // Checks that the link is set if (getLink() == null) { if (Validator.checkExpressions(this, "link")) { String currentValues[] = {"link = " + getLink()}; tracer.createError("Link should be set", currentValues); } } // Checks that the label is set if (getLabel() == null) { if (Validator.checkExpressions(this, "label")) { String currentValues[] = {"label =" + getLabel(), "link =" + getLink()}; tracer.createWarning("Label is null, link should be used instead", currentValues); } } super.completeValidation(tracer.getCopy()); } }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.flowable.rest.service.api.runtime; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.util.Map; import org.apache.http.HttpStatus; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPut; import org.apache.http.entity.StringEntity; import org.flowable.engine.runtime.Execution; import org.flowable.engine.test.Deployment; import org.flowable.rest.service.BaseSpringRestTestCase; import org.flowable.rest.service.api.RestUrls; import org.junit.Test; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; /** * Test for all REST-operations related to a single execution resource. * * @author Frederik Heremans */ public class ExecutionResourceTest extends BaseSpringRestTestCase { /** * Test getting a single execution. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-subprocess.bpmn20.xml" }) public void testGetExecution() throws Exception { Execution processInstanceExecution = runtimeService.startProcessInstanceByKey("processOne"); Execution subProcessExecution = runtimeService.createExecutionQuery().activityId("subProcess").singleResult(); assertNotNull(subProcessExecution); Execution childExecution = runtimeService.createExecutionQuery().activityId("processTask").singleResult(); assertNotNull(childExecution); CloseableHttpResponse response = executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, processInstanceExecution.getId())), HttpStatus.SC_OK); // Check resulting parent execution JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertNotNull(responseNode); assertEquals(processInstanceExecution.getId(), responseNode.get("id").textValue()); assertTrue(responseNode.get("activityId").isNull()); assertFalse(responseNode.get("suspended").booleanValue()); assertTrue(responseNode.get("parentUrl").isNull()); assertFalse(responseNode.get("suspended").booleanValue()); assertTrue(responseNode.get("url").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, processInstanceExecution.getId()))); assertTrue(responseNode.get("processInstanceUrl").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE, processInstanceExecution.getId()))); // Check resulting child execution response = executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, childExecution.getId())), HttpStatus.SC_OK); responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertNotNull(responseNode); assertEquals(childExecution.getId(), responseNode.get("id").textValue()); assertEquals("processTask", responseNode.get("activityId").textValue()); assertFalse(responseNode.get("suspended").booleanValue()); assertFalse(responseNode.get("suspended").booleanValue()); assertTrue(responseNode.get("url").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, childExecution.getId()))); assertTrue(responseNode.get("parentUrl").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, subProcessExecution.getId()))); assertTrue(responseNode.get("processInstanceUrl").asText().endsWith(RestUrls.createRelativeResourceUrl(RestUrls.URL_PROCESS_INSTANCE, processInstanceExecution.getId()))); } /** * Test getting an unexisting execution. */ @Test public void testGetUnexistingExecution() throws Exception { CloseableHttpResponse response = executeRequest(new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, "unexisting")), HttpStatus.SC_NOT_FOUND); closeResponse(response); } /** * Test signalling a single execution, without signal name. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-signal.bpmn20.xml" }) public void testSignalExecution() throws Exception { runtimeService.startProcessInstanceByKey("processOne"); Execution signalExecution = runtimeService.createExecutionQuery().activityId("waitState").singleResult(); assertNotNull(signalExecution); assertEquals("waitState", signalExecution.getActivityId()); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("action", "signal"); // Signalling one causes process to move on to second signal and // execution is not finished yet HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, signalExecution.getId())); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_OK); JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent()); closeResponse(response); assertEquals("anotherWaitState", responseNode.get("activityId").textValue()); assertEquals("anotherWaitState", runtimeService.createExecutionQuery().executionId(signalExecution.getId()).singleResult().getActivityId()); // Signalling again causes process to end response = executeRequest(httpPut, HttpStatus.SC_NO_CONTENT); closeResponse(response); // Check if process is actually ended assertNull(runtimeService.createExecutionQuery().executionId(signalExecution.getId()).singleResult()); } /** * Test signalling a single execution, without signal name. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-signal-event.bpmn20.xml" }) public void testSignalEventExecution() throws Exception { Execution signalExecution = runtimeService.startProcessInstanceByKey("processOne"); assertNotNull(signalExecution); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("action", "signalEventReceived"); requestNode.put("signalName", "unexisting"); Execution waitingExecution = runtimeService.createExecutionQuery().activityId("waitState").singleResult(); assertNotNull(waitingExecution); HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, waitingExecution.getId())); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_INTERNAL_SERVER_ERROR); closeResponse(response); requestNode.put("signalName", "alert"); // Sending signal event causes the execution to end (scope-execution for // the catching event) httpPut.setEntity(new StringEntity(requestNode.toString())); response = executeRequest(httpPut, HttpStatus.SC_OK); closeResponse(response); // Check if process is moved on to the other wait-state waitingExecution = runtimeService.createExecutionQuery().activityId("anotherWaitState").singleResult(); assertNotNull(waitingExecution); } /** * Test signalling a single execution, with signal event. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-signal-event.bpmn20.xml" }) public void testSignalEventExecutionWithvariables() throws Exception { Execution signalExecution = runtimeService.startProcessInstanceByKey("processOne"); assertNotNull(signalExecution); ArrayNode variables = objectMapper.createArrayNode(); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("action", "signalEventReceived"); requestNode.put("signalName", "alert"); requestNode.set("variables", variables); ObjectNode varNode = objectMapper.createObjectNode(); variables.add(varNode); varNode.put("name", "myVar"); varNode.put("value", "Variable set when signal event is received"); Execution waitingExecution = runtimeService.createExecutionQuery().activityId("waitState").singleResult(); assertNotNull(waitingExecution); // Sending signal event causes the execution to end (scope-execution for // the catching event) HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, waitingExecution.getId())); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_OK); closeResponse(response); // Check if process is moved on to the other wait-state waitingExecution = runtimeService.createExecutionQuery().activityId("anotherWaitState").singleResult(); assertNotNull(waitingExecution); Map<String, Object> vars = runtimeService.getVariables(waitingExecution.getId()); assertEquals(1, vars.size()); assertEquals("Variable set when signal event is received", vars.get("myVar")); } /** * Test signalling a single execution, without signal event and variables. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-message-event.bpmn20.xml" }) public void testMessageEventExecution() throws Exception { Execution execution = runtimeService.startProcessInstanceByKey("processOne"); assertNotNull(execution); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("action", "messageEventReceived"); requestNode.put("messageName", "unexisting"); Execution waitingExecution = runtimeService.createExecutionQuery().activityId("waitState").singleResult(); assertNotNull(waitingExecution); HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, waitingExecution.getId())); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_INTERNAL_SERVER_ERROR); closeResponse(response); requestNode.put("messageName", "paymentMessage"); // Sending signal event causes the execution to end (scope-execution for // the catching event) httpPut.setEntity(new StringEntity(requestNode.toString())); response = executeRequest(httpPut, HttpStatus.SC_OK); closeResponse(response); // Check if process is moved on to the other wait-state waitingExecution = runtimeService.createExecutionQuery().activityId("anotherWaitState").singleResult(); assertNotNull(waitingExecution); } /** * Test messaging a single execution with variables. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-message-event.bpmn20.xml" }) public void testMessageEventExecutionWithvariables() throws Exception { Execution signalExecution = runtimeService.startProcessInstanceByKey("processOne"); assertNotNull(signalExecution); ArrayNode variables = objectMapper.createArrayNode(); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("action", "messageEventReceived"); requestNode.put("messageName", "paymentMessage"); requestNode.set("variables", variables); ObjectNode varNode = objectMapper.createObjectNode(); variables.add(varNode); varNode.put("name", "myVar"); varNode.put("value", "Variable set when signal event is received"); Execution waitingExecution = runtimeService.createExecutionQuery().activityId("waitState").singleResult(); assertNotNull(waitingExecution); // Sending signal event causes the execution to end (scope-execution for // the catching event) HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, waitingExecution.getId())); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_OK); closeResponse(response); // Check if process is moved on to the other wait-state waitingExecution = runtimeService.createExecutionQuery().activityId("anotherWaitState").singleResult(); assertNotNull(waitingExecution); Map<String, Object> vars = runtimeService.getVariables(waitingExecution.getId()); assertEquals(1, vars.size()); assertEquals("Variable set when signal event is received", vars.get("myVar")); } /** * Test executing an illegal action on an execution. */ @Test @Deployment(resources = { "org/flowable/rest/service/api/runtime/ExecutionResourceTest.process-with-subprocess.bpmn20.xml" }) public void testIllegalExecutionAction() throws Exception { Execution execution = runtimeService.startProcessInstanceByKey("processOne"); assertNotNull(execution); ObjectNode requestNode = objectMapper.createObjectNode(); requestNode.put("action", "badaction"); HttpPut httpPut = new HttpPut(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_EXECUTION, execution.getId())); httpPut.setEntity(new StringEntity(requestNode.toString())); CloseableHttpResponse response = executeRequest(httpPut, HttpStatus.SC_BAD_REQUEST); closeResponse(response); } }
/* * Licensed to the University of California, Berkeley under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package tachyon.master; import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import tachyon.Constants; import tachyon.UnderFileSystem; import tachyon.client.TachyonFS; import tachyon.conf.CommonConf; import tachyon.conf.MasterConf; import tachyon.conf.UserConf; import tachyon.conf.WorkerConf; import tachyon.thrift.NetAddress; import tachyon.util.CommonUtils; import tachyon.util.NetworkUtils; import tachyon.worker.TachyonWorker; /** * Local Tachyon cluster for unit tests. */ public final class LocalTachyonCluster { public static void main(String[] args) throws Exception { LocalTachyonCluster cluster = new LocalTachyonCluster(100); cluster.start(); CommonUtils.sleepMs(null, Constants.SECOND_MS); cluster.stop(); CommonUtils.sleepMs(null, Constants.SECOND_MS); cluster = new LocalTachyonCluster(100); cluster.start(); CommonUtils.sleepMs(null, Constants.SECOND_MS); cluster.stop(); CommonUtils.sleepMs(null, Constants.SECOND_MS); } private TachyonWorker mWorker = null; private final long mWorkerMemCapacityBytes; private String mTachyonHome; private String mWorkerDataFolder; private Thread mWorkerThread = null; private String mLocalhostName = null; private LocalTachyonMaster mMaster; public LocalTachyonCluster(long workerMemCapacityBytes) { mWorkerMemCapacityBytes = workerMemCapacityBytes; } public TachyonFS getClient() throws IOException { return mMaster.getClient(); } public String getEditLogPath() { return mMaster.getEditLogPath(); } public String getImagePath() { return mMaster.getImagePath(); } public InetSocketAddress getMasterAddress() { return new InetSocketAddress(mLocalhostName, getMasterPort()); } public String getMasterHostname() { return mLocalhostName; } public MasterInfo getMasterInfo() { return mMaster.getMasterInfo(); } public String getMasterUri() { return mMaster.getUri(); } public int getMasterPort() { return mMaster.getMetaPort(); } public String getTachyonHome() { return mTachyonHome; } public String getTempFolderInUnderFs() { return CommonConf.get().UNDERFS_ADDRESS; } public TachyonWorker getWorker() { return mWorker; } public NetAddress getWorkerAddress() { return new NetAddress(mLocalhostName, getWorkerPort(), getWorkerDataPort()); } public String getWorkerDataFolder() { return mWorkerDataFolder; } public int getWorkerPort() { return mWorker.getMetaPort(); } public int getWorkerDataPort() { return mWorker.getDataPort(); } private void deleteDir(String path) throws IOException { UnderFileSystem ufs = UnderFileSystem.get(path); if (ufs.exists(path) && !ufs.delete(path, true)) { throw new IOException("Folder " + path + " already exists but can not be deleted."); } } private void mkdir(String path) throws IOException { UnderFileSystem ufs = UnderFileSystem.get(path); if (ufs.exists(path)) { ufs.delete(path, true); } if (!ufs.mkdirs(path, true)) { throw new IOException("Failed to make folder: " + path); } } public void start() throws IOException { int maxLevel = 1; mTachyonHome = File.createTempFile("Tachyon", "U" + System.currentTimeMillis()).getAbsolutePath(); mWorkerDataFolder = "/datastore"; // re-build the dir to set permission to 777 deleteDir(mTachyonHome); mkdir(mTachyonHome); mLocalhostName = NetworkUtils.getLocalHostName(); System.setProperty("tachyon.test.mode", "true"); System.setProperty("tachyon.home", mTachyonHome); System.setProperty("tachyon.master.port", 0 + ""); System.setProperty("tachyon.master.web.port", 0 + ""); System.setProperty("tachyon.worker.port", 0 + ""); System.setProperty("tachyon.worker.data.port", 0 + ""); System.setProperty("tachyon.worker.data.folder", mWorkerDataFolder); if (System.getProperty("tachyon.worker.hierarchystore.level.max") == null) { System.setProperty("tachyon.worker.hierarchystore.level.max", 1 + ""); } else { maxLevel = Integer.valueOf(System.getProperty("tachyon.worker.hierarchystore.level.max")); } System.setProperty("tachyon.worker.hierarchystore.level0.alias", "MEM"); System.setProperty("tachyon.worker.hierarchystore.level0.dirs.path", mTachyonHome + "/ramdisk"); System.setProperty("tachyon.worker.hierarchystore.level0.dirs.quota", mWorkerMemCapacityBytes + ""); for (int level = 1; level < maxLevel; level ++) { String path = System.getProperty("tachyon.worker.hierarchystore.level" + level + ".dirs.path"); if (path == null) { throw new IOException("Paths for StorageDirs are not set! Level:" + level); } String[] dirPaths = path.split(","); String newPath = ""; for (int i = 0; i < dirPaths.length; i ++) { newPath += mTachyonHome + dirPaths[i] + ","; } System.setProperty("tachyon.worker.hierarchystore.level" + level + ".dirs.path", newPath.substring(0, newPath.length() - 1)); } System.setProperty("tachyon.worker.to.master.heartbeat.interval.ms", 15 + ""); System.setProperty("tachyon.user.remote.read.buffer.size.byte", 64 + ""); // Lower the number of threads that the cluster will spin off. // default thread overhead is too much. System.setProperty("tachyon.master.selector.threads", Integer.toString(1)); System.setProperty("tachyon.master.server.threads", Integer.toString(2)); System.setProperty("tachyon.worker.selector.threads", Integer.toString(1)); System.setProperty("tachyon.worker.server.threads", Integer.toString(2)); System.setProperty("tachyon.worker.network.netty.worker.threads", Integer.toString(2)); System.setProperty("tachyon.master.web.threads", Integer.toString(1)); CommonConf.clear(); MasterConf.clear(); WorkerConf.clear(); UserConf.clear(); mMaster = LocalTachyonMaster.create(mTachyonHome); mMaster.start(); mkdir(CommonConf.get().UNDERFS_DATA_FOLDER); mkdir(CommonConf.get().UNDERFS_WORKERS_FOLDER); CommonUtils.sleepMs(null, 10); System.setProperty("tachyon.master.port", getMasterPort() + ""); System.setProperty("tachyon.master.web.port", (getMasterPort() + 1) + ""); mWorker = TachyonWorker.createWorker(new InetSocketAddress(mLocalhostName, getMasterPort()), new InetSocketAddress(mLocalhostName, 0), 0, 1, 1, 1); Runnable runWorker = new Runnable() { @Override public void run() { try { mWorker.start(); } catch (Exception e) { throw new RuntimeException(e + " \n Start Worker Error \n" + e.getMessage(), e); } } }; mWorkerThread = new Thread(runWorker); mWorkerThread.start(); System.setProperty("tachyon.worker.port", getWorkerPort() + ""); System.setProperty("tachyon.worker.data.port", getWorkerDataPort() + ""); } /** * Stop both of the tachyon and underfs service threads. * * @throws Exception */ public void stop() throws Exception { stopTFS(); stopUFS(); } /** * Stop the tachyon filesystem's service thread only * * @throws Exception */ public void stopTFS() throws Exception { mMaster.stop(); mWorker.stop(); System.clearProperty("tachyon.home"); System.clearProperty("tachyon.master.hostname"); System.clearProperty("tachyon.master.port"); System.clearProperty("tachyon.master.web.port"); System.clearProperty("tachyon.worker.port"); System.clearProperty("tachyon.worker.data.port"); System.clearProperty("tachyon.worker.data.folder"); System.clearProperty("tachyon.worker.memory.size"); System.clearProperty("tachyon.user.remote.read.buffer.size.byte"); System.clearProperty("tachyon.worker.to.master.heartbeat.interval.ms"); System.clearProperty("tachyon.master.selector.threads"); System.clearProperty("tachyon.master.server.threads"); System.clearProperty("tachyon.worker.selector.threads"); System.clearProperty("tachyon.worker.server.threads"); System.clearProperty("tachyon.worker.hierarchystore.level.max"); System.clearProperty("tachyon.worker.network.netty.worker.threads"); System.clearProperty("tachyon.master.web.threads"); } /** * Cleanup the underfs cluster test folder only * * @throws Exception */ public void stopUFS() throws Exception { mMaster.cleanupUnderfs(); } public void stopWorker() throws Exception { mMaster.clearClients(); mWorker.stop(); } }
/* * The MIT License * * Copyright 2018 The OpenNARS authors. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.opennars.language; import com.google.common.collect.Iterators; import org.opennars.entity.TermLink; import org.opennars.inference.TemporalRules; import org.opennars.io.Symbols; import org.opennars.io.Symbols.NativeOperator; import org.opennars.main.Debug; import org.opennars.storage.Memory; import java.nio.CharBuffer; import java.util.*; import static org.opennars.io.Symbols.NativeOperator.COMPOUND_TERM_CLOSER; import static org.opennars.io.Symbols.NativeOperator.COMPOUND_TERM_OPENER; /** * Compound term as defined in the NARS-theory * * @author Pei Wang * @author Patrick Hammer */ public abstract class CompoundTerm extends Term implements Iterable<Term> { /** * list of (direct) term * */ // TODO make final again public final Term[] term; /** * syntactic complexity of the compound, the sum of those of its term plus 1 */ // TODO make final again public short complexity; /** Whether contains a variable */ private boolean hasVariables, hasVarQueries, hasVarIndeps, hasVarDeps, hasIntervals; int containedTemporalRelations = -1; int hash; private boolean normalized; /** * method to get the operator of the compound */ @Override public abstract NativeOperator operator(); /** * clone method * * @return A clone of the compound term */ @Override public abstract CompoundTerm clone(); /** subclasses should be sure to call init() in their constructors; * it is not done here to allow subclass constructors to set data before calling init() */ public CompoundTerm(final Term[] components) { super(); this.term = components; } public static class ConvRectangle { public String index_variable = null; public int[] term_indices = null; //size X, size Y, pos X, pos Y, min size X, min size Y public ConvRectangle(){} //the latter two for being able to assing a relative index for size too } public static ConvRectangle UpdateConvRectangle(final Term[] term) { String index_last_var = null; int minX = Integer.MAX_VALUE, minY = Integer.MAX_VALUE, maxX = 0, maxY = 0, minsX = Integer.MAX_VALUE, minsY = Integer.MAX_VALUE; boolean hasTermIndices = false; boolean calculateTermIndices = true; for (final Term t : term) { if(t != null && t.term_indices != null) { if(!calculateTermIndices || (t.index_variable != null && index_last_var != null && (!t.index_variable.equals(index_last_var)))) { calculateTermIndices = false; hasTermIndices = false; continue; //different "channels", don't calculate term indices } hasTermIndices = true; final int size_X = t.term_indices[0]; if(size_X < minsX) minsX = size_X; final int size_Y = t.term_indices[1]; if(size_Y < minsY) minsY = size_Y; final int pos_X = t.term_indices[2]; final int pos_Y = t.term_indices[3]; if(pos_X < minX) minX = pos_X; if(pos_Y < minY) minY = pos_Y; if(pos_X+size_X > maxX) maxX = pos_X+size_X; if(pos_Y+size_Y > maxY) maxY = pos_Y+size_Y; index_last_var = t.index_variable; } } final ConvRectangle rect = new ConvRectangle();// = new ConvRectangle(); if(hasTermIndices) { rect.term_indices = new int[6]; rect.term_indices[0] = maxX-minX; rect.term_indices[1] = maxY-minY; rect.term_indices[2] = minX; rect.term_indices[3] = minY; rect.term_indices[4] = minsX; rect.term_indices[5] = minsY; rect.index_variable = index_last_var; } return rect; } /** call this after changing Term[] contents */ protected void init(final Term[] term) { this.complexity = 1; this.hasVariables = this.hasVarDeps = this.hasVarIndeps = this.hasVarQueries = false; if(this.term_indices == null) { final ConvRectangle rect = UpdateConvRectangle(term); this.index_variable = rect.index_variable; this.term_indices = rect.term_indices; } for (final Term t : term) { this.complexity += t.getComplexity(); hasVariables |= t.hasVar(); hasVarDeps |= t.hasVarDep(); hasVarIndeps |= t.hasVarIndep(); hasVarQueries |= t.hasVarQuery(); hasIntervals |= t.hasInterval(); } invalidateName(); if (!hasVar()) setNormalized(true); } public void invalidateName() { this.setName(null); //invalidate name so it will be (re-)created lazily for (final Term t : term) { if (t.hasVar()) if (t instanceof CompoundTerm) ((CompoundTerm)t).invalidateName(); } setNormalized(false); } /** Must be Term return type because the type of Term may change with different arguments */ abstract public Term clone(final Term[] replaced); @Override public CompoundTerm cloneDeep() { final Term c = clone(cloneTermsDeep()); if (c == null) return null; if (Debug.DETAILED && c.getClass()!=getClass()) //debug relevant, while it is natural due to interval //simplification to reduce to other term type, //other cases should not appear System.out.println("cloneDeep resulted in different class: " + c + " from " + this); if (isNormalized()) ((CompoundTerm)c).setNormalized(true); if(!(c instanceof CompoundTerm)) { return null; } return (CompoundTerm)c; } public static void transformIndependentVariableToDependent(final CompoundTerm T) { //a special instance of transformVariableTermsDeep in 1.7 final Term[] term=T.term; for (int i = 0; i < term.length; i++) { final Term t = term[i]; if (t.hasVar()) { if (t instanceof CompoundTerm) { transformIndependentVariableToDependent((CompoundTerm) t); } else if (t instanceof Variable && ((Variable)t).isIndependentVariable()) { /* it's a variable */ term[i] = new Variable(""+Symbols.VAR_DEPENDENT+t.name().subSequence(1, t.name().length())); // vars.get(t.toString()); assert term[i] != null; } } } } static final Interval conceptival = new Interval(1); private static void ReplaceIntervals(final CompoundTerm comp) { if (!comp.hasIntervals) { return; } comp.invalidateName(); for(int i=0; i<comp.term.length; i++) { final Term t = comp.term[i]; if(t instanceof Interval) { assert conceptival != null; comp.term[i] = conceptival; comp.invalidateName(); } else if(t instanceof CompoundTerm) { ReplaceIntervals((CompoundTerm) t); } } } public static Term replaceIntervals(Term T) { if(T instanceof CompoundTerm) { T=T.cloneDeep(); //we will operate on a copy if(T == null) { return null; //not a valid concept term } ReplaceIntervals((CompoundTerm) T); } return T; } private static void ExtractIntervals(final Memory mem, final List<Long> ivals, final CompoundTerm comp) { for(int i=0; i<comp.term.length; i++) { final Term t = comp.term[i]; if(t instanceof Interval) { ivals.add(((Interval) t).time); } else if(t instanceof CompoundTerm) { ExtractIntervals(mem, ivals, (CompoundTerm) t); } } } public static List<Long> extractIntervals(final Memory mem, final Term T) { final List<Long> ret = new ArrayList<>(); if(T instanceof CompoundTerm) { ExtractIntervals(mem, ret, (CompoundTerm) T); } return ret; } public static class UnableToCloneException extends RuntimeException { public UnableToCloneException(final String message) { super(message); } @Override public synchronized Throwable fillInStackTrace() { if (Debug.DETAILED) { return super.fillInStackTrace(); } else { //avoid recording stack trace for efficiency reasons return this; } } } public CompoundTerm cloneDeepVariables() { final Term c = clone( cloneVariableTermsDeep() ); if (c == null) return null; if (Debug.DETAILED && c.getClass()!=getClass()) System.out.println("cloneDeepVariables resulted in different class: " + c + " from " + this); final CompoundTerm cc = (CompoundTerm)c; cc.setNormalized(isNormalized()); return cc; } @Override public int containedTemporalRelations() { if (containedTemporalRelations == -1) { containedTemporalRelations = 0; if ((this instanceof Equivalence) || (this instanceof Implication)) { final int temporalOrder = this.getTemporalOrder(); switch (temporalOrder) { case TemporalRules.ORDER_FORWARD: case TemporalRules.ORDER_CONCURRENT: case TemporalRules.ORDER_BACKWARD: containedTemporalRelations = 1; } } for (final Term t : term) containedTemporalRelations += t.containedTemporalRelations(); } return this.containedTemporalRelations; } /** * build a component list from terms * @return the component list */ public static Term[] termArray(final Term... t) { return t; } public static List<Term> termList(final Term... t) { return Arrays.asList((Term[])t); } /* ----- utilities for oldName ----- */ /** * default method to make the oldName of the current term from existing * fields. needs overridden in certain subclasses * * @return the oldName of the term */ protected CharSequence makeName() { return makeCompoundName(operator(), term); } @Override public CharSequence name() { if (this.nameInternal() == null) { this.setName(makeName()); } return this.nameInternal(); } /** * default method to make the oldName of a compound term from given fields * * @param op the term operator * @param arg the list of term * @return the oldName of the term */ protected static CharSequence makeCompoundName(final NativeOperator op, final Term... arg) { int size = 1 + 1; final String opString = op.toString(); size += opString.length(); for (final Term t : arg) size += 1 + t.name().length(); final CharBuffer n = CharBuffer.allocate(size) .append(COMPOUND_TERM_OPENER.ch).append(opString); for (final Term t : arg) { n.append(Symbols.ARGUMENT_SEPARATOR).append(t.name()); } n.append(COMPOUND_TERM_CLOSER.ch); return n.compact().toString(); } /* ----- utilities for other fields ----- */ /** * report the term's syntactic complexity * * @return the complexity value */ @Override public short getComplexity() { return complexity; } /** * Check if the order of the term matters * <p> * commutative CompoundTerms: Sets, Intersections Commutative Statements: * Similarity, Equivalence (except the one with a temporal order) * Commutative CompoundStatements: Disjunction, Conjunction (except the one * with a temporal order) * * @return The default value is false */ public boolean isCommutative() { return false; } /* ----- extend Collection methods to component list ----- */ /** * get the number of term * * @return the size of the component list */ final public int size() { return term.length; } /** Gives a set of all contained term, recursively */ public Set<Term> getContainedTerms() { final Set<Term> s = new LinkedHashSet(getComplexity()); for (final Term t : term) { s.add(t); if (t instanceof CompoundTerm) s.addAll( ((CompoundTerm)t).getContainedTerms() ); } return s; } /** * Clone the component list * * @return The cloned component list */ public Term[] cloneTerms(final Term... additional) { return cloneTermsAppend(term, additional); } /** * Cloned array of Terms, except for one or more Terms. * @param toRemove * @return the cloned array with the missing terms removed, OR null if no terms were actually removed when requireModification=true */ public Term[] cloneTermsExcept(final boolean requireModification, final Term[] toRemove) { //TODO if deep, this wastes created clones that are then removed. correct this inefficiency? final List<Term> l = asTermList(); boolean removed = false; for (final Term t : toRemove) { if (l.remove(t)) removed = true; } if ((!removed) && (requireModification)) return null; return l.toArray(new Term[0]); } /** * Deep clone an array list of terms * * @param original The original component list * @return an identical and separate copy of the list */ public static Term[] cloneTermsAppend(final Term[] original, final Term[] additional) { if (original == null) { return null; } final int L = original.length + additional.length; if (L == 0) return original; //TODO apply preventUnnecessaryDeepCopy to more cases final Term[] arr = new Term[L]; int i; int j = 0; Term[] srcArray = original; for (i = 0; i < L; i++) { if (i == original.length) { srcArray = additional; j = 0; } arr[i] = srcArray[j++]; } return arr; } public List<Term> asTermList() { final List l = new ArrayList(term.length); addTermsTo(l); return l; } /** forced deep clone of terms */ public Term[] cloneTermsDeep() { final Term[] l = new Term[term.length]; for (int i = 0; i < l.length; i++) { l[i] = term[i].cloneDeep(); if(l[i] == null) { return null; } } return l; } public Term[] cloneVariableTermsDeep() { final Term[] l = new Term[term.length]; for (int i = 0; i < l.length; i++) { Term t = term[i]; if (t.hasVar()) { if (t instanceof CompoundTerm) { t = ((CompoundTerm)t).cloneDeepVariables(); } else /* it's a variable */ t = t.clone(); } l[i] = t; } return l; } /** forced deep clone of terms */ public List<Term> cloneTermsListDeep() { final List<Term> l = new ArrayList(term.length); for (final Term t : term) l.add(t.clone()); return l; } static void shuffle(final Term[] ar,final Random randomNumber) { if (ar.length < 2) { return; } for (int i = ar.length - 1; i > 0; i--) { final int index = randomNumber.nextInt(i + 1); // Simple swap final Term a = ar[index]; ar[index] = ar[i]; ar[i] = a; } } /** * Check whether the compound contains a certain component * Also matches variables, ex: (&amp;&amp;,&lt;a --&gt; b&gt;,&lt;b --&gt; c&gt;) also contains &lt;a --&gt; #1&gt; * * @param t The component to be checked * @return Whether the component is in the compound */ /* * extra comment because it is a Implementation detail - question: * * Check whether the compound contains a certain component * Also matches variables, ex: (&amp;&amp;,&lt;a --&gt; b&gt;,&lt;b --&gt; c&gt;) also contains &lt;a --&gt; #1&gt; * ^^^ is this right? if so then try containsVariablesAsWildcard */ @Override public boolean containsTerm(final Term t) { return Terms.contains(term, t); //return Terms.containsVariablesAsWildcard(term, t); } /** * Recursively check if a compound contains a term * * @param target The term to be searched * @return Whether the target is in the current term */ @Override public boolean containsTermRecursively(final Term target) { if (super.containsTermRecursively(target)) return true; for (final Term term : term) { if (term.containsTermRecursively(target)) { return true; } } return false; } /** * Recursively count how often the terms are contained * * @param map The count map that will be created to count how often each term occurs * @return The counts of the terms */ @Override public Map<Term, Integer> countTermRecursively(Map<Term,Integer> map) { if(map == null) { map = new LinkedHashMap<Term, Integer>(); } map.put(this, map.getOrDefault(this, 0) + 1); for (final Term term : term) { term.countTermRecursively(map); } return map; } /** * Add all the components of term t into components recursively * * @param t The term * @param components The components * @return */ public static Set<Term> addComponentsRecursively(Term t, Set<Term> components) { if(components == null) { components = new LinkedHashSet<Term>(); } components.add(t); if(t instanceof CompoundTerm) { CompoundTerm cTerm = (CompoundTerm) t; for(Term component : cTerm) { addComponentsRecursively(component, components); } } return components; } /** * Check whether the compound contains all term of another term, or that term as a whole * * @param t The other term * @return Whether the term are all in the compound */ public boolean containsAllTermsOf(final Term t) { if (getClass() == t.getClass()) { //(t instanceof CompoundTerm) { return Terms.containsAll(term, ((CompoundTerm) t).term ); } else { return Terms.contains(term, t); } } /** * Try to replace a component in a compound at a given index by another one * * @param index The location of replacement * @param t The new component * @param memory Reference to the memory * @return The new compound */ public Term setComponent(final int index, final Term t, final Memory memory) { final List<Term> list = asTermList();//Deep(); list.remove(index); if (t != null) { if (getClass() != t.getClass()) { list.add(index, t); } else { //final List<Term> list2 = ((CompoundTerm) t).cloneTermsList(); final Term[] tt = ((CompoundTerm)t).term; for (int i = 0; i < tt.length; i++) { list.add(index + i, tt[i]); } } } if(this.isCommutative()) { Term[] ret = list.toArray(new Term[0]); return Terms.term(this, ret); } return Terms.term(this, list); } /* ----- variable-related utilities ----- */ /** * Whether this compound term contains any variable term * * @return Whether the name contains a variable */ @Override public boolean hasVar() { return hasVariables; } @Override public boolean hasVarDep() { return hasVarDeps; } @Override public boolean hasVarIndep() { return hasVarIndeps; } @Override public boolean hasVarQuery() { return hasVarQueries; } @Override public boolean hasInterval() { return hasIntervals; } /** * Recursively apply a substitute to the current CompoundTerm * May return null if the term can not be created * @param subs */ public Term applySubstitute(final Map<Term, Term> subs) { if ((subs == null) || (subs.isEmpty())) { return this;//.clone(); } final Term[] tt = new Term[term.length]; boolean modified = false; for (int i = 0; i < tt.length; i++) { final Term t1 = tt[i] = term[i]; if (subs.containsKey(t1)) { Term t2 = subs.get(t1); while (subs.containsKey(t2)) { t2 = subs.get(t2); } //prevents infinite recursion if (!t2.containsTerm(t1)) { tt[i] = t2; //t2.clone(); modified = true; } } else if (t1 instanceof CompoundTerm) { final Term ss = ((CompoundTerm) t1).applySubstitute(subs); if (ss!=null) { tt[i] = ss; if (!tt[i].equals(term[i])) modified = true; } } } if (!modified) return this; if (this.isCommutative()) { Arrays.sort(tt); } return this.clone(tt); } /** returns result of applySubstitute, if and only if it's a CompoundTerm. * otherwise it is null */ public CompoundTerm applySubstituteToCompound(final Map<Term, Term> substitute) { final Term t = applySubstitute(substitute); if (t instanceof CompoundTerm) return ((CompoundTerm)t); return null; } /* ----- link CompoundTerm and its term ----- */ /** * Build TermLink templates to constant term and subcomponents * <p> * The compound type determines the link type; the component type determines * whether to build the link. * * @return A list of TermLink templates */ public List<TermLink> prepareComponentLinks() { //complexity seems like an upper bound for the resulting number of componentLinks. //so use it as an initial size for the array list final List<TermLink> componentLinks = new ArrayList<>( getComplexity() ); return Terms.prepareComponentLinks(componentLinks, this); } final public void addTermsTo(final Collection<Term> c) { Collections.addAll(c, term); } @Override public int hashCode() { return name().hashCode(); } @Override public int compareTo(final AbstractTerm that) { if (that==this) { return 0; } return super.compareTo(that); } @Override public boolean equals(final Object that) { if (that==this) return true; if (!(that instanceof Term)) return false; return name().equals(((Term)that).name()); } public void setNormalized(final boolean b) { this.normalized = b; } public boolean isNormalized() { return normalized; } public Term[] cloneTermsReplacing(final Term from, final Term to) { final Term[] y = new Term[term.length]; int i = 0; for (Term x : term) { if (x.equals(from)) x = to; y[i++] = x; } return y; } @Override public Iterator<Term> iterator() { return Iterators.forArray(term); } }
/* Copyright (c) 2012 Emitrom LLC. All rights reserved. For licensing questions, please contact us at licensing@emitrom.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.emitrom.lienzo.shared.core.types; /** * Color implements the {@link IColor} interface (just like {@link ColorName}) * so it can be used in all the Lienzo methods that required colors. * <p> * Internally, it stores Red, Green and Blue (RGB) values as integers between 0 and 255. * It stores an Alpha value (a.k.a. opacity) as a double between 0 and 1, where 0 is invisible and 1 is fully visible. * <p> * This class provides additional utility methods, e.g.: * <ul> * <li>{@link #fromHSL(double, double, double) fromHSL} - convert colors from the HSL model to the RGB model * <li>{@link #fromColorString(String) fromColorString} - converts any CSS3 compliant color string to an RGB Color * </ul> * * @see IColor * @see ColorName * @see <a href="http://www.w3.org/TR/css3-color/">CSS Color Module Level 3</a> */ public class Color implements IColor { private int m_r; private int m_g; private int m_b; private static int s_r = 0; private static int s_g = 0; private static int s_b = 0; private double m_a = 1.0; /** * Constructs a Color from RGB values. * The RGB values are normalized to [0,255]. * The alpha value (A) is set to 1. * * @param r int between 0 and 255 * @param g int between 0 and 255 * @param b int between 0 and 255 */ public Color(int r, int g, int b) { setR(r); setG(g); setB(b); } /** * Constructs a Color from RGB values and alpha (transparency). * The RGB values are normalized to [0,255]. * Alpha is normalized to [0,1] * * @param r int between 0 and 255 * @param g int between 0 and 255 * @param b int between 0 and 255 * @param a double between 0 and 1 */ public Color(int r, int g, int b, double a) { setR(r); setG(g); setB(b); setA(a); } public Color brightness(double brightness) { int r = (int) Math.max(Math.min((getR() + (brightness * 255) + 0.5), 255), 0); int g = (int) Math.max(Math.min((getG() + (brightness * 255) + 0.5), 255), 0); int b = (int) Math.max(Math.min((getB() + (brightness * 255) + 0.5), 255), 0); return new Color(r, g, b, getA()); } public Color percent(double percent) { if (percent < -1) { return new Color(0, 0, 0, getA()); } if (percent > 1) { return new Color(255, 255, 255, getA()); } if (percent < 0) { percent = 1 + percent; int r = Math.max(0, Math.min(255, (int) ((getR() * percent) + 0.5))); int g = Math.max(0, Math.min(255, (int) ((getG() * percent) + 0.5))); int b = Math.max(0, Math.min(255, (int) ((getB() * percent) + 0.5))); return new Color(r, g, b, getA()); } else if (percent > 0) { int r = Math.max(0, Math.min(255, (int) (((255 - getR()) * percent) + getR() + 0.5))); int g = Math.max(0, Math.min(255, (int) (((255 - getG()) * percent) + getG() + 0.5))); int b = Math.max(0, Math.min(255, (int) (((255 - getB()) * percent) + getB() + 0.5))); return new Color(r, g, b, getA()); } return new Color(getR(), getG(), getB(), getA()); } /** * Generates a unique RGB color key, e.g. "rgb(12,34,255)". * This is used internally. * * @return String */ public static final String getHEXColorKey() { s_r++; if (s_r == 256) { s_r = 0; s_g++; if (s_g == 256) { s_g = 0; s_b++; if (s_b == 256) { s_b = 0; return getHEXColorKey(); } } } return rgbToBrowserHexColor(s_r, s_g, s_b); } /** * Converts RGB integer values to a browser-compliance rgb format. * * @param r int between 0 and 255 * @param g int between 0 and 255 * @param b int between 0 and 255 * @return String e.g. "rgb(12,34,255)" */ public static final String toBrowserRGB(int r, int g, int b) { return "rgb(" + r + "," + g + "," + b + ")"; } /** * Converts HSL (hue, saturation, lightness) to RGB Color. * HSL values are not normalized yet. * * @param h in [0,360] degrees * @param s in [0,100] percent * @param l in [0,100] percent * * @return Color with RGB values */ public static final Color fromHSL(double h, double s, double l) { h = (((h % 360) + 360) % 360); if (s < 0) s = 0; else if (s > 100) s = 1; else s /= 100; if (l < 0) l = 0; else if (l > 100) l = 1; else l /= 100; return fromNormalizedHSL(h, s, l); } /** * Converts HSL (hue, saturation, lightness) to RGB. * HSL values should already be normalized to [0,1] * * @param h in [0,1] * @param s in [0,1] * @param l in [0,1] * * @return Color with RGB values */ public static final Color fromNormalizedHSL(double h, double s, double l) { // see http://www.w3.org/TR/css3-color/ // // HOW TO RETURN hsl.to.rgb(h, s, l): // SELECT: // l<=0.5: PUT l*(s+1) IN m2 // ELSE: PUT l+s-l*s IN m2 // PUT l*2-m2 IN m1 // PUT hue.to.rgb(m1, m2, h+1/3) IN r // PUT hue.to.rgb(m1, m2, h ) IN g // PUT hue.to.rgb(m1, m2, h-1/3) IN b // RETURN (r, g, b) double m2 = (l <= 0.5) ? (l * (s + 1)) : (l + s - l * s); double m1 = l * 2 - m2; return new Color(fixRGB((int) Math.round(hueToRGB(m1, m2, h + 1 / 3))), fixRGB((int) Math.round(hueToRGB(m1, m2, h))), fixRGB((int) Math.round(hueToRGB(m1, m2, h - 1 / 3)))); } /** * Parses a CSS color string and returns a Color object. * * @param cssColorString Any valid color string for use in HTML 5 canvas * (as defined by "CSS Color Module Level 3") * except for "inherit" and "currentcolor". * * @return null if cssColorString could not be parsed * * @see <a href="http://www.w3.org/TR/css3-color/">CSS Color Module Level 3</a> */ public static Color fromColorString(String cssColorString) { String str = cssColorString.toLowerCase(); if (str.equals("transparent")) { return new Color(0, 0, 0, 0); } try { if (str.startsWith("#")) { String r, g, b; if (str.length() == 7) { r = str.substring(1, 3); g = str.substring(3, 5); b = str.substring(5, 7); } else if (str.length() == 4) { r = str.substring(1, 2); g = str.substring(2, 3); b = str.substring(3, 4); r = r + r; g = g + g; b = b + b; } else return null; // error - invalid length return new Color(fixRGB(Integer.valueOf(r, 16)), fixRGB(Integer.valueOf(g, 16)), fixRGB(Integer.valueOf(b, 16))); } else { ColorName colName = ColorName.lookup(str); if (colName != null) return colName.getColor(); } // Remove whitespace if (str.contains(" ")) str = str.replaceAll(" ", ""); if (str.startsWith("rgb(") && str.endsWith(")")) { String[] rgb = str.substring(4, str.length() - 1).split(","); if (rgb.length != 3) return null; int r = intOrPct(rgb[0], 255); int g = intOrPct(rgb[1], 255); int b = intOrPct(rgb[2], 255); return new Color(r, g, b); } else if (str.startsWith("rgba(") && str.endsWith(")")) { String[] rgba = str.substring(5, str.length() - 1).split(","); if (rgba.length != 4) return null; int r = intOrPct(rgba[0], 255); int g = intOrPct(rgba[1], 255); int b = intOrPct(rgba[2], 255); double a = doubleOrPct(rgba[3], 1); return new Color(r, g, b, a); } else if (str.startsWith("hsl(") && str.endsWith(")")) { String[] hsl = str.substring(4, str.length() - 1).split(","); if (hsl.length != 3) return null; double h = hueOrPct(hsl[0]); double s = percentage(hsl[1], 1); double l = percentage(hsl[2], 1); return fromNormalizedHSL(h, s, l); } else if (str.startsWith("hsla(") && str.endsWith(")")) { String[] hsla = str.substring(5, str.length() - 1).split(","); if (hsla.length != 4) return null; double h = hueOrPct(hsla[0]); double s = percentage(hsla[1], 1); double l = percentage(hsla[2], 1); double a = doubleOrPct(hsla[3], 1); Color col = fromNormalizedHSL(h, s, l); col.setA(a); return col; } else return null; // unknown format } catch (NumberFormatException e) { return null; } } private static final int intOrPct(String s, int max) { if (s.endsWith("%")) { s = s.substring(0, s.length() - 1); double val = Double.parseDouble(s); if (val < 0) return 0; if (val >= 100) return max; return (int) Math.round(val * max / 100.0); } else { int val = Integer.parseInt(s); if (val < 0) return 0; if (val > max) return max; return val; } } private static final double percentage(String s, double max) { if (s.endsWith("%")) { s = s.substring(0, s.length() - 1); double val = Double.parseDouble(s); if (val < 0) return 0; if (val >= 100) return max; return val * max / 100.0; } throw new IllegalArgumentException("invalid percentage [" + s + "]"); } private static final double doubleOrPct(String s, double max) { if (s.endsWith("%")) { return percentage(s, max); } else { double val = Double.parseDouble(s); if (val < 0) return 0; if (val > max) return max; return val; } } private static final double hueOrPct(String s) { if (s.endsWith("%")) { return percentage(s, 1); } else { double h = Double.parseDouble(s); h = (((h % 360) + 360) % 360); return h / 360; } } /** * Generates a random hex color, e.g. "#1234EF" * * @return String */ public static final String getRandomHexColor() { int r = fixRGB((int) Math.round(Math.random() * 255)); int g = fixRGB((int) Math.round(Math.random() * 255)); int b = fixRGB((int) Math.round(Math.random() * 255)); return rgbToBrowserHexColor(r, g, b); } /** * Convertss RGB to hex browser-compliance color, e.g. "#1234EF" * * @param r int between 0 and 255 * @param g int between 0 and 255 * @param b int between 0 and 255 * @return String */ public static final String rgbToBrowserHexColor(int r, int g, int b) { return "#" + toBrowserHexValue(r) + toBrowserHexValue(g) + toBrowserHexValue(b); } /** * Converts Hex string to RGB. Assumes * @param hex String of length 7, e.g. "#1234EF" * @return {@link Color} */ public static final Color hex2RGB(String hex) { // TODO this assumes hex is 6 long - what about strings of length 3? return new Color(Integer.valueOf(hex.substring(1, 3), 16), Integer.valueOf(hex.substring(3, 5), 16), Integer.valueOf(hex.substring(5, 7), 16)); } /** * Returns the Red component of the RGB color. * * @return int between 0 and 255 */ @Override public int getR() { return m_r; } /** * Sets the Red component of the RGB color. * The value is normalized to [0,255]. * * @param r int between 0 and 255 * @return this Color */ public Color setR(int r) { m_r = fixRGB(r); return this; } /** * Returns the Green component of the RGB color. * * @return int between 0 and 255 */ @Override public int getG() { return m_g; } /** * Sets the Green component of the RGB color. * The value is normalized to [0,255]. * * @param g int between 0 and 255 * @return this Color */ public Color setG(int g) { m_g = fixRGB(g); return this; } /** * Returns the Blue component of the RGB color. * * @return int between 0 and 255 */ @Override public int getB() { return m_b; } /** * Sets the Blue component of the RGB color. * The value is normalized to [0,255]. * * @param b int between 0 and 255 * @return this Color */ public Color setB(int b) { m_b = fixRGB(b); return this; } /** * Returns the Alpha component (transparency) of the RGB color, between 0 and 1. * * @return double between 0 and 1 */ @Override public double getA() { return m_a; } /** * Sets the alpha channel. * The value is normalized to [0,1]. * * @param a between 0 and 1 * @return this Color */ public Color setA(double a) { m_a = fixAlpha(a); return this; } /** * Returns an RGB color string, e.g. "rgb(255,255,255)" * @return String */ public String getRGB() { return "rgb(" + m_r + "," + m_g + "," + m_b + ")"; } /** * Returns RGBA color string, e.g. "rgba(255,255,255,0.5) * @return String */ public String getRGBA() { return "rgba(" + m_r + "," + m_g + "," + m_b + "," + m_a + ")"; } /** * Returns a CCS compliant color string that can be set as a color on * an HTML5 canvas, e.g. "rgb(255,255,255)" if alpha is 1, or * "rgba(255,255,255,0.2)" otherwise. * * @return String e.g. "rgb(255,255,255)", "rgba(255,255,255,0.2)" */ @Override public String getColorString() { if (m_a == 1) return getRGB(); else return getRGBA(); } /** * Converts the number to a two-digit hex string, * e.g. 0 becomes "00" and 255 becomes "FF". * * @param number int between 0 and 255 * @return String */ private static final String toBrowserHexValue(int number) { String chex = Integer.toHexString(number & 0xFF).toUpperCase(); if (chex.length() < 2) { return "0" + chex; } return chex; } private static int fixRGB(int c) { if (c < 0) { return 0; } if (c > 255) { return 255; } return c; } private static double fixAlpha(double a) { if (a < 0) { return 0; } if (a > 1.0) { return 1.0; } return a; } /** * Used by {@link #fromNormalizedHSL(double, double, double)} * * @param m1 * @param m2 * @param h * @return */ private static double hueToRGB(double m1, double m2, double h) { // see http://www.w3.org/TR/css3-color/ // // HOW TO RETURN hue.to.rgb(m1, m2, h): // IF h<0: PUT h+1 IN h // IF h>1: PUT h-1 IN h // IF h*6<1: RETURN m1+(m2-m1)*h*6 // IF h*2<1: RETURN m2 // IF h*3<2: RETURN m1+(m2-m1)*(2/3-h)*6 // RETURN m1 if (h < 0) h++; if (h > 1) h--; if (h * 6 < 1) return m1 + (m2 - m1) * h * 6; if (h * 2 < 1) return m2; if (h * 3 < 2) return m1 + (m2 - m1) * (2 / 3 - h) * 6; return m1; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.security.encryption; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.configuration.Configuration; import org.apache.ambari.server.security.credential.Credential; import org.apache.ambari.server.security.credential.GenericKeyCredential; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.util.Random; import java.util.regex.Matcher; import java.util.regex.Pattern; public class CredentialProvider { public static final Pattern PASSWORD_ALIAS_PATTERN = Pattern.compile("\\$\\{alias=[\\w\\.]+\\}"); protected char[] chars = {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '2', '3', '4', '5', '6', '7', '8', '9'}; private CredentialStore keystoreService; static final Logger LOG = LoggerFactory.getLogger(CredentialProvider.class); public CredentialProvider(String masterKey, File masterKeyLocation, boolean isMasterKeyPersisted, File masterKeyStoreLocation) throws AmbariException { MasterKeyService masterKeyService; if (masterKey != null) { masterKeyService = new MasterKeyServiceImpl(masterKey); } else { if (isMasterKeyPersisted) { if (masterKeyLocation == null) { throw new IllegalArgumentException("The master key file location must be specified if the master key is persisted"); } masterKeyService = new MasterKeyServiceImpl(masterKeyLocation); } else { masterKeyService = new MasterKeyServiceImpl(); } } if (!masterKeyService.isMasterKeyInitialized()) { throw new AmbariException("Master key initialization failed."); } this.keystoreService = new FileBasedCredentialStore(masterKeyStoreLocation); this.keystoreService.setMasterKeyService(masterKeyService); } public char[] getPasswordForAlias(String alias) throws AmbariException { Credential credential = (isAliasString(alias)) ? keystoreService.getCredential(getAliasFromString(alias)) : keystoreService.getCredential(alias); return (credential instanceof GenericKeyCredential) ? ((GenericKeyCredential) credential).getKey() : null; } public void generateAliasWithPassword(String alias) throws AmbariException { String passwordString = generatePassword(16); addAliasToCredentialStore(alias, passwordString); } public void addAliasToCredentialStore(String alias, String passwordString) throws AmbariException { if (alias == null || alias.isEmpty()) { throw new IllegalArgumentException("Alias cannot be null or empty."); } if (passwordString == null || passwordString.isEmpty()) { throw new IllegalArgumentException("Empty or null password not allowed."); } keystoreService.addCredential(alias, new GenericKeyCredential(passwordString.toCharArray())); } private String generatePassword(int length) { StringBuffer sb = new StringBuffer(); Random r = new Random(); for (int i = 0; i < length; i++) { sb.append(chars[r.nextInt(chars.length)]); } return sb.toString(); } public static boolean isAliasString(String aliasStr) { if (aliasStr == null || aliasStr.isEmpty()) { return false; } Matcher matcher = PASSWORD_ALIAS_PATTERN.matcher(aliasStr); return matcher.matches(); } private String getAliasFromString(String strPasswd) { return strPasswd.substring(strPasswd.indexOf("=") + 1, strPasswd.length() - 1); } protected CredentialStore getKeystoreService() { return keystoreService; } /** * Credential Store entry point * args[0] => Action (GET/PUT) * args[1] => Alias * args[2] => Payload (FilePath for GET/Password for PUT) * args[3] => Master Key (Empty) * * @param args */ public static void main(String args[]) { if (args != null && args.length > 0) { String action = args[0]; String alias = null; String masterKey = null; CredentialProvider credentialProvider = null; Configuration configuration = new Configuration(); if (args.length > 1 && !args[1].isEmpty()) { alias = args[1]; } else { LOG.error("No valid arguments provided."); System.exit(1); } // None - To avoid incorrectly assuming redirection as argument if (args.length > 3 && !args[3].isEmpty() && !args[3].equalsIgnoreCase("None")) { masterKey = args[3]; LOG.debug("Master key provided as an argument."); } try { credentialProvider = new CredentialProvider(masterKey, configuration.getMasterKeyLocation(), configuration.isMasterKeyPersisted(), configuration.getMasterKeyStoreLocation()); } catch (Exception ex) { ex.printStackTrace(); System.exit(1); } LOG.info("action => " + action + ", alias => " + alias); if (action.equalsIgnoreCase("PUT")) { String password = null; if (args.length > 2 && !args[2].isEmpty()) { password = args[2]; } if (alias != null && !alias.isEmpty() && password != null && !password.isEmpty()) { try { credentialProvider.addAliasToCredentialStore(alias, password); } catch (AmbariException e) { e.printStackTrace(); } } else { LOG.error("Alias and password are required arguments."); System.exit(1); } } else if (action.equalsIgnoreCase("GET")) { String writeFilePath = null; if (args.length > 2 && !args[2].isEmpty()) { writeFilePath = args[2]; } if (alias != null && !alias.isEmpty() && writeFilePath != null && !writeFilePath.isEmpty()) { String passwd = ""; try { char[] retPasswd = credentialProvider.getPasswordForAlias(alias); if (retPasswd != null) { passwd = new String(retPasswd); } } catch (AmbariException e) { LOG.error("Error retrieving password for alias."); e.printStackTrace(); } FileOutputStream fo = null; try { fo = new FileOutputStream(writeFilePath); fo.write(passwd.getBytes()); } catch (FileNotFoundException fe) { fe.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { if (fo != null) { try { fo.close(); } catch (IOException e) { } } } } else { LOG.error("Alias and file path are required arguments."); } } else if (action.equalsIgnoreCase("RESET")) { } } else { LOG.error("No arguments provided to " + "CredentialProvider"); System.exit(1); } System.exit(0); } }
package io.dropwizard.metrics; import com.codahale.metrics.MetricAttribute; import com.codahale.metrics.MetricFilter; import com.codahale.metrics.ScheduledReporter; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import io.dropwizard.util.Duration; import io.dropwizard.validation.MinDuration; import javax.validation.Valid; import javax.validation.constraints.NotNull; import javax.validation.valueextraction.Unwrapping; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; /** * A base {@link ReporterFactory} for configuring metric reporters. * <p/> * Configures options common to all {@link ScheduledReporter}s. * <p/> * <b>Configuration Parameters:</b> * <table> * <tr> * <td>Name</td> * <td>Default</td> * <td>Description</td> * </tr> * <tr> * <td>durationUnit</td> * <td>milliseconds</td> * <td>The unit to report durations as. Overrides per-metric duration units.</td> * </tr> * <tr> * <td>rateUnit</td> * <td>seconds</td> * <td>The unit to report rates as. Overrides per-metric rate units.</td> * </tr> * <tr> * <td>excludes</td> * <td>No excluded metrics.</td> * <td>Metrics to exclude from reports, by name. When defined, matching metrics will not be * reported. See {@link #getFilter()}.</td> * </tr> * <tr> * <td>includes</td> * <td>All metrics included.</td> * <td>Metrics to include in reports, by name. When defined, only these metrics will be * reported. See {@link #getFilter()}. Exclusion rules (excludes) take precedence, * so if a name matches both <i>excludes</i> and <i>includes</i>, it is excluded.</td> * </tr> * <tr> * <td>excludesAttributes</td> * <td>No excluded attributes.</td> * <td>Metric attributes to exclude from reports, by name (e.g `p98`, `m15_rate`, `stddev`). * When defined, matching metrics attributes will not be reported. See {@link MetricAttribute}</td> * </tr> * <tr> * <td>includesAttributes</td> * <td>All metrics attributes.</td> * <td>Metrics attributes to include in reports, by name (e.g `p98`, `m15_rate`, `stddev`). * When defined, only these attributes will be reported. See {@link MetricAttribute}. * Exclusion rules (excludes) take precedence, so if an attribute matches both <i>includesAttributes</i> * and <i>excludesAttributes</i>, it is excluded.</td> * </tr> * <tr> * <td>useRegexFilters</td> * <td>false</td> * <td>Indicates whether the values of the 'includes' and 'excludes' fields should be * treated as regular expressions or not.</td> * </tr> * <tr> * <td>frequency</td> * <td>none</td> * <td>The frequency to report metrics. Overrides the {@link * MetricsFactory#getFrequency() default}.</td> * </tr> * </table> */ public abstract class BaseReporterFactory implements ReporterFactory { private static final DefaultStringMatchingStrategy DEFAULT_STRING_MATCHING_STRATEGY = new DefaultStringMatchingStrategy(); private static final RegexStringMatchingStrategy REGEX_STRING_MATCHING_STRATEGY = new RegexStringMatchingStrategy(); private static final SubstringMatchingStrategy SUBSTRING_MATCHING_STRATEGY = new SubstringMatchingStrategy(); @NotNull private TimeUnit durationUnit = TimeUnit.MILLISECONDS; @NotNull private TimeUnit rateUnit = TimeUnit.SECONDS; @NotNull private Set<String> excludes = Collections.emptySet(); @NotNull private Set<String> includes = Collections.emptySet(); @Valid @MinDuration(value = 0, payload = Unwrapping.Unwrap.class) private Optional<Duration> frequency = Optional.empty(); private boolean useRegexFilters = false; private boolean useSubstringMatching = false; private EnumSet<MetricAttribute> excludesAttributes = EnumSet.noneOf(MetricAttribute.class); private EnumSet<MetricAttribute> includesAttributes = EnumSet.allOf(MetricAttribute.class); public TimeUnit getDurationUnit() { return durationUnit; } @JsonProperty public void setDurationUnit(TimeUnit durationUnit) { this.durationUnit = durationUnit; } @JsonProperty public TimeUnit getRateUnit() { return rateUnit; } @JsonProperty public void setRateUnit(final TimeUnit rateUnit) { this.rateUnit = rateUnit; } @JsonProperty public Set<String> getIncludes() { return includes; } @JsonProperty public void setIncludes(Set<String> includes) { this.includes = new HashSet<>(includes); } @JsonProperty public Set<String> getExcludes() { return excludes; } @JsonProperty public void setExcludes(Set<String> excludes) { this.excludes = new HashSet<>(excludes); } @Override @JsonProperty public Optional<Duration> getFrequency() { return frequency; } @JsonProperty public void setFrequency(Optional<Duration> frequency) { this.frequency = frequency; } @JsonProperty public boolean getUseRegexFilters() { return useRegexFilters; } @JsonProperty public void setUseRegexFilters(boolean useRegexFilters) { this.useRegexFilters = useRegexFilters; } @JsonProperty public boolean getUseSubstringMatching() { return useSubstringMatching; } @JsonProperty public void setUseSubstringMatching(boolean useSubstringMatching) { this.useSubstringMatching = useSubstringMatching; } @JsonProperty public EnumSet<MetricAttribute> getExcludesAttributes() { return excludesAttributes; } @JsonProperty public void setExcludesAttributes(EnumSet<MetricAttribute> excludesAttributes) { this.excludesAttributes = EnumSet.copyOf(excludesAttributes); } @JsonProperty public EnumSet<MetricAttribute> getIncludesAttributes() { return includesAttributes; } @JsonProperty public void setIncludesAttributes(EnumSet<MetricAttribute> includesAttributes) { this.includesAttributes = EnumSet.copyOf(includesAttributes); } /** * Gets a {@link MetricFilter} that specifically includes and excludes configured metrics. * <p/> * Filtering works in 4 ways: * <dl> * <dt><i>unfiltered</i></dt> * <dd>All metrics are reported</dd> * <dt><i>excludes</i>-only</dt> * <dd>All metrics are reported, except those whose name is listed in <i>excludes</i>.</dd> * <dt><i>includes</i>-only</dt> * <dd>Only metrics whose name is listed in <i>includes</i> are reported.</dd> * <dt>mixed (both <i>includes</i> and <i>excludes</i></dt> * <dd>Only metrics whose name is listed in <i>includes</i> and * <em>not</em> listed in <i>excludes</i> are reported; * <i>excludes</i> takes precedence over <i>includes</i>.</dd> * </dl> * * @return the filter for selecting metrics based on the configured excludes/includes. * @see #getIncludes() * @see #getExcludes() */ @JsonIgnore public MetricFilter getFilter() { final StringMatchingStrategy stringMatchingStrategy = getUseRegexFilters() ? REGEX_STRING_MATCHING_STRATEGY : (getUseSubstringMatching() ? SUBSTRING_MATCHING_STRATEGY : DEFAULT_STRING_MATCHING_STRATEGY); // Include the metric if its name is not excluded and its name is included // Where, by default, with no includes setting, all names are included. return (name, metric) -> !stringMatchingStrategy.containsMatch(getExcludes(), name) && (getIncludes().isEmpty() || stringMatchingStrategy.containsMatch(getIncludes(), name)); } protected Set<MetricAttribute> getDisabledAttributes() { final EnumSet<MetricAttribute> metricAttributes = EnumSet.complementOf(getIncludesAttributes()); metricAttributes.addAll(getExcludesAttributes()); return metricAttributes; } }
/*L * Copyright Ekagra Software Technologies Ltd. * Copyright SAIC, SAIC-Frederick * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cacore-sdk/LICENSE.txt for details. */ package test.gov.nih.nci.cacoresdk.domain.inheritance.parentwithassociation.sametable; import gov.nih.nci.cacoresdk.domain.inheritance.parentwithassociation.sametable.Wheel; import javax.ws.rs.core.Response; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.InputStream; import java.io.FileWriter; import java.io.File; import java.util.List; import org.apache.http.HttpResponse; import org.apache.http.client.ClientProtocolException; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpDelete; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.entity.FileEntity; import org.apache.cxf.jaxrs.client.WebClient; import org.apache.cxf.common.util.Base64Utility; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriInfo; import java.lang.reflect.Modifier; import java.util.Collection; import java.util.Iterator; import test.gov.nih.nci.cacoresdk.SDKRESTfulTestBase; import gov.nih.nci.system.applicationservice.ApplicationException; public class WheelResourceTest extends SDKRESTfulTestBase { public static String getTestCaseName() { return "Wheel RESTful Resource Test Case"; } /** * Uses Nested Search Criteria for search * Verifies that the results are returned * Verifies size of the result set * Verifies that none of the attributes are null * * @throws Exception */ public void testGet() throws Exception { try { Wheel searchObject = new Wheel(); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.inheritance.parentwithassociation.sametable.Wheel",searchObject ); String id = ""; if(results != null && results.size() > 0) { Wheel obj = (Wheel) ((List)results).get(0); Integer idVal = obj.getId(); id = new Integer(idVal).toString(); } else return; if(id.equals("")) return; String url = baseURL + "/rest/Wheel/"+id; WebClient client = WebClient.create(url); client.type("application/xml").accept("application/xml"); Response response = client.get(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } File myFile = new File("Wheel"+"XML.xml"); System.out.println("writing data to file "+myFile.getAbsolutePath()); FileWriter myWriter = new FileWriter(myFile); BufferedReader br = new BufferedReader( new InputStreamReader(((InputStream)response.getEntity()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { myWriter.write(output); System.out.println(output); } myWriter.flush(); myWriter.close(); } catch (Exception e) { e.printStackTrace(); throw e; } } public void testSearch() throws Exception { try { String url = baseURL + "/rest/Wheel/search;id=*"; WebClient client = WebClient.create(url); client.type("application/xml").accept("application/xml"); Response response = client.get(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } File myFile = new File("Wheel_Search"+"XML.xml"); System.out.println("writing data to file "+myFile.getAbsolutePath()); FileWriter myWriter = new FileWriter(myFile); BufferedReader br = new BufferedReader( new InputStreamReader(((InputStream)response.getEntity()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { myWriter.write(output); System.out.println(output); } myWriter.flush(); myWriter.close(); } catch (Exception e) { e.printStackTrace(); } } //*************************************************** public void testWheel1() { try { String url = baseURL + "/rest/Wheel/search;id=*/luggage"; WebClient client = WebClient.create(url); client.type("application/xml").accept("application/xml"); Response response = client.get(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } File myFile = new File("Wheel_Search"+"XML.xml"); System.out.println("writing data to file "+myFile.getAbsolutePath()); FileWriter myWriter = new FileWriter(myFile); BufferedReader br = new BufferedReader( new InputStreamReader(((InputStream)response.getEntity()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { myWriter.write(output); System.out.println(output); } myWriter.flush(); myWriter.close(); } catch(Exception e) { e.printStackTrace(); ResponseBuilder builder = Response.status(Status.INTERNAL_SERVER_ERROR); builder.type("application/xml"); StringBuffer buffer = new StringBuffer(); buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); buffer.append("<response>"); buffer.append("<type>ERROR</type>"); buffer.append("<code>INTERNAL_ERROR_4</code>"); buffer.append("<message>Failed to Query due to: "+e.getMessage()+"</message>"); buffer.append("</response>"); builder.entity(buffer.toString()); throw new WebApplicationException(builder.build()); } } public void testgetLuggage() { try { Wheel searchObject = new Wheel(); Collection results4 = getApplicationService().search("gov.nih.nci.cacoresdk.domain.inheritance.parentwithassociation.sametable.Wheel",searchObject ); String id = ""; if(results4 != null && results4.size() > 0) { Wheel obj = (Wheel) ((List)results4).get(0); Integer idVal = obj.getId(); id = new Integer(idVal).toString(); } else return; if(id.equals("")) return; String url = baseURL + "/rest/Wheel/"+id+"/luggage"; WebClient client = WebClient.create(url); client.type("application/xml").accept("application/xml"); Response response = client.get(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } File myFile = new File("Wheel_Search"+"XML.xml"); System.out.println("writing data to file "+myFile.getAbsolutePath()); FileWriter myWriter = new FileWriter(myFile); BufferedReader br = new BufferedReader( new InputStreamReader(((InputStream)response.getEntity()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { myWriter.write(output); System.out.println(output); } myWriter.flush(); myWriter.close(); } catch(Exception e) { e.printStackTrace(); ResponseBuilder builder = Response.status(Status.INTERNAL_SERVER_ERROR); builder.type("application/xml"); StringBuffer buffer = new StringBuffer(); buffer.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); buffer.append("<response>"); buffer.append("<type>ERROR</type>"); buffer.append("<code>INTERNAL_ERROR_4</code>"); buffer.append("<message>Failed to Query due to: "+e.getMessage()+"</message>"); buffer.append("</response>"); builder.entity(buffer.toString()); throw new WebApplicationException(builder.build()); } } //********************************************************End public void testDelete() throws Exception { try { Wheel searchObject = new Wheel(); Collection results = getApplicationService().search("gov.nih.nci.cacoresdk.domain.inheritance.parentwithassociation.sametable.Wheel",searchObject ); String id = ""; if(results != null && results.size() > 0) { Wheel obj = (Wheel) ((List)results).get(0); Integer idVal = obj.getId(); id = new Integer(idVal).toString(); } else return; if(id.equals("")) return; String url = baseURL + "/rest/Wheel/"+id; WebClient client = WebClient.create(url); Response response = client.delete(); if (response.getStatus() == Status.NOT_ACCEPTABLE.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() == Status.NOT_FOUND.getStatusCode()) { InputStream is = (InputStream) response.getEntity(); org.jdom.input.SAXBuilder builder = new org.jdom.input.SAXBuilder( false); org.jdom.Document jDoc = builder.build(is); assertEquals(jDoc.getRootElement().getName(), "response"); } else if (response.getStatus() != 200) { throw new RuntimeException("Failed : HTTP error code : " + response.getStatus()); } } catch (Exception e) { e.printStackTrace(); throw e; } } public void testPost() throws Exception { try { DefaultHttpClient httpClient = new DefaultHttpClient(); String url = baseURL + "/rest/Wheel"; WebClient client = WebClient.create(url); HttpPost postRequest = new HttpPost(url); File myFile = new File("Wheel"+"XML.xml"); if(!myFile.exists()) { testGet(); myFile = new File("Wheel"+"XML.xml"); if(!myFile.exists()) return; } FileEntity input = new FileEntity(myFile); input.setContentType("application/xml"); System.out.println("input: "+myFile); postRequest.setEntity(input); HttpResponse response = httpClient.execute(postRequest); BufferedReader br = new BufferedReader( new InputStreamReader((response.getEntity().getContent()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { System.out.println(output); } httpClient.getConnectionManager().shutdown(); } catch (Exception e) { e.printStackTrace(); throw e; } } public void testPut() throws Exception { try { DefaultHttpClient httpClient = new DefaultHttpClient(); String url = baseURL + "/rest/Wheel"; HttpPut putRequest = new HttpPut(url); File myFile = new File("Wheel"+"XML.xml"); if(!myFile.exists()) { testGet(); myFile = new File("Wheel"+"XML.xml"); if(!myFile.exists()) return; } FileEntity input = new FileEntity(myFile); input.setContentType("application/xml"); putRequest.setEntity(input); HttpResponse response = httpClient.execute(putRequest); if(response.getEntity() != null) { BufferedReader br = new BufferedReader( new InputStreamReader((response.getEntity().getContent()))); String output; System.out.println("Output from Server .... \n"); while ((output = br.readLine()) != null) { System.out.println(output); } } httpClient.getConnectionManager().shutdown(); } catch (Exception e) { e.printStackTrace(); throw e; } } }
/* * Copyright (C) 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.offline; import static com.google.android.exoplayer2.offline.DownloadRequest.TYPE_PROGRESSIVE; import static com.google.common.truth.Truth.assertThat; import android.net.Uri; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.android.exoplayer2.database.ExoDatabaseProvider; import com.google.android.exoplayer2.testutil.TestUtil; import com.google.android.exoplayer2.util.Util; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.Arrays; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; /** Unit tests for {@link ActionFileUpgradeUtil}. */ @RunWith(AndroidJUnit4.class) public class ActionFileUpgradeUtilTest { private static final long NOW_MS = 1234; private File tempFile; private ExoDatabaseProvider databaseProvider; private DefaultDownloadIndex downloadIndex; @Before public void setUp() throws Exception { tempFile = Util.createTempFile(ApplicationProvider.getApplicationContext(), "ExoPlayerTest"); databaseProvider = new ExoDatabaseProvider(ApplicationProvider.getApplicationContext()); downloadIndex = new DefaultDownloadIndex(databaseProvider); } @After public void tearDown() { databaseProvider.close(); tempFile.delete(); } @Test public void upgradeAndDelete_createsDownloads() throws IOException { // Copy the test asset to a file. byte[] actionFileBytes = TestUtil.getByteArray( ApplicationProvider.getApplicationContext(), "offline/action_file_for_download_index_upgrade.exi"); try (FileOutputStream output = new FileOutputStream(tempFile)) { output.write(actionFileBytes); } StreamKey expectedStreamKey1 = new StreamKey(/* periodIndex= */ 3, /* groupIndex= */ 4, /* trackIndex= */ 5); StreamKey expectedStreamKey2 = new StreamKey(/* periodIndex= */ 0, /* groupIndex= */ 1, /* trackIndex= */ 2); DownloadRequest expectedRequest1 = new DownloadRequest( "key123", /* type= */ "test", Uri.parse("https://www.test.com/download1"), asList(expectedStreamKey1), /* customCacheKey= */ "key123", new byte[] {1, 2, 3, 4}); DownloadRequest expectedRequest2 = new DownloadRequest( "key234", /* type= */ "test", Uri.parse("https://www.test.com/download2"), asList(expectedStreamKey2), /* customCacheKey= */ "key234", new byte[] {5, 4, 3, 2, 1}); ActionFileUpgradeUtil.upgradeAndDelete( tempFile, /* downloadIdProvider= */ null, downloadIndex, /* deleteOnFailure= */ true, /* addNewDownloadsAsCompleted= */ false); assertDownloadIndexContainsRequest(expectedRequest1, Download.STATE_QUEUED); assertDownloadIndexContainsRequest(expectedRequest2, Download.STATE_QUEUED); } @Test public void mergeRequest_nonExistingDownload_createsNewDownload() throws IOException { byte[] data = new byte[] {1, 2, 3, 4}; DownloadRequest request = new DownloadRequest( "id", TYPE_PROGRESSIVE, Uri.parse("https://www.test.com/download"), asList( new StreamKey(/* periodIndex= */ 0, /* groupIndex= */ 1, /* trackIndex= */ 2), new StreamKey(/* periodIndex= */ 3, /* groupIndex= */ 4, /* trackIndex= */ 5)), /* customCacheKey= */ "key123", data); ActionFileUpgradeUtil.mergeRequest( request, downloadIndex, /* addNewDownloadAsCompleted= */ false, NOW_MS); assertDownloadIndexContainsRequest(request, Download.STATE_QUEUED); } @Test public void mergeRequest_existingDownload_createsMergedDownload() throws IOException { StreamKey streamKey1 = new StreamKey(/* periodIndex= */ 3, /* groupIndex= */ 4, /* trackIndex= */ 5); StreamKey streamKey2 = new StreamKey(/* periodIndex= */ 0, /* groupIndex= */ 1, /* trackIndex= */ 2); DownloadRequest request1 = new DownloadRequest( "id", TYPE_PROGRESSIVE, Uri.parse("https://www.test.com/download1"), asList(streamKey1), /* customCacheKey= */ "key123", new byte[] {1, 2, 3, 4}); DownloadRequest request2 = new DownloadRequest( "id", TYPE_PROGRESSIVE, Uri.parse("https://www.test.com/download2"), asList(streamKey2), /* customCacheKey= */ "key123", new byte[] {5, 4, 3, 2, 1}); ActionFileUpgradeUtil.mergeRequest( request1, downloadIndex, /* addNewDownloadAsCompleted= */ false, NOW_MS); ActionFileUpgradeUtil.mergeRequest( request2, downloadIndex, /* addNewDownloadAsCompleted= */ false, NOW_MS); Download download = downloadIndex.getDownload(request2.id); assertThat(download).isNotNull(); assertThat(download.request.type).isEqualTo(request2.type); assertThat(download.request.customCacheKey).isEqualTo(request2.customCacheKey); assertThat(download.request.data).isEqualTo(request2.data); assertThat(download.request.uri).isEqualTo(request2.uri); assertThat(download.request.streamKeys).containsExactly(streamKey1, streamKey2); assertThat(download.state).isEqualTo(Download.STATE_QUEUED); } @Test public void mergeRequest_addNewDownloadAsCompleted() throws IOException { StreamKey streamKey1 = new StreamKey(/* periodIndex= */ 3, /* groupIndex= */ 4, /* trackIndex= */ 5); StreamKey streamKey2 = new StreamKey(/* periodIndex= */ 0, /* groupIndex= */ 1, /* trackIndex= */ 2); DownloadRequest request1 = new DownloadRequest( "id1", TYPE_PROGRESSIVE, Uri.parse("https://www.test.com/download1"), asList(streamKey1), /* customCacheKey= */ "key123", new byte[] {1, 2, 3, 4}); DownloadRequest request2 = new DownloadRequest( "id2", TYPE_PROGRESSIVE, Uri.parse("https://www.test.com/download2"), asList(streamKey2), /* customCacheKey= */ "key123", new byte[] {5, 4, 3, 2, 1}); ActionFileUpgradeUtil.mergeRequest( request1, downloadIndex, /* addNewDownloadAsCompleted= */ false, NOW_MS); // Merging existing download, keeps it queued. ActionFileUpgradeUtil.mergeRequest( request1, downloadIndex, /* addNewDownloadAsCompleted= */ true, NOW_MS); assertThat(downloadIndex.getDownload(request1.id).state).isEqualTo(Download.STATE_QUEUED); // New download is merged as completed. ActionFileUpgradeUtil.mergeRequest( request2, downloadIndex, /* addNewDownloadAsCompleted= */ true, NOW_MS); assertThat(downloadIndex.getDownload(request2.id).state).isEqualTo(Download.STATE_COMPLETED); } private void assertDownloadIndexContainsRequest(DownloadRequest request, int state) throws IOException { Download download = downloadIndex.getDownload(request.id); assertThat(download.request).isEqualTo(request); assertThat(download.state).isEqualTo(state); } private static List<StreamKey> asList(StreamKey... streamKeys) { return Arrays.asList(streamKeys); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint; import org.apache.flink.api.common.JobID; import org.apache.flink.runtime.OperatorIDPair; import org.apache.flink.runtime.checkpoint.metadata.CheckpointMetadata; import org.apache.flink.runtime.executiongraph.ExecutionAttemptID; import org.apache.flink.runtime.executiongraph.ExecutionVertex; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.operators.coordination.OperatorInfo; import org.apache.flink.runtime.state.CheckpointMetadataOutputStream; import org.apache.flink.runtime.state.CheckpointStorageLocation; import org.apache.flink.runtime.state.CompletedCheckpointStorageLocation; import org.apache.flink.runtime.state.StateUtil; import org.apache.flink.runtime.state.memory.ByteStreamStateHandle; import org.apache.flink.util.ExceptionUtils; import org.apache.flink.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledFuture; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkNotNull; import static org.apache.flink.util.Preconditions.checkState; /** * A pending checkpoint is a checkpoint that has been started, but has not been acknowledged by all * tasks that need to acknowledge it. Once all tasks have acknowledged it, it becomes a {@link * CompletedCheckpoint}. * * <p>Note that the pending checkpoint, as well as the successful checkpoint keep the state handles * always as serialized values, never as actual values. */ public class PendingCheckpoint implements Checkpoint { /** Result of the {@link PendingCheckpoint#acknowledgedTasks} method. */ public enum TaskAcknowledgeResult { SUCCESS, // successful acknowledge of the task DUPLICATE, // acknowledge message is a duplicate UNKNOWN, // unknown task acknowledged DISCARDED // pending checkpoint has been discarded } // ------------------------------------------------------------------------ /** The PendingCheckpoint logs to the same logger as the CheckpointCoordinator. */ private static final Logger LOG = LoggerFactory.getLogger(CheckpointCoordinator.class); private final Object lock = new Object(); private final JobID jobId; private final long checkpointId; private final long checkpointTimestamp; private final Map<OperatorID, OperatorState> operatorStates; private final Map<ExecutionAttemptID, ExecutionVertex> notYetAcknowledgedTasks; private final Set<OperatorID> notYetAcknowledgedOperatorCoordinators; private final List<MasterState> masterStates; private final Set<String> notYetAcknowledgedMasterStates; /** Set of acknowledged tasks. */ private final Set<ExecutionAttemptID> acknowledgedTasks; /** The checkpoint properties. */ private final CheckpointProperties props; /** Target storage location to persist the checkpoint metadata to. */ private final CheckpointStorageLocation targetLocation; /** The promise to fulfill once the checkpoint has been completed. */ private final CompletableFuture<CompletedCheckpoint> onCompletionPromise; private int numAcknowledgedTasks; private boolean disposed; private boolean discarded; /** Optional stats tracker callback. */ @Nullable private PendingCheckpointStats statsCallback; private volatile ScheduledFuture<?> cancellerHandle; private CheckpointException failureCause; // -------------------------------------------------------------------------------------------- public PendingCheckpoint( JobID jobId, long checkpointId, long checkpointTimestamp, Map<ExecutionAttemptID, ExecutionVertex> verticesToConfirm, Collection<OperatorID> operatorCoordinatorsToConfirm, Collection<String> masterStateIdentifiers, CheckpointProperties props, CheckpointStorageLocation targetLocation, CompletableFuture<CompletedCheckpoint> onCompletionPromise) { checkArgument( verticesToConfirm.size() > 0, "Checkpoint needs at least one vertex that commits the checkpoint"); this.jobId = checkNotNull(jobId); this.checkpointId = checkpointId; this.checkpointTimestamp = checkpointTimestamp; this.notYetAcknowledgedTasks = checkNotNull(verticesToConfirm); this.props = checkNotNull(props); this.targetLocation = checkNotNull(targetLocation); this.operatorStates = new HashMap<>(); this.masterStates = new ArrayList<>(masterStateIdentifiers.size()); this.notYetAcknowledgedMasterStates = masterStateIdentifiers.isEmpty() ? Collections.emptySet() : new HashSet<>(masterStateIdentifiers); this.notYetAcknowledgedOperatorCoordinators = operatorCoordinatorsToConfirm.isEmpty() ? Collections.emptySet() : new HashSet<>(operatorCoordinatorsToConfirm); this.acknowledgedTasks = new HashSet<>(verticesToConfirm.size()); this.onCompletionPromise = checkNotNull(onCompletionPromise); } // -------------------------------------------------------------------------------------------- // ------------------------------------------------------------------------ // Properties // ------------------------------------------------------------------------ public JobID getJobId() { return jobId; } /** @deprecated use {@link #getCheckpointID()} */ @Deprecated public long getCheckpointId() { return getCheckpointID(); } @Override public long getCheckpointID() { return checkpointId; } public CheckpointStorageLocation getCheckpointStorageLocation() { return targetLocation; } public long getCheckpointTimestamp() { return checkpointTimestamp; } public int getNumberOfNonAcknowledgedTasks() { return notYetAcknowledgedTasks.size(); } public int getNumberOfNonAcknowledgedOperatorCoordinators() { return notYetAcknowledgedOperatorCoordinators.size(); } public int getNumberOfAcknowledgedTasks() { return numAcknowledgedTasks; } public Map<OperatorID, OperatorState> getOperatorStates() { return operatorStates; } public List<MasterState> getMasterStates() { return masterStates; } public boolean isFullyAcknowledged() { return areTasksFullyAcknowledged() && areCoordinatorsFullyAcknowledged() && areMasterStatesFullyAcknowledged(); } boolean areMasterStatesFullyAcknowledged() { return notYetAcknowledgedMasterStates.isEmpty() && !disposed; } boolean areCoordinatorsFullyAcknowledged() { return notYetAcknowledgedOperatorCoordinators.isEmpty() && !disposed; } boolean areTasksFullyAcknowledged() { return notYetAcknowledgedTasks.isEmpty() && !disposed; } public boolean isAcknowledgedBy(ExecutionAttemptID executionAttemptId) { return !notYetAcknowledgedTasks.containsKey(executionAttemptId); } public boolean isDisposed() { return disposed; } /** * Checks whether this checkpoint can be subsumed or whether it should always continue, * regardless of newer checkpoints in progress. * * @return True if the checkpoint can be subsumed, false otherwise. */ public boolean canBeSubsumed() { // If the checkpoint is forced, it cannot be subsumed. return !props.isSavepoint(); } CheckpointProperties getProps() { return props; } /** * Sets the callback for tracking this pending checkpoint. * * @param trackerCallback Callback for collecting subtask stats. */ void setStatsCallback(@Nullable PendingCheckpointStats trackerCallback) { this.statsCallback = trackerCallback; } /** * Sets the handle for the canceller to this pending checkpoint. This method fails with an * exception if a handle has already been set. * * @return true, if the handle was set, false, if the checkpoint is already disposed; */ public boolean setCancellerHandle(ScheduledFuture<?> cancellerHandle) { synchronized (lock) { if (this.cancellerHandle == null) { if (!disposed) { this.cancellerHandle = cancellerHandle; return true; } else { return false; } } else { throw new IllegalStateException("A canceller handle was already set"); } } } public CheckpointException getFailureCause() { return failureCause; } // ------------------------------------------------------------------------ // Progress and Completion // ------------------------------------------------------------------------ /** * Returns the completion future. * * @return A future to the completed checkpoint */ public CompletableFuture<CompletedCheckpoint> getCompletionFuture() { return onCompletionPromise; } public CompletedCheckpoint finalizeCheckpoint( CheckpointsCleaner checkpointsCleaner, Runnable postCleanup, Executor executor) throws IOException { synchronized (lock) { checkState(!isDisposed(), "checkpoint is discarded"); checkState( isFullyAcknowledged(), "Pending checkpoint has not been fully acknowledged yet"); // make sure we fulfill the promise with an exception if something fails try { // write out the metadata final CheckpointMetadata savepoint = new CheckpointMetadata(checkpointId, operatorStates.values(), masterStates); final CompletedCheckpointStorageLocation finalizedLocation; try (CheckpointMetadataOutputStream out = targetLocation.createMetadataOutputStream()) { Checkpoints.storeCheckpointMetadata(savepoint, out); finalizedLocation = out.closeAndFinalizeCheckpoint(); } CompletedCheckpoint completed = new CompletedCheckpoint( jobId, checkpointId, checkpointTimestamp, System.currentTimeMillis(), operatorStates, masterStates, props, finalizedLocation); onCompletionPromise.complete(completed); // to prevent null-pointers from concurrent modification, copy reference onto stack PendingCheckpointStats statsCallback = this.statsCallback; if (statsCallback != null) { // Finalize the statsCallback and give the completed checkpoint a // callback for discards. CompletedCheckpointStats.DiscardCallback discardCallback = statsCallback.reportCompletedCheckpoint( finalizedLocation.getExternalPointer()); completed.setDiscardCallback(discardCallback); } // mark this pending checkpoint as disposed, but do NOT drop the state dispose(false, checkpointsCleaner, postCleanup, executor); return completed; } catch (Throwable t) { onCompletionPromise.completeExceptionally(t); ExceptionUtils.rethrowIOException(t); return null; // silence the compiler } } } /** * Acknowledges the task with the given execution attempt id and the given subtask state. * * @param executionAttemptId of the acknowledged task * @param operatorSubtaskStates of the acknowledged task * @param metrics Checkpoint metrics for the stats * @return TaskAcknowledgeResult of the operation */ public TaskAcknowledgeResult acknowledgeTask( ExecutionAttemptID executionAttemptId, TaskStateSnapshot operatorSubtaskStates, CheckpointMetrics metrics) { synchronized (lock) { if (disposed) { return TaskAcknowledgeResult.DISCARDED; } final ExecutionVertex vertex = notYetAcknowledgedTasks.remove(executionAttemptId); if (vertex == null) { if (acknowledgedTasks.contains(executionAttemptId)) { return TaskAcknowledgeResult.DUPLICATE; } else { return TaskAcknowledgeResult.UNKNOWN; } } else { acknowledgedTasks.add(executionAttemptId); } List<OperatorIDPair> operatorIDs = vertex.getJobVertex().getOperatorIDs(); int subtaskIndex = vertex.getParallelSubtaskIndex(); long ackTimestamp = System.currentTimeMillis(); long stateSize = 0L; if (operatorSubtaskStates != null) { for (OperatorIDPair operatorID : operatorIDs) { OperatorSubtaskState operatorSubtaskState = operatorSubtaskStates.getSubtaskStateByOperatorID( operatorID.getGeneratedOperatorID()); // if no real operatorSubtaskState was reported, we insert an empty state if (operatorSubtaskState == null) { operatorSubtaskState = OperatorSubtaskState.builder().build(); } OperatorState operatorState = operatorStates.get(operatorID.getGeneratedOperatorID()); if (operatorState == null) { operatorState = new OperatorState( operatorID.getGeneratedOperatorID(), vertex.getTotalNumberOfParallelSubtasks(), vertex.getMaxParallelism()); operatorStates.put(operatorID.getGeneratedOperatorID(), operatorState); } operatorState.putState(subtaskIndex, operatorSubtaskState); stateSize += operatorSubtaskState.getStateSize(); } } ++numAcknowledgedTasks; // publish the checkpoint statistics // to prevent null-pointers from concurrent modification, copy reference onto stack final PendingCheckpointStats statsCallback = this.statsCallback; if (statsCallback != null) { // Do this in millis because the web frontend works with them long alignmentDurationMillis = metrics.getAlignmentDurationNanos() / 1_000_000; long checkpointStartDelayMillis = metrics.getCheckpointStartDelayNanos() / 1_000_000; SubtaskStateStats subtaskStateStats = new SubtaskStateStats( subtaskIndex, ackTimestamp, stateSize, metrics.getSyncDurationMillis(), metrics.getAsyncDurationMillis(), metrics.getBytesProcessedDuringAlignment(), metrics.getBytesPersistedDuringAlignment(), alignmentDurationMillis, checkpointStartDelayMillis, metrics.getUnalignedCheckpoint()); statsCallback.reportSubtaskStats(vertex.getJobvertexId(), subtaskStateStats); } return TaskAcknowledgeResult.SUCCESS; } } public TaskAcknowledgeResult acknowledgeCoordinatorState( OperatorInfo coordinatorInfo, @Nullable ByteStreamStateHandle stateHandle) { synchronized (lock) { if (disposed) { return TaskAcknowledgeResult.DISCARDED; } final OperatorID operatorId = coordinatorInfo.operatorId(); OperatorState operatorState = operatorStates.get(operatorId); // sanity check for better error reporting if (!notYetAcknowledgedOperatorCoordinators.remove(operatorId)) { return operatorState != null && operatorState.getCoordinatorState() != null ? TaskAcknowledgeResult.DUPLICATE : TaskAcknowledgeResult.UNKNOWN; } if (stateHandle != null) { if (operatorState == null) { operatorState = new OperatorState( operatorId, coordinatorInfo.currentParallelism(), coordinatorInfo.maxParallelism()); operatorStates.put(operatorId, operatorState); } operatorState.setCoordinatorState(stateHandle); } return TaskAcknowledgeResult.SUCCESS; } } /** * Acknowledges a master state (state generated on the checkpoint coordinator) to the pending * checkpoint. * * @param identifier The identifier of the master state * @param state The state to acknowledge */ public void acknowledgeMasterState(String identifier, @Nullable MasterState state) { synchronized (lock) { if (!disposed) { if (notYetAcknowledgedMasterStates.remove(identifier) && state != null) { masterStates.add(state); } } } } // ------------------------------------------------------------------------ // Cancellation // ------------------------------------------------------------------------ /** Aborts a checkpoint with reason and cause. */ public void abort( CheckpointFailureReason reason, @Nullable Throwable cause, CheckpointsCleaner checkpointsCleaner, Runnable postCleanup, Executor executor) { try { failureCause = new CheckpointException(reason, cause); onCompletionPromise.completeExceptionally(failureCause); reportFailedCheckpoint(failureCause); assertAbortSubsumedForced(reason); } finally { dispose(true, checkpointsCleaner, postCleanup, executor); } } private void assertAbortSubsumedForced(CheckpointFailureReason reason) { if (props.isSavepoint() && reason == CheckpointFailureReason.CHECKPOINT_SUBSUMED) { throw new IllegalStateException( "Bug: savepoints must never be subsumed, " + "the abort reason is : " + reason.message()); } } private void dispose( boolean releaseState, CheckpointsCleaner checkpointsCleaner, Runnable postCleanup, Executor executor) { synchronized (lock) { try { numAcknowledgedTasks = -1; checkpointsCleaner.cleanCheckpoint(this, releaseState, postCleanup, executor); } finally { disposed = true; notYetAcknowledgedTasks.clear(); acknowledgedTasks.clear(); cancelCanceller(); } } } /** * Discard state. Must be called after {@link #dispose(boolean, CheckpointsCleaner, Runnable, * Executor) dispose}. */ @Override public void discard() { synchronized (lock) { if (discarded) { Preconditions.checkState( disposed, "Checkpoint should be disposed before being discarded"); return; } else { discarded = true; } } // discard the private states. // unregistered shared states are still considered private at this point. try { StateUtil.bestEffortDiscardAllStateObjects(operatorStates.values()); targetLocation.disposeOnFailure(); } catch (Throwable t) { LOG.warn( "Could not properly dispose the private states in the pending checkpoint {} of job {}.", checkpointId, jobId, t); } finally { operatorStates.clear(); } } private void cancelCanceller() { try { final ScheduledFuture<?> canceller = this.cancellerHandle; if (canceller != null) { canceller.cancel(false); } } catch (Exception e) { // this code should not throw exceptions LOG.warn("Error while cancelling checkpoint timeout task", e); } } /** * Reports a failed checkpoint with the given optional cause. * * @param cause The failure cause or <code>null</code>. */ private void reportFailedCheckpoint(Exception cause) { // to prevent null-pointers from concurrent modification, copy reference onto stack final PendingCheckpointStats statsCallback = this.statsCallback; if (statsCallback != null) { long failureTimestamp = System.currentTimeMillis(); statsCallback.reportFailedCheckpoint(failureTimestamp, cause); } } // ------------------------------------------------------------------------ // Utilities // ------------------------------------------------------------------------ @Override public String toString() { return String.format( "Pending Checkpoint %d @ %d - confirmed=%d, pending=%d", checkpointId, checkpointTimestamp, getNumberOfAcknowledgedTasks(), getNumberOfNonAcknowledgedTasks()); } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.network.v2017_10_01; import com.microsoft.azure.arm.model.HasInner; import com.microsoft.azure.management.network.v2017_10_01.implementation.SecurityRuleInner; import com.microsoft.azure.arm.model.Indexable; import com.microsoft.azure.arm.model.Refreshable; import com.microsoft.azure.arm.model.Updatable; import com.microsoft.azure.arm.model.Appliable; import com.microsoft.azure.arm.model.Creatable; import com.microsoft.azure.arm.resources.models.HasManager; import com.microsoft.azure.management.network.v2017_10_01.implementation.NetworkManager; import java.util.List; import com.microsoft.azure.management.network.v2017_10_01.implementation.ApplicationSecurityGroupInner; /** * Type representing NetworkSecurityGroupSecurityRule. */ public interface NetworkSecurityGroupSecurityRule extends HasInner<SecurityRuleInner>, Indexable, Refreshable<NetworkSecurityGroupSecurityRule>, Updatable<NetworkSecurityGroupSecurityRule.Update>, HasManager<NetworkManager> { /** * @return the access value. */ SecurityRuleAccess access(); /** * @return the description value. */ String description(); /** * @return the destinationAddressPrefix value. */ String destinationAddressPrefix(); /** * @return the destinationAddressPrefixes value. */ List<String> destinationAddressPrefixes(); /** * @return the destinationApplicationSecurityGroups value. */ List<ApplicationSecurityGroup> destinationApplicationSecurityGroups(); /** * @return the destinationPortRange value. */ String destinationPortRange(); /** * @return the destinationPortRanges value. */ List<String> destinationPortRanges(); /** * @return the direction value. */ SecurityRuleDirection direction(); /** * @return the etag value. */ String etag(); /** * @return the id value. */ String id(); /** * @return the name value. */ String name(); /** * @return the priority value. */ Integer priority(); /** * @return the protocol value. */ SecurityRuleProtocol protocol(); /** * @return the provisioningState value. */ String provisioningState(); /** * @return the sourceAddressPrefix value. */ String sourceAddressPrefix(); /** * @return the sourceAddressPrefixes value. */ List<String> sourceAddressPrefixes(); /** * @return the sourceApplicationSecurityGroups value. */ List<ApplicationSecurityGroup> sourceApplicationSecurityGroups(); /** * @return the sourcePortRange value. */ String sourcePortRange(); /** * @return the sourcePortRanges value. */ List<String> sourcePortRanges(); /** * The entirety of the NetworkSecurityGroupSecurityRule definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithNetworkSecurityGroup, DefinitionStages.WithAccess, DefinitionStages.WithDirection, DefinitionStages.WithProtocol, DefinitionStages.WithCreate { } /** * Grouping of NetworkSecurityGroupSecurityRule definition stages. */ interface DefinitionStages { /** * The first stage of a NetworkSecurityGroupSecurityRule definition. */ interface Blank extends WithNetworkSecurityGroup { } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify NetworkSecurityGroup. */ interface WithNetworkSecurityGroup { /** * Specifies resourceGroupName, networkSecurityGroupName. * @param resourceGroupName The name of the resource group * @param networkSecurityGroupName The name of the network security group * @return the next definition stage */ WithAccess withExistingNetworkSecurityGroup(String resourceGroupName, String networkSecurityGroupName); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Access. */ interface WithAccess { /** * Specifies access. * @param access The network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'. Possible values include: 'Allow', 'Deny' * @return the next definition stage */ WithDirection withAccess(SecurityRuleAccess access); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Direction. */ interface WithDirection { /** * Specifies direction. * @param direction The direction of the rule. The direction specifies if rule will be evaluated on incoming or outcoming traffic. Possible values are: 'Inbound' and 'Outbound'. Possible values include: 'Inbound', 'Outbound' * @return the next definition stage */ WithProtocol withDirection(SecurityRuleDirection direction); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Protocol. */ interface WithProtocol { /** * Specifies protocol. * @param protocol Network protocol this rule applies to. Possible values are 'Tcp', 'Udp', and '*'. Possible values include: 'Tcp', 'Udp', '*' * @return the next definition stage */ WithCreate withProtocol(SecurityRuleProtocol protocol); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Description. */ interface WithDescription { /** * Specifies description. * @param description A description for this rule. Restricted to 140 chars * @return the next definition stage */ WithCreate withDescription(String description); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify DestinationAddressPrefix. */ interface WithDestinationAddressPrefix { /** * Specifies destinationAddressPrefix. * @param destinationAddressPrefix The destination address prefix. CIDR or destination IP range. Asterix '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used * @return the next definition stage */ WithCreate withDestinationAddressPrefix(String destinationAddressPrefix); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify DestinationAddressPrefixes. */ interface WithDestinationAddressPrefixes { /** * Specifies destinationAddressPrefixes. * @param destinationAddressPrefixes The destination address prefixes. CIDR or destination IP ranges * @return the next definition stage */ WithCreate withDestinationAddressPrefixes(List<String> destinationAddressPrefixes); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify DestinationApplicationSecurityGroups. */ interface WithDestinationApplicationSecurityGroups { /** * Specifies destinationApplicationSecurityGroups. * @param destinationApplicationSecurityGroups The application security group specified as destination * @return the next definition stage */ WithCreate withDestinationApplicationSecurityGroups(List<ApplicationSecurityGroupInner> destinationApplicationSecurityGroups); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify DestinationPortRange. */ interface WithDestinationPortRange { /** * Specifies destinationPortRange. * @param destinationPortRange The destination port or range. Integer or range between 0 and 65535. Asterix '*' can also be used to match all ports * @return the next definition stage */ WithCreate withDestinationPortRange(String destinationPortRange); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify DestinationPortRanges. */ interface WithDestinationPortRanges { /** * Specifies destinationPortRanges. * @param destinationPortRanges The destination port ranges * @return the next definition stage */ WithCreate withDestinationPortRanges(List<String> destinationPortRanges); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Etag. */ interface WithEtag { /** * Specifies etag. * @param etag A unique read-only string that changes whenever the resource is updated * @return the next definition stage */ WithCreate withEtag(String etag); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next definition stage */ WithCreate withId(String id); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next definition stage */ WithCreate withName(String name); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify Priority. */ interface WithPriority { /** * Specifies priority. * @param priority The priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule * @return the next definition stage */ WithCreate withPriority(Integer priority); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify ProvisioningState. */ interface WithProvisioningState { /** * Specifies provisioningState. * @param provisioningState The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed' * @return the next definition stage */ WithCreate withProvisioningState(String provisioningState); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify SourceAddressPrefix. */ interface WithSourceAddressPrefix { /** * Specifies sourceAddressPrefix. * @param sourceAddressPrefix The CIDR or source IP range. Asterix '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from * @return the next definition stage */ WithCreate withSourceAddressPrefix(String sourceAddressPrefix); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify SourceAddressPrefixes. */ interface WithSourceAddressPrefixes { /** * Specifies sourceAddressPrefixes. * @param sourceAddressPrefixes The CIDR or source IP ranges * @return the next definition stage */ WithCreate withSourceAddressPrefixes(List<String> sourceAddressPrefixes); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify SourceApplicationSecurityGroups. */ interface WithSourceApplicationSecurityGroups { /** * Specifies sourceApplicationSecurityGroups. * @param sourceApplicationSecurityGroups The application security group specified as source * @return the next definition stage */ WithCreate withSourceApplicationSecurityGroups(List<ApplicationSecurityGroupInner> sourceApplicationSecurityGroups); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify SourcePortRange. */ interface WithSourcePortRange { /** * Specifies sourcePortRange. * @param sourcePortRange The source port or range. Integer or range between 0 and 65535. Asterix '*' can also be used to match all ports * @return the next definition stage */ WithCreate withSourcePortRange(String sourcePortRange); } /** * The stage of the networksecuritygroupsecurityrule definition allowing to specify SourcePortRanges. */ interface WithSourcePortRanges { /** * Specifies sourcePortRanges. * @param sourcePortRanges The source port ranges * @return the next definition stage */ WithCreate withSourcePortRanges(List<String> sourcePortRanges); } /** * The stage of the definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<NetworkSecurityGroupSecurityRule>, DefinitionStages.WithDescription, DefinitionStages.WithDestinationAddressPrefix, DefinitionStages.WithDestinationAddressPrefixes, DefinitionStages.WithDestinationApplicationSecurityGroups, DefinitionStages.WithDestinationPortRange, DefinitionStages.WithDestinationPortRanges, DefinitionStages.WithEtag, DefinitionStages.WithId, DefinitionStages.WithName, DefinitionStages.WithPriority, DefinitionStages.WithProvisioningState, DefinitionStages.WithSourceAddressPrefix, DefinitionStages.WithSourceAddressPrefixes, DefinitionStages.WithSourceApplicationSecurityGroups, DefinitionStages.WithSourcePortRange, DefinitionStages.WithSourcePortRanges { } } /** * The template for a NetworkSecurityGroupSecurityRule update operation, containing all the settings that can be modified. */ interface Update extends Appliable<NetworkSecurityGroupSecurityRule>, UpdateStages.WithDescription, UpdateStages.WithDestinationAddressPrefix, UpdateStages.WithDestinationAddressPrefixes, UpdateStages.WithDestinationApplicationSecurityGroups, UpdateStages.WithDestinationPortRange, UpdateStages.WithDestinationPortRanges, UpdateStages.WithEtag, UpdateStages.WithId, UpdateStages.WithName, UpdateStages.WithPriority, UpdateStages.WithProvisioningState, UpdateStages.WithSourceAddressPrefix, UpdateStages.WithSourceAddressPrefixes, UpdateStages.WithSourceApplicationSecurityGroups, UpdateStages.WithSourcePortRange, UpdateStages.WithSourcePortRanges { } /** * Grouping of NetworkSecurityGroupSecurityRule update stages. */ interface UpdateStages { /** * The stage of the networksecuritygroupsecurityrule update allowing to specify Description. */ interface WithDescription { /** * Specifies description. * @param description A description for this rule. Restricted to 140 chars * @return the next update stage */ Update withDescription(String description); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify DestinationAddressPrefix. */ interface WithDestinationAddressPrefix { /** * Specifies destinationAddressPrefix. * @param destinationAddressPrefix The destination address prefix. CIDR or destination IP range. Asterix '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used * @return the next update stage */ Update withDestinationAddressPrefix(String destinationAddressPrefix); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify DestinationAddressPrefixes. */ interface WithDestinationAddressPrefixes { /** * Specifies destinationAddressPrefixes. * @param destinationAddressPrefixes The destination address prefixes. CIDR or destination IP ranges * @return the next update stage */ Update withDestinationAddressPrefixes(List<String> destinationAddressPrefixes); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify DestinationApplicationSecurityGroups. */ interface WithDestinationApplicationSecurityGroups { /** * Specifies destinationApplicationSecurityGroups. * @param destinationApplicationSecurityGroups The application security group specified as destination * @return the next update stage */ Update withDestinationApplicationSecurityGroups(List<ApplicationSecurityGroupInner> destinationApplicationSecurityGroups); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify DestinationPortRange. */ interface WithDestinationPortRange { /** * Specifies destinationPortRange. * @param destinationPortRange The destination port or range. Integer or range between 0 and 65535. Asterix '*' can also be used to match all ports * @return the next update stage */ Update withDestinationPortRange(String destinationPortRange); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify DestinationPortRanges. */ interface WithDestinationPortRanges { /** * Specifies destinationPortRanges. * @param destinationPortRanges The destination port ranges * @return the next update stage */ Update withDestinationPortRanges(List<String> destinationPortRanges); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify Etag. */ interface WithEtag { /** * Specifies etag. * @param etag A unique read-only string that changes whenever the resource is updated * @return the next update stage */ Update withEtag(String etag); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify Id. */ interface WithId { /** * Specifies id. * @param id Resource ID * @return the next update stage */ Update withId(String id); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify Name. */ interface WithName { /** * Specifies name. * @param name The name of the resource that is unique within a resource group. This name can be used to access the resource * @return the next update stage */ Update withName(String name); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify Priority. */ interface WithPriority { /** * Specifies priority. * @param priority The priority of the rule. The value can be between 100 and 4096. The priority number must be unique for each rule in the collection. The lower the priority number, the higher the priority of the rule * @return the next update stage */ Update withPriority(Integer priority); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify ProvisioningState. */ interface WithProvisioningState { /** * Specifies provisioningState. * @param provisioningState The provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed' * @return the next update stage */ Update withProvisioningState(String provisioningState); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify SourceAddressPrefix. */ interface WithSourceAddressPrefix { /** * Specifies sourceAddressPrefix. * @param sourceAddressPrefix The CIDR or source IP range. Asterix '*' can also be used to match all source IPs. Default tags such as 'VirtualNetwork', 'AzureLoadBalancer' and 'Internet' can also be used. If this is an ingress rule, specifies where network traffic originates from * @return the next update stage */ Update withSourceAddressPrefix(String sourceAddressPrefix); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify SourceAddressPrefixes. */ interface WithSourceAddressPrefixes { /** * Specifies sourceAddressPrefixes. * @param sourceAddressPrefixes The CIDR or source IP ranges * @return the next update stage */ Update withSourceAddressPrefixes(List<String> sourceAddressPrefixes); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify SourceApplicationSecurityGroups. */ interface WithSourceApplicationSecurityGroups { /** * Specifies sourceApplicationSecurityGroups. * @param sourceApplicationSecurityGroups The application security group specified as source * @return the next update stage */ Update withSourceApplicationSecurityGroups(List<ApplicationSecurityGroupInner> sourceApplicationSecurityGroups); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify SourcePortRange. */ interface WithSourcePortRange { /** * Specifies sourcePortRange. * @param sourcePortRange The source port or range. Integer or range between 0 and 65535. Asterix '*' can also be used to match all ports * @return the next update stage */ Update withSourcePortRange(String sourcePortRange); } /** * The stage of the networksecuritygroupsecurityrule update allowing to specify SourcePortRanges. */ interface WithSourcePortRanges { /** * Specifies sourcePortRanges. * @param sourcePortRanges The source port ranges * @return the next update stage */ Update withSourcePortRanges(List<String> sourcePortRanges); } } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova; import org.apache.cordova.api.CordovaInterface; import org.apache.cordova.api.LOG; import org.json.JSONArray; import org.json.JSONException; import android.annotation.TargetApi; import android.app.AlertDialog; import android.content.DialogInterface; import android.content.Intent; import android.net.Uri; import android.view.Gravity; import android.view.KeyEvent; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.webkit.ConsoleMessage; import android.webkit.JsPromptResult; import android.webkit.JsResult; import android.webkit.ValueCallback; import android.webkit.WebChromeClient; import android.webkit.WebStorage; import android.webkit.WebView; import android.webkit.GeolocationPermissions.Callback; import android.widget.EditText; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.RelativeLayout; /** * This class is the WebChromeClient that implements callbacks for our web view. */ public class CordovaChromeClient extends WebChromeClient { public static final int FILECHOOSER_RESULTCODE = 5173; private static final String LOG_TAG = "CordovaChromeClient"; private String TAG = "CordovaLog"; private long MAX_QUOTA = 100 * 1024 * 1024; private CordovaInterface cordova; private CordovaWebView appView; // the video progress view private View mVideoProgressView; // File Chooser public ValueCallback<Uri> mUploadMessage; /** * Constructor. * * @param cordova */ public CordovaChromeClient(CordovaInterface cordova) { this.cordova = cordova; } /** * Constructor. * * @param ctx * @param app */ public CordovaChromeClient(CordovaInterface ctx, CordovaWebView app) { this.cordova = ctx; this.appView = app; } /** * Constructor. * * @param view */ public void setWebView(CordovaWebView view) { this.appView = view; } /** * Tell the client to display a javascript alert dialog. * * @param view * @param url * @param message * @param result */ @Override public boolean onJsAlert(WebView view, String url, String message, final JsResult result) { AlertDialog.Builder dlg = new AlertDialog.Builder(this.cordova.getActivity()); dlg.setMessage(message); dlg.setTitle("Alert"); //Don't let alerts break the back button dlg.setCancelable(true); dlg.setPositiveButton(android.R.string.ok, new AlertDialog.OnClickListener() { public void onClick(DialogInterface dialog, int which) { result.confirm(); } }); dlg.setOnCancelListener( new DialogInterface.OnCancelListener() { public void onCancel(DialogInterface dialog) { result.cancel(); } }); dlg.setOnKeyListener(new DialogInterface.OnKeyListener() { //DO NOTHING public boolean onKey(DialogInterface dialog, int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK) { result.confirm(); return false; } else return true; } }); dlg.create(); dlg.show(); return true; } /** * Tell the client to display a confirm dialog to the user. * * @param view * @param url * @param message * @param result */ @Override public boolean onJsConfirm(WebView view, String url, String message, final JsResult result) { AlertDialog.Builder dlg = new AlertDialog.Builder(this.cordova.getActivity()); dlg.setMessage(message); dlg.setTitle("Confirm"); dlg.setCancelable(true); dlg.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { result.confirm(); } }); dlg.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { result.cancel(); } }); dlg.setOnCancelListener( new DialogInterface.OnCancelListener() { public void onCancel(DialogInterface dialog) { result.cancel(); } }); dlg.setOnKeyListener(new DialogInterface.OnKeyListener() { //DO NOTHING public boolean onKey(DialogInterface dialog, int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK) { result.cancel(); return false; } else return true; } }); dlg.create(); dlg.show(); return true; } /** * Tell the client to display a prompt dialog to the user. * If the client returns true, WebView will assume that the client will * handle the prompt dialog and call the appropriate JsPromptResult method. * * Since we are hacking prompts for our own purposes, we should not be using them for * this purpose, perhaps we should hack console.log to do this instead! * * @param view * @param url * @param message * @param defaultValue * @param result */ @Override public boolean onJsPrompt(WebView view, String url, String message, String defaultValue, JsPromptResult result) { // Security check to make sure any requests are coming from the page initially // loaded in webview and not another loaded in an iframe. boolean reqOk = false; if (url.startsWith("file://") || Config.isUrlWhiteListed(url)) { reqOk = true; } // Calling PluginManager.exec() to call a native service using // prompt(this.stringify(args), "gap:"+this.stringify([service, action, callbackId, true])); if (reqOk && defaultValue != null && defaultValue.length() > 3 && defaultValue.substring(0, 4).equals("gap:")) { JSONArray array; try { array = new JSONArray(defaultValue.substring(4)); String service = array.getString(0); String action = array.getString(1); String callbackId = array.getString(2); String r = this.appView.exposedJsApi.exec(service, action, callbackId, message); result.confirm(r == null ? "" : r); } catch (JSONException e) { e.printStackTrace(); return false; } } // Sets the native->JS bridge mode. else if (reqOk && defaultValue != null && defaultValue.equals("gap_bridge_mode:")) { this.appView.exposedJsApi.setNativeToJsBridgeMode(Integer.parseInt(message)); result.confirm(""); } // Polling for JavaScript messages else if (reqOk && defaultValue != null && defaultValue.equals("gap_poll:")) { String r = this.appView.exposedJsApi.retrieveJsMessages("1".equals(message)); result.confirm(r == null ? "" : r); } // Do NO-OP so older code doesn't display dialog else if (defaultValue != null && defaultValue.equals("gap_init:")) { result.confirm("OK"); } // Show dialog else { final JsPromptResult res = result; AlertDialog.Builder dlg = new AlertDialog.Builder(this.cordova.getActivity()); dlg.setMessage(message); final EditText input = new EditText(this.cordova.getActivity()); if (defaultValue != null) { input.setText(defaultValue); } dlg.setView(input); dlg.setCancelable(false); dlg.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { String usertext = input.getText().toString(); res.confirm(usertext); } }); dlg.setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { res.cancel(); } }); dlg.create(); dlg.show(); } return true; } /** * Handle database quota exceeded notification. */ @Override public void onExceededDatabaseQuota(String url, String databaseIdentifier, long currentQuota, long estimatedSize, long totalUsedQuota, WebStorage.QuotaUpdater quotaUpdater) { LOG.d(TAG, "onExceededDatabaseQuota estimatedSize: %d currentQuota: %d totalUsedQuota: %d", estimatedSize, currentQuota, totalUsedQuota); quotaUpdater.updateQuota(MAX_QUOTA); } // console.log in api level 7: http://developer.android.com/guide/developing/debug-tasks.html // Expect this to not compile in a future Android release! @SuppressWarnings("deprecation") @Override public void onConsoleMessage(String message, int lineNumber, String sourceID) { //This is only for Android 2.1 if(android.os.Build.VERSION.SDK_INT == android.os.Build.VERSION_CODES.ECLAIR_MR1) { LOG.d(TAG, "%s: Line %d : %s", sourceID, lineNumber, message); super.onConsoleMessage(message, lineNumber, sourceID); } } @TargetApi(8) @Override public boolean onConsoleMessage(ConsoleMessage consoleMessage) { if (consoleMessage.message() != null) LOG.d(TAG, "%s: Line %d : %s" , consoleMessage.sourceId() , consoleMessage.lineNumber(), consoleMessage.message()); return super.onConsoleMessage(consoleMessage); } @Override /** * Instructs the client to show a prompt to ask the user to set the Geolocation permission state for the specified origin. * * @param origin * @param callback */ public void onGeolocationPermissionsShowPrompt(String origin, Callback callback) { super.onGeolocationPermissionsShowPrompt(origin, callback); callback.invoke(origin, true, false); } // API level 7 is required for this, see if we could lower this using something else @Override public void onShowCustomView(View view, WebChromeClient.CustomViewCallback callback) { this.appView.showCustomView(view, callback); } @Override public void onHideCustomView() { this.appView.hideCustomView(); } @Override /** * Ask the host application for a custom progress view to show while * a <video> is loading. * @return View The progress view. */ public View getVideoLoadingProgressView() { if (mVideoProgressView == null) { // Create a new Loading view programmatically. // create the linear layout LinearLayout layout = new LinearLayout(this.appView.getContext()); layout.setOrientation(LinearLayout.VERTICAL); RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT); layout.setLayoutParams(layoutParams); // the proress bar ProgressBar bar = new ProgressBar(this.appView.getContext()); LinearLayout.LayoutParams barLayoutParams = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); barLayoutParams.gravity = Gravity.CENTER; bar.setLayoutParams(barLayoutParams); layout.addView(bar); mVideoProgressView = layout; } return mVideoProgressView; } public void openFileChooser(ValueCallback<Uri> uploadMsg) { this.openFileChooser(uploadMsg, "*/*"); } public void openFileChooser( ValueCallback<Uri> uploadMsg, String acceptType ) { this.openFileChooser(uploadMsg, acceptType, null); } public void openFileChooser(ValueCallback<Uri> uploadMsg, String acceptType, String capture) { mUploadMessage = uploadMsg; Intent i = new Intent(Intent.ACTION_GET_CONTENT); i.addCategory(Intent.CATEGORY_OPENABLE); i.setType("*/*"); this.cordova.getActivity().startActivityForResult(Intent.createChooser(i, "File Browser"), FILECHOOSER_RESULTCODE); } public ValueCallback<Uri> getValueCallback() { return this.mUploadMessage; } }
/* * ValidationBean.java created on 2010-04-09 * * Created by Brushing Bits Labs * http://www.brushingbits.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.brushingbits.jnap.validation; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.commons.lang.exception.ExceptionUtils; import com.opensymphony.xwork2.ValidationAware; import com.thoughtworks.xstream.annotations.XStreamAlias; /** * @author Daniel Rochetti * @since 1.0 */ @XStreamAlias("validation") public class ValidationBean { private String exceptionMsg; private String exceptionType; private String exceptionStack; private List<String> errors; private List<String> messages; private List<FieldError> fieldErrors; /** * Default constructor. All properties will be initialized to empty collections. */ public ValidationBean() { this.errors = new ArrayList<String>(); this.messages = new ArrayList<String>(); this.fieldErrors = new ArrayList<FieldError>(); } /** * Constructor that will make easier to create the {@code ValidationBean} * with {@link ValidationAware} data. * * @param errors * @param messages * @param fieldErrors */ public ValidationBean(Collection<String> errors, Collection<String> messages, Map<String, List<String>> fieldErrors) { this(); this.errors.addAll(errors); this.messages.addAll(messages); if (fieldErrors != null) { for (String fieldName : fieldErrors.keySet()) { this.fieldErrors.add(new FieldError(fieldName, fieldErrors.get(fieldName))); } } } public ValidationBean(Exception ex) { this(); this.setException(ex); } /** * <code>Accessor</code> ("getter") method for property <code>errors</code>. */ public List<String> getErrors() { return errors; } /** * <code>Mutator</code> ("setter") method for property <code>errors</code>. */ public void setErrors(List<String> actionErrors) { this.errors = actionErrors; } /** * <code>Accessor</code> ("getter") method for property <code>messages</code>. */ public List<String> getMessages() { return messages; } /** * <code>Mutator</code> ("setter") method for property <code>messages</code>. */ public void setMessages(List<String> messages) { this.messages = messages; } /** * <code>Accessor</code> ("getter") method for property <code>fieldErrors</code>. */ public List<FieldError> getFieldErrors() { return fieldErrors; } /** * <code>Mutator</code> ("setter") method for property <code>fieldErrors</code>. */ public void setFieldErrors(List<FieldError> fieldErrors) { this.fieldErrors = fieldErrors; } public void addError(String errorMsg) { this.errors.add(errorMsg); } public String getError() { return getErrors().isEmpty() ? null : getErrors().get(0); } public void setException(Throwable t) { this.exceptionMsg = t.getMessage(); this.exceptionType = t.getClass().getName(); this.exceptionStack = ExceptionUtils.getFullStackTrace(t); } /** * <code>Accessor</code> ("getter") method for property <code>exceptionMsg</code>. */ public String getExceptionMsg() { return exceptionMsg; } /** * <code>Accessor</code> ("getter") method for property <code>exceptionType</code>. */ public String getExceptionType() { return exceptionType; } /** * <code>Accessor</code> ("getter") method for property <code>exceptionStack</code>. */ public String getExceptionStack() { return exceptionStack; } /** * */ @XStreamAlias("fieldError") public static class FieldError { private String fieldName; private List<String> messages; public FieldError(String fieldName, List<String> messages) { this.fieldName = fieldName; this.messages = messages; } public FieldError(String fieldName, String message) { this.fieldName = fieldName; this.messages = new ArrayList<String>(); this.messages.add(message); } /** * <code>Accessor</code> ("getter") method for property <code>fieldName</code>. */ public String getFieldName() { return fieldName; } /** * <code>Mutator</code> ("setter") method for property <code>fieldName</code>. */ public void setFieldName(String fieldName) { this.fieldName = fieldName; } /** * <code>Accessor</code> ("getter") method for property <code>messages</code>. */ public List<String> getMessages() { return messages; } /** * <code>Mutator</code> ("setter") method for property <code>messages</code>. */ public void setMessages(List<String> messages) { this.messages = messages; } } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.handler.codec.http; import io.netty.util.AsciiString; /** * Standard HTTP header names. * <p> * These are all defined as lowercase to support HTTP/2 requirements while also not * violating HTTP/1.x requirements. New header names should always be lowercase. */ public final class HttpHeaderNames { /** * {@code "accept"} */ public static final AsciiString ACCEPT = AsciiString.cached("accept"); /** * {@code "accept-charset"} */ public static final AsciiString ACCEPT_CHARSET = AsciiString.cached("accept-charset"); /** * {@code "accept-encoding"} */ public static final AsciiString ACCEPT_ENCODING = AsciiString.cached("accept-encoding"); /** * {@code "accept-language"} */ public static final AsciiString ACCEPT_LANGUAGE = AsciiString.cached("accept-language"); /** * {@code "accept-ranges"} */ public static final AsciiString ACCEPT_RANGES = AsciiString.cached("accept-ranges"); /** * {@code "accept-patch"} */ public static final AsciiString ACCEPT_PATCH = AsciiString.cached("accept-patch"); /** * {@code "access-control-allow-credentials"} */ public static final AsciiString ACCESS_CONTROL_ALLOW_CREDENTIALS = AsciiString.cached("access-control-allow-credentials"); /** * {@code "access-control-allow-headers"} */ public static final AsciiString ACCESS_CONTROL_ALLOW_HEADERS = AsciiString.cached("access-control-allow-headers"); /** * {@code "access-control-allow-methods"} */ public static final AsciiString ACCESS_CONTROL_ALLOW_METHODS = AsciiString.cached("access-control-allow-methods"); /** * {@code "access-control-allow-origin"} */ public static final AsciiString ACCESS_CONTROL_ALLOW_ORIGIN = AsciiString.cached("access-control-allow-origin"); /** * {@code "access-control-expose-headers"} */ public static final AsciiString ACCESS_CONTROL_EXPOSE_HEADERS = AsciiString.cached("access-control-expose-headers"); /** * {@code "access-control-max-age"} */ public static final AsciiString ACCESS_CONTROL_MAX_AGE = AsciiString.cached("access-control-max-age"); /** * {@code "access-control-request-headers"} */ public static final AsciiString ACCESS_CONTROL_REQUEST_HEADERS = AsciiString.cached("access-control-request-headers"); /** * {@code "access-control-request-method"} */ public static final AsciiString ACCESS_CONTROL_REQUEST_METHOD = AsciiString.cached("access-control-request-method"); /** * {@code "age"} */ public static final AsciiString AGE = AsciiString.cached("age"); /** * {@code "allow"} */ public static final AsciiString ALLOW = AsciiString.cached("allow"); /** * {@code "authorization"} */ public static final AsciiString AUTHORIZATION = AsciiString.cached("authorization"); /** * {@code "cache-control"} */ public static final AsciiString CACHE_CONTROL = AsciiString.cached("cache-control"); /** * {@code "connection"} */ public static final AsciiString CONNECTION = AsciiString.cached("connection"); /** * {@code "content-base"} */ public static final AsciiString CONTENT_BASE = AsciiString.cached("content-base"); /** * {@code "content-encoding"} */ public static final AsciiString CONTENT_ENCODING = AsciiString.cached("content-encoding"); /** * {@code "content-language"} */ public static final AsciiString CONTENT_LANGUAGE = AsciiString.cached("content-language"); /** * {@code "content-length"} */ public static final AsciiString CONTENT_LENGTH = AsciiString.cached("content-length"); /** * {@code "content-location"} */ public static final AsciiString CONTENT_LOCATION = AsciiString.cached("content-location"); /** * {@code "content-transfer-encoding"} */ public static final AsciiString CONTENT_TRANSFER_ENCODING = AsciiString.cached("content-transfer-encoding"); /** * {@code "content-disposition"} */ public static final AsciiString CONTENT_DISPOSITION = AsciiString.cached("content-disposition"); /** * {@code "content-md5"} */ public static final AsciiString CONTENT_MD5 = AsciiString.cached("content-md5"); /** * {@code "content-range"} */ public static final AsciiString CONTENT_RANGE = AsciiString.cached("content-range"); /** * {@code "content-security-policy"} */ public static final AsciiString CONTENT_SECURITY_POLICY = AsciiString.cached("content-security-policy"); /** * {@code "content-type"} */ public static final AsciiString CONTENT_TYPE = AsciiString.cached("content-type"); /** * {@code "cookie"} */ public static final AsciiString COOKIE = AsciiString.cached("cookie"); /** * {@code "date"} */ public static final AsciiString DATE = AsciiString.cached("date"); /** * {@code "dnt"} */ public static final AsciiString DNT = AsciiString.cached("dnt"); /** * {@code "etag"} */ public static final AsciiString ETAG = AsciiString.cached("etag"); /** * {@code "expect"} */ public static final AsciiString EXPECT = AsciiString.cached("expect"); /** * {@code "expires"} */ public static final AsciiString EXPIRES = AsciiString.cached("expires"); /** * {@code "from"} */ public static final AsciiString FROM = AsciiString.cached("from"); /** * {@code "host"} */ public static final AsciiString HOST = AsciiString.cached("host"); /** * {@code "if-match"} */ public static final AsciiString IF_MATCH = AsciiString.cached("if-match"); /** * {@code "if-modified-since"} */ public static final AsciiString IF_MODIFIED_SINCE = AsciiString.cached("if-modified-since"); /** * {@code "if-none-match"} */ public static final AsciiString IF_NONE_MATCH = AsciiString.cached("if-none-match"); /** * {@code "if-range"} */ public static final AsciiString IF_RANGE = AsciiString.cached("if-range"); /** * {@code "if-unmodified-since"} */ public static final AsciiString IF_UNMODIFIED_SINCE = AsciiString.cached("if-unmodified-since"); /** * @deprecated use {@link #CONNECTION} * * {@code "keep-alive"} */ @Deprecated public static final AsciiString KEEP_ALIVE = AsciiString.cached("keep-alive"); /** * {@code "last-modified"} */ public static final AsciiString LAST_MODIFIED = AsciiString.cached("last-modified"); /** * {@code "location"} */ public static final AsciiString LOCATION = AsciiString.cached("location"); /** * {@code "max-forwards"} */ public static final AsciiString MAX_FORWARDS = AsciiString.cached("max-forwards"); /** * {@code "origin"} */ public static final AsciiString ORIGIN = AsciiString.cached("origin"); /** * {@code "pragma"} */ public static final AsciiString PRAGMA = AsciiString.cached("pragma"); /** * {@code "proxy-authenticate"} */ public static final AsciiString PROXY_AUTHENTICATE = AsciiString.cached("proxy-authenticate"); /** * {@code "proxy-authorization"} */ public static final AsciiString PROXY_AUTHORIZATION = AsciiString.cached("proxy-authorization"); /** * @deprecated use {@link #CONNECTION} * * {@code "proxy-connection"} */ @Deprecated public static final AsciiString PROXY_CONNECTION = AsciiString.cached("proxy-connection"); /** * {@code "range"} */ public static final AsciiString RANGE = AsciiString.cached("range"); /** * {@code "referer"} */ public static final AsciiString REFERER = AsciiString.cached("referer"); /** * {@code "retry-after"} */ public static final AsciiString RETRY_AFTER = AsciiString.cached("retry-after"); /** * {@code "sec-websocket-key1"} */ public static final AsciiString SEC_WEBSOCKET_KEY1 = AsciiString.cached("sec-websocket-key1"); /** * {@code "sec-websocket-key2"} */ public static final AsciiString SEC_WEBSOCKET_KEY2 = AsciiString.cached("sec-websocket-key2"); /** * {@code "sec-websocket-location"} */ public static final AsciiString SEC_WEBSOCKET_LOCATION = AsciiString.cached("sec-websocket-location"); /** * {@code "sec-websocket-origin"} */ public static final AsciiString SEC_WEBSOCKET_ORIGIN = AsciiString.cached("sec-websocket-origin"); /** * {@code "sec-websocket-protocol"} */ public static final AsciiString SEC_WEBSOCKET_PROTOCOL = AsciiString.cached("sec-websocket-protocol"); /** * {@code "sec-websocket-version"} */ public static final AsciiString SEC_WEBSOCKET_VERSION = AsciiString.cached("sec-websocket-version"); /** * {@code "sec-websocket-key"} */ public static final AsciiString SEC_WEBSOCKET_KEY = AsciiString.cached("sec-websocket-key"); /** * {@code "sec-websocket-accept"} */ public static final AsciiString SEC_WEBSOCKET_ACCEPT = AsciiString.cached("sec-websocket-accept"); /** * {@code "sec-websocket-protocol"} */ public static final AsciiString SEC_WEBSOCKET_EXTENSIONS = AsciiString.cached("sec-websocket-extensions"); /** * {@code "server"} */ public static final AsciiString SERVER = AsciiString.cached("server"); /** * {@code "set-cookie"} */ public static final AsciiString SET_COOKIE = AsciiString.cached("set-cookie"); /** * {@code "set-cookie2"} */ public static final AsciiString SET_COOKIE2 = AsciiString.cached("set-cookie2"); /** * {@code "te"} */ public static final AsciiString TE = AsciiString.cached("te"); /** * {@code "trailer"} */ public static final AsciiString TRAILER = AsciiString.cached("trailer"); /** * {@code "transfer-encoding"} */ public static final AsciiString TRANSFER_ENCODING = AsciiString.cached("transfer-encoding"); /** * {@code "upgrade"} */ public static final AsciiString UPGRADE = AsciiString.cached("upgrade"); /** * {@code "upgrade-insecure-requests"} */ public static final AsciiString UPGRADE_INSECURE_REQUESTS = AsciiString.cached("upgrade-insecure-requests"); /** * {@code "user-agent"} */ public static final AsciiString USER_AGENT = AsciiString.cached("user-agent"); /** * {@code "vary"} */ public static final AsciiString VARY = AsciiString.cached("vary"); /** * {@code "via"} */ public static final AsciiString VIA = AsciiString.cached("via"); /** * {@code "warning"} */ public static final AsciiString WARNING = AsciiString.cached("warning"); /** * {@code "websocket-location"} */ public static final AsciiString WEBSOCKET_LOCATION = AsciiString.cached("websocket-location"); /** * {@code "websocket-origin"} */ public static final AsciiString WEBSOCKET_ORIGIN = AsciiString.cached("websocket-origin"); /** * {@code "websocket-protocol"} */ public static final AsciiString WEBSOCKET_PROTOCOL = AsciiString.cached("websocket-protocol"); /** * {@code "www-authenticate"} */ public static final AsciiString WWW_AUTHENTICATE = AsciiString.cached("www-authenticate"); /** * {@code "x-frame-options"} */ public static final AsciiString X_FRAME_OPTIONS = AsciiString.cached("x-frame-options"); /** * {@code "x-requested-with"} */ public static final AsciiString X_REQUESTED_WITH = AsciiString.cached("x-requested-with"); private HttpHeaderNames() { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.counters; import com.google.api.services.dataflow.model.CounterMetadata; import com.google.api.services.dataflow.model.CounterStructuredName; import com.google.api.services.dataflow.model.CounterStructuredNameAndMetadata; import com.google.api.services.dataflow.model.CounterUpdate; import com.google.api.services.dataflow.model.DistributionUpdate; import com.google.api.services.dataflow.model.FloatingPointMean; import com.google.api.services.dataflow.model.Histogram; import com.google.api.services.dataflow.model.IntegerMean; import com.google.api.services.dataflow.model.NameAndKind; import com.google.api.services.dataflow.model.SplitInt64; import org.apache.beam.runners.dataflow.worker.counters.Counter.CounterUpdateExtractor; import org.apache.beam.runners.dataflow.worker.counters.CounterFactory.CounterMean; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList; /** Factory methods for extracting {@link CounterUpdate} updates from counters. */ public class DataflowCounterUpdateExtractor implements CounterUpdateExtractor<CounterUpdate> { public static final DataflowCounterUpdateExtractor INSTANCE = new DataflowCounterUpdateExtractor(); /** Should not be instantiated */ private DataflowCounterUpdateExtractor() {} private CounterUpdate initUpdate(CounterName name, boolean delta, String kind) { CounterUpdate counterUpdate = new CounterUpdate(); if (name.isStructured()) { counterUpdate.setStructuredNameAndMetadata(getStructuredName(name, kind)); } else { counterUpdate.setNameAndKind(getUnstructuredName(name, kind)); } counterUpdate.setCumulative(!delta); return counterUpdate; } private CounterStructuredNameAndMetadata getStructuredName(CounterName name, String kind) { CounterMetadata metadata = new CounterMetadata(); metadata.setKind(kind); CounterStructuredName structuredName = new CounterStructuredName(); structuredName.setName(name.name()); if (name.usesContextOriginalName()) { structuredName.setOriginalStepName(name.contextOriginalName()); } else if (name.usesContextSystemName()) { structuredName.setComponentStepName(name.contextSystemName()); } if (name.originalRequestingStepName() != null) { structuredName.setOriginalRequestingStepName(name.originalRequestingStepName()); } if (name.inputIndex() != null && name.inputIndex() > 0) { structuredName.setInputIndex(name.inputIndex()); } CounterStructuredNameAndMetadata nameAndMetadata = new CounterStructuredNameAndMetadata(); nameAndMetadata.setMetadata(metadata); nameAndMetadata.setName(structuredName); return nameAndMetadata; } private NameAndKind getUnstructuredName(CounterName name, String kind) { NameAndKind nameAndKind = new NameAndKind(); nameAndKind.setName(name.name()); nameAndKind.setKind(kind); return nameAndKind; } private CounterUpdate longUpdate(CounterName name, boolean delta, String kind, long value) { return initUpdate(name, delta, kind).setInteger(longToSplitInt(value)); } private CounterUpdate doubleUpdate(CounterName name, boolean delta, String kind, Double value) { return initUpdate(name, delta, kind).setFloatingPoint(value); } private CounterUpdate boolUpdate(CounterName name, boolean delta, String kind, Boolean value) { return initUpdate(name, delta, kind).setBoolean(value); } @Override public CounterUpdate longSum(CounterName name, boolean delta, Long value) { return longUpdate(name, delta, "SUM", value); } @Override public CounterUpdate longMin(CounterName name, boolean delta, Long value) { return longUpdate(name, delta, "MIN", value); } @Override public CounterUpdate longMax(CounterName name, boolean delta, Long value) { return longUpdate(name, delta, "MAX", value); } @Override public CounterUpdate longMean(CounterName name, boolean delta, CounterMean<Long> value) { if (value.getCount() <= 0) { return null; } return initUpdate(name, delta, "MEAN") .setIntegerMean( new IntegerMean() .setSum(longToSplitInt(value.getAggregate())) .setCount(longToSplitInt(value.getCount()))); } @Override public CounterUpdate intSum(CounterName name, boolean delta, Integer value) { return longUpdate(name, delta, "SUM", value); } @Override public CounterUpdate intMin(CounterName name, boolean delta, Integer value) { return longUpdate(name, delta, "MIN", value); } @Override public CounterUpdate intMax(CounterName name, boolean delta, Integer value) { return longUpdate(name, delta, "MAX", value); } @Override public CounterUpdate intMean(CounterName name, boolean delta, CounterMean<Integer> value) { if (value.getCount() <= 0) { return null; } return initUpdate(name, delta, "MEAN") .setIntegerMean( new IntegerMean() .setSum(longToSplitInt(value.getAggregate())) .setCount(longToSplitInt(value.getCount()))); } @Override public CounterUpdate doubleSum(CounterName name, boolean delta, Double value) { return doubleUpdate(name, delta, "SUM", value); } @Override public CounterUpdate doubleMin(CounterName name, boolean delta, Double value) { return doubleUpdate(name, delta, "MIN", value); } @Override public CounterUpdate doubleMax(CounterName name, boolean delta, Double value) { return doubleUpdate(name, delta, "MAX", value); } @Override public CounterUpdate doubleMean(CounterName name, boolean delta, CounterMean<Double> value) { if (value.getCount() <= 0) { return null; } return initUpdate(name, delta, "MEAN") .setFloatingPointMean( new FloatingPointMean() .setSum(value.getAggregate()) .setCount(longToSplitInt(value.getCount()))); } @Override public CounterUpdate boolOr(CounterName name, boolean delta, Boolean value) { return boolUpdate(name, delta, "OR", value); } @Override public CounterUpdate boolAnd(CounterName name, boolean delta, Boolean value) { return boolUpdate(name, delta, "AND", value); } @Override public CounterUpdate distribution( CounterName name, boolean delta, CounterFactory.CounterDistribution value) { DistributionUpdate distributionUpdate = new DistributionUpdate() .setMin(longToSplitInt(value.getMin())) .setMax(longToSplitInt(value.getMax())) .setCount(longToSplitInt(value.getCount())) .setSum(longToSplitInt(value.getSum())) .setSumOfSquares(value.getSumOfSquares()) .setHistogram( new Histogram() .setFirstBucketOffset(value.getFirstBucketOffset()) .setBucketCounts(ImmutableList.copyOf(value.getBuckets()))); return initUpdate(name, delta, "DISTRIBUTION").setDistribution(distributionUpdate); } /** Takes a long and returns a {@link SplitInt64}. */ public static SplitInt64 longToSplitInt(long num) { SplitInt64 result = new SplitInt64(); result.setLowBits(num & 0xffffffffL); result.setHighBits((int) (num >> 32)); return result; } /** * Takes a {@link SplitInt64} and returns a long. A SplintInt64 is composed of a uint32_t low_bits * and an int32_t high_bits. */ public static long splitIntToLong(SplitInt64 splitInt) { return ((long) splitInt.getHighBits() << 32) | splitInt.getLowBits(); } }
package android.support.v4.media.session; import android.app.PendingIntent; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; import android.os.RemoteException; import android.os.ResultReceiver; import android.support.v4.media.MediaMetadataCompat; import android.util.Log; import android.view.KeyEvent; import java.util.List; class MediaControllerCompat$MediaControllerImplBase implements MediaControllerCompat.MediaControllerImpl { private IMediaSession mBinder; private MediaSessionCompat.Token mToken; private MediaControllerCompat.TransportControls mTransportControls; public MediaControllerCompat$MediaControllerImplBase(MediaSessionCompat.Token paramToken) { this.mToken = paramToken; this.mBinder = IMediaSession.Stub.asInterface((IBinder)paramToken.getToken()); } public void adjustVolume(int paramInt1, int paramInt2) { try { this.mBinder.adjustVolume(paramInt1, paramInt2, null); return; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in adjustVolume. " + localRemoteException); } } public boolean dispatchMediaButtonEvent(KeyEvent paramKeyEvent) { if (paramKeyEvent == null) throw new IllegalArgumentException("event may not be null."); try { this.mBinder.sendMediaButton(paramKeyEvent); return false; } catch (RemoteException localRemoteException) { while (true) Log.e("MediaControllerCompat", "Dead object in dispatchMediaButtonEvent. " + localRemoteException); } } public Bundle getExtras() { try { Bundle localBundle = this.mBinder.getExtras(); return localBundle; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getExtras. " + localRemoteException); } return null; } public long getFlags() { try { long l = this.mBinder.getFlags(); return l; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getFlags. " + localRemoteException); } return 0L; } public Object getMediaController() { return null; } public MediaMetadataCompat getMetadata() { try { MediaMetadataCompat localMediaMetadataCompat = this.mBinder.getMetadata(); return localMediaMetadataCompat; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getMetadata. " + localRemoteException); } return null; } public String getPackageName() { try { String str = this.mBinder.getPackageName(); return str; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getPackageName. " + localRemoteException); } return null; } public MediaControllerCompat.PlaybackInfo getPlaybackInfo() { try { ParcelableVolumeInfo localParcelableVolumeInfo = this.mBinder.getVolumeAttributes(); MediaControllerCompat.PlaybackInfo localPlaybackInfo = new MediaControllerCompat.PlaybackInfo(localParcelableVolumeInfo.volumeType, localParcelableVolumeInfo.audioStream, localParcelableVolumeInfo.controlType, localParcelableVolumeInfo.maxVolume, localParcelableVolumeInfo.currentVolume); return localPlaybackInfo; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getPlaybackInfo. " + localRemoteException); } return null; } public PlaybackStateCompat getPlaybackState() { try { PlaybackStateCompat localPlaybackStateCompat = this.mBinder.getPlaybackState(); return localPlaybackStateCompat; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getPlaybackState. " + localRemoteException); } return null; } public List<MediaSessionCompat.QueueItem> getQueue() { try { List localList = this.mBinder.getQueue(); return localList; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getQueue. " + localRemoteException); } return null; } public CharSequence getQueueTitle() { try { CharSequence localCharSequence = this.mBinder.getQueueTitle(); return localCharSequence; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getQueueTitle. " + localRemoteException); } return null; } public int getRatingType() { try { int i = this.mBinder.getRatingType(); return i; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getRatingType. " + localRemoteException); } return 0; } public PendingIntent getSessionActivity() { try { PendingIntent localPendingIntent = this.mBinder.getLaunchPendingIntent(); return localPendingIntent; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in getSessionActivity. " + localRemoteException); } return null; } public MediaControllerCompat.TransportControls getTransportControls() { if (this.mTransportControls == null) this.mTransportControls = new MediaControllerCompat.TransportControlsBase(this.mBinder); return this.mTransportControls; } public void registerCallback(MediaControllerCompat.Callback paramCallback, Handler paramHandler) { if (paramCallback == null) throw new IllegalArgumentException("callback may not be null."); try { this.mBinder.asBinder().linkToDeath(paramCallback, 0); this.mBinder.registerCallbackListener((IMediaControllerCallback)MediaControllerCompat.Callback.access$400(paramCallback)); MediaControllerCompat.Callback.access$500(paramCallback, paramHandler); MediaControllerCompat.Callback.access$302(paramCallback, true); return; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in registerCallback. " + localRemoteException); paramCallback.onSessionDestroyed(); } } public void sendCommand(String paramString, Bundle paramBundle, ResultReceiver paramResultReceiver) { try { this.mBinder.sendCommand(paramString, paramBundle, new MediaSessionCompat.ResultReceiverWrapper(paramResultReceiver)); return; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in sendCommand. " + localRemoteException); } } public void setVolumeTo(int paramInt1, int paramInt2) { try { this.mBinder.setVolumeTo(paramInt1, paramInt2, null); return; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in setVolumeTo. " + localRemoteException); } } public void unregisterCallback(MediaControllerCompat.Callback paramCallback) { if (paramCallback == null) throw new IllegalArgumentException("callback may not be null."); try { this.mBinder.unregisterCallbackListener((IMediaControllerCallback)MediaControllerCompat.Callback.access$400(paramCallback)); this.mBinder.asBinder().unlinkToDeath(paramCallback, 0); MediaControllerCompat.Callback.access$302(paramCallback, false); return; } catch (RemoteException localRemoteException) { Log.e("MediaControllerCompat", "Dead object in unregisterCallback. " + localRemoteException); } } } /* Location: E:\Progs\Dev\Android\Decompile\apktool\zssq\zssq-dex2jar.jar * Qualified Name: android.support.v4.media.session.MediaControllerCompat.MediaControllerImplBase * JD-Core Version: 0.6.0 */
// Copyright (C) 2012 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.extensions.registration; import com.google.inject.Binder; import com.google.inject.Key; import com.google.inject.Provider; import com.google.inject.ProvisionException; import com.google.inject.Scopes; import com.google.inject.TypeLiteral; import com.google.inject.binder.LinkedBindingBuilder; import com.google.inject.util.Providers; import com.google.inject.util.Types; import java.util.concurrent.atomic.AtomicReference; /** * A single item that can be modified as plugins reload. * <p> * DynamicItems are always mapped as singletons in Guice. Items store a Provider * internally, and resolve the provider to an instance on demand. This enables * registrations to decide between singleton and non-singleton members. If * multiple plugins try to provide the same Provider, an exception is thrown. */ public class DynamicItem<T> { /** Pair of provider implementation and plugin providing it. */ static class NamedProvider<T> { final Provider<T> impl; final String pluginName; NamedProvider(Provider<T> provider, String pluginName) { this.impl = provider; this.pluginName = pluginName; } } /** * Declare a singleton {@code DynamicItem<T>} with a binder. * <p> * Items must be defined in a Guice module before they can be bound: * <pre> * DynamicItem.itemOf(binder(), Interface.class); * DynamicItem.bind(binder(), Interface.class).to(Impl.class); * </pre> * * @param binder a new binder created in the module. * @param member type of entry to store. */ public static <T> void itemOf(Binder binder, Class<T> member) { itemOf(binder, TypeLiteral.get(member)); } /** * Declare a singleton {@code DynamicItem<T>} with a binder. * <p> * Items must be defined in a Guice module before they can be bound: * <pre> * {@code * DynamicSet.itemOf(binder(), new TypeLiteral<Thing<Foo>>() {}); * } * </pre> * * @param binder a new binder created in the module. * @param member type of entry to store. */ public static <T> void itemOf(Binder binder, TypeLiteral<T> member) { @SuppressWarnings("unchecked") Key<DynamicItem<T>> key = (Key<DynamicItem<T>>) Key.get( Types.newParameterizedType(DynamicItem.class, member.getType())); binder.bind(key) .toProvider(new DynamicItemProvider<>(member, key)) .in(Scopes.SINGLETON); } /** * Bind one implementation as the item using a unique annotation. * * @param binder a new binder created in the module. * @param type type of entry to store. * @return a binder to continue configuring the new item. */ public static <T> LinkedBindingBuilder<T> bind(Binder binder, Class<T> type) { return bind(binder, TypeLiteral.get(type)); } /** * Bind one implementation as the item. * * @param binder a new binder created in the module. * @param type type of entry to store. * @return a binder to continue configuring the new item. */ public static <T> LinkedBindingBuilder<T> bind(Binder binder, TypeLiteral<T> type) { return binder.bind(type); } private final Key<DynamicItem<T>> key; private final AtomicReference<NamedProvider<T>> ref; DynamicItem(Key<DynamicItem<T>> key, Provider<T> provider, String pluginName) { NamedProvider<T> in = null; if (provider != null) { in = new NamedProvider<>(provider, pluginName); } this.key = key; this.ref = new AtomicReference<>(in); } /** * Get the configured item, or null. * * @return the configured item instance; null if no implementation has been * bound to the item. This is common if no plugin registered an * implementation for the type. */ public T get() { NamedProvider<T> item = ref.get(); return item != null ? item.impl.get() : null; } /** * Set the element to provide. * * @param item the item to use. Must not be null. * @param pluginName the name of the plugin providing the item. * @return handle to remove the item at a later point in time. */ public RegistrationHandle set(T item, String pluginName) { return set(Providers.of(item), pluginName); } /** * Set the element to provide. * * @param impl the item to add to the collection. Must not be null. * @param pluginName name of the source providing the implementation. * @return handle to remove the item at a later point in time. */ public RegistrationHandle set(Provider<T> impl, String pluginName) { final NamedProvider<T> item = new NamedProvider<>(impl, pluginName); NamedProvider<T> old = null; while (!ref.compareAndSet(old, item)) { old = ref.get(); if (old != null && !"gerrit".equals(old.pluginName)) { throw new ProvisionException(String.format( "%s already provided by %s, ignoring plugin %s", key.getTypeLiteral(), old.pluginName, pluginName)); } } final NamedProvider<T> defaultItem = old; return new RegistrationHandle() { @Override public void remove() { ref.compareAndSet(item, defaultItem); } }; } /** * Set the element that may be hot-replaceable in the future. * * @param key unique description from the item's Guice binding. This can be * later obtained from the registration handle to facilitate matching * with the new equivalent instance during a hot reload. * @param impl the item to set as our value right now. Must not be null. * @param pluginName the name of the plugin providing the item. * @return a handle that can remove this item later, or hot-swap the item. */ public ReloadableRegistrationHandle<T> set(Key<T> key, Provider<T> impl, String pluginName) { final NamedProvider<T> item = new NamedProvider<>(impl, pluginName); NamedProvider<T> old = null; while (!ref.compareAndSet(old, item)) { old = ref.get(); if (old != null && !"gerrit".equals(old.pluginName) && !pluginName.equals(old.pluginName)) { // We allow to replace: // 1. Gerrit core items, e.g. websession cache // can be replaced by plugin implementation // 2. Reload of current plugin throw new ProvisionException(String.format( "%s already provided by %s, ignoring plugin %s", this.key.getTypeLiteral(), old.pluginName, pluginName)); } } return new ReloadableHandle(key, item, old); } private class ReloadableHandle implements ReloadableRegistrationHandle<T> { private final Key<T> handleKey; private final NamedProvider<T> item; private final NamedProvider<T> defaultItem; ReloadableHandle(Key<T> handleKey, NamedProvider<T> item, NamedProvider<T> defaultItem) { this.handleKey = handleKey; this.item = item; this.defaultItem = defaultItem; } @Override public Key<T> getKey() { return handleKey; } @Override public void remove() { ref.compareAndSet(item, defaultItem); } @Override public ReloadableHandle replace(Key<T> newKey, Provider<T> newItem) { NamedProvider<T> n = new NamedProvider<>(newItem, item.pluginName); if (ref.compareAndSet(item, n)) { return new ReloadableHandle(newKey, n, defaultItem); } return null; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gearpump.streaming.kafka.util; import kafka.api.OffsetRequest; import kafka.common.TopicAndPartition; import kafka.consumer.ConsumerConfig; import org.apache.gearpump.streaming.kafka.lib.source.DefaultKafkaMessageDecoder; import org.apache.gearpump.streaming.kafka.lib.util.KafkaClient; import org.apache.gearpump.streaming.kafka.lib.source.grouper.DefaultPartitionGrouper; import org.apache.kafka.common.config.AbstractConfig; import org.apache.kafka.common.config.ConfigDef; import java.io.Serializable; import java.util.Properties; /** * kafka specific configs */ public class KafkaConfig extends AbstractConfig implements Serializable { private static final ConfigDef CONFIG; public static final String ZOOKEEPER_CONNECT_CONFIG = "zookeeper.connect"; private static final String ZOOKEEPER_CONNECT_DOC = "Zookeeper connect string for Kafka topics management."; public static final String BOOTSTRAP_SERVERS_CONFIG = "bootstrap.servers"; public static final String BOOTSTRAP_SERVERS_DOC = "A list of host/port pairs to use for " + "establishing the initial connection to the Kafka cluster. " + "The client will make use of all servers irrespective of which servers are specified " + "here for bootstrapping&mdash;this list only impacts the initial hosts used to discover " + "the full set of servers. This list should be in the form " + "<code>host1:port1,host2:port2,...</code>. Since these servers are just used for the " + "initial connection to discover the full cluster membership (which may change dynamically)," + " this list need not contain the full set of servers (you may want more than one, though, " + "in case a server is down)."; public static final String CLIENT_ID_CONFIG = "client.id"; public static final String CLIENT_ID_DOC = "An id string to pass to the server when making " + "requests. The purpose of this is to be able to track the source of requests beyond just " + "ip/port by allowing a logical application name to be included in server-side request " + "logging."; public static final String GROUP_ID_CONFIG = "group.id"; public static final String GROUP_ID_DOC = "A string that uniquely identifies a set of consumers within the same consumer group"; public static final String ENABLE_AUTO_COMMIT_CONFIG = "auto.commit.enable"; public static final String ENABLE_AUTO_COMMIT_DOC = "If true the consumer's offset will be periodically committed in the background."; /** KafkaSource specific configs */ public static final String CONSUMER_START_OFFSET_CONFIG = "consumer.start.offset"; private static final String CONSUMER_START_OFFSET_DOC = "Kafka offset to start consume from. " + "This will be overwritten when checkpoint recover takes effect."; public static final String FETCH_THRESHOLD_CONFIG = "fetch.threshold"; private static final String FETCH_THRESHOLD_DOC = "Kafka messages are fetched asynchronously " + "and put onto a internal queue. When the number of messages in the queue hit the threshold," + "the fetch thread stops fetching, and goes to sleep. It starts fetching again when the" + "number falls below the threshold"; public static final String FETCH_SLEEP_MS_CONFIG = "fetch.sleep.ms"; private static final String FETCH_SLEEP_MS_DOC = "The amount of time to sleep when hitting fetch.threshold."; public static final String MESSAGE_DECODER_CLASS_CONFIG = "message.decoder.class"; private static final String MESSAGE_DECODER_CLASS_DOC = "Message decoder class that implements the <code>MessageDecoder</code> interface."; public static final String PARTITION_GROUPER_CLASS_CONFIG = "partition.grouper"; private static final String PARTITION_GROUPER_CLASS_DOC = "Partition grouper class that implements the <code>KafkaGrouper</code> interface."; public static final String REPLICATION_FACTOR_CONFIG = "replication.factor"; public static final String REPLICATION_FACTOR_DOC = "The replication factor for checkpoint store topic."; public static final String CHECKPOINT_STORE_NAME_PREFIX_CONFIG = "checkpoint.store.name.prefix"; public static final String CHECKPOINT_STORE_NAME_PREFIX_DOC = "Name prefix for checkpoint " + "store whose name will be of the form, namePrefix-sourceTopic-partitionId"; static { CONFIG = new ConfigDef() .define(BOOTSTRAP_SERVERS_CONFIG, // required with no default value ConfigDef.Type.LIST, ConfigDef.Importance.HIGH, BOOTSTRAP_SERVERS_DOC) .define(CLIENT_ID_CONFIG, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH, CLIENT_ID_DOC) .define(GROUP_ID_CONFIG, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH, GROUP_ID_DOC) .define(ZOOKEEPER_CONNECT_CONFIG, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH, ZOOKEEPER_CONNECT_DOC) .define(REPLICATION_FACTOR_CONFIG, ConfigDef.Type.INT, 1, ConfigDef.Range.atLeast(1), ConfigDef.Importance.MEDIUM, REPLICATION_FACTOR_DOC) .define(MESSAGE_DECODER_CLASS_CONFIG, ConfigDef.Type.CLASS, DefaultKafkaMessageDecoder.class.getName(), ConfigDef.Importance.MEDIUM, MESSAGE_DECODER_CLASS_DOC) .define(PARTITION_GROUPER_CLASS_CONFIG, ConfigDef.Type.CLASS, DefaultPartitionGrouper.class.getName(), ConfigDef.Importance.MEDIUM, PARTITION_GROUPER_CLASS_DOC) .define(FETCH_THRESHOLD_CONFIG, ConfigDef.Type.INT, 10000, ConfigDef.Range.atLeast(0), ConfigDef.Importance.LOW, FETCH_THRESHOLD_DOC) .define(FETCH_SLEEP_MS_CONFIG, ConfigDef.Type.LONG, 100, ConfigDef.Range.atLeast(0), ConfigDef.Importance.LOW, FETCH_SLEEP_MS_DOC) .define(CONSUMER_START_OFFSET_CONFIG, ConfigDef.Type.LONG, OffsetRequest.EarliestTime(), ConfigDef.Range.atLeast(-2), ConfigDef.Importance.MEDIUM, CONSUMER_START_OFFSET_DOC) .define(ENABLE_AUTO_COMMIT_CONFIG, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.MEDIUM, ENABLE_AUTO_COMMIT_DOC) .define(CHECKPOINT_STORE_NAME_PREFIX_CONFIG, ConfigDef.Type.STRING, "", ConfigDef.Importance.HIGH, CHECKPOINT_STORE_NAME_PREFIX_DOC); } public KafkaConfig(Properties props) { super(CONFIG, props); } public static String getCheckpointStoreNameSuffix(TopicAndPartition tp) { return tp.topic() + "-" + tp.partition(); } public Properties getProducerConfig() { Properties props = new Properties(); props.putAll(this.originals()); // remove source properties removeSourceSpecificConfigs(props); // remove consumer properties removeConsumerSpecificConfigs(props); return props; } public String getKafkaStoreTopic(String suffix) { return getString(CHECKPOINT_STORE_NAME_PREFIX_CONFIG) + "-" + suffix; } public KafkaClient.KafkaClientFactory getKafkaClientFactory() { return KafkaClient.factory(); } public ConsumerConfig getConsumerConfig() { Properties props = new Properties(); props.putAll(this.originals()); // remove source properties removeSourceSpecificConfigs(props); // remove producer properties removeProducerSpecificConfigs(props); // set consumer default property values if (!props.containsKey(GROUP_ID_CONFIG)) { props.put(GROUP_ID_CONFIG, getString(GROUP_ID_CONFIG)); } return new ConsumerConfig(props); } private void removeSourceSpecificConfigs(Properties props) { props.remove(FETCH_SLEEP_MS_CONFIG); props.remove(FETCH_THRESHOLD_CONFIG); props.remove(PARTITION_GROUPER_CLASS_CONFIG); props.remove(MESSAGE_DECODER_CLASS_CONFIG); props.remove(REPLICATION_FACTOR_CONFIG); props.remove(CHECKPOINT_STORE_NAME_PREFIX_CONFIG); } private void removeConsumerSpecificConfigs(Properties props) { props.remove(ZOOKEEPER_CONNECT_CONFIG); props.remove(GROUP_ID_CONFIG); } private void removeProducerSpecificConfigs(Properties props) { props.remove(BOOTSTRAP_SERVERS_CONFIG); } public static class KafkaConfigFactory implements Serializable { public KafkaConfig getKafkaConfig(Properties props) { return new KafkaConfig(props); } } }
/* * Copyright (c) 2011-2016, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.abst.feature.associate; import boofcv.struct.feature.AssociatedIndex; import boofcv.struct.feature.MatchScoreType; import org.ddogleg.struct.FastQueue; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * Standard tests for implementations of AssociateDescription * * @author Peter Abeles */ public abstract class StandardAssociateDescriptionChecks<Desc> { FastQueue<Desc> listSrc; FastQueue<Desc> listDst; protected StandardAssociateDescriptionChecks( Class<Desc> descType ) { listSrc = new FastQueue<>(descType, false); listDst = new FastQueue<>(descType, false); } public void allTests() { checkScoreType(); basicTests(); checkDefaultThreshold(); checkSetThreshold(); uniqueSource(); uniqueDestination(); } /** * Match error must be less than the specified euclidean error */ public abstract AssociateDescription<Desc> createAlg(); protected void init() { listSrc.reset(); listDst.reset(); } @Test public void checkScoreType() { AssociateDescription<Desc> alg = createAlg(); assertTrue("Test are designed for norm error",MatchScoreType.NORM_ERROR == alg.getScoreType()); } /** * Basic tests where there should be unique association in both direction */ @Test public void basicTests() { // test the cases where the number of matches is more than and less than the maximum performBasicTest(20); performBasicTest(40); } private void performBasicTest(int numFeatures ) { init(); AssociateDescription<Desc> alg = createAlg(); alg.setThreshold(0.01); for( int i = 0; i < numFeatures; i++ ) { listSrc.add(c(i+1) ); listDst.add(c(i + 1 + 0.001)); } alg.setSource(listSrc); alg.setDestination(listDst); alg.associate(); FastQueue<AssociatedIndex> matches = alg.getMatches(); // Every features should be associated assertEquals(numFeatures,matches.size()); // see if everything is assigned as expected for( int i = 0; i < matches.size(); i++ ) { int numMatches = 0; for( int j = 0; j < matches.size(); j++ ) { AssociatedIndex a = matches.get(j); if( i == a.src ) { assertEquals(a.src,a.dst); assertTrue(a.fitScore != 0 ); numMatches++; } } assertEquals(1,numMatches); } // in this example there should be perfect unambiguous associations assertEquals(0,alg.getUnassociatedSource().size); assertEquals(0,alg.getUnassociatedDestination().size); } /** * The default threshold should allow for all matches to work */ @Test public void checkDefaultThreshold() { init(); listSrc.add( c(1) ); listDst.add( c(100) ); AssociateDescription<Desc> alg = createAlg(); alg.setSource(listSrc); alg.setDestination(listDst); alg.associate(); assertEquals(1,alg.getMatches().size); } /** * Checks to see if changing the threshold increases or reduces the number of associations */ @Test public void checkSetThreshold() { init(); listSrc.add( c(1) ); listDst.add( c(1+0.1) ); AssociateDescription<Desc> alg = createAlg(); alg.setSource(listSrc); alg.setDestination(listDst); // no matches should be found since the error is too large alg.setThreshold(0.01); alg.associate(); assertEquals(0,alg.getMatches().size); // Test edge case for threshold. If it is exactly the distance away then should be included alg.setThreshold(1.1-1); alg.associate(); assertEquals(1,alg.getMatches().size); // Threshold is greater than the assoc error alg.setThreshold(0.2); alg.associate(); assertEquals(1,alg.getMatches().size); // Test no threshold case alg.setThreshold(Double.MAX_VALUE); alg.associate(); assertEquals(1,alg.getMatches().size); } @Test public void checkUnassociatedLists() { init(); AssociateDescription<Desc> alg = createAlg(); listSrc.add( c(1) ); listSrc.add( c(2) ); listSrc.add( c(3) ); listDst.add( c(1+0.1) ); listDst.add( c(2+0.05) ); listDst.add( c(3+0.05) ); listDst.add( c(20) ); // can't be paired with anything // set threshold so that one pair won't be considered alg.setThreshold(0.07); alg.setSource(listSrc); alg.setDestination(listDst); alg.associate(); assertEquals(2,alg.getMatches().size); assertEquals(1,alg.getUnassociatedSource().size); assertEquals(2,alg.getUnassociatedDestination().size); } @Test public void uniqueSource() { init(); listSrc.add(c(1)); listDst.add( c(1) ); listDst.add( c(1.001) ); AssociateDescription<Desc> alg = createAlg(); alg.setSource(listSrc); alg.setDestination(listDst); alg.associate(); if( alg.uniqueSource() ) { assertEquals(1,numMatchesSrc(0,alg.getMatches())); } else { // both dst will match up the first src assertEquals(2,numMatchesSrc(0,alg.getMatches())); } } @Test public void uniqueDestination() { init(); listSrc.add( c(1) ); listSrc.add( c(1.001) ); listDst.add( c(1) ); AssociateDescription<Desc> alg = createAlg(); alg.setSource(listSrc); alg.setDestination(listDst); alg.associate(); if( alg.uniqueDestination() ) { assertEquals(1,numMatchesDst(0, alg.getMatches())); } else { // both src will match up the first dst assertEquals(2,numMatchesDst(0, alg.getMatches())); } } private int numMatchesSrc( int index , FastQueue<AssociatedIndex> list ) { int ret = 0; for( AssociatedIndex l : list.toList() ) { if( l.src == index ) ret++; } return ret; } private int numMatchesDst( int index , FastQueue<AssociatedIndex> list ) { int ret = 0; for( AssociatedIndex l : list.toList() ) { if( l.dst == index ) ret++; } return ret; } /** * Creates a description with the specified value */ protected abstract Desc c( double value ); }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/websecurityscanner/v1/finding_addon.proto package com.google.cloud.websecurityscanner.v1; /** * * * <pre> * Information regarding any resource causing the vulnerability such * as JavaScript sources, image, audio files, etc. * </pre> * * Protobuf type {@code google.cloud.websecurityscanner.v1.ViolatingResource} */ public final class ViolatingResource extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.websecurityscanner.v1.ViolatingResource) ViolatingResourceOrBuilder { private static final long serialVersionUID = 0L; // Use ViolatingResource.newBuilder() to construct. private ViolatingResource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ViolatingResource() { contentType_ = ""; resourceUrl_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ViolatingResource(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ViolatingResource( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { java.lang.String s = input.readStringRequireUtf8(); contentType_ = s; break; } case 18: { java.lang.String s = input.readStringRequireUtf8(); resourceUrl_ = s; break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.websecurityscanner.v1.FindingAddonProto .internal_static_google_cloud_websecurityscanner_v1_ViolatingResource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.websecurityscanner.v1.FindingAddonProto .internal_static_google_cloud_websecurityscanner_v1_ViolatingResource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.websecurityscanner.v1.ViolatingResource.class, com.google.cloud.websecurityscanner.v1.ViolatingResource.Builder.class); } public static final int CONTENT_TYPE_FIELD_NUMBER = 1; private volatile java.lang.Object contentType_; /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @return The contentType. */ @java.lang.Override public java.lang.String getContentType() { java.lang.Object ref = contentType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); contentType_ = s; return s; } } /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @return The bytes for contentType. */ @java.lang.Override public com.google.protobuf.ByteString getContentTypeBytes() { java.lang.Object ref = contentType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); contentType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_URL_FIELD_NUMBER = 2; private volatile java.lang.Object resourceUrl_; /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @return The resourceUrl. */ @java.lang.Override public java.lang.String getResourceUrl() { java.lang.Object ref = resourceUrl_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceUrl_ = s; return s; } } /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @return The bytes for resourceUrl. */ @java.lang.Override public com.google.protobuf.ByteString getResourceUrlBytes() { java.lang.Object ref = resourceUrl_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(contentType_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, contentType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceUrl_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, resourceUrl_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(contentType_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, contentType_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resourceUrl_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, resourceUrl_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.websecurityscanner.v1.ViolatingResource)) { return super.equals(obj); } com.google.cloud.websecurityscanner.v1.ViolatingResource other = (com.google.cloud.websecurityscanner.v1.ViolatingResource) obj; if (!getContentType().equals(other.getContentType())) return false; if (!getResourceUrl().equals(other.getResourceUrl())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + CONTENT_TYPE_FIELD_NUMBER; hash = (53 * hash) + getContentType().hashCode(); hash = (37 * hash) + RESOURCE_URL_FIELD_NUMBER; hash = (53 * hash) + getResourceUrl().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.websecurityscanner.v1.ViolatingResource prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Information regarding any resource causing the vulnerability such * as JavaScript sources, image, audio files, etc. * </pre> * * Protobuf type {@code google.cloud.websecurityscanner.v1.ViolatingResource} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.websecurityscanner.v1.ViolatingResource) com.google.cloud.websecurityscanner.v1.ViolatingResourceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.websecurityscanner.v1.FindingAddonProto .internal_static_google_cloud_websecurityscanner_v1_ViolatingResource_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.websecurityscanner.v1.FindingAddonProto .internal_static_google_cloud_websecurityscanner_v1_ViolatingResource_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.websecurityscanner.v1.ViolatingResource.class, com.google.cloud.websecurityscanner.v1.ViolatingResource.Builder.class); } // Construct using com.google.cloud.websecurityscanner.v1.ViolatingResource.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); contentType_ = ""; resourceUrl_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.websecurityscanner.v1.FindingAddonProto .internal_static_google_cloud_websecurityscanner_v1_ViolatingResource_descriptor; } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ViolatingResource getDefaultInstanceForType() { return com.google.cloud.websecurityscanner.v1.ViolatingResource.getDefaultInstance(); } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ViolatingResource build() { com.google.cloud.websecurityscanner.v1.ViolatingResource result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ViolatingResource buildPartial() { com.google.cloud.websecurityscanner.v1.ViolatingResource result = new com.google.cloud.websecurityscanner.v1.ViolatingResource(this); result.contentType_ = contentType_; result.resourceUrl_ = resourceUrl_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.websecurityscanner.v1.ViolatingResource) { return mergeFrom((com.google.cloud.websecurityscanner.v1.ViolatingResource) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.websecurityscanner.v1.ViolatingResource other) { if (other == com.google.cloud.websecurityscanner.v1.ViolatingResource.getDefaultInstance()) return this; if (!other.getContentType().isEmpty()) { contentType_ = other.contentType_; onChanged(); } if (!other.getResourceUrl().isEmpty()) { resourceUrl_ = other.resourceUrl_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.websecurityscanner.v1.ViolatingResource parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.websecurityscanner.v1.ViolatingResource) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private java.lang.Object contentType_ = ""; /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @return The contentType. */ public java.lang.String getContentType() { java.lang.Object ref = contentType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); contentType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @return The bytes for contentType. */ public com.google.protobuf.ByteString getContentTypeBytes() { java.lang.Object ref = contentType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); contentType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @param value The contentType to set. * @return This builder for chaining. */ public Builder setContentType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } contentType_ = value; onChanged(); return this; } /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @return This builder for chaining. */ public Builder clearContentType() { contentType_ = getDefaultInstance().getContentType(); onChanged(); return this; } /** * * * <pre> * The MIME type of this resource. * </pre> * * <code>string content_type = 1;</code> * * @param value The bytes for contentType to set. * @return This builder for chaining. */ public Builder setContentTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); contentType_ = value; onChanged(); return this; } private java.lang.Object resourceUrl_ = ""; /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @return The resourceUrl. */ public java.lang.String getResourceUrl() { java.lang.Object ref = resourceUrl_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resourceUrl_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @return The bytes for resourceUrl. */ public com.google.protobuf.ByteString getResourceUrlBytes() { java.lang.Object ref = resourceUrl_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resourceUrl_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @param value The resourceUrl to set. * @return This builder for chaining. */ public Builder setResourceUrl(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resourceUrl_ = value; onChanged(); return this; } /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @return This builder for chaining. */ public Builder clearResourceUrl() { resourceUrl_ = getDefaultInstance().getResourceUrl(); onChanged(); return this; } /** * * * <pre> * URL of this violating resource. * </pre> * * <code>string resource_url = 2;</code> * * @param value The bytes for resourceUrl to set. * @return This builder for chaining. */ public Builder setResourceUrlBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resourceUrl_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.websecurityscanner.v1.ViolatingResource) } // @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1.ViolatingResource) private static final com.google.cloud.websecurityscanner.v1.ViolatingResource DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.websecurityscanner.v1.ViolatingResource(); } public static com.google.cloud.websecurityscanner.v1.ViolatingResource getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ViolatingResource> PARSER = new com.google.protobuf.AbstractParser<ViolatingResource>() { @java.lang.Override public ViolatingResource parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ViolatingResource(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ViolatingResource> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ViolatingResource> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.websecurityscanner.v1.ViolatingResource getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (c) 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.net; import java.io.IOException; import java.io.FileDescriptor; /* * This class defines the plain SocketImpl that is used for all * Windows version lower than Vista. It adds support for IPv6 on * these platforms where available. * * For backward compatibility Windows platforms that do not have IPv6 * support also use this implementation, and fd1 gets set to null * during socket creation. * * @author Chris Hegarty */ class TwoStacksPlainSocketImpl extends AbstractPlainSocketImpl { /* second fd, used for ipv6 on windows only. * fd1 is used for listeners and for client sockets at initialization * until the socket is connected. Up to this point fd always refers * to the ipv4 socket and fd1 to the ipv6 socket. After the socket * becomes connected, fd always refers to the connected socket * (either v4 or v6) and fd1 is closed. * * For ServerSockets, fd always refers to the v4 listener and * fd1 the v6 listener. */ private FileDescriptor fd1; /* * Needed for ipv6 on windows because we need to know * if the socket is bound to ::0 or 0.0.0.0, when a caller * asks for it. Otherwise we don't know which socket to ask. */ private InetAddress anyLocalBoundAddr = null; /* to prevent starvation when listening on two sockets, this is * is used to hold the id of the last socket we accepted on. */ private int lastfd = -1; static { initProto(); } public TwoStacksPlainSocketImpl() {} public TwoStacksPlainSocketImpl(FileDescriptor fd) { this.fd = fd; } /** * Creates a socket with a boolean that specifies whether this * is a stream socket (true) or an unconnected UDP socket (false). */ protected synchronized void create(boolean stream) throws IOException { fd1 = new FileDescriptor(); super.create(stream); } /** * Binds the socket to the specified address of the specified local port. * @param address the address * @param port the port */ protected synchronized void bind(InetAddress address, int lport) throws IOException { super.bind(address, lport); if (address.isAnyLocalAddress()) { anyLocalBoundAddr = address; } } public Object getOption(int opt) throws SocketException { if (isClosedOrPending()) { throw new SocketException("Socket Closed"); } if (opt == SO_BINDADDR) { if (fd != null && fd1 != null ) { /* must be unbound or else bound to anyLocal */ return anyLocalBoundAddr; } InetAddressContainer in = new InetAddressContainer(); socketGetOption(opt, in); return in.addr; } else return super.getOption(opt); } /** * Closes the socket. */ protected void close() throws IOException { synchronized(fdLock) { if (fd != null || fd1 != null) { if (fdUseCount == 0) { if (closePending) { return; } closePending = true; socketClose(); fd = null; fd1 = null; return; } else { /* * If a thread has acquired the fd and a close * isn't pending then use a deferred close. * Also decrement fdUseCount to signal the last * thread that releases the fd to close it. */ if (!closePending) { closePending = true; fdUseCount--; socketClose(); } } } } } void reset() throws IOException { if (fd != null || fd1 != null) { socketClose(); } fd = null; fd1 = null; super.reset(); } /* * Return true if already closed or close is pending */ public boolean isClosedOrPending() { /* * Lock on fdLock to ensure that we wait if a * close is in progress. */ synchronized (fdLock) { if (closePending || (fd == null && fd1 == null)) { return true; } else { return false; } } } /* Native methods */ static native void initProto(); native void socketCreate(boolean isServer) throws IOException; native void socketConnect(InetAddress address, int port, int timeout) throws IOException; native void socketBind(InetAddress address, int port) throws IOException; native void socketListen(int count) throws IOException; native void socketAccept(SocketImpl s) throws IOException; native int socketAvailable() throws IOException; native void socketClose0(boolean useDeferredClose) throws IOException; native void socketShutdown(int howto) throws IOException; native void socketSetOption(int cmd, boolean on, Object value) throws SocketException; native int socketGetOption(int opt, Object iaContainerObj) throws SocketException; native int socketGetOption1(int opt, Object iaContainerObj, FileDescriptor fd) throws SocketException; native void socketSendUrgentData(int data) throws IOException; }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. /** * GroupItemType.java * * This file was auto-generated from WSDL * by the Apache Axis2 version: 1.5.1 Built on : Oct 19, 2009 (10:59:34 EDT) */ package com.amazon.ec2; /** * GroupItemType bean class */ public class GroupItemType implements org.apache.axis2.databinding.ADBBean{ /* This type was generated from the piece of schema that had name = GroupItemType Namespace URI = http://ec2.amazonaws.com/doc/2010-11-15/ Namespace Prefix = ns1 */ private static java.lang.String generatePrefix(java.lang.String namespace) { if(namespace.equals("http://ec2.amazonaws.com/doc/2010-11-15/")){ return "ns1"; } return org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } /** * field for GroupId */ protected java.lang.String localGroupId ; /** * Auto generated getter method * @return java.lang.String */ public java.lang.String getGroupId(){ return localGroupId; } /** * Auto generated setter method * @param param GroupId */ public void setGroupId(java.lang.String param){ this.localGroupId=param; } /** * isReaderMTOMAware * @return true if the reader supports MTOM */ public static boolean isReaderMTOMAware(javax.xml.stream.XMLStreamReader reader) { boolean isReaderMTOMAware = false; try{ isReaderMTOMAware = java.lang.Boolean.TRUE.equals(reader.getProperty(org.apache.axiom.om.OMConstants.IS_DATA_HANDLERS_AWARE)); }catch(java.lang.IllegalArgumentException e){ isReaderMTOMAware = false; } return isReaderMTOMAware; } /** * * @param parentQName * @param factory * @return org.apache.axiom.om.OMElement */ public org.apache.axiom.om.OMElement getOMElement ( final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory) throws org.apache.axis2.databinding.ADBException{ org.apache.axiom.om.OMDataSource dataSource = new org.apache.axis2.databinding.ADBDataSource(this,parentQName){ public void serialize(org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { GroupItemType.this.serialize(parentQName,factory,xmlWriter); } }; return new org.apache.axiom.om.impl.llom.OMSourcedElementImpl( parentQName,factory,dataSource); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ serialize(parentQName,factory,xmlWriter,false); } public void serialize(final javax.xml.namespace.QName parentQName, final org.apache.axiom.om.OMFactory factory, org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter xmlWriter, boolean serializeType) throws javax.xml.stream.XMLStreamException, org.apache.axis2.databinding.ADBException{ java.lang.String prefix = null; java.lang.String namespace = null; prefix = parentQName.getPrefix(); namespace = parentQName.getNamespaceURI(); if ((namespace != null) && (namespace.trim().length() > 0)) { java.lang.String writerPrefix = xmlWriter.getPrefix(namespace); if (writerPrefix != null) { xmlWriter.writeStartElement(namespace, parentQName.getLocalPart()); } else { if (prefix == null) { prefix = generatePrefix(namespace); } xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } } else { xmlWriter.writeStartElement(parentQName.getLocalPart()); } if (serializeType){ java.lang.String namespacePrefix = registerPrefix(xmlWriter,"http://ec2.amazonaws.com/doc/2010-11-15/"); if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0)){ writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", namespacePrefix+":GroupItemType", xmlWriter); } else { writeAttribute("xsi","http://www.w3.org/2001/XMLSchema-instance","type", "GroupItemType", xmlWriter); } } namespace = "http://ec2.amazonaws.com/doc/2010-11-15/"; if (! namespace.equals("")) { prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); xmlWriter.writeStartElement(prefix,"groupId", namespace); xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } else { xmlWriter.writeStartElement(namespace,"groupId"); } } else { xmlWriter.writeStartElement("groupId"); } if (localGroupId==null){ // write the nil attribute throw new org.apache.axis2.databinding.ADBException("groupId cannot be null!!"); }else{ xmlWriter.writeCharacters(localGroupId); } xmlWriter.writeEndElement(); xmlWriter.writeEndElement(); } /** * Util method to write an attribute with the ns prefix */ private void writeAttribute(java.lang.String prefix,java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (xmlWriter.getPrefix(namespace) == null) { xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } xmlWriter.writeAttribute(namespace,attName,attValue); } /** * Util method to write an attribute without the ns prefix */ private void writeAttribute(java.lang.String namespace,java.lang.String attName, java.lang.String attValue,javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException{ if (namespace.equals("")) { xmlWriter.writeAttribute(attName,attValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace,attName,attValue); } } /** * Util method to write an attribute without the ns prefix */ private void writeQNameAttribute(java.lang.String namespace, java.lang.String attName, javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String attributeNamespace = qname.getNamespaceURI(); java.lang.String attributePrefix = xmlWriter.getPrefix(attributeNamespace); if (attributePrefix == null) { attributePrefix = registerPrefix(xmlWriter, attributeNamespace); } java.lang.String attributeValue; if (attributePrefix.trim().length() > 0) { attributeValue = attributePrefix + ":" + qname.getLocalPart(); } else { attributeValue = qname.getLocalPart(); } if (namespace.equals("")) { xmlWriter.writeAttribute(attName, attributeValue); } else { registerPrefix(xmlWriter, namespace); xmlWriter.writeAttribute(namespace, attName, attributeValue); } } /** * method to handle Qnames */ private void writeQName(javax.xml.namespace.QName qname, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { java.lang.String namespaceURI = qname.getNamespaceURI(); if (namespaceURI != null) { java.lang.String prefix = xmlWriter.getPrefix(namespaceURI); if (prefix == null) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ xmlWriter.writeCharacters(prefix + ":" + org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } else { // i.e this is the default namespace xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } else { xmlWriter.writeCharacters(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qname)); } } private void writeQNames(javax.xml.namespace.QName[] qnames, javax.xml.stream.XMLStreamWriter xmlWriter) throws javax.xml.stream.XMLStreamException { if (qnames != null) { // we have to store this data until last moment since it is not possible to write any // namespace data after writing the charactor data java.lang.StringBuffer stringToWrite = new java.lang.StringBuffer(); java.lang.String namespaceURI = null; java.lang.String prefix = null; for (int i = 0; i < qnames.length; i++) { if (i > 0) { stringToWrite.append(" "); } namespaceURI = qnames[i].getNamespaceURI(); if (namespaceURI != null) { prefix = xmlWriter.getPrefix(namespaceURI); if ((prefix == null) || (prefix.length() == 0)) { prefix = generatePrefix(namespaceURI); xmlWriter.writeNamespace(prefix, namespaceURI); xmlWriter.setPrefix(prefix,namespaceURI); } if (prefix.trim().length() > 0){ stringToWrite.append(prefix).append(":").append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } else { stringToWrite.append(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(qnames[i])); } } xmlWriter.writeCharacters(stringToWrite.toString()); } } /** * Register a namespace prefix */ private java.lang.String registerPrefix(javax.xml.stream.XMLStreamWriter xmlWriter, java.lang.String namespace) throws javax.xml.stream.XMLStreamException { java.lang.String prefix = xmlWriter.getPrefix(namespace); if (prefix == null) { prefix = generatePrefix(namespace); while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null) { prefix = org.apache.axis2.databinding.utils.BeanUtil.getUniquePrefix(); } xmlWriter.writeNamespace(prefix, namespace); xmlWriter.setPrefix(prefix, namespace); } return prefix; } /** * databinding method to get an XML representation of this object * */ public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) throws org.apache.axis2.databinding.ADBException{ java.util.ArrayList elementList = new java.util.ArrayList(); java.util.ArrayList attribList = new java.util.ArrayList(); elementList.add(new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/", "groupId")); if (localGroupId != null){ elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localGroupId)); } else { throw new org.apache.axis2.databinding.ADBException("groupId cannot be null!!"); } return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray()); } /** * Factory class that keeps the parse method */ public static class Factory{ /** * static method to create the object * Precondition: If this object is an element, the current or next start element starts this object and any intervening reader events are ignorable * If this object is not an element, it is a complex type and the reader is at the event just after the outer start element * Postcondition: If this object is an element, the reader is positioned at its end element * If this object is a complex type, the reader is positioned at the end element of its outer element */ public static GroupItemType parse(javax.xml.stream.XMLStreamReader reader) throws java.lang.Exception{ GroupItemType object = new GroupItemType(); int event; java.lang.String nillableValue = null; java.lang.String prefix =""; java.lang.String namespaceuri =""; try { while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance","type")!=null){ java.lang.String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type"); if (fullTypeName!=null){ java.lang.String nsPrefix = null; if (fullTypeName.indexOf(":") > -1){ nsPrefix = fullTypeName.substring(0,fullTypeName.indexOf(":")); } nsPrefix = nsPrefix==null?"":nsPrefix; java.lang.String type = fullTypeName.substring(fullTypeName.indexOf(":")+1); if (!"GroupItemType".equals(type)){ //find namespace for the prefix java.lang.String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix); return (GroupItemType)com.amazon.ec2.ExtensionMapper.getTypeObject( nsUri,type,reader); } } } // Note all attributes that were handled. Used to differ normal attributes // from anyAttributes. java.util.Vector handledAttributes = new java.util.Vector(); reader.next(); while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement() && new javax.xml.namespace.QName("http://ec2.amazonaws.com/doc/2010-11-15/","groupId").equals(reader.getName())){ java.lang.String content = reader.getElementText(); object.setGroupId( org.apache.axis2.databinding.utils.ConverterUtil.convertToString(content)); reader.next(); } // End of if for expected property start element else{ // A start element we are not expecting indicates an invalid parameter was passed throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } while (!reader.isStartElement() && !reader.isEndElement()) reader.next(); if (reader.isStartElement()) // A start element we are not expecting indicates a trailing invalid property throw new org.apache.axis2.databinding.ADBException("Unexpected subelement " + reader.getLocalName()); } catch (javax.xml.stream.XMLStreamException e) { throw new java.lang.Exception(e); } return object; } }//end of factory class }
/** * (c) 2014 Cisco and/or its affiliates. All rights reserved. * * This software is released under the Eclipse Public License. The details can be found in the file LICENSE. * Any dependent libraries supplied by third parties are provided under their own open source licenses as * described in their own LICENSE files, generally named .LICENSE.txt. The libraries supplied by Cisco as * part of the Composite Information Server/Cisco Data Virtualization Server, particularly csadmin-XXXX.jar, * csarchive-XXXX.jar, csbase-XXXX.jar, csclient-XXXX.jar, cscommon-XXXX.jar, csext-XXXX.jar, csjdbc-XXXX.jar, * csserverutil-XXXX.jar, csserver-XXXX.jar, cswebapi-XXXX.jar, and customproc-XXXX.jar (where -XXXX is an * optional version number) are provided as a convenience, but are covered under the licensing for the * Composite Information Server/Cisco Data Virtualization Server. They cannot be used in any way except * through a valid license for that product. * * This software is released AS-IS!. Support for this software is not covered by standard maintenance agreements with Cisco. * Any support for this software by Cisco would be covered by paid consulting agreements, and would be billable work. * */ package com.cisco.dvbu.cmdline.vcs.spi; import java.io.BufferedReader; import java.io.Closeable; import java.io.File; import java.io.InputStreamReader; import java.io.IOException; import java.util.Map; import com.cisco.dvbu.cmdline.vcs.spi.git.GITLifecycleListener; import com.cisco.dvbu.ps.common.util.CommonUtils; import com.compositesw.common.vcs.primitives.IOPrimitives; import com.compositesw.common.vcs.primitives.ProcessPrimitives; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public abstract class AbstractLifecycleListener implements LifecycleListener { protected static Log logger = LogFactory.getLog(AbstractLifecycleListener.class); protected static final String VCS_EXEC = System.getProperty("VCS_EXEC"); protected static final String VCS_OPTIONS = System.getProperty("VCS_OPTIONS"); protected static final String VCS_ENV = System.getProperty("VCS_ENV"); protected static final String prefix = "AbstractLifecycleListener::"; protected static final String LS = System.getProperty("line.separator"); protected final ProcessBuilder processBuilder; protected AbstractLifecycleListener() { processBuilder = new ProcessBuilder(); } /** * @param commandTemplate Assumed not to be <tt>null</tt>. * @param commandConfiguration May be <tt>null</tt>. * @return An instance representing the specified command with the specified configuration parameters. * <p> * May not be <tt>null</tt>. */ protected static String[] getConfiguredCommand(String[] commandTemplate, Map<Integer, String> commandConfiguration) { String[] result = getCommandFromTemplate(commandTemplate); if (commandConfiguration != null) { for (Map.Entry<Integer, String> entry: commandConfiguration.entrySet()) { result[entry.getKey()] = entry.getValue(); } } return result; } // private static String printCommand(String[] command) { // StringBuilder sb = new StringBuilder(); // for (int i = 0; i < command.length-1; i++) sb.append(command[i]).append(" "); // sb.append(command[command.length-1]); // return sb.toString(); // } private static String[] getCommandFromTemplate(String[] template) { String[] command = new String[template.length]; System.arraycopy(template, 0, command, 0, template.length); return command; } protected abstract String getErrorMessages(Process process) throws VCSException; /** * @param contextFolder The folder to execute the specified command in. * <p> * May not be <tt>null</tt>. * @param command */ protected void execute(File contextFolder, String[] command, boolean verbose) throws VCSException { if (contextFolder == null) throw new IllegalArgumentException("Context folder must be specified."); if (logger.isDebugEnabled()) { logger.info(prefix+"-------------------------------------------------"); } // Set the full command including any VCS_OPTIONS String[] newCommand = setCommannd(command); /* * mtinius: 2014-02-28 resolve issue with long file paths. * * A new process builder is required because the full command must be created on initialization. * This resolves the "file too long" error that occurs in windows when executing a command line that exceeds 260 characters. * Previously, the 2 commands were executed below which has the affect of doing a cd <long path> and then executing the VCS command. * That was the issue with windows. Windows uses a different api when executing a cd to a path and it fails. * processBuilder.directory(contextFolder); * processBuilder.command(newCommand); * * By creating a process that includes the entire path as a single command, windows uses an API call that is not bound by the 260 character limit and * thus this command succeeds with very long paths. * * To illustrate the point... * This will succeed: return new ProcessBuilder("svn", "add", new File(DIRECTORY, FILE).getAbsolutePath()).start(); * This is basically doing a svn add path\file in one step. * This will work with paths longer than 260 characters * * This will fail: return new ProcessBuilder("svn", "add", FILE).directory(new File(DIRECTORY)).start(); * Fails with CreateProcess error=267, The directory name is invalid * This is basically doing a cd <long path> and svn add file in two steps. * It will fail on cd <long path> * * cgoodric: 2014-09-08: The Git executable requires that its working directory is in the within the workspace that is * being worked in regardless of the path to the file(s) being worked on. NOTE: the directory() * method call MUST occur when the ProcessBuilder object is created. If it occurs later, the call * is ignored for some reason. :/ */ ProcessBuilder processBuilder = (! GITLifecycleListener.isGitExecutable (VCS_EXEC)) ? new ProcessBuilder (newCommand) : new ProcessBuilder (newCommand).directory (contextFolder); // Set any environment variables that were specified by the user setEnvironment(processBuilder); if (logger.isDebugEnabled()) { logger.info(prefix+"-------------------------------------------------"); } Process process = execute(processBuilder); try { handleOutput(process, verbose); handleErrors(process); } finally { ProcessPrimitives.closeStreams(process, verbose); } } private Process execute(ProcessBuilder processBuilder) throws VCSException { Process result = null; try { result = processBuilder.start(); } catch(IOException e) { throw new VCSException(e); } return result; } private void handleOutput(Process process, boolean verbose) throws VCSException { try { IOPrimitives.redirect(process.getInputStream(), verbose?System.out:null); } catch(IOException e) { throw new VCSException(e); } } private void handleErrors(Process process) throws VCSException { String errorMessages = getErrorMessages(process); if (errorMessages != null && errorMessages.length() > 0) { // Mask the original command for any passwords String command = CommonUtils.maskCommand(processBuilder.command().toString()); throw new VCSException(command + ": " + errorMessages); } } /** * Set the command for the process to execute. * * @param command[] - a list of command line arguments */ private String[] setCommannd(String[] command) { // Count the VCS_OPTIONS in order to initialize the string array java.util.StringTokenizer st = new java.util.StringTokenizer(VCS_OPTIONS," "); int vcsOptionCount = 0; while(st.hasMoreTokens()){ vcsOptionCount++; st.nextToken(); } // Initialize a new command to hold existing command + VCS_OPTIONS String[] newCommand = new String[vcsOptionCount+command.length]; // Setup the newCommand and extract the existing command to a string String cmd = ""; for (int i=0; i < command.length; i++) { newCommand[i] = command[i]; cmd = cmd + command[i].toString() + " "; } // Add on the VCS_OPTIONS to the end of the newCommand int tokenCount = command.length; st = new java.util.StringTokenizer(VCS_OPTIONS," "); while(st.hasMoreTokens()){ String token = st.nextToken().toString(); newCommand[tokenCount] = token; cmd = cmd + token + " "; tokenCount++; } // Print out the command just prior to execution if (logger.isDebugEnabled()) { logger.debug("DEBUG::"+prefix+"Command: " + CommonUtils.maskCommand(cmd)); } return newCommand; } /** * Set the environment variables for the process * * @param processBuilder The process context */ private void setEnvironment(ProcessBuilder processBuilder) { // Setup the environment variables Map<String, String> env = processBuilder.environment(); java.util.List<String> envList = new java.util.ArrayList<String>(); // Retrieve the environment variables separated by a pipe envList = CommonUtils.getArgumentsList(envList, true, VCS_ENV, "|"); // 2014-09-03 (cgoodric): make sure envList isn't empty if (envList != null) { // Loop through the list of VCS_ENV variables for (int i=0; i < envList.size(); i++) { String envVar = envList.get(i).toString(); // Retrieve the name=value pair java.util.StringTokenizer st = new java.util.StringTokenizer(envVar,"="); if (st.hasMoreTokens()) { // Retrieve the variable name token String property = st.nextToken(); String propertyVal = ""; try { // Retrieve the variable value token propertyVal = st.nextToken(); } catch (Exception e) {} // Put the environment variable (name=value) pair back to the environment env.put(property, propertyVal); if (logger.isDebugEnabled()) { logger.info(prefix+"Env Var: "+CommonUtils.maskCommand(envVar)); } } } } } }
/******************************************************************************* * * Pentaho Data Integration * * Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.job.entries.checkdbconnection; import static org.pentaho.di.job.entry.validator.AndValidator.putValidators; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator; import java.util.List; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceEntry; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.resource.ResourceEntry.ResourceType; import org.w3c.dom.Node; /** * This check db connections * * @author Samatar * @since 10-12-2007 * */ public class JobEntryCheckDbConnections extends JobEntryBase implements Cloneable, JobEntryInterface { private static Class<?> PKG = JobEntryCheckDbConnections.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ public DatabaseMeta connections[]; public static final String[] unitTimeDesc = new String[] { BaseMessages.getString(PKG, "JobEntryCheckDbConnections.UnitTimeMilliSecond.Label"), BaseMessages.getString(PKG, "JobEntryCheckDbConnections.UnitTimeSecond.Label"), BaseMessages.getString(PKG, "JobEntryCheckDbConnections.UnitTimeMinute.Label"), BaseMessages.getString(PKG, "JobEntryCheckDbConnections.UnitTimeHour.Label"), }; public static final String[] unitTimeCode = new String[] { "millisecond", "second", "minute", "hour" }; public static final int UNIT_TIME_MILLI_SECOND=0; public static final int UNIT_TIME_SECOND=1; public static final int UNIT_TIME_MINUTE=2; public static final int UNIT_TIME_HOUR=3; public String[] waitfors; public int[] waittimes; public JobEntryCheckDbConnections(String n) { super(n, ""); connections = null; waitfors=null; waittimes=null; setID(-1L); } public JobEntryCheckDbConnections() { this(""); } public Object clone() { JobEntryCheckDbConnections je = (JobEntryCheckDbConnections) super.clone(); return je; } private static String getWaitTimeCode(int i) { if (i < 0 || i >= unitTimeCode.length) return unitTimeCode[0]; return unitTimeCode[i]; } public static String getWaitTimeDesc(int i) { if (i < 0 || i >= unitTimeDesc.length) return unitTimeDesc[0]; return unitTimeDesc[i]; } public static int getWaitTimeByDesc(String tt) { if (tt == null) return 0; for (int i = 0; i < unitTimeDesc.length; i++) { if (unitTimeDesc[i].equalsIgnoreCase(tt)) return i; } // If this fails, try to match using the code. return getWaitTimeByCode(tt); } private static int getWaitTimeByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < unitTimeCode.length; i++) { if (unitTimeCode[i].equalsIgnoreCase(tt)) return i; } return 0; } public String getXML() { StringBuffer retval = new StringBuffer(); retval.append(super.getXML()); retval.append(" <connections>").append(Const.CR); //$NON-NLS-1$ if (connections != null) { for (int i = 0; i < connections.length; i++) { retval.append(" <connection>").append(Const.CR); //$NON-NLS-1$ retval.append(" ").append(XMLHandler.addTagValue("name", connections[i]==null?null:connections[i].getName())); retval.append(" ").append(XMLHandler.addTagValue("waitfor",waitfors[i])); retval.append(" ").append(XMLHandler.addTagValue("waittime",getWaitTimeCode(waittimes[i]))); retval.append(" </connection>").append(Const.CR); //$NON-NLS-1$ } } retval.append(" </connections>").append(Const.CR); //$NON-NLS-1$ return retval.toString(); } private static int getWaitByCode(String tt) { if (tt == null) return 0; for (int i = 0; i < unitTimeCode.length; i++) { if (unitTimeCode[i].equalsIgnoreCase(tt)) return i; } return 0; } public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException { try { super.loadXML(entrynode, databases, slaveServers); Node fields = XMLHandler.getSubNode(entrynode, "connections"); //$NON-NLS-1$ // How many hosts? int nrFields = XMLHandler.countNodes(fields, "connection"); //$NON-NLS-1$ connections = new DatabaseMeta[nrFields]; waitfors = new String[nrFields]; waittimes = new int[nrFields]; // Read them all... for (int i = 0; i < nrFields; i++) { Node fnode = XMLHandler.getSubNodeByNr(fields, "connection", i); //$NON-NLS-1$ String dbname = XMLHandler.getTagValue(fnode, "name"); //$NON-NLS-1$ connections[i] = DatabaseMeta.findDatabase(databases, dbname); waitfors[i] = XMLHandler.getTagValue(fnode, "waitfor"); //$NON-NLS-1$ waittimes[i] = getWaitByCode(Const.NVL(XMLHandler.getTagValue(fnode, "waittime"), "")); } } catch(KettleXMLException xe) { throw new KettleXMLException(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.ERROR_0001_Cannot_Load_Job_Entry_From_Xml_Node", xe.getMessage())); } } public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { // How many connections? int argnr = rep.countNrJobEntryAttributes(id_jobentry, "id_database"); //$NON-NLS-1$ connections = new DatabaseMeta[argnr]; waitfors = new String[argnr]; waittimes = new int[argnr]; // Read them all... for (int a = 0; a < argnr; a++) { connections[a] = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", a, "id_database", databases); waitfors[a] = rep.getJobEntryAttributeString(id_jobentry, a, "waitfor"); waittimes[a] = getWaitByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry, a, "waittime"), "")); } } catch(KettleException dbe) { throw new KettleException(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.ERROR_0002_Cannot_Load_Job_From_Repository",""+id_jobentry, dbe.getMessage())); } } public void saveRep(Repository rep, ObjectId id_job) throws KettleException { try { // save the arguments... if (connections != null) { for (int i = 0; i < connections.length; i++) { rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), i, "connection", "id_database", connections[i]); rep.saveJobEntryAttribute(id_job, getObjectId(), i, "waittime", getWaitTimeCode(waittimes[i])); rep.saveJobEntryAttribute(id_job, getObjectId(), i, "waitfor", waitfors[i]); } } } catch(KettleDatabaseException dbe) { throw new KettleException(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.ERROR_0003_Cannot_Save_Job_Entry",""+id_job, dbe.getMessage())); } } public Result execute(Result previousResult, int nr) { Result result = previousResult; result.setResult( true ); int nrerrors=0; int nrsuccess=0; if (connections != null) { for (int i = 0; i < connections.length && !parentJob.isStopped(); i++) { Database db = new Database(this, connections[i]); db.shareVariablesWith(this); try { db.connect(); if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.Connected", connections[i].getDatabaseName(),connections[i].getName())); int iMaximumTimeout=Const.toInt(environmentSubstitute(waitfors[i]),0); if(iMaximumTimeout>0) { int Multiple=1; String waitTimeMessage=unitTimeDesc[0]; switch(waittimes[i]) { case JobEntryCheckDbConnections.UNIT_TIME_SECOND: Multiple=1000; // Second waitTimeMessage=unitTimeDesc[1]; break; case JobEntryCheckDbConnections.UNIT_TIME_MINUTE: Multiple = 60000; // Minute waitTimeMessage=unitTimeDesc[2]; break; case JobEntryCheckDbConnections.UNIT_TIME_HOUR: Multiple = 3600000; // Hour waitTimeMessage=unitTimeDesc[3]; break; default: Multiple=1000; // Second waitTimeMessage=unitTimeDesc[1]; break; } if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.Wait", ""+iMaximumTimeout,waitTimeMessage)); // starttime (in seconds ,Minutes or Hours) long timeStart = System.currentTimeMillis() / Multiple; boolean continueLoop = true; while (continueLoop && !parentJob.isStopped()) { // Update Time value long now = System.currentTimeMillis() / Multiple; // Let's check the limit time if ((now >= (timeStart + iMaximumTimeout))) { // We have reached the time limit if (isDetailed()) logDetailed( BaseMessages.getString(PKG, "JobEntryCheckDbConnections.WaitTimeIsElapsed.Label", connections[i].getDatabaseName(),connections[i].getName())); //$NON-NLS-1$ continueLoop = false; } else { try {Thread.sleep(100);} catch (Exception e) {} } } } nrsuccess++; if(isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.ConnectionOK", connections[i].getDatabaseName(),connections[i].getName())); } catch (KettleDatabaseException e) { nrerrors++; logError( BaseMessages.getString(PKG, "JobEntryCheckDbConnections.Exception", connections[i].getDatabaseName(), connections[i].getName(),e.toString())); //$NON-NLS-1$ } finally { if(db!=null) try{ db.disconnect(); db=null; }catch(Exception e){}; } } } if(nrerrors>0) { result.setNrErrors(nrerrors); result.setResult(false); } if(isDetailed()){ logDetailed( "======================================="); logDetailed( BaseMessages.getString(PKG, "JobEntryCheckDbConnections.Log.Info.ConnectionsInError","" + nrerrors)); logDetailed( BaseMessages.getString(PKG, "JobEntryCheckDbConnections.Log.Info.ConnectionsInSuccess","" + nrsuccess)); logDetailed( "======================================="); } return result; } public boolean evaluates() { return true; } public DatabaseMeta[] getUsedDatabaseConnections() { return connections; } public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) { List<ResourceReference> references = super.getResourceDependencies(jobMeta); if (connections != null) { for(int i=0; i<connections.length; i++) { DatabaseMeta connection = connections[i]; ResourceReference reference = new ResourceReference(this); reference.getEntries().add( new ResourceEntry(connection.getHostname(), ResourceType.SERVER)); reference.getEntries().add( new ResourceEntry(connection.getDatabaseName(), ResourceType.DATABASENAME)); references.add(reference); } } return references; } @Override public void check(List<CheckResultInterface> remarks, JobMeta jobMeta) { andValidator().validate(this, "tablename", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$ andValidator().validate(this, "columnname", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$ } }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.google.protobuf; import com.google.protobuf.GeneratedMessageLite.ExtensionDescriptor; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; @SuppressWarnings("unchecked") final class ExtensionSchemaLite extends ExtensionSchema<ExtensionDescriptor> { @Override boolean hasExtensions(MessageLite prototype) { return prototype instanceof GeneratedMessageLite.ExtendableMessage; } @Override FieldSet<ExtensionDescriptor> getExtensions(Object message) { return ((GeneratedMessageLite.ExtendableMessage<?, ?>) message).extensions; } @Override void setExtensions(Object message, FieldSet<ExtensionDescriptor> extensions) { ((GeneratedMessageLite.ExtendableMessage<?, ?>) message).extensions = extensions; } @Override FieldSet<ExtensionDescriptor> getMutableExtensions(Object message) { return ((GeneratedMessageLite.ExtendableMessage<?, ?>) message).ensureExtensionsAreMutable(); } @Override void makeImmutable(Object message) { getExtensions(message).makeImmutable(); } @Override <UT, UB> UB parseExtension( Reader reader, Object extensionObject, ExtensionRegistryLite extensionRegistry, FieldSet<ExtensionDescriptor> extensions, UB unknownFields, UnknownFieldSchema<UT, UB> unknownFieldSchema) throws IOException { GeneratedMessageLite.GeneratedExtension<?, ?> extension = (GeneratedMessageLite.GeneratedExtension<?, ?>) extensionObject; int fieldNumber = extension.getNumber(); if (extension.descriptor.isRepeated() && extension.descriptor.isPacked()) { Object value = null; switch (extension.getLiteType()) { case DOUBLE: { List<Double> list = new ArrayList<Double>(); reader.readDoubleList(list); value = list; break; } case FLOAT: { List<Float> list = new ArrayList<Float>(); reader.readFloatList(list); value = list; break; } case INT64: { List<Long> list = new ArrayList<Long>(); reader.readInt64List(list); value = list; break; } case UINT64: { List<Long> list = new ArrayList<Long>(); reader.readUInt64List(list); value = list; break; } case INT32: { List<Integer> list = new ArrayList<Integer>(); reader.readInt32List(list); value = list; break; } case FIXED64: { List<Long> list = new ArrayList<Long>(); reader.readFixed64List(list); value = list; break; } case FIXED32: { List<Integer> list = new ArrayList<Integer>(); reader.readFixed32List(list); value = list; break; } case BOOL: { List<Boolean> list = new ArrayList<Boolean>(); reader.readBoolList(list); value = list; break; } case UINT32: { List<Integer> list = new ArrayList<Integer>(); reader.readUInt32List(list); value = list; break; } case SFIXED32: { List<Integer> list = new ArrayList<Integer>(); reader.readSFixed32List(list); value = list; break; } case SFIXED64: { List<Long> list = new ArrayList<Long>(); reader.readSFixed64List(list); value = list; break; } case SINT32: { List<Integer> list = new ArrayList<Integer>(); reader.readSInt32List(list); value = list; break; } case SINT64: { List<Long> list = new ArrayList<Long>(); reader.readSInt64List(list); value = list; break; } case ENUM: { List<Integer> list = new ArrayList<Integer>(); reader.readEnumList(list); unknownFields = SchemaUtil.filterUnknownEnumList( fieldNumber, list, extension.descriptor.getEnumType(), unknownFields, unknownFieldSchema); value = list; break; } default: throw new IllegalStateException( "Type cannot be packed: " + extension.descriptor.getLiteType()); } extensions.setField(extension.descriptor, value); } else { Object value = null; // Enum is a special case because unknown enum values will be put into UnknownFieldSetLite. if (extension.getLiteType() == WireFormat.FieldType.ENUM) { int number = reader.readInt32(); Object enumValue = extension.descriptor.getEnumType().findValueByNumber(number); if (enumValue == null) { return SchemaUtil.storeUnknownEnum( fieldNumber, number, unknownFields, unknownFieldSchema); } // Note, we store the integer value instead of the actual enum object in FieldSet. // This is also different from full-runtime where we store EnumValueDescriptor. value = number; } else { switch (extension.getLiteType()) { case DOUBLE: value = reader.readDouble(); break; case FLOAT: value = reader.readFloat(); break; case INT64: value = reader.readInt64(); break; case UINT64: value = reader.readUInt64(); break; case INT32: value = reader.readInt32(); break; case FIXED64: value = reader.readFixed64(); break; case FIXED32: value = reader.readFixed32(); break; case BOOL: value = reader.readBool(); break; case BYTES: value = reader.readBytes(); break; case UINT32: value = reader.readUInt32(); break; case SFIXED32: value = reader.readSFixed32(); break; case SFIXED64: value = reader.readSFixed64(); break; case SINT32: value = reader.readSInt32(); break; case SINT64: value = reader.readSInt64(); break; case STRING: value = reader.readString(); break; case GROUP: value = reader.readGroup( extension.getMessageDefaultInstance().getClass(), extensionRegistry); break; case MESSAGE: value = reader.readMessage( extension.getMessageDefaultInstance().getClass(), extensionRegistry); break; case ENUM: throw new IllegalStateException("Shouldn't reach here."); } } if (extension.isRepeated()) { extensions.addRepeatedField(extension.descriptor, value); } else { switch (extension.getLiteType()) { case MESSAGE: case GROUP: Object oldValue = extensions.getField(extension.descriptor); if (oldValue != null) { value = Internal.mergeMessage(oldValue, value); } break; default: break; } extensions.setField(extension.descriptor, value); } } return unknownFields; } @Override int extensionNumber(Map.Entry<?, ?> extension) { GeneratedMessageLite.ExtensionDescriptor descriptor = (GeneratedMessageLite.ExtensionDescriptor) extension.getKey(); return descriptor.getNumber(); } @Override void serializeExtension(Writer writer, Map.Entry<?, ?> extension) throws IOException { GeneratedMessageLite.ExtensionDescriptor descriptor = (GeneratedMessageLite.ExtensionDescriptor) extension.getKey(); if (descriptor.isRepeated()) { switch (descriptor.getLiteType()) { case DOUBLE: SchemaUtil.writeDoubleList( descriptor.getNumber(), (List<Double>) extension.getValue(), writer, descriptor.isPacked()); break; case FLOAT: SchemaUtil.writeFloatList( descriptor.getNumber(), (List<Float>) extension.getValue(), writer, descriptor.isPacked()); break; case INT64: SchemaUtil.writeInt64List( descriptor.getNumber(), (List<Long>) extension.getValue(), writer, descriptor.isPacked()); break; case UINT64: SchemaUtil.writeUInt64List( descriptor.getNumber(), (List<Long>) extension.getValue(), writer, descriptor.isPacked()); break; case INT32: SchemaUtil.writeInt32List( descriptor.getNumber(), (List<Integer>) extension.getValue(), writer, descriptor.isPacked()); break; case FIXED64: SchemaUtil.writeFixed64List( descriptor.getNumber(), (List<Long>) extension.getValue(), writer, descriptor.isPacked()); break; case FIXED32: SchemaUtil.writeFixed32List( descriptor.getNumber(), (List<Integer>) extension.getValue(), writer, descriptor.isPacked()); break; case BOOL: SchemaUtil.writeBoolList( descriptor.getNumber(), (List<Boolean>) extension.getValue(), writer, descriptor.isPacked()); break; case BYTES: SchemaUtil.writeBytesList( descriptor.getNumber(), (List<ByteString>) extension.getValue(), writer); break; case UINT32: SchemaUtil.writeUInt32List( descriptor.getNumber(), (List<Integer>) extension.getValue(), writer, descriptor.isPacked()); break; case SFIXED32: SchemaUtil.writeSFixed32List( descriptor.getNumber(), (List<Integer>) extension.getValue(), writer, descriptor.isPacked()); break; case SFIXED64: SchemaUtil.writeSFixed64List( descriptor.getNumber(), (List<Long>) extension.getValue(), writer, descriptor.isPacked()); break; case SINT32: SchemaUtil.writeSInt32List( descriptor.getNumber(), (List<Integer>) extension.getValue(), writer, descriptor.isPacked()); break; case SINT64: SchemaUtil.writeSInt64List( descriptor.getNumber(), (List<Long>) extension.getValue(), writer, descriptor.isPacked()); break; case ENUM: SchemaUtil.writeInt32List( descriptor.getNumber(), (List<Integer>) extension.getValue(), writer, descriptor.isPacked()); break; case STRING: SchemaUtil.writeStringList( descriptor.getNumber(), (List<String>) extension.getValue(), writer); break; case GROUP: { List<?> data = (List<?>) extension.getValue(); if (data != null && !data.isEmpty()) { SchemaUtil.writeGroupList( descriptor.getNumber(), (List<?>) extension.getValue(), writer, Protobuf.getInstance().schemaFor(data.get(0).getClass())); } } break; case MESSAGE: { List<?> data = (List<?>) extension.getValue(); if (data != null && !data.isEmpty()) { SchemaUtil.writeMessageList( descriptor.getNumber(), (List<?>) extension.getValue(), writer, Protobuf.getInstance().schemaFor(data.get(0).getClass())); } } break; } } else { switch (descriptor.getLiteType()) { case DOUBLE: writer.writeDouble(descriptor.getNumber(), (Double) extension.getValue()); break; case FLOAT: writer.writeFloat(descriptor.getNumber(), (Float) extension.getValue()); break; case INT64: writer.writeInt64(descriptor.getNumber(), (Long) extension.getValue()); break; case UINT64: writer.writeUInt64(descriptor.getNumber(), (Long) extension.getValue()); break; case INT32: writer.writeInt32(descriptor.getNumber(), (Integer) extension.getValue()); break; case FIXED64: writer.writeFixed64(descriptor.getNumber(), (Long) extension.getValue()); break; case FIXED32: writer.writeFixed32(descriptor.getNumber(), (Integer) extension.getValue()); break; case BOOL: writer.writeBool(descriptor.getNumber(), (Boolean) extension.getValue()); break; case BYTES: writer.writeBytes(descriptor.getNumber(), (ByteString) extension.getValue()); break; case UINT32: writer.writeUInt32(descriptor.getNumber(), (Integer) extension.getValue()); break; case SFIXED32: writer.writeSFixed32(descriptor.getNumber(), (Integer) extension.getValue()); break; case SFIXED64: writer.writeSFixed64(descriptor.getNumber(), (Long) extension.getValue()); break; case SINT32: writer.writeSInt32(descriptor.getNumber(), (Integer) extension.getValue()); break; case SINT64: writer.writeSInt64(descriptor.getNumber(), (Long) extension.getValue()); break; case ENUM: writer.writeInt32(descriptor.getNumber(), (Integer) extension.getValue()); break; case STRING: writer.writeString(descriptor.getNumber(), (String) extension.getValue()); break; case GROUP: writer.writeGroup( descriptor.getNumber(), extension.getValue(), Protobuf.getInstance().schemaFor(extension.getValue().getClass())); break; case MESSAGE: writer.writeMessage( descriptor.getNumber(), extension.getValue(), Protobuf.getInstance().schemaFor(extension.getValue().getClass())); break; } } } @Override Object findExtensionByNumber( ExtensionRegistryLite extensionRegistry, MessageLite defaultInstance, int number) { return extensionRegistry.findLiteExtensionByNumber(defaultInstance, number); } @Override void parseLengthPrefixedMessageSetItem( Reader reader, Object extensionObject, ExtensionRegistryLite extensionRegistry, FieldSet<ExtensionDescriptor> extensions) throws IOException { GeneratedMessageLite.GeneratedExtension<?, ?> extension = (GeneratedMessageLite.GeneratedExtension<?, ?>) extensionObject; Object value = reader.readMessage(extension.getMessageDefaultInstance().getClass(), extensionRegistry); extensions.setField(extension.descriptor, value); } @Override void parseMessageSetItem( ByteString data, Object extensionObject, ExtensionRegistryLite extensionRegistry, FieldSet<ExtensionDescriptor> extensions) throws IOException { GeneratedMessageLite.GeneratedExtension<?, ?> extension = (GeneratedMessageLite.GeneratedExtension<?, ?>) extensionObject; Object value = extension.getMessageDefaultInstance().newBuilderForType().buildPartial(); Reader reader = BinaryReader.newInstance(ByteBuffer.wrap(data.toByteArray()), true); Protobuf.getInstance().mergeFrom(value, reader, extensionRegistry); extensions.setField(extension.descriptor, value); if (reader.getFieldNumber() != Reader.READ_DONE) { throw InvalidProtocolBufferException.invalidEndTag(); } } }
/* * Exp.java * --------------------------------- * Copyright (c) 2021 * RESOLVE Software Research Group * School of Computing * Clemson University * All rights reserved. * --------------------------------- * This file is subject to the terms and conditions defined in * file 'LICENSE.txt', which is part of this source code package. */ package edu.clemson.rsrg.absyn.expressions; import edu.clemson.rsrg.absyn.ResolveConceptualElement; import edu.clemson.rsrg.absyn.expressions.mathexpr.VarExp; import edu.clemson.rsrg.parsing.data.Location; import edu.clemson.rsrg.parsing.data.LocationDetailModel; import edu.clemson.rsrg.parsing.data.PosSymbol; import edu.clemson.rsrg.statushandling.exception.MiscErrorException; import edu.clemson.rsrg.typeandpopulate.mathtypes.MTType; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * <p> * This is the abstract base class for all the expression objects that the compiler builds using the ANTLR4 AST nodes. * </p> * * @version 2.0 */ public abstract class Exp extends ResolveConceptualElement { // =========================================================== // Member Fields // =========================================================== /** * <p> * An object that contains additional information on where this expression came from. This should be added by the * {@code VCGenerator} when applying the various different {@code proof rules}. * </p> * * <p> * Note: We were trying to hard to make things immutable. There are things that simply can't be immutable until we * are done creating objects. This is probably the best compromise given the design decision of the {@code Exp} * hierarchy. * </p> */ private LocationDetailModel myLocationDetailModel = null; /** * <p> * The object's mathematical type. * </p> */ protected MTType myMathType = null; /** * <p> * The object's mathematical type value. * </p> */ protected MTType myMathTypeValue = null; // =========================================================== // Constructors // =========================================================== /** * <p> * An helper constructor that allow us to store the location of any objects created from a class that inherits from * {@code Exp}. * </p> * * @param l * A {@link Location} representation object. */ protected Exp(Location l) { super(l); } // =========================================================== // Public Methods // =========================================================== /** * <p> * This method overrides the default clone method implementation for all the classes that extend from {@link Exp}. * </p> * * @return A deep copy of the object. */ @Override public Exp clone() { Exp result = this.copy(); result.setMathType(myMathType); result.setMathTypeValue(myMathTypeValue); // Copy any location detail models if (myLocationDetailModel != null) { result.setLocationDetailModel(myLocationDetailModel.clone()); } return result; } /** * <p> * Compares to see if the expression matches this object. * </p> * * @param exp * A {@link Exp} to compare. * * @return A {@link VarExp} containing {@code true} if it is exactly the same, otherwise just return a deep copy our * ourselves. */ public Exp compareWithAssumptions(Exp exp) { Exp retExp; if (this.equivalent(exp)) { retExp = VarExp.getTrueVarExp(myLoc, myMathType.getTypeGraph()); } else { retExp = this.clone(); } return retExp; } /** * <p> * This method must be implemented by all inherited classes to attempt to find the provided expression in our * sub-expressions. * </p> * * @param exp * The expression we wish to locate. * * @return {@code true} if there is an instance of {@code exp} within this object's sub-expressions. {@code false} * otherwise. */ public abstract boolean containsExp(Exp exp); /** * <p> * This method attempts to find an expression with the given name in our sub-expressions. This method is only * invoked by a mathematical expression, but since we could have either mathematical or programming expressions, the * default behavior is to return {@code false}. * </p> * * <p> * Any inherited mathematical expressions must override this method to make this method work. * </p> * * @param varName * Expression name. * @param IsOldExp * Flag to indicate if the given name is of the form {@code #[varName]} * * @return False. */ public boolean containsVar(String varName, boolean IsOldExp) { return false; } /** * {@inheritDoc} */ @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Exp exp = (Exp) o; // YS: Note that this check should be in here for completeness, // but so many things have been built without checking for // location equality. At some point someone should add this // back in and make sure everything still works as intended! /* * if (myLoc != null ? !myLoc.equals(exp.myLoc) : exp.myLoc != null) return false; */ if (myLocationDetailModel != null ? !myLocationDetailModel.equals(exp.myLocationDetailModel) : exp.myLocationDetailModel != null) return false; if (myMathType != null ? !myMathType.equals(exp.myMathType) : exp.myMathType != null) return false; return myMathTypeValue != null ? myMathTypeValue.equals(exp.myMathTypeValue) : exp.myMathTypeValue == null; } /** * <p> * Shallow compare is too weak for many things, and equals() is too strict. This method returns {@code true} * <strong>iff</strong> this expression and the provided expression, {@code e}, are equivalent with respect to * structure and all function and variable names. * </p> * * @param e * The expression to compare this one to. * * @return True <strong>iff</strong> this expression and the provided expression are equivalent with respect to * structure and all function and variable names. */ public boolean equivalent(Exp e) { System.out.println(e.toString()); throw new UnsupportedOperationException( "Equivalence for classes of type " + this.getClass() + " is not currently supported."); } /** * <p> * Helper method to deal with {@link Exp}s that need to be compared but might be {@code null}. Returns {@code true} * <strong>iff</strong> {@code e1} and {@code e2} are both {@code null} or both are not {@code null} and equivalent. * </p> * * @param e1 * The first {@link Exp}. * @param e2 * The second {@link Exp}. * * @return {@code true} <strong>iff</strong> both {@link Exp}s are {@code null}; or both are not {@code null} and * are equivalent. */ public static boolean equivalent(Exp e1, Exp e2) { return !((e1 == null ^ e2 == null)) && ((e1 == null && e2 == null) || e1.equivalent(e2)); } /** * <p> * This method gets the location details associated with this object. * </p> * * @return A {@link LocationDetailModel} object. */ public final LocationDetailModel getLocationDetailModel() { return myLocationDetailModel; } /** * <p> * This method gets the mathematical type associated with this object. * </p> * * @return The {@link MTType} type object. */ public final MTType getMathType() { return myMathType; } /** * <p> * This method gets the mathematical type value associated with this object. * </p> * * @return The {@link MTType} type object. */ public final MTType getMathTypeValue() { return myMathTypeValue; } /** * <p> * This method must be implemented by all inherited classes to return the list of sub-expressions. * </p> * * @return A list containing {@link Exp} type objects. */ public abstract List<Exp> getSubExpressions(); /** * {@inheritDoc} */ @Override public int hashCode() { int result = myLocationDetailModel != null ? myLocationDetailModel.hashCode() : 0; result = 31 * result + (myMathType != null ? myMathType.hashCode() : 0); result = 31 * result + (myMathTypeValue != null ? myMathTypeValue.hashCode() : 0); return result; } /** * <p> * Helper method to deal with {@link PosSymbol}s that need to be compared but might be {@code null}. Returns * {@code true} <strong>iff</strong> {@code s1} and {@code s2} are both {@code null} or both are not {@code null} * and have names that are equivalent strings (see {@link Exp#stringEquivalent(String, String)}) * </p> * * @param s1 * The first {@link PosSymbol}. * @param s2 * The second {@link PosSymbol}. * * @return {@code true} <strong>iff</strong> both {@link PosSymbol}s are {@code null}; or both are not {@code null} * and have names that are equivalent strings (see {@link Exp#stringEquivalent(String, String)}). */ public static boolean posSymbolEquivalent(PosSymbol s1, PosSymbol s2) { // The first line makes sure that either both s1 and s2 are null or // neither is. If not, we short circuit with "false". // The second line short circuits and returns "true" if both are null. // The third line performs the string comparison. return !((s1 == null) ^ (s2 == null)) && ((s1 == null && s2 == null) || (stringEquivalent(s1.getName(), s2.getName()))); } /** * <p> * This method sets the location details associated with this object. * </p> * * @param locationDetailModel * A {@link LocationDetailModel} object. */ public final void setLocationDetailModel(LocationDetailModel locationDetailModel) { myLocationDetailModel = locationDetailModel; } /** * <p> * This method sets the mathematical type associated with this object. * </p> * * @param mathType * The {@link MTType} type object. */ public void setMathType(MTType mathType) { myMathType = mathType; } /** * <p> * This method sets the mathematical type value associated with this object. * </p> * * @param mathTypeValue * The {@link MTType} type object. */ public void setMathTypeValue(MTType mathTypeValue) { myMathTypeValue = mathTypeValue; } /** * <p> * Helper method to deal with strings that need to be compared but might be {@code null}. Returns {@code true} * <strong>iff</strong> {@code s1} and {@code s2} are both {@code null} or both are not {@code null} and represent * the same string (case sensitive). * </p> * * @param s1 * The first string. * @param s2 * The second string. * * @return {@code true} <strong>iff</strong> both string are {@code null}; or both are not {@code null} and * represent the same string. */ public static boolean stringEquivalent(String s1, String s2) { // The first line makes sure that either both s1 and s2 are null or // neither is. If not, we short circuit with "false". // The second line short circuits and returns "true" if both are null. // The third line performs the string comparison. return !((s1 == null) ^ (s2 == null)) && ((s1 == null && s2 == null) || (s1.equals(s2))); } /** * <p> * Returns a DEEP COPY of this expression, with all instances of {@link Exp}s that occur as keys in * {@code substitutions} replaced with their corresponding values. * </p> * * <p> * In general, a key {@link Exp} "occurs" in this {@link Exp} if either this {@link Exp} or some sub-expression is * {@link Exp#equivalent(Exp)}. However, if the key is a {@link VarExp} function names are additionally matched, * even though they would not ordinarily match via {@link Exp#equivalent(Exp)}, so function names can be substituted * without affecting their arguments. * </p> * * @param substitutions * A mapping from {@link Exp}s that should be substituted out to the {@link Exp} that should replace * them. * * @return A new {@link Exp} that is a deep copy of the original with the provided substitutions made. */ public final Exp substitute(Map<Exp, Exp> substitutions) { Exp retval; boolean match = false; Map.Entry<Exp, Exp> curEntry = null; if (substitutions.size() > 0) { Set<Map.Entry<Exp, Exp>> entries = substitutions.entrySet(); Iterator<Map.Entry<Exp, Exp>> entryIter = entries.iterator(); while (entryIter.hasNext() && !match) { curEntry = entryIter.next(); match = curEntry.getKey().equivalent(this); } if (match) { retval = curEntry.getValue(); } else { retval = substituteChildren(substitutions); retval.setMathType(myMathType); retval.setMathTypeValue(myMathTypeValue); // Copy the location detail model if it is not null if (myLocationDetailModel != null) { retval.setLocationDetailModel(myLocationDetailModel.clone()); } } } else { retval = this.clone(); } return retval; } // =========================================================== // Protected Methods // =========================================================== /** * <p> * Implemented by concrete subclasses of {@link Exp} to manufacture a copy of themselves. * </p> * * @return A new {@link Exp} that is a deep copy of the original. */ protected Exp copy() { throw new MiscErrorException("Shouldn't be calling copy() from type " + this.getClass(), new CloneNotSupportedException()); } /** * <p> * A static helper method that calls substitute method using {@code e}. * </p> * * @param e * The original {@link Exp}. * @param substitutions * A mapping from {@link Exp}s that should be substituted out to the {@link Exp} that should replace * them. * * @return A new {@link Exp} that is a deep copy of the original with the provided substitutions made. */ protected static Exp substitute(Exp e, Map<Exp, Exp> substitutions) { Exp retval; if (e == null) { retval = null; } else { retval = e.substitute(substitutions); } return retval; } /** * <p> * Implemented by concrete subclasses of {@link Exp} to manufacture a copy of themselves where all sub-expressions * have been appropriately substituted. The concrete subclass may assume that {@code this} does not match any key in * {@code substitutions} and thus need only concern itself with performing substitutions in its children. * </p> * * @param substitutions * A mapping from {@link Exp}s that should be substituted out to the {@link Exp} that should replace * them. * * @return A new {@link Exp} that is a deep copy of the original with the provided substitutions made. */ protected abstract Exp substituteChildren(Map<Exp, Exp> substitutions); }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.NavigableSet; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.regionserver.Store.ScanInfo; import org.apache.hadoop.hbase.regionserver.metrics.RegionMetricsStorage; import org.apache.hadoop.hbase.regionserver.metrics.SchemaMetrics; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; /** * Scanner scans both the memstore and the HStore. Coalesce KeyValue stream * into List<KeyValue> for a single row. */ public class StoreScanner extends NonLazyKeyValueScanner implements KeyValueScanner, InternalScanner, ChangedReadersObserver { static final Log LOG = LogFactory.getLog(StoreScanner.class); private Store store; private ScanQueryMatcher matcher; private KeyValueHeap heap; private boolean cacheBlocks; private String metricNamePrefix; // Used to indicate that the scanner has closed (see HBASE-1107) // Doesnt need to be volatile because it's always accessed via synchronized methods private boolean closing = false; private final boolean isGet; private final boolean explicitColumnQuery; private final boolean useRowColBloom; private final Scan scan; private final NavigableSet<byte[]> columns; private final long oldestUnexpiredTS; private final int minVersions; /** We don't ever expect to change this, the constant is just for clarity. */ static final boolean LAZY_SEEK_ENABLED_BY_DEFAULT = true; /** Used during unit testing to ensure that lazy seek does save seek ops */ private static boolean lazySeekEnabledGlobally = LAZY_SEEK_ENABLED_BY_DEFAULT; // if heap == null and lastTop != null, you need to reseek given the key below private KeyValue lastTop = null; /** An internal constructor. */ private StoreScanner(Store store, boolean cacheBlocks, Scan scan, final NavigableSet<byte[]> columns, long ttl, int minVersions) { this.store = store; this.cacheBlocks = cacheBlocks; isGet = scan.isGetScan(); int numCol = columns == null ? 0 : columns.size(); explicitColumnQuery = numCol > 0; this.scan = scan; this.columns = columns; oldestUnexpiredTS = EnvironmentEdgeManager.currentTimeMillis() - ttl; this.minVersions = minVersions; // We look up row-column Bloom filters for multi-column queries as part of // the seek operation. However, we also look the row-column Bloom filter // for multi-row (non-"get") scans because this is not done in // StoreFile.passesBloomFilter(Scan, SortedSet<byte[]>). useRowColBloom = numCol > 1 || (!isGet && numCol == 1); } /** * Opens a scanner across memstore, snapshot, and all StoreFiles. Assumes we * are not in a compaction. * * @param store who we scan * @param scan the spec * @param columns which columns we are scanning * @throws IOException */ public StoreScanner(Store store, ScanInfo scanInfo, Scan scan, final NavigableSet<byte[]> columns) throws IOException { this(store, scan.getCacheBlocks(), scan, columns, scanInfo.getTtl(), scanInfo.getMinVersions()); initializeMetricNames(); if (columns != null && scan.isRaw()) { throw new DoNotRetryIOException( "Cannot specify any column for a raw scan"); } matcher = new ScanQueryMatcher(scan, scanInfo, columns, ScanType.USER_SCAN, Long.MAX_VALUE, HConstants.LATEST_TIMESTAMP, oldestUnexpiredTS); // Pass columns to try to filter out unnecessary StoreFiles. List<KeyValueScanner> scanners = getScannersNoCompaction(); // Seek all scanners to the start of the Row (or if the exact matching row // key does not exist, then to the start of the next matching Row). // Always check bloom filter to optimize the top row seek for delete // family marker. if (explicitColumnQuery && lazySeekEnabledGlobally) { for (KeyValueScanner scanner : scanners) { scanner.requestSeek(matcher.getStartKey(), false, true); } } else { for (KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } } // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, store.comparator); this.store.addChangedReaderObserver(this); } /** * Used for major compactions.<p> * * Opens a scanner across specified StoreFiles. * @param store who we scan * @param scan the spec * @param scanners ancillary scanners * @param smallestReadPoint the readPoint that we should use for tracking * versions */ public StoreScanner(Store store, ScanInfo scanInfo, Scan scan, List<? extends KeyValueScanner> scanners, ScanType scanType, long smallestReadPoint, long earliestPutTs) throws IOException { this(store, false, scan, null, scanInfo.getTtl(), scanInfo.getMinVersions()); initializeMetricNames(); matcher = new ScanQueryMatcher(scan, scanInfo, null, scanType, smallestReadPoint, earliestPutTs, oldestUnexpiredTS); // Filter the list of scanners using Bloom filters, time range, TTL, etc. scanners = selectScannersFrom(scanners); // Seek all scanners to the initial key for(KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, store.comparator); } /** Constructor for testing. */ StoreScanner(final Scan scan, Store.ScanInfo scanInfo, ScanType scanType, final NavigableSet<byte[]> columns, final List<KeyValueScanner> scanners) throws IOException { this(scan, scanInfo, scanType, columns, scanners, HConstants.LATEST_TIMESTAMP); } // Constructor for testing. StoreScanner(final Scan scan, Store.ScanInfo scanInfo, ScanType scanType, final NavigableSet<byte[]> columns, final List<KeyValueScanner> scanners, long earliestPutTs) throws IOException { this(null, scan.getCacheBlocks(), scan, columns, scanInfo.getTtl(), scanInfo.getMinVersions()); this.initializeMetricNames(); this.matcher = new ScanQueryMatcher(scan, scanInfo, columns, scanType, Long.MAX_VALUE, earliestPutTs, oldestUnexpiredTS); // Seek all scanners to the initial key for (KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } heap = new KeyValueHeap(scanners, scanInfo.getComparator()); } /** * Method used internally to initialize metric names throughout the * constructors. * * To be called after the store variable has been initialized! */ private void initializeMetricNames() { String tableName = SchemaMetrics.UNKNOWN; String family = SchemaMetrics.UNKNOWN; if (store != null) { tableName = store.getTableName(); family = Bytes.toString(store.getFamily().getName()); } this.metricNamePrefix = SchemaMetrics.generateSchemaMetricsPrefix(tableName, family); } /** * Get a filtered list of scanners. Assumes we are not in a compaction. * @return list of scanners to seek */ private List<KeyValueScanner> getScannersNoCompaction() throws IOException { final boolean isCompaction = false; return selectScannersFrom(store.getScanners(cacheBlocks, isGet, isCompaction, matcher)); } /** * Filters the given list of scanners using Bloom filter, time range, and * TTL. */ private List<KeyValueScanner> selectScannersFrom( final List<? extends KeyValueScanner> allScanners) { boolean memOnly; boolean filesOnly; if (scan instanceof InternalScan) { InternalScan iscan = (InternalScan)scan; memOnly = iscan.isCheckOnlyMemStore(); filesOnly = iscan.isCheckOnlyStoreFiles(); } else { memOnly = false; filesOnly = false; } List<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>(allScanners.size()); // We can only exclude store files based on TTL if minVersions is set to 0. // Otherwise, we might have to return KVs that have technically expired. long expiredTimestampCutoff = minVersions == 0 ? oldestUnexpiredTS : Long.MIN_VALUE; // include only those scan files which pass all filters for (KeyValueScanner kvs : allScanners) { boolean isFile = kvs.isFileScanner(); if ((!isFile && filesOnly) || (isFile && memOnly)) { continue; } if (kvs.shouldUseScanner(scan, columns, expiredTimestampCutoff)) { scanners.add(kvs); } } return scanners; } @Override public synchronized KeyValue peek() { if (this.heap == null) { return this.lastTop; } return this.heap.peek(); } @Override public KeyValue next() { // throw runtime exception perhaps? throw new RuntimeException("Never call StoreScanner.next()"); } @Override public synchronized void close() { if (this.closing) return; this.closing = true; // under test, we dont have a this.store if (this.store != null) this.store.deleteChangedReaderObserver(this); if (this.heap != null) this.heap.close(); this.heap = null; // CLOSED! this.lastTop = null; // If both are null, we are closed. } @Override public synchronized boolean seek(KeyValue key) throws IOException { if (this.heap == null) { List<KeyValueScanner> scanners = getScannersNoCompaction(); heap = new KeyValueHeap(scanners, store.comparator); } return this.heap.seek(key); } /** * Get the next row of values from this Store. * @param outResult * @param limit * @return true if there are more rows, false if scanner is done */ @Override public synchronized boolean next(List<KeyValue> outResult, int limit) throws IOException { return next(outResult, limit, null); } /** * Get the next row of values from this Store. * @param outResult * @param limit * @return true if there are more rows, false if scanner is done */ @Override public synchronized boolean next(List<KeyValue> outResult, int limit, String metric) throws IOException { if (checkReseek()) { return true; } // if the heap was left null, then the scanners had previously run out anyways, close and // return. if (this.heap == null) { close(); return false; } KeyValue peeked = this.heap.peek(); if (peeked == null) { close(); return false; } // only call setRow if the row changes; avoids confusing the query matcher // if scanning intra-row if ((matcher.row == null) || !peeked.matchingRow(matcher.row)) { matcher.setRow(peeked.getRow()); } KeyValue kv; KeyValue prevKV = null; // Only do a sanity-check if store and comparator are available. KeyValue.KVComparator comparator = store != null ? store.getComparator() : null; long cumulativeMetric = 0; int count = 0; try { LOOP: while((kv = this.heap.peek()) != null) { // Check that the heap gives us KVs in an increasing order. assert prevKV == null || comparator == null || comparator.compare(prevKV, kv) <= 0 : "Key " + prevKV + " followed by a " + "smaller key " + kv + " in cf " + store; prevKV = kv; ScanQueryMatcher.MatchCode qcode = matcher.match(kv); switch(qcode) { case INCLUDE: case INCLUDE_AND_SEEK_NEXT_ROW: case INCLUDE_AND_SEEK_NEXT_COL: Filter f = matcher.getFilter(); outResult.add(f == null ? kv : f.transform(kv)); count++; if (qcode == ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_ROW) { if (!matcher.moreRowsMayExistAfter(kv)) { return false; } reseek(matcher.getKeyForNextRow(kv)); } else if (qcode == ScanQueryMatcher.MatchCode.INCLUDE_AND_SEEK_NEXT_COL) { reseek(matcher.getKeyForNextColumn(kv)); } else { this.heap.next(); } cumulativeMetric += kv.getLength(); if (limit > 0 && (count == limit)) { break LOOP; } continue; case DONE: return true; case DONE_SCAN: close(); return false; case SEEK_NEXT_ROW: // This is just a relatively simple end of scan fix, to short-cut end // us if there is an endKey in the scan. if (!matcher.moreRowsMayExistAfter(kv)) { return false; } reseek(matcher.getKeyForNextRow(kv)); break; case SEEK_NEXT_COL: reseek(matcher.getKeyForNextColumn(kv)); break; case SKIP: this.heap.next(); break; case SEEK_NEXT_USING_HINT: KeyValue nextKV = matcher.getNextKeyHint(kv); if (nextKV != null) { reseek(nextKV); } else { heap.next(); } break; default: throw new RuntimeException("UNEXPECTED"); } } } finally { if (cumulativeMetric > 0 && metric != null) { RegionMetricsStorage.incrNumericMetric(this.metricNamePrefix + metric, cumulativeMetric); } } if (count > 0) { return true; } // No more keys close(); return false; } @Override public synchronized boolean next(List<KeyValue> outResult) throws IOException { return next(outResult, -1, null); } @Override public synchronized boolean next(List<KeyValue> outResult, String metric) throws IOException { return next(outResult, -1, metric); } // Implementation of ChangedReadersObserver @Override public synchronized void updateReaders() throws IOException { if (this.closing) return; // All public synchronized API calls will call 'checkReseek' which will cause // the scanner stack to reseek if this.heap==null && this.lastTop != null. // But if two calls to updateReaders() happen without a 'next' or 'peek' then we // will end up calling this.peek() which would cause a reseek in the middle of a updateReaders // which is NOT what we want, not to mention could cause an NPE. So we early out here. if (this.heap == null) return; // this could be null. this.lastTop = this.peek(); //DebugPrint.println("SS updateReaders, topKey = " + lastTop); // close scanners to old obsolete Store files this.heap.close(); // bubble thru and close all scanners. this.heap = null; // the re-seeks could be slow (access HDFS) free up memory ASAP // Let the next() call handle re-creating and seeking } /** * @return true if top of heap has changed (and KeyValueHeap has to try the * next KV) * @throws IOException */ private boolean checkReseek() throws IOException { if (this.heap == null && this.lastTop != null) { resetScannerStack(this.lastTop); if (this.heap.peek() == null || store.comparator.compareRows(this.lastTop, this.heap.peek()) != 0) { LOG.debug("Storescanner.peek() is changed where before = " + this.lastTop.toString() + ",and after = " + this.heap.peek()); this.lastTop = null; return true; } this.lastTop = null; // gone! } // else dont need to reseek return false; } private void resetScannerStack(KeyValue lastTopKey) throws IOException { if (heap != null) { throw new RuntimeException("StoreScanner.reseek run on an existing heap!"); } /* When we have the scan object, should we not pass it to getScanners() * to get a limited set of scanners? We did so in the constructor and we * could have done it now by storing the scan object from the constructor */ List<KeyValueScanner> scanners = getScannersNoCompaction(); for(KeyValueScanner scanner : scanners) { scanner.seek(lastTopKey); } // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, store.comparator); // Reset the state of the Query Matcher and set to top row. // Only reset and call setRow if the row changes; avoids confusing the // query matcher if scanning intra-row. KeyValue kv = heap.peek(); if (kv == null) { kv = lastTopKey; } if ((matcher.row == null) || !kv.matchingRow(matcher.row)) { matcher.reset(); matcher.setRow(kv.getRow()); } } @Override public synchronized boolean reseek(KeyValue kv) throws IOException { //Heap cannot be null, because this is only called from next() which //guarantees that heap will never be null before this call. if (explicitColumnQuery && lazySeekEnabledGlobally) { return heap.requestSeek(kv, true, useRowColBloom); } else { return heap.reseek(kv); } } @Override public long getSequenceID() { return 0; } /** * Used in testing. * @return all scanners in no particular order */ List<KeyValueScanner> getAllScannersForTesting() { List<KeyValueScanner> allScanners = new ArrayList<KeyValueScanner>(); KeyValueScanner current = heap.getCurrentForTesting(); if (current != null) allScanners.add(current); for (KeyValueScanner scanner : heap.getHeap()) allScanners.add(scanner); return allScanners; } static void enableLazySeekGlobally(boolean enable) { lazySeekEnabledGlobally = enable; } }
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package com.pentaho.big.data.bundles.impl.shim.hdfs; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.security.AccessControlException; import org.pentaho.bigdata.api.hdfs.HadoopFileStatus; import org.pentaho.bigdata.api.hdfs.HadoopFileSystem; import org.pentaho.bigdata.api.hdfs.HadoopFileSystemPath; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; /** * Created by bryan on 5/28/15. */ public class HadoopFileSystemImpl implements HadoopFileSystem { private HadoopFileSystemCallable hadoopFileSystemCallable; public HadoopFileSystemImpl( HadoopFileSystemCallable hadoopFileSystemCallable ) { this.hadoopFileSystemCallable = hadoopFileSystemCallable; } @Override public OutputStream append( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<OutputStream>() { @Override public OutputStream call() throws IOException { return getFileSystem().append( new Path( path.getPath() ) ); } } ); } @Override public OutputStream create( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<OutputStream>() { @Override public OutputStream call() throws IOException { return getFileSystem().create( new Path( path.getPath() ) ); } } ); } @Override public boolean delete( final HadoopFileSystemPath path, final boolean arg1 ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<Boolean>() { @Override public Boolean call() throws IOException { return getFileSystem().delete( new Path( path.getPath() ), arg1 ); } } ); } @Override public HadoopFileStatus getFileStatus( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<HadoopFileStatus>() { @Override public HadoopFileStatus call() throws IOException { return new HadoopFileStatusImpl( getFileSystem().getFileStatus( new Path( path.getPath() ) ) ); } } ); } @Override public boolean mkdirs( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<Boolean>() { @Override public Boolean call() throws IOException { return getFileSystem().mkdirs( new Path( path.getPath() ) ); } } ); } @Override public InputStream open( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<InputStream>() { @Override public InputStream call() throws IOException { return getFileSystem().open( new Path( path.getPath() ) ); } } ); } @Override public boolean rename( final HadoopFileSystemPath path, final HadoopFileSystemPath path2 ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<Boolean>() { @Override public Boolean call() throws IOException { return getFileSystem().rename( new Path( path.getPath() ), new Path( path2.getPath() ) ); } } ); } @Override public void setTimes( final HadoopFileSystemPath path, final long mtime, final long atime ) throws IOException { callAndWrapExceptions( new IOExceptionCallable<Void>() { @Override public Void call() throws IOException { getFileSystem().setTimes( new Path( path.getPath() ), mtime, atime ); return null; } } ); } @Override public HadoopFileStatus[] listStatus( final HadoopFileSystemPath path ) throws IOException { FileStatus[] fileStatuses = callAndWrapExceptions( new IOExceptionCallable<FileStatus[]>() { @Override public FileStatus[] call() throws IOException { return getFileSystem().listStatus( new Path( path.getPath() ) ); } } ); if ( fileStatuses == null ) { return null; } HadoopFileStatus[] result = new HadoopFileStatus[ fileStatuses.length ]; for ( int i = 0; i < fileStatuses.length; i++ ) { result[ i ] = new HadoopFileStatusImpl( fileStatuses[ i ] ); } return result; } @Override public HadoopFileSystemPath getPath( String path ) { return new HadoopFileSystemPathImpl( new Path( path ) ); } @Override public HadoopFileSystemPath getHomeDirectory() { return new HadoopFileSystemPathImpl( getFileSystem().getHomeDirectory() ); } @Override public HadoopFileSystemPath makeQualified( HadoopFileSystemPath hadoopFileSystemPath ) { return new HadoopFileSystemPathImpl( getFileSystem() .makeQualified( HadoopFileSystemPathImpl.toHadoopFileSystemPathImpl( hadoopFileSystemPath ).getRawPath() ) ); } @Override public void chmod( final HadoopFileSystemPath hadoopFileSystemPath, int permissions ) throws IOException { final int owner = permissions / 100; if ( owner < 0 || owner > 7 ) { throw new IllegalArgumentException( "Expected owner permissions between 0 and 7" ); } final int group = ( permissions - ( owner * 100 ) ) / 10; if ( group < 0 || group > 7 ) { throw new IllegalArgumentException( "Expected group permissions between 0 and 7" ); } final int other = permissions - ( owner * 100 ) - ( group * 10 ); if ( other < 0 || other > 7 ) { throw new IllegalArgumentException( "Expected other permissions between 0 and 7" ); } callAndWrapExceptions( new IOExceptionCallable<Void>() { @Override public Void call() throws IOException { getFileSystem().setPermission( HadoopFileSystemPathImpl.toHadoopFileSystemPathImpl( hadoopFileSystemPath ).getRawPath(), new FsPermission( FsAction.values()[ owner ], FsAction.values()[ group ], FsAction.values()[ other ] ) ); return null; } } ); } @Override public boolean exists( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<Boolean>() { @Override public Boolean call() throws IOException { return getFileSystem().exists( HadoopFileSystemPathImpl.toHadoopFileSystemPathImpl( path ).getRawPath() ); } } ); } @Override public HadoopFileSystemPath resolvePath( final HadoopFileSystemPath path ) throws IOException { return callAndWrapExceptions( new IOExceptionCallable<HadoopFileSystemPath>() { @Override public HadoopFileSystemPath call() throws IOException { return new HadoopFileSystemPathImpl( getFileSystem().getFileStatus( HadoopFileSystemPathImpl.toHadoopFileSystemPathImpl( path ).getRawPath() ) .getPath() ); } } ); } @Override public String getFsDefaultName() { return getFileSystem().getConf().get( "fs.defaultFS", getFileSystem().getConf().get( "fs.default.name" ) ); } @Override public void setProperty( String name, String value ) { getFileSystem().getConf().set( name, value ); } @Override public String getProperty( String name, String defaultValue ) { return getFileSystem().getConf().get( name, defaultValue ); } private <T> T callAndWrapExceptions( IOExceptionCallable<T> ioExceptionCallable ) throws IOException { try { return ioExceptionCallable.call(); } catch ( AccessControlException e ) { throw new org.pentaho.bigdata.api.hdfs.exceptions.AccessControlException( e.getMessage(), e ); } } private interface IOExceptionCallable<T> { T call() throws IOException; } private FileSystem getFileSystem() { return hadoopFileSystemCallable.getFileSystem(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.action.admin.indices.rollover; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexClusterStateUpdateRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.ActiveShardsObserver; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.AliasAction; import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaDataCreateIndexService; import org.elasticsearch.cluster.metadata.MetaDataIndexAliasesService; import org.elasticsearch.cluster.metadata.MetaDataIndexTemplateService; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.shard.DocsStats; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Arrays; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableList; /** * Main class to swap the index pointed to by an alias, given some conditions */ public class TransportRolloverAction extends TransportMasterNodeAction<RolloverRequest, RolloverResponse> { private static final Pattern INDEX_NAME_PATTERN = Pattern.compile("^.*-\\d+$"); private final MetaDataCreateIndexService createIndexService; private final MetaDataIndexAliasesService indexAliasesService; private final ActiveShardsObserver activeShardsObserver; private final Client client; @Inject public TransportRolloverAction(Settings settings, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, MetaDataCreateIndexService createIndexService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, MetaDataIndexAliasesService indexAliasesService, Client client) { super(settings, RolloverAction.NAME, transportService, clusterService, threadPool, actionFilters, indexNameExpressionResolver, RolloverRequest::new); this.createIndexService = createIndexService; this.indexAliasesService = indexAliasesService; this.client = client; this.activeShardsObserver = new ActiveShardsObserver(settings, clusterService, threadPool); } @Override protected String executor() { // we go async right away return ThreadPool.Names.SAME; } @Override protected RolloverResponse newResponse() { return new RolloverResponse(); } @Override protected ClusterBlockException checkBlock(RolloverRequest request, ClusterState state) { IndicesOptions indicesOptions = IndicesOptions.fromOptions(true, true, request.indicesOptions().expandWildcardsOpen(), request.indicesOptions().expandWildcardsClosed()); return state.blocks().indicesBlockedException(ClusterBlockLevel.METADATA_WRITE, indexNameExpressionResolver.concreteIndexNames(state, indicesOptions, request.indices())); } @Override protected void masterOperation(final RolloverRequest rolloverRequest, final ClusterState state, final ActionListener<RolloverResponse> listener) { final MetaData metaData = state.metaData(); validate(metaData, rolloverRequest); final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(rolloverRequest.getAlias()); final IndexMetaData indexMetaData = aliasOrIndex.getIndices().get(0); final String sourceProvidedName = indexMetaData.getSettings().get(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, indexMetaData.getIndex().getName()); final String sourceIndexName = indexMetaData.getIndex().getName(); final String unresolvedName = (rolloverRequest.getNewIndexName() != null) ? rolloverRequest.getNewIndexName() : generateRolloverIndexName(sourceProvidedName, indexNameExpressionResolver); final String rolloverIndexName = indexNameExpressionResolver.resolveDateMathExpression(unresolvedName); MetaDataCreateIndexService.validateIndexName(rolloverIndexName, state); // will fail if the index already exists checkNoDuplicatedAliasInIndexTemplate(metaData, rolloverIndexName, rolloverRequest.getAlias()); client.admin().indices().prepareStats(sourceIndexName).clear().setDocs(true).execute( new ActionListener<IndicesStatsResponse>() { @Override public void onResponse(IndicesStatsResponse statsResponse) { final Set<Condition.Result> conditionResults = evaluateConditions(rolloverRequest.getConditions(), metaData.index(sourceIndexName), statsResponse); if (rolloverRequest.isDryRun()) { listener.onResponse( new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, true, false, false, false)); return; } if (conditionResults.size() == 0 || conditionResults.stream().anyMatch(result -> result.matched)) { CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(unresolvedName, rolloverIndexName, rolloverRequest); createIndexService.createIndex(updateRequest, ActionListener.wrap(createIndexClusterStateUpdateResponse -> { // switch the alias to point to the newly created index indexAliasesService.indicesAliases( prepareRolloverAliasesUpdateRequest(sourceIndexName, rolloverIndexName, rolloverRequest), ActionListener.wrap(aliasClusterStateUpdateResponse -> { if (aliasClusterStateUpdateResponse.isAcknowledged()) { activeShardsObserver.waitForActiveShards(new String[]{rolloverIndexName}, rolloverRequest.getCreateIndexRequest().waitForActiveShards(), rolloverRequest.masterNodeTimeout(), isShardsAcknowledged -> listener.onResponse(new RolloverResponse( sourceIndexName, rolloverIndexName, conditionResults, false, true, true, isShardsAcknowledged)), listener::onFailure); } else { listener.onResponse(new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, false, true, false, false)); } }, listener::onFailure)); }, listener::onFailure)); } else { // conditions not met listener.onResponse( new RolloverResponse(sourceIndexName, rolloverIndexName, conditionResults, false, false, false, false) ); } } @Override public void onFailure(Exception e) { listener.onFailure(e); } } ); } static IndicesAliasesClusterStateUpdateRequest prepareRolloverAliasesUpdateRequest(String oldIndex, String newIndex, RolloverRequest request) { List<AliasAction> actions = unmodifiableList(Arrays.asList( new AliasAction.Add(newIndex, request.getAlias(), null, null, null), new AliasAction.Remove(oldIndex, request.getAlias()))); final IndicesAliasesClusterStateUpdateRequest updateRequest = new IndicesAliasesClusterStateUpdateRequest(actions) .ackTimeout(request.ackTimeout()) .masterNodeTimeout(request.masterNodeTimeout()); return updateRequest; } static String generateRolloverIndexName(String sourceIndexName, IndexNameExpressionResolver indexNameExpressionResolver) { String resolvedName = indexNameExpressionResolver.resolveDateMathExpression(sourceIndexName); final boolean isDateMath = sourceIndexName.equals(resolvedName) == false; if (INDEX_NAME_PATTERN.matcher(resolvedName).matches()) { int numberIndex = sourceIndexName.lastIndexOf("-"); assert numberIndex != -1 : "no separator '-' found"; int counter = Integer.parseInt(sourceIndexName.substring(numberIndex + 1, isDateMath ? sourceIndexName.length()-1 : sourceIndexName.length())); String newName = sourceIndexName.substring(0, numberIndex) + "-" + String.format(Locale.ROOT, "%06d", ++counter) + (isDateMath ? ">" : ""); return newName; } else { throw new IllegalArgumentException("index name [" + sourceIndexName + "] does not match pattern '^.*-\\d+$'"); } } static Set<Condition.Result> evaluateConditions(final Set<Condition> conditions, final DocsStats docsStats, final IndexMetaData metaData) { final long numDocs = docsStats == null ? 0 : docsStats.getCount(); final long indexSize = docsStats == null ? 0 : docsStats.getTotalSizeInBytes(); final Condition.Stats stats = new Condition.Stats(numDocs, metaData.getCreationDate(), new ByteSizeValue(indexSize)); return conditions.stream() .map(condition -> condition.evaluate(stats)) .collect(Collectors.toSet()); } static Set<Condition.Result> evaluateConditions(final Set<Condition> conditions, final IndexMetaData metaData, final IndicesStatsResponse statsResponse) { return evaluateConditions(conditions, statsResponse.getPrimaries().getDocs(), metaData); } static void validate(MetaData metaData, RolloverRequest request) { final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(request.getAlias()); if (aliasOrIndex == null) { throw new IllegalArgumentException("source alias does not exist"); } if (aliasOrIndex.isAlias() == false) { throw new IllegalArgumentException("source alias is a concrete index"); } if (aliasOrIndex.getIndices().size() != 1) { throw new IllegalArgumentException("source alias maps to multiple indices"); } } static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest(final String providedIndexName, final String targetIndexName, final RolloverRequest rolloverRequest) { final CreateIndexRequest createIndexRequest = rolloverRequest.getCreateIndexRequest(); createIndexRequest.cause("rollover_index"); createIndexRequest.index(targetIndexName); return new CreateIndexClusterStateUpdateRequest(createIndexRequest, "rollover_index", targetIndexName, providedIndexName) .ackTimeout(createIndexRequest.timeout()) .masterNodeTimeout(createIndexRequest.masterNodeTimeout()) .settings(createIndexRequest.settings()) .aliases(createIndexRequest.aliases()) .waitForActiveShards(ActiveShardCount.NONE) // not waiting for shards here, will wait on the alias switch operation .mappings(createIndexRequest.mappings()); } /** * If the newly created index matches with an index template whose aliases contains the rollover alias, * the rollover alias will point to multiple indices. This causes indexing requests to be rejected. * To avoid this, we make sure that there is no duplicated alias in index templates before creating a new index. */ static void checkNoDuplicatedAliasInIndexTemplate(MetaData metaData, String rolloverIndexName, String rolloverRequestAlias) { final List<IndexTemplateMetaData> matchedTemplates = MetaDataIndexTemplateService.findTemplates(metaData, rolloverIndexName); for (IndexTemplateMetaData template : matchedTemplates) { if (template.aliases().containsKey(rolloverRequestAlias)) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Rollover alias [%s] can point to multiple indices, found duplicated alias [%s] in index template [%s]", rolloverRequestAlias, template.aliases().keys(), template.name())); } } } }
/* * Copyright 2007 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.collect.ImmutableSet; import java.util.*; /** * Tests for {@link AliasStrings}. * * */ public class AliasStringsTest extends CompilerTestCase { private static final String EXTERNS = "alert"; private static final Set<String> ALL_STRINGS = null; private Set<String> strings = ALL_STRINGS; private JSModuleGraph moduleGraph = null; private boolean hashReduction = false; public AliasStringsTest() { super(EXTERNS); } @Override public void setUp() { super.enableLineNumberCheck(false); } @Override public CompilerPass getProcessor(Compiler compiler) { AliasStrings pass = new AliasStrings(compiler, moduleGraph, strings, "(?i)secret", false); if (hashReduction) pass.unitTestHashReductionMask = 0; return pass; } public void testAssignment() { strings = ImmutableSet.of("none", "width", "overimaginative"); // Strings not in alias list testSame("var foo='foo'"); testSame("a='titanium',b='titanium',c='titanium',d='titanium'"); // Not worth aliasing: testSame("myStr='width'"); testSame("Bar.prototype.start='none'"); // Worth aliasing: test("a='overimaginative';b='overimaginative'", "var $$S_overimaginative='overimaginative';" + "a=$$S_overimaginative;b=$$S_overimaginative"); testSame("var width=1234"); testSame("width=1234;width=10000;width=9900;width=17;"); } public void testSeveral() { strings = ImmutableSet.of("", "px", "none", "width"); test("function() {var styles=['width',100,'px','display','none'].join('')}", "var $$S_width='width';" + "var $$S_px='px';" + "var $$S_none='none';" + "var $$S_='';" + "function() {var styles=[$$S_width,100,$$S_px,'display'," + "$$S_none].join($$S_)}"); } public void testObjectLiterals() { strings = ImmutableSet.of("px", "!@#$%^&*()"); test("var foo={px:435}", "var foo={px:435}"); // string as key test("var foo={'px':435}", "var foo={px:435}"); test("bar=function(){return {'px':435}}", "bar=function(){return {px:435}}"); test("function() {var foo={bar:'!@#$%^&*()'}}", "var $$S_$21$40$23$24$25$5e$26$2a$28$29='!@#$%^&*()';" + "function() {var foo={bar:$$S_$21$40$23$24$25$5e$26$2a$28$29}}"); test("function() {var foo={px:435,foo:'px',bar:'baz'}}", "var $$S_px='px';" + "function() {var foo={px:435,foo:$$S_px,bar:'baz'}}"); } public void testGetProp() { strings = ImmutableSet.of("px", "width"); testSame("function(){element.style.px=1234}"); test("function f(){shape.width.units='px'}", "var $$S_px='px';function f(){shape.width.units=$$S_px}"); test("function f(){shape['width'].units='pt'}", "var $$S_width='width';" + "function f(){shape[$$S_width].units='pt'}"); } public void testFunctionCalls() { strings = ImmutableSet.of("", ",", "overimaginative"); // Not worth aliasing testSame("alert('')"); testSame("var a=[1,2,3];a.join(',')"); // worth aliasing test("f('overimaginative', 'overimaginative')", "var $$S_overimaginative='overimaginative';" + "f($$S_overimaginative,$$S_overimaginative)"); } public void testRegularExpressions() { strings = ImmutableSet.of("px"); testSame("/px/.match('10px')"); } public void testBlackList() { test("function(){var f=\'sec ret\';g=\"TOPseCreT\"}", "var $$S_sec$20ret='sec ret';" + "function(){var f=$$S_sec$20ret;g=\"TOPseCreT\"}"); } public void testLongStableAlias() { strings = ALL_STRINGS; // Check long strings get a hash code test("a='Antidisestablishmentarianism';" + "b='Antidisestablishmentarianism';", "var $$S_Antidisestablishment_506eaf9c=" + " 'Antidisestablishmentarianism';" + "a=$$S_Antidisestablishment_506eaf9c;" + "b=$$S_Antidisestablishment_506eaf9c"); // Check that small changes give different hash codes test("a='AntidisestablishmentarianIsm';" + "b='AntidisestablishmentarianIsm';", "var $$S_Antidisestablishment_6823e97c=" + " 'AntidisestablishmentarianIsm';" + "a=$$S_Antidisestablishment_6823e97c;" + "b=$$S_Antidisestablishment_6823e97c"); // TODO(user): check that hash code collisions are handled. } public void testLongStableAliasHashCollision() { strings = ALL_STRINGS; hashReduction = true; // Check that hash code collisions generate different alias // variable names test("f('Antidisestablishmentarianism');" + "f('Antidisestablishmentarianism');" + "f('Antidisestablishmentarianismo');" + "f('Antidisestablishmentarianismo');", "var $$S_Antidisestablishment_0=" + " 'Antidisestablishmentarianism';" + "f($$S_Antidisestablishment_0);" + "f($$S_Antidisestablishment_0);" + "var $$S_Antidisestablishment_0_1=" + " 'Antidisestablishmentarianismo';" + "f($$S_Antidisestablishment_0_1);" + "f($$S_Antidisestablishment_0_1);"); } public void testStringsThatAreGlobalVarValues() { strings = ALL_STRINGS; testSame("var foo='foo'; var bar='';"); // Regular array testSame("var foo=['foo','bar'];"); // Nested array testSame("var foo=['foo',['bar']];"); // Same string is in a global array and a local in a function test("var foo=['foo', 'bar'];function bar() {return 'foo';}", "var $$S_foo='foo';" + "var foo=[$$S_foo, 'bar'];function bar() {return $$S_foo;}"); // Regular object literal testSame("var foo={'foo': 'bar'};"); // Nested object literal testSame("var foo={'foo': {'bar': 'baz'}};"); // Same string is in a global object literal (as key) and local in a // function test("var foo={'foo': 'bar'};function bar() {return 'foo';}", "var foo={'foo': 'bar'};" + "var $$S_foo='foo';function bar() {return $$S_foo;}"); // Same string is in a global object literal (as value) and local in a // function test("var foo={'foo': 'foo'};function bar() {return 'foo';}", "var $$S_foo='foo';" + "var foo={'foo': $$S_foo};function bar() {return $$S_foo;}"); } public void testStringsInModules() { strings = ALL_STRINGS; // Aliases must be placed in the correct module. The alias for // '------adios------' must be lifted from m2 and m3 and go in the // common parent module m1 JSModule[] modules = createModules( // m0 "function f(a) { alert('f:' + a); }" + "function g() { alert('ciao'); }", // m1 "f('-------hi-------');" + "f('bye');" + "function h(a) { alert('h:' + a); }", // m2 "f('-------hi-------');" + "h('ciao' + '------adios------');" + "(function() { alert('zzz'); })();", // m3 "f('-------hi-------'); alert('------adios------');" + "h('-----peaches-----'); h('-----peaches-----');"); modules[1].addDependency(modules[0]); modules[2].addDependency(modules[1]); modules[3].addDependency(modules[1]); moduleGraph = new JSModuleGraph(modules); test(modules, new String[] { // m1 "var $$S_f$3a = 'f:';" + "function f(a) { alert($$S_f$3a + a); }" + "var $$S_ciao = 'ciao';" + "function g() { alert($$S_ciao); }", // m2 "var $$S_$2d$2d$2d$2d$2d$2d$2dhi$2d$2d$2d$2d$2d$2d$2d" + " = '-------hi-------';" + "var $$S_$2d$2d$2d$2d$2d$2d_adios$2d$2d$2d$2d$2d$2d" + " = '------adios------'; " + "f($$S_$2d$2d$2d$2d$2d$2d$2dhi$2d$2d$2d$2d$2d$2d$2d);" + "f('bye');" + "var $$S_h$3a = 'h:';" + "function h(a) { alert($$S_h$3a + a); }", // m3 "f($$S_$2d$2d$2d$2d$2d$2d$2dhi$2d$2d$2d$2d$2d$2d$2d);" + "h($$S_ciao + $$S_$2d$2d$2d$2d$2d$2d_adios$2d$2d$2d$2d$2d$2d);" + "var $$S_zzz = 'zzz';" + "(function() { alert($$S_zzz) })();", // m4 "f($$S_$2d$2d$2d$2d$2d$2d$2dhi$2d$2d$2d$2d$2d$2d$2d);" + "alert($$S_$2d$2d$2d$2d$2d$2d_adios$2d$2d$2d$2d$2d$2d);" + "var $$S_$2d$2d$2d$2d$2dpeaches$2d$2d$2d$2d$2d" + " = '-----peaches-----';" + "h($$S_$2d$2d$2d$2d$2dpeaches$2d$2d$2d$2d$2d);" + "h($$S_$2d$2d$2d$2d$2dpeaches$2d$2d$2d$2d$2d);", }); moduleGraph = null; } public void testEmptyModules() { JSModule[] modules = createModules( // m0 "", // m1 "function foo() { f('good') }", // m2 "function foo() { f('good') }"); modules[1].addDependency(modules[0]); modules[2].addDependency(modules[0]); moduleGraph = new JSModuleGraph(modules); test(modules, new String[] { // m0 "var $$S_good='good'", // m1 "function foo() {f($$S_good)}", // m2 "function foo() {f($$S_good)}",}); moduleGraph = null; } }
package org.ml4j.nn.layers; import java.util.Arrays; import java.util.List; import org.ml4j.EditableMatrix; /* * Copyright 2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ import org.ml4j.Matrix; import org.ml4j.MatrixFactory; import org.ml4j.nn.activationfunctions.ActivationFunction; import org.ml4j.nn.axons.AxonsConfig; import org.ml4j.nn.axons.BiasVectorImpl; import org.ml4j.nn.axons.FeaturesVectorFormat; import org.ml4j.nn.axons.TrainableAxons; import org.ml4j.nn.axons.WeightsFormatImpl; import org.ml4j.nn.axons.WeightsMatrixImpl; import org.ml4j.nn.axons.WeightsMatrixOrientation; import org.ml4j.nn.axons.factories.AxonsFactory; import org.ml4j.nn.neurons.Neurons; import org.ml4j.nn.neurons.NeuronsActivation; import org.ml4j.nn.neurons.NeuronsActivationImpl; import org.ml4j.nn.neurons.format.NeuronsActivationFormat; import org.ml4j.nn.neurons.format.features.Dimension; import org.ml4j.nn.synapses.UndirectedSynapses; import org.ml4j.nn.synapses.UndirectedSynapsesActivation; import org.ml4j.nn.synapses.UndirectedSynapsesContext; import org.ml4j.nn.synapses.UndirectedSynapsesImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RestrictedBoltzmannLayerImpl implements RestrictedBoltzmannLayer<TrainableAxons<?, ?, ?>> { private static final Logger LOGGER = LoggerFactory.getLogger(RestrictedBoltzmannLayerImpl.class); /** * Default serialization id. */ private static final long serialVersionUID = 1L; private TrainableAxons<?, ?, ?> axons; private UndirectedSynapses<?, ?> synapses; /** * @param axons The Axons. * @param visibleActivationFunction The visible ActivationFunction * @param hiddenActivationFunction The hidden ActivationFunction */ public RestrictedBoltzmannLayerImpl(TrainableAxons<?, ?, ?> axons, ActivationFunction<?, ?> visibleActivationFunction, ActivationFunction<?, ?> hiddenActivationFunction) { this.axons = axons; this.synapses = new UndirectedSynapsesImpl<Neurons, Neurons>(axons, visibleActivationFunction, hiddenActivationFunction); } /** * @param axonsFactory A factory implementation to create axons. * @param visibleNeurons The visible Neurons. * @param hiddenNeurons The hidden Neurons. * @param visibleActivationFunction The visible ActivationFunction. * @param hiddenActivationFunction The hidden ActivationFunction. * @param matrixFactory The MatrixFactory. */ public RestrictedBoltzmannLayerImpl(AxonsFactory axonsFactory, Neurons visibleNeurons, Neurons hiddenNeurons, ActivationFunction<?, ?> visibleActivationFunction, ActivationFunction<?, ?> hiddenActivationFunction, MatrixFactory matrixFactory) { this.axons = axonsFactory.createFullyConnectedAxons(new AxonsConfig<>(visibleNeurons, hiddenNeurons), new WeightsMatrixImpl(null, new WeightsFormatImpl(Arrays.asList(Dimension.INPUT_FEATURE), Arrays.asList(Dimension.OUTPUT_FEATURE), WeightsMatrixOrientation.ROWS_SPAN_OUTPUT_DIMENSIONS)), null); this.synapses = new UndirectedSynapsesImpl<Neurons, Neurons>(axons, visibleActivationFunction, hiddenActivationFunction); } /** * @param axonsFactory A factory implementation to create axons. * @param visibleNeurons The visible Neurons. * @param hiddenNeurons The hidden Neurons. * @param visibleActivationFunction The visible ActivationFunction. * @param hiddenActivationFunction The hidden ActivationFunction. * @param matrixFactory The MatrixFactory. * @param initialWeights The initial weights. */ public RestrictedBoltzmannLayerImpl(AxonsFactory axonsFactory, Neurons visibleNeurons, Neurons hiddenNeurons, ActivationFunction<?, ?> visibleActivationFunction, ActivationFunction<?, ?> hiddenActivationFunction, MatrixFactory matrixFactory, Matrix initialWeights, Matrix initialLeftToRightBiases, Matrix initialRightToLeftBiases) { this.axons = axonsFactory.createFullyConnectedAxons(new AxonsConfig<>(visibleNeurons, hiddenNeurons), new WeightsMatrixImpl(initialWeights, new WeightsFormatImpl(Arrays.asList(Dimension.INPUT_FEATURE), Arrays.asList(Dimension.OUTPUT_FEATURE), WeightsMatrixOrientation.ROWS_SPAN_OUTPUT_DIMENSIONS)), initialLeftToRightBiases == null ? null : new BiasVectorImpl(initialLeftToRightBiases, FeaturesVectorFormat.DEFAULT_BIAS_FORMAT)); this.synapses = new UndirectedSynapsesImpl<Neurons, Neurons>(axons, visibleActivationFunction, hiddenActivationFunction); } @Override public RestrictedBoltzmannLayer<TrainableAxons<?, ?, ?>> dup() { return new RestrictedBoltzmannLayerImpl(axons.dup(), synapses.getLeftActivationFunction(), synapses.getRightActivationFunction()); } @Override public TrainableAxons<?, ?, ?> getPrimaryAxons() { return axons; } /* * @Override public List<UndirectedSynapses<?, ?>> getSynapses() { return * Arrays.asList(synapses); } */ @Override public NeuronsActivation getOptimalVisibleActivationsForHiddenNeuron(int hiddenNeuronIndex, UndirectedLayerContext undirectedLayerContext, MatrixFactory matrixFactory) { LOGGER.debug("Obtaining optimal input for hidden neuron with index:" + hiddenNeuronIndex); Matrix weights = getPrimaryAxons().getDetachedAxonWeights().getConnectionWeights().getMatrix(); int countJ = weights.getColumns(); float[] maximisingInputFeatures = new float[countJ]; boolean hasBiasUnit = getPrimaryAxons().getLeftNeurons().hasBiasUnit(); for (int j = 0; j < countJ; j++) { float wij = getWij(j, hiddenNeuronIndex, weights, hasBiasUnit); float sum = 0; if (wij != 0) { for (int j2 = 0; j2 < countJ; j2++) { float weight = getWij(j2, hiddenNeuronIndex, weights, hasBiasUnit); if (weight != 0) { sum = sum + (float) Math.pow(weight, 2); } } sum = (float) Math.sqrt(sum); } maximisingInputFeatures[j] = wij / sum; } return new NeuronsActivationImpl(getVisibleNeurons(), matrixFactory.createMatrixFromRows(new float[][] { maximisingInputFeatures }), NeuronsActivationFormat.ROWS_SPAN_FEATURE_SET); } private float getWij(int indI, int indJ, Matrix weights, boolean hasBiasUnit) { int indICorrected = indI; return weights.get(indJ, indICorrected); } @Override public RestrictedBoltzmannLayerActivation activateHiddenNeuronsFromVisibleNeuronsData( NeuronsActivation visibleNeuronsActivation, UndirectedLayerContext layerContext) { NeuronsActivation synapsesInput = visibleNeuronsActivation; UndirectedSynapsesActivation hiddenNeuronsSynapseActivation = synapses.pushLeftToRight(synapsesInput, null, layerContext.createSynapsesContext(0)); return new RestrictedBoltzmannLayerActivationImpl(hiddenNeuronsSynapseActivation, visibleNeuronsActivation, hiddenNeuronsSynapseActivation.getOutput()); } @Override public RestrictedBoltzmannLayerActivation activateHiddenNeuronsFromVisibleNeuronsReconstruction( RestrictedBoltzmannLayerActivation visibleNeuronsReconstruction, UndirectedLayerContext layerContext) { NeuronsActivation synapsesInput = new NeuronsActivationImpl(getHiddenNeurons(), visibleNeuronsReconstruction.getSynapsesActivation().getOutput() .getActivations(layerContext.getMatrixFactory()), NeuronsActivationFormat.ROWS_SPAN_FEATURE_SET); UndirectedSynapsesActivation hiddenNeuronsSynapseActivation = synapses.pushLeftToRight(synapsesInput, visibleNeuronsReconstruction.getSynapsesActivation(), layerContext.createSynapsesContext(0)); return new RestrictedBoltzmannLayerActivationImpl(hiddenNeuronsSynapseActivation, visibleNeuronsReconstruction.getVisibleActivationProbablities(), hiddenNeuronsSynapseActivation.getOutput()); } @Override public RestrictedBoltzmannLayerActivation activateVisibleNeuronsFromHiddenNeurons( NeuronsActivation hiddenNeuronsDataActivation, UndirectedLayerContext layerContext) { NeuronsActivation synapsesInput = hiddenNeuronsDataActivation; UndirectedSynapsesContext context = layerContext.createSynapsesContext(0); UndirectedSynapsesActivation visibleNeuronsSynapseActivation = synapses.pushRightToLeft(synapsesInput, null, context); return new RestrictedBoltzmannLayerActivationImpl(visibleNeuronsSynapseActivation, visibleNeuronsSynapseActivation.getOutput(), hiddenNeuronsDataActivation); } @Override public RestrictedBoltzmannLayerActivation activateVisibleNeuronsFromHiddenNeuronsProbabilities( RestrictedBoltzmannLayerActivation previousVisibleToHiddenNeuronsActivation, UndirectedLayerContext layerContext) { NeuronsActivation synapsesInput = previousVisibleToHiddenNeuronsActivation.getHiddenActivationProbabilities(); UndirectedSynapsesContext context = layerContext.createSynapsesContext(0); UndirectedSynapsesActivation visibleNeuronsSynapseActivation = synapses.pushRightToLeft(synapsesInput, previousVisibleToHiddenNeuronsActivation.getSynapsesActivation(), context); return new RestrictedBoltzmannLayerActivationImpl(visibleNeuronsSynapseActivation, visibleNeuronsSynapseActivation.getOutput(), previousVisibleToHiddenNeuronsActivation.getHiddenActivationProbabilities()); } @Override public RestrictedBoltzmannLayerActivation activateVisibleNeuronsFromHiddenNeuronsSample( RestrictedBoltzmannLayerActivation previousVisibleToHiddenNeuronsActivation, UndirectedLayerContext layerContext) { NeuronsActivation sample = previousVisibleToHiddenNeuronsActivation .getHiddenActivationBinarySample(layerContext.getMatrixFactory()); NeuronsActivation synapsesInput = new NeuronsActivationImpl(getHiddenNeurons(), sample.getActivations(layerContext.getMatrixFactory()), NeuronsActivationFormat.ROWS_SPAN_FEATURE_SET); UndirectedSynapsesContext context = layerContext.createSynapsesContext(0); UndirectedSynapsesActivation visibleNeuronsSynapseActivation = synapses.pushRightToLeft(synapsesInput, previousVisibleToHiddenNeuronsActivation.getSynapsesActivation(), context); return new RestrictedBoltzmannLayerActivationImpl(visibleNeuronsSynapseActivation, new NeuronsActivationImpl(getVisibleNeurons(), visibleNeuronsSynapseActivation.getOutput().getActivations(layerContext.getMatrixFactory()), NeuronsActivationFormat.ROWS_SPAN_FEATURE_SET), previousVisibleToHiddenNeuronsActivation.getHiddenActivationProbabilities()); } @Override public Neurons getHiddenNeurons() { return axons.getRightNeurons(); } @Override public Neurons getVisibleNeurons() { return axons.getLeftNeurons(); } /** * @param data The training data. * @param visibleNeurons The visible neurons. * @param hiddenNeurons The hidden neurons. * @param learningRate The learning rate. * @param matrixFactory The matrix factory. * @return The initial connection weights. */ public static Matrix generateInitialConnectionWeights(NeuronsActivation data, Neurons visibleNeurons, Neurons hiddenNeurons, float learningRate, MatrixFactory matrixFactory) { int initialHiddenUnitBiasWeight = -4; EditableMatrix thetas = matrixFactory .createRandn(visibleNeurons.getNeuronCountIncludingBias(), hiddenNeurons.getNeuronCountIncludingBias()) .mul(learningRate).asEditableMatrix(); for (int i = 1; i < thetas.getColumns(); i++) { thetas.put(0, i, initialHiddenUnitBiasWeight); } for (int i = 1; i < thetas.getRows(); i++) { double[] proportionsOfOnUnits = getProportionsOfOnUnits(data.getActivations(matrixFactory)); double proportionOfTimeUnitActivated = proportionsOfOnUnits[i - 1]; // Needed to add the following to limit p here, otherwise the log blows up proportionOfTimeUnitActivated = Math.max(proportionOfTimeUnitActivated, 0.001); float initialVisibleUnitBiasWeight = (float) Math .log(proportionOfTimeUnitActivated / (1 - proportionOfTimeUnitActivated)); thetas.put(i, 0, initialVisibleUnitBiasWeight); } thetas.put(0, 0, 0); return thetas; } private static double[] getProportionsOfOnUnits(Matrix data) { int[] counts = new int[data.getColumns()]; for (int row = 0; row < data.getRows(); row++) { float[] dat = data.getRow(row).getRowByRowArray(); for (int i = 0; i < counts.length; i++) { if (dat[i] == 1) { counts[i]++; } } } double[] props = new double[counts.length]; for (int i = 0; i < props.length; i++) { props[i] = counts[i] / data.getColumns(); } return props; } @Override public List<UndirectedSynapses<?, ?>> getComponents() { return Arrays.asList(synapses); } }
/* * * Copyright 2015-2017 IBM * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.ibm.util.merge; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.net.URL; import java.nio.file.Files; import java.util.HashMap; import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.LogManager; import java.util.logging.Logger; import org.apache.commons.io.IOUtils; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.ibm.util.merge.data.DataElement; import com.ibm.util.merge.data.DataObject; import com.ibm.util.merge.data.DataPrimitive; import com.ibm.util.merge.data.parser.DataProxyJson; import com.ibm.util.merge.data.parser.Parsers; import com.ibm.util.merge.exception.Merge500; import com.ibm.util.merge.exception.MergeException; import com.ibm.util.merge.template.Template; import com.ibm.util.merge.template.content.Segment; import com.ibm.util.merge.template.directive.Enrich; import com.ibm.util.merge.template.directive.Insert; import com.ibm.util.merge.template.directive.ParseData; import com.ibm.util.merge.template.directive.Replace; import com.ibm.util.merge.template.directive.SaveFile; import com.ibm.util.merge.template.directive.enrich.provider.ProviderInterface; import com.ibm.util.merge.template.directive.enrich.provider.ProviderMeta; import com.ibm.util.merge.template.directive.enrich.provider.Providers; /** * Provides configuration information for the Merge Utility - * Implemented as a singleton with static convenience methods * * <p>All configuration constructor values accept a JSON string. * This string only needs to provide values when you want to override a default. </p> * <p> * Both the Default Providers and Parsers lists must be complete, providing any * value removes all default entries. If you add a custom provider, you will need to * list all of the default providers you want as well as your new provider.</p> * <p>Configuration Format JSON<blockquote><pre> * { * "nestLimit": n, * "insertLimit": n, * "tempFolder": "folder", * "loadFolder" : "folder", * "prettyJson" : true | false * "logLevel": "CRITICAL | SEVERE | WARN | INFO", * "envVars" : {"var":"value"}, * "defaultProviders" : ["providerClass","providerClass"], * "defaultParsers" : ["parserClass","parserClass"], * } * </pre></blockquote> * * @author Mike Storey * @since: v4.0.0.B1 */ public class Config { private final String version = "4.0.0.B3"; private int nestLimit = 2; private int insertLimit = 20; private String tempFolder = "/opt/ibm/idmu/v4/archives"; private String loadFolder = "/opt/ibm/idmu/v4/packages"; private String logLevel = "SEVERE"; private boolean prettyJson = true; private HashMap<String, String> envVars = new HashMap<String,String>(); private String[] defaultProviders = { "com.ibm.util.merge.template.directive.enrich.provider.CacheProvider", "com.ibm.util.merge.template.directive.enrich.provider.CloudantProvider", "com.ibm.util.merge.template.directive.enrich.provider.FileSystemProvider", "com.ibm.util.merge.template.directive.enrich.provider.JdbcProvider", "com.ibm.util.merge.template.directive.enrich.provider.JndiProvider", "com.ibm.util.merge.template.directive.enrich.provider.MongoProvider", "com.ibm.util.merge.template.directive.enrich.provider.RestProvider", "com.ibm.util.merge.template.directive.enrich.provider.StubProvider" }; private String[] defaultParsers = { "com.ibm.util.merge.data.parser.DataProxyCsv", "com.ibm.util.merge.data.parser.DataProxyJson", "com.ibm.util.merge.data.parser.DataProxyXmlStrict" }; /* * Transient Values - Providers, Parsers and a JSON Proxy */ private transient Providers providers = new Providers(); private transient Parsers proxies = new Parsers(); private transient static DataProxyJson proxy; /** * Provide a default configuration. If the enviornment variable * idmu-config exists it is parsed as a json configuration value. * If the config environment variable does not exist, all defaults * are provided. * * @throws MergeException on Processing Errors */ public Config() throws MergeException { String configString = ""; try { configString = this.getEnv("idmu-config"); } catch (Throwable e) { // ignore } Logger rootLogger = LogManager.getLogManager().getLogger(""); rootLogger.setLevel(Level.parse(this.logLevel)); loadConfig(configString); } /** * Get a configuration from the provided environment variable name. * * @param configString The configuration JSON * @throws MergeException on Processing Errors */ public Config(String configString) throws MergeException { Logger rootLogger = LogManager.getLogManager().getLogger(""); rootLogger.setLevel(Level.parse(this.logLevel)); loadConfig(configString); } /** * Read a configuration from a config file * * @param configFile The configuration file * @throws MergeException on Processing Errors */ public Config(File configFile) throws MergeException { String configString; try { configString = new String(Files.readAllBytes(configFile.toPath()), "ISO-8859-1"); } catch (UnsupportedEncodingException e) { throw new Merge500("Unsupported Encoding Exception reading config file: " + configFile.toString() + " Message: "+ e.getMessage()); } catch (IOException e) { throw new Merge500("IO Exception reading config file: " + configFile.toString() + " Message: " + e.getMessage()); } Logger rootLogger = LogManager.getLogManager().getLogger(""); rootLogger.setLevel(Level.parse(this.logLevel)); loadConfig(configString); } /** * Read a configuration from an anonymous http source * * @param url The URL to fetch a config from * @throws MergeException on Processing Errors */ public Config(URL url) throws MergeException { String configString; try { configString = IOUtils.toString( new BufferedReader( new InputStreamReader( url.openStream()))); } catch (IOException e) { throw new Merge500("Unable to laod config " + url.toString()); } Logger rootLogger = LogManager.getLogManager().getLogger(""); rootLogger.setLevel(Level.parse(this.logLevel)); loadConfig(configString); } /** * Load the configuration provided * * @param configString The configuration JSON * @throws MergeException on Processing Errors */ private void loadConfig(String configString) throws MergeException { proxy = new DataProxyJson(false); if (null != configString) { JsonElement ele = proxy.fromString(configString, JsonElement.class); if (null != ele && ele.isJsonObject()) { JsonObject me = ele.getAsJsonObject(); this.nestLimit = this.getIf(me, "nestLimit", this.nestLimit); this.insertLimit = this.getIf(me, "insertLimit", this.insertLimit); this.tempFolder = this.getIf(me, "tempFolder", this.tempFolder); this.loadFolder = this.getIf(me, "loadFolder", this.loadFolder); this.prettyJson = this.getIf(me, "prettyJson", true); this.logLevel = this.getIf(me, "logLevel", this.logLevel); if (me.has("envVars") && me.get("envVars").isJsonObject()) { this.envVars = new HashMap<String,String>(); for (Entry<String, JsonElement> var : me.get("envVars").getAsJsonObject().entrySet()) { this.envVars.put(var.getKey(), var.getValue().getAsString()); } } if (me.has("defaultProviders") && me.get("defaultProviders").isJsonArray()) { JsonArray list = me.get("defaultProviders").getAsJsonArray(); this.defaultProviders = new String[list.size()]; for (int i = 0; i < list.size(); i++ ) { this.defaultProviders[i] = list.get(i).getAsString(); } } if (me.has("defaultParsers")) { JsonArray list = me.get("defaultParsers").getAsJsonArray(); this.defaultParsers = new String[list.size()]; for (int i = 0; i < list.size(); i++ ) { this.defaultParsers[i] = list.get(i).getAsString(); } } } } this.registerDefaultProxies(); this.registerDefaultProviders(); Logger rootLogger = LogManager.getLogManager().getLogger(""); rootLogger.setLevel(Level.parse(this.logLevel)); proxy = new DataProxyJson(this.isPrettyJson()); } private int getIf(JsonObject me, String name, int value) { if (me.has(name)) { return me.get(name).getAsInt(); } else { return value; } } private String getIf(JsonObject object, String name, String value) { if (object.has(name)) { return object.get(name).getAsString(); } else { return value; } } private Boolean getIf(JsonObject object, String name, Boolean value) { if (object.has(name)) { return object.get(name).getAsBoolean(); } else { return value; } } /** * Abstraction of Environment access. Will leverage an entry from the * local Environment hashmap property. Environment Variables prefixed with * "VCAP:" will be treated as entries in the VCAP_SERVICES environment variable. * * You can provide environment values by adding entries to the envVars hashMap * * @param name The environment variable * @return The environment value * @throws MergeException on Processing Errors */ public String getEnv(String name) throws MergeException { if (envVars.containsKey(name)) { return envVars.get(name); } if (name.startsWith("VCAP:")) { return getVcapEntry(name.substring(5)); } String value = System.getenv(name); if (null == value) { throw new Merge500("enviornment variable not found"); } return value; } /** * @param serviceName The VCAP Service Name to lookup * @return the VCAP entry * @throws MergeException on Processing Errors */ public String getVcapEntry(String serviceName) throws MergeException { String VCAP_SERVICES = this.getEnv("VCAP_SERVICES"); String value = ""; if (null == VCAP_SERVICES) { throw new Merge500("VCAP_SERVICES enviornment variable missing"); } try { DataElement vcap = proxy.fromString(VCAP_SERVICES, DataElement.class); value = proxy.toString(vcap.getAsObject().get(serviceName).getAsList().get(0).getAsPrimitive()); } catch (Exception e) { throw new Merge500("VCAP_SERVICES contains malformed JSON or is missing service " + serviceName); } return value; } // // Simple Getters below here // public Boolean isPrettyJson() { return prettyJson; } public String getTempFolder() { return tempFolder; } public String getLoadFolder() { return loadFolder; } public int getNestLimit() { return nestLimit; } public int getInsertLimit() { return insertLimit; } public String getVersion() { return version; } // Parser Management public void registerDefaultProxies() throws MergeException { this.proxies.registerDefaultProxies(this.defaultParsers); } public DataElement parseString(int parseAs, String value, String options, Template context) throws MergeException { return this.proxies.parseString(parseAs, value, options, context); } // Provider Management public ProviderInterface getProviderInstance(String className, String source, String parameter) throws MergeException { return this.providers.getProviderInstance(className, source, parameter); } public void registerDefaultProviders() throws MergeException { this.providers.registerDefaultProviders(this.defaultProviders); } /* * Constants and Options */ public static final int PARSE_NONE = 4; public static final int PARSE_CSV = 1; public static final int PARSE_JSON = 3; public static final int PARSE_XML = 5; public static final HashMap<Integer, String> PARSE_OPTIONS() { HashMap<Integer, String> values = new HashMap<Integer, String>(); values.put(PARSE_CSV, "csv"); values.put(PARSE_JSON, "json"); values.put(PARSE_NONE, "none"); values.put(PARSE_XML, "xml"); return values; } public static final HashMap<String,HashMap<Integer, String>> getOptions() { HashMap<String,HashMap<Integer, String>> options = new HashMap<String,HashMap<Integer, String>>(); options.put("Parse Formats", PARSE_OPTIONS()); return options; } /** * Build a configuration object and return the json * @return A json string of all configuration and template/directive options * @throws MergeException on build errors. */ public String get() throws MergeException { // Build the return object and Config values DataObject returnObject = new DataObject(); DataObject config = new DataObject(); config.put("version", new DataPrimitive(version)); config.put("nestLimit", new DataPrimitive(nestLimit)); config.put("insertLimit", new DataPrimitive(insertLimit)); config.put("tempFolder", new DataPrimitive(tempFolder)); config.put("logLevel", new DataPrimitive(logLevel)); config.put("prettyJson", new DataPrimitive(prettyJson)); DataObject env = new DataObject(); for (String key : envVars.keySet()) { env.put(key, new DataPrimitive(envVars.get(key))); } returnObject.put("config", config); // Build the Providers List DataObject providers = new DataObject(); for (String provider : this.providers.keySet()) { ProviderMeta meta = this.getProviderInstance(provider, "source", "parameters").getMetaInfo(); DataObject providerData = new DataObject(); providerData.put("optionName", new DataPrimitive(meta.optionName)); providerData.put("sourceJson", new DataPrimitive(meta.sourceEnv)); providerData.put("commandHelp", new DataPrimitive(meta.commandHelp)); providerData.put("parseHelp", new DataPrimitive(meta.parseHelp)); providerData.put("returnHelp", new DataPrimitive(meta.returnHelp)); providers.put(provider, providerData); } returnObject.put("providers", providers); // Build Parser List DataObject parsers = new DataObject(); for (Integer parser : this.proxies.keySet()) { parsers.put(Integer.toString(parser), new DataPrimitive(proxies.get(parser).getClass().getName())); } returnObject.put("parsers", parsers); // Build the Object Enum Options list returnObject.put("template", theOptions(Template.getOptions())); returnObject.put("encoding", theOptions(Segment.getOptions())); returnObject.put("enrich", theOptions(Enrich.getOptions())); returnObject.put("insert", theOptions(Insert.getOptions())); returnObject.put("parse", theOptions(ParseData.getOptions())); returnObject.put("replace", theOptions(Replace.getOptions())); returnObject.put("save", theOptions(SaveFile.getOptions())); return proxy.toString(returnObject); } /** * Helper for "get" method * @param values Options from an Object getOptions * @return the Options converted to a DataElement */ public DataElement theOptions(HashMap<String, HashMap<Integer, String>> values) { DataObject enums = new DataObject(); for (String name : values.keySet()) { DataObject options = new DataObject(); enums.put(name, options); for (Integer option : values.get(name).keySet()) { options.put(Integer.toString(option), new DataPrimitive(values.get(name).get(option))); } } return enums; } /** * Check if the parse as parser supported * @param parseAs the parse format * @return true if the parse format is supported */ public boolean hasParser(int parseAs) { return (this.proxies.containsKey(new Integer(parseAs))); } /** * Check if the provider is supported * @param key provider class * @return true if it's a supported provider */ public boolean hasProvider(String key) { return (this.providers.containsKey(key)); } }
/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is part of dcm4che, an implementation of DICOM(TM) in * Java(TM), available at http://sourceforge.net/projects/dcm4che. * * The Initial Developer of the Original Code is * TIANI Medgraph AG. * Portions created by the Initial Developer are Copyright (C) 2003-2005 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Gunter Zeilinger <gunter.zeilinger@tiani.com> * Franz Willer <franz.willer@gwi-ag.com> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ package org.dcm4chex.archive.ejb.entity; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.ejb.CreateException; import javax.ejb.EntityBean; import javax.ejb.FinderException; import org.apache.log4j.Logger; import org.dcm4chex.archive.ejb.interfaces.AEDTO; /** * Application Entity bean. * * @author <a href="mailto:gunter@tiani.com">Gunter Zeilinger</a> * * @ejb.bean name="AE" type="CMP" view-type="local" primkey-field="pk" * local-jndi-name="ejb/AE" * @jboss.container-configuration name="Instance Per Transaction CMP 2.x EntityBean" * * @ejb.transaction type="Required" * * @ejb.persistence table-name="ae" * * @jboss.entity-command name="hsqldb-fetch-key" * * @ejb.finder * signature="Collection findAll()" * query="SELECT OBJECT(a) FROM AE AS a" transaction-type="Supports" * @jboss.query * signature="Collection findAll()" * strategy="on-find" * eager-load-group="*" * * @ejb.finder * signature="org.dcm4chex.archive.ejb.interfaces.AELocal findByAET(java.lang.String aet)" * query="SELECT OBJECT(a) FROM AE AS a WHERE a.title = ?1" * transaction-type="Supports" * @jboss.query * signature="org.dcm4chex.archive.ejb.interfaces.AELocal findByAET(java.lang.String aet)" * strategy="on-find" * eager-load-group="*" * * @jboss.query signature="java.util.Collection ejbSelectGeneric(java.lang.String jbossQl, java.lang.Object[] args)" * dynamic="true" strategy="on-load" page-size="20" * eager-load-group="*" * */ public abstract class AEBean implements EntityBean { private static final Logger log = Logger.getLogger(AEBean.class); /** * Auto-generated Primary Key * * @ejb.interface-method * @ejb.pk-field * @ejb.persistence column-name="pk" * @jboss.persistence auto-increment="true" * */ public abstract Long getPk(); public abstract void getPk(Long pk); /** * Application Entity Title * * @ejb.interface-method * @ejb.persistence column-name="aet" */ public abstract String getTitle(); /** * @ejb.interface-method */ public abstract void setTitle(String title); /** * @ejb.interface-method * @ejb.persistence column-name="hostname" */ public abstract String getHostName(); /** * @ejb.interface-method */ public abstract void setHostName(String name); /** * @ejb.interface-method * @ejb.persistence column-name="port" */ public abstract int getPort(); /** * @ejb.interface-method */ public abstract void setPort(int port); /** * @ejb.interface-method * @ejb.persistence column-name="cipher_suites" */ public abstract String getCipherSuites(); /** * @ejb.interface-method */ public abstract void setCipherSuites(String cipherSuites); /** * @ejb.interface-method * @ejb.persistence column-name="pat_id_issuer" */ public abstract String getIssuerOfPatientID(); /** * @ejb.interface-method */ public abstract void setIssuerOfPatientID(String issuer); /** * @ejb.interface-method * @ejb.persistence column-name="station_name" */ public abstract String getStationName(); /** * @ejb.interface-method */ public abstract void setStationName(String stationName); /** * @ejb.interface-method * @ejb.persistence column-name="institution" */ public abstract String getInstitution(); /** * @ejb.interface-method */ public abstract void setInstitution(String institution); /** * @ejb.interface-method * @ejb.persistence column-name="department" */ public abstract String getDepartment(); /** * @ejb.interface-method */ public abstract void setDepartment(String department); /** * @ejb.interface-method * @ejb.persistence column-name="installed" */ public abstract boolean getInstalled(); /** * @ejb.interface-method */ public abstract void setInstalled(boolean installed); /** * @ejb.interface-method * @ejb.persistence column-name="user_id" */ public abstract String getUserID(); /** * @ejb.interface-method */ public abstract void setUserID(String user); /** * @ejb.interface-method * @ejb.persistence column-name="passwd" */ public abstract String getPassword(); /** * @ejb.interface-method */ public abstract void setPassword(String passwd); /** * @ejb.interface-method * @ejb.persistence column-name="fs_group_id" */ public abstract String getFileSystemGroupID(); /** * @ejb.interface-method */ public abstract void setFileSystemGroupID(String id); /** * @ejb.interface-method * @ejb.persistence column-name="ae_desc" */ public abstract String getDescription(); /** * @ejb.interface-method */ public abstract void setDescription(String desc); /** * @ejb.interface-method * @ejb.persistence column-name="wado_url" */ public abstract String getWadoURL(); /** * @ejb.interface-method */ public abstract void setWadoURL(String desc); /** * @ejb.interface-method * @ejb.persistence column-name="vendor_data" */ public abstract byte[] getVendorData(); /** * @ejb.interface-method */ public abstract void setVendorData(byte[] vendorData); /** * @ejb.create-method */ public Long ejbCreate(AEDTO dto) throws CreateException { if (log.isDebugEnabled()) { log.debug("create AEBean(" + dto.getTitle() + ")"); } update(dto); return null; } public void ejbPostCreate(AEDTO dto) throws CreateException { } /** * @ejb.interface-method */ public void update(AEDTO dto) { setTitle(dto.getTitle().trim()); setHostName(dto.getHostName()); setPort(dto.getPort()); setCipherSuites(dto.getCipherSuitesAsString()); setIssuerOfPatientID(dto.getIssuerOfPatientID()); setUserID(dto.getUserID()); setPassword(dto.getPassword()); setFileSystemGroupID(dto.getFileSystemGroupID()); setDescription(dto.getDescription()); setWadoURL(dto.getWadoURL()); setStationName(dto.getStationName()); setInstitution(dto.getInstitution()); setDepartment(dto.getDepartment()); setVendorData(dto.getVendorData()); setInstalled(dto.isInstalled()); } /** * @ejb.interface-method * @ejb.transaction type="Supports" */ public AEDTO toDTO() { AEDTO dto = new AEDTO(); dto.setPk(getPk().longValue()); dto.setTitle(getTitle()); dto.setHostName(getHostName()); dto.setPort(getPort()); dto.setCipherSuitesAsString(getCipherSuites()); dto.setIssuerOfPatientID(getIssuerOfPatientID()); dto.setUserID(getUserID()); dto.setPassword(getPassword()); dto.setFileSystemGroupID(getFileSystemGroupID()); dto.setDescription(getDescription()); dto.setWadoURL(getWadoURL()); dto.setStationName(getStationName()); dto.setInstitution(getInstitution()); dto.setDepartment(getDepartment()); dto.setVendorData(getVendorData()); dto.setInstalled(getInstalled()); return dto; } /** * @ejb.interface-method * @ejb.transaction type="Supports" */ public String asString() { StringBuffer sb = new StringBuffer(64); sb.append(getProtocol()).append("://").append(getTitle()).append('@') .append(getHostName()).append(':').append(getPort()); return sb.toString(); } private String getProtocol() { String cipherSuites = getCipherSuites(); if (cipherSuites == null || cipherSuites.length() == 0) { return "dicom"; } if ("SSL_RSA_WITH_NULL_SHA".equals(cipherSuites)) { return "dicom-tls.nodes"; } if ("SSL_RSA_WITH_3DES_EDE_CBC_SHA".equals(cipherSuites)) { return "dicom-tls.3des"; } if ("TLS_RSA_WITH_AES_128_CBC_SHA,SSL_RSA_WITH_3DES_EDE_CBC_SHA" .equals(cipherSuites)) { return "dicom-tls.aes"; } return "dicom-tls"; } /** * @ejb.select query="" transaction-type="Supports" */ public abstract Collection ejbSelectGeneric(String jbossQl, Object[] args) throws FinderException; /** * @ejb.home-method */ public Collection ejbHomeListByAETAndPort(Collection aets, int port) throws FinderException { if ( aets == null || aets.size() == 0 ) { throw new IllegalArgumentException("Request must include at least 1 ae title"); } StringBuffer jbossQl = new StringBuffer(); jbossQl.append("SELECT OBJECT(a) FROM AE AS a WHERE a.port = ?1 AND a.title IN("); for ( int i = 1; i < aets.size(); i++ ) { jbossQl.append("?").append(i + 1).append(", "); } jbossQl.append("?").append(aets.size() + 1).append(")"); List<Object> values = new ArrayList<Object>(); values.add(port); values.addAll(aets); return ejbSelectGeneric(jbossQl.toString(), values.toArray(new Object[values.size()])); } }
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.api; import java.io.BufferedWriter; import java.io.IOException; import java.io.Writer; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; import java.util.HashMap; import java.util.Map; public class NodeJSAPIGenerator extends AbstractAPIGenerator { private final String HEADER = "/* Zed Attack Proxy (ZAP) and its related class files.\n" + " *\n" + " * ZAP is an HTTP/HTTPS proxy for assessing web application security.\n" + " *\n" + " * Copyright 2016 the ZAP development team\n" + " *\n" + " * Licensed under the Apache License, Version 2.0 (the \"License\");\n" + " * you may not use this file except in compliance with the License.\n" + " * You may obtain a copy of the License at\n" + " *\n" + " * http://www.apache.org/licenses/LICENSE-2.0\n" + " *\n" + " * Unless required by applicable law or agreed to in writing, software\n" + " * distributed under the License is distributed on an \"AS IS\" BASIS,\n" + " * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n" + " * See the License for the specific language governing permissions and\n" + " * limitations under the License.\n" + " */\n" + "\n\n"; /** * Map any names which are reserved in java to something legal */ private static final Map<String, String> nameMap; static { Map<String, String> initMap = new HashMap<>(); initMap.put("break", "brk"); initMap.put("boolean", "bool"); initMap.put("continue", "cont"); nameMap = Collections.unmodifiableMap(initMap); } public NodeJSAPIGenerator() { super("nodejs/api/zapv2"); } public NodeJSAPIGenerator(String path, boolean optional) { super(path, optional); } private void generateNodeJSElement(ApiElement element, String component, String type, Writer out) throws IOException { String className = createClassName(component); boolean hasParams = false; // Add description if defined String descTag = element.getDescriptionTag(); if (descTag == null) { // This is the default, but it can be overriden by the getDescriptionTag method if required descTag = component + ".api." + type + "." + element.getName(); } try { String desc = getMessages().getString(descTag); out.write("/**\n"); out.write(" * " + desc + "\n"); if (isOptional()) { out.write(" * " + OPTIONAL_MESSAGE + "\n"); } out.write(" **/\n"); } catch (Exception e) { // Might not be set, so just print out the ones that are missing System.out.println("No i18n for: " + descTag); if (isOptional()) { out.write("/**\n"); out.write(" * " + OPTIONAL_MESSAGE + "\n"); out.write(" **/\n"); } } out.write(className + ".prototype." + createMethodName(element.getName()) + " = function ("); if (element.getMandatoryParamNames() != null) { for (String param : element.getMandatoryParamNames()) { if (! hasParams) { hasParams = true; } else { out.write(", "); } out.write(safeName(param.toLowerCase())); } } if (element.getOptionalParamNames() != null) { for (String param : element.getOptionalParamNames()) { if (! hasParams) { hasParams = true; } else { out.write(", "); } out.write(safeName(param.toLowerCase())); } } if (type.equals(ACTION_ENDPOINT) || type.equals(OTHER_ENDPOINT)) { // Always add the API key - we've no way of knowing if it will be required or not if (hasParams) { out.write(", "); } hasParams = true; out.write(API.API_KEY_PARAM); } if (hasParams) { out.write(", "); } out.write("callback) {\n"); if (type.equals(ACTION_ENDPOINT) || type.equals(OTHER_ENDPOINT)) { // Make the API key optional out.write(" if (!callback && typeof(" + API.API_KEY_PARAM + ") === 'function') {\n"); out.write(" callback = " + API.API_KEY_PARAM + ";\n"); out.write(" " + API.API_KEY_PARAM + " = null;\n"); out.write(" }\n"); } // , {'url': url})) StringBuilder reqParams = new StringBuilder(); if (hasParams) { reqParams.append("{"); boolean first = true; if (element.getMandatoryParamNames() != null) { for (String param : element.getMandatoryParamNames()) { if (first) { first = false; } else { reqParams.append(", "); } reqParams.append("'" + param + "' : " + safeName(param.toLowerCase())); } } if (type.equals(ACTION_ENDPOINT) || type.equals(OTHER_ENDPOINT)) { // Always add the API key - we've no way of knowing if it will be required or not if (!first) { reqParams.append(", "); } reqParams.append("'" + API.API_KEY_PARAM + "' : " + API.API_KEY_PARAM); } reqParams.append("}"); if (element.getOptionalParamNames() != null && !element.getOptionalParamNames().isEmpty()) { out.write(" var params = "); out.write(reqParams.toString()); out.write(";\n"); reqParams.replace(0, reqParams.length(), "params"); for (String param : element.getOptionalParamNames()) { out.write(" if ("+safeName(param.toLowerCase())+" && "+safeName(param.toLowerCase())+" !== null) {\n"); out.write(" params['" + param + "'] = " + safeName(param.toLowerCase())+";\n"); out.write(" }\n"); } } } String method = "request"; if (type.equals(OTHER_ENDPOINT)) { method = "requestOther"; } out.write(" this.api." + method + "('/" + component + "/" + type + "/" + element.getName() + "/'"); if (hasParams) { out.write(", "); out.write(reqParams.toString()); } out.write(", callback);\n"); out.write("};\n\n"); } private static String safeName (String name) { if (nameMap.containsKey(name)) { return nameMap.get(name); } return name; } private static String createFileName(String name) { return safeName(name) + ".js"; } private static String createMethodName(String name) { return removeAllFullStopCharacters(safeName(name)); } private static String createClassName(String name) { return name.substring(0, 1).toUpperCase() + name.substring(1); } private static String removeAllFullStopCharacters(String string) { return string.replaceAll("\\.", ""); } @Override protected void generateAPIFiles(ApiImplementor imp) throws IOException { String className = createClassName(imp.getPrefix()); Path file = getDirectory().resolve(createFileName(imp.getPrefix())); System.out.println("Generating " + file.toAbsolutePath()); try (BufferedWriter out = Files.newBufferedWriter(file, StandardCharsets.UTF_8)) { out.write(HEADER); out.write("'use strict';\n\n"); out.write("/**\n"); out.write(" * This file was automatically generated.\n"); out.write(" */\n"); out.write("function " + className + "(clientApi) {\n"); out.write(" this.api = clientApi;\n"); out.write("}\n\n"); for (ApiElement view : imp.getApiViews()) { this.generateNodeJSElement(view, imp.getPrefix(), VIEW_ENDPOINT, out); } for (ApiElement action : imp.getApiActions()) { this.generateNodeJSElement(action, imp.getPrefix(), ACTION_ENDPOINT, out); } for (ApiElement other : imp.getApiOthers()) { this.generateNodeJSElement(other, imp.getPrefix(), OTHER_ENDPOINT, out); } out.write("module.exports = " + className + ";\n"); } } public static void main(String[] args) throws Exception { // Command for generating a python version of the ZAP API NodeJSAPIGenerator wapi = new NodeJSAPIGenerator(); wapi.generateCoreAPIFiles(); } }
package mytown.entities; import myessentials.chat.api.ChatComponentFormatted; import myessentials.chat.api.ChatManager; import myessentials.chat.api.IChatFormat; import myessentials.localization.api.LocalManager; import myessentials.teleport.Teleport; import myessentials.utils.PlayerUtils; import mypermissions.permission.api.proxy.PermissionProxy; import mypermissions.permission.core.entities.PermissionLevel; import mytown.MyTown; import mytown.api.container.ResidentRankMap; import mytown.api.container.TicketMap; import mytown.config.Config; import mytown.entities.flag.Flag; import mytown.entities.flag.FlagType; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.util.ChatComponentText; import net.minecraft.util.EnumChatFormatting; import net.minecraft.util.IChatComponent; import net.minecraftforge.common.ForgeChunkManager; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * Defines a Town. A Town is made up of Residents, Ranks, Blocks, and Plots. */ public class Town implements Comparable<Town>, IChatFormat { private String name, oldName = null; protected int maxFarClaims = Config.instance.maxFarClaims.get(); private Nation nation; private Teleport spawn; public final TicketMap ticketMap = new TicketMap(this); public final ResidentRankMap residentsMap = new ResidentRankMap(); public final Rank.Container ranksContainer = new Rank.Container(); public final Plot.Container plotsContainer = new Plot.Container(Config.instance.defaultMaxPlots.get()); public final Flag.Container flagsContainer = new Flag.Container(); public final TownBlock.Container townBlocksContainer = new TownBlock.Container(); public final BlockWhitelist.Container blockWhitelistsContainer = new BlockWhitelist.Container(); public final Bank bank = new Bank(this); public Town(String name) { this.name = name; } /** * Notifies every resident in this town sending a message. */ public void notifyEveryone(IChatComponent message) { for (Resident r : residentsMap.keySet()) { ChatManager.send(r.getPlayer(), message); } } public IChatComponent getOwnerComponent() { Resident mayor = residentsMap.getMayor(); return mayor == null ? LocalManager.get("mytown.notification.town.owners.admins") : mayor.toChatMessage(); } /** * Checks if the Resident is allowed to do the action specified by the FlagType at the coordinates given. * This method will go through all the plots and prioritize the plot's flags over town flags. */ public boolean hasPermission(Resident res, FlagType<Boolean> flagType, int dim, int x, int y, int z) { Plot plot = plotsContainer.get(dim, x, y, z); if (plot == null) { return hasPermission(res, flagType); } else { return plot.hasPermission(res, flagType); } } /** * Checks if the Resident is allowed to do the action specified by the FlagType in this town. */ public boolean hasPermission(Resident res, FlagType<Boolean> flagType) { if(flagType.configurable ? flagsContainer.getValue(flagType) : flagType.defaultValue) { return true; } if (res == null || res.getFakePlayer()) { return false; } boolean rankBypass; boolean permissionBypass; if (residentsMap.containsKey(res)) { if (flagsContainer.getValue(FlagType.RESTRICTIONS)) { rankBypass = hasPermission(res, FlagType.RESTRICTIONS.getBypassPermission()); permissionBypass = PermissionProxy.getPermissionManager().hasPermission(res.getUUID(), FlagType.RESTRICTIONS.getBypassPermission()); if (!rankBypass && !permissionBypass) { ChatManager.send(res.getPlayer(), flagType.getDenialKey()); ChatManager.send(res.getPlayer(), "mytown.notification.town.owners", getOwnerComponent()); return false; } } rankBypass = hasPermission(res, flagType.getBypassPermission()); permissionBypass = PermissionProxy.getPermissionManager().hasPermission(res.getUUID(), flagType.getBypassPermission()); if (!rankBypass && !permissionBypass) { ChatManager.send(res.getPlayer(), flagType.getDenialKey()); ChatManager.send(res.getPlayer(), "mytown.notification.town.owners", getOwnerComponent()); return false; } } else { permissionBypass = PermissionProxy.getPermissionManager().hasPermission(res.getUUID(), flagType.getBypassPermission()); if (!permissionBypass) { ChatManager.send(res.getPlayer(), flagType.getDenialKey()); ChatManager.send(res.getPlayer(), "mytown.notification.town.owners", getOwnerComponent()); return false; } } return true; } /** * Permission node check for Residents */ public boolean hasPermission(Resident res, String permission) { if(!residentsMap.containsKey(res)) { return false; } Rank rank = residentsMap.get(res); return rank.permissionsContainer.hasPermission(permission) == PermissionLevel.ALLOWED; } public <T> T getValueAtCoords(int dim, int x, int y, int z, FlagType<T> flagType) { Plot plot = plotsContainer.get(dim, x, y, z); if(plot == null || !flagType.isPlotPerm) { return flagsContainer.getValue(flagType); } else { return plot.flagsContainer.getValue(flagType); } } /** * Used to get the owners of a plot (or a town) at the position given * Returns null if position is not in town */ public Resident.Container getOwnersAtPosition(int dim, int x, int y, int z) { Resident.Container result = new Resident.Container(); Plot plot = plotsContainer.get(dim, x, y, z); if (plot == null) { if (isPointInTown(dim, x, z) && !(this instanceof AdminTown) && !residentsMap.isEmpty()) { Resident mayor = residentsMap.getMayor(); if (mayor != null) { result.add(mayor); } } } else { for (Resident res : plot.ownersContainer) { result.add(res); } } return result; } public void sendToSpawn(Resident res) { EntityPlayer pl = res.getPlayer(); if (pl != null) { PlayerUtils.teleport((EntityPlayerMP)pl, spawn.getDim(), spawn.getX(), spawn.getY(), spawn.getZ()); res.setTeleportCooldown(Config.instance.teleportCooldown.get()); } } public int getMaxFarClaims() { return maxFarClaims + townBlocksContainer.getExtraFarClaims(); } public int getMaxBlocks() { int mayorBlocks = Config.instance.blocksMayor.get(); int residentsBlocks = Config.instance.blocksResident.get() * (residentsMap.size() - 1); int residentsExtra = 0; for(Resident res : residentsMap.keySet()) { residentsExtra += res.getExtraBlocks(); } int townExtra = townBlocksContainer.getExtraBlocks(); return mayorBlocks + residentsBlocks + residentsExtra + townExtra; } public int getExtraBlocks() { int residentsExtra = 0; for(Resident res : residentsMap.keySet()) { residentsExtra += res.getExtraBlocks(); } return residentsExtra + townBlocksContainer.getExtraBlocks(); } /* ----- Comparable ----- */ @Override public int compareTo(Town t) { // TODO Flesh this out more for ranking towns? int thisNumberOfResidents = residentsMap.size(), thatNumberOfResidents = t.residentsMap.size(); if (thisNumberOfResidents > thatNumberOfResidents) return -1; else if (thisNumberOfResidents == thatNumberOfResidents) return 0; else if (thisNumberOfResidents < thatNumberOfResidents) return 1; return -1; } public String getName() { return name; } public String getOldName() { return oldName; } /** * Renames this current Town setting oldName to the previous name. You MUST set oldName to null after saving it in the Datasource */ public void rename(String newName) { oldName = name; name = newName; } /** * Resets the oldName to null. You MUST call this after a name change in the Datasource! */ public void resetOldName() { oldName = null; } public Nation getNation() { return nation; } public void setNation(Nation nation) { this.nation = nation; } public boolean hasSpawn() { return spawn != null; } public Teleport getSpawn() { return spawn; } public void setSpawn(Teleport spawn) { this.spawn = spawn; } /** * Checks if the given block in non-chunk coordinates is in this Town */ public boolean isPointInTown(int dim, int x, int z) { return isChunkInTown(dim, x >> 4, z >> 4); } public boolean isChunkInTown(int dim, int chunkX, int chunkZ) { return townBlocksContainer.contains(dim, chunkX, chunkZ); } @Override public String toString() { return toChatMessage().getUnformattedText(); } @Override public IChatComponent toChatMessage() { IChatComponent header = LocalManager.get("myessentials.format.list.header", new ChatComponentFormatted("{9|%s}", getName())); IChatComponent hoverComponent = ((ChatComponentFormatted)LocalManager.get("mytown.format.town.long", header, residentsMap.size(), townBlocksContainer.size(), getMaxBlocks(), plotsContainer.size(), residentsMap, ranksContainer)).applyDelimiter("\n"); return LocalManager.get("mytown.format.town.short", name, hoverComponent); } public static class Container extends ArrayList<Town> implements IChatFormat { private Town mainTown; public boolean isSelectedTownSaved = false; @Override public boolean add(Town town) { if(mainTown == null) { mainTown = town; } return super.add(town); } public Town get(String name) { for(Town town : this) { if(town.getName().equals(name)) { return town; } } return null; } public void remove(String name) { for(Iterator<Town> it = iterator(); it.hasNext(); ) { Town town = it.next(); if(town.getName().equals(name)) { it.remove(); } } } public boolean contains(String name) { for(Town town : this) { if(town.getName().equals(name)) { return true; } } return false; } public void setMainTown(Town town) { if(contains(town)) { mainTown = town; } } public Town getMainTown() { if(!contains(mainTown) || mainTown == null) { if(size() == 0) { return null; } else { mainTown = get(0); } } return mainTown; } @Override public IChatComponent toChatMessage() { IChatComponent root = new ChatComponentText(""); for (Town town : this) { if (root.getSiblings().size() > 0) { root.appendSibling(new ChatComponentFormatted("{7|, }")); } root.appendSibling(town.toChatMessage()); } return root; } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2019 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.xmlinputstream; import java.util.List; import org.pentaho.di.core.CheckResult; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.annotations.Step; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.row.value.ValueMetaInteger; import org.pentaho.di.core.row.value.ValueMetaString; import org.pentaho.di.core.util.Utils; import org.pentaho.di.core.variables.VariableSpace; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.repository.ObjectId; import org.pentaho.di.repository.Repository; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStepMeta; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.metastore.api.IMetaStore; import org.w3c.dom.Node; @Step( id = "XMLInputStream", image = "xml_input_stream.svg", i18nPackageName = "org.pentaho.di.trans.steps.xmlinputstream", name = "XMLInputStream.name", description = "XMLInputStream.description", categoryDescription = "XMLInputStream.category", documentationUrl = "Products/XML_Input_Stream_(StAX)" ) public class XMLInputStreamMeta extends BaseStepMeta implements StepMetaInterface { private static final int DEFAULT_STRING_LEN_FILENAME = 256; // default length for XML path private static final int DEFAULT_STRING_LEN_PATH = 1024; // default length for XML path public static final String DEFAULT_STRING_LEN = "1024"; // used by defaultStringLen public static final String DEFAULT_ENCODING = "UTF-8"; // used by encoding private String filename; private boolean addResultFile; /** The number of rows to ignore before sending rows to the next step */ private String nrRowsToSkip; // String for variable usage, enables chunk loading defined in an outer loop /** The maximum number of lines to read */ private String rowLimit; // String for variable usage, enables chunk loading defined in an outer loop /** This is the default String length for name/value elements & attributes */ private String defaultStringLen; // default set to DEFAULT_STRING_LEN /** Encoding to be used */ private String encoding; // default set to DEFAULT_ENCODING /** Enable Namespaces in the output? (will be slower) */ private boolean enableNamespaces; /** Trim all name/value elements & attributes? */ private boolean enableTrim; // trim is also eliminating white spaces, tab, cr, lf at the beginning and end of the // string // The fields in the output stream private boolean includeFilenameField; private String filenameField; private boolean includeRowNumberField; private String rowNumberField; private boolean includeXmlDataTypeNumericField; private String xmlDataTypeNumericField; private boolean includeXmlDataTypeDescriptionField; private String xmlDataTypeDescriptionField; private boolean includeXmlLocationLineField; private String xmlLocationLineField; private boolean includeXmlLocationColumnField; private String xmlLocationColumnField; private boolean includeXmlElementIDField; private String xmlElementIDField; private boolean includeXmlParentElementIDField; private String xmlParentElementIDField; private boolean includeXmlElementLevelField; private String xmlElementLevelField; private boolean includeXmlPathField; private String xmlPathField; private boolean includeXmlParentPathField; private String xmlParentPathField; private boolean includeXmlDataNameField; private String xmlDataNameField; private boolean includeXmlDataValueField; private String xmlDataValueField; /** Are we accepting filenames in input rows? */ public boolean sourceFromInput; /** The field in which the filename is placed */ public String sourceFieldName; public XMLInputStreamMeta() { super(); // allocate BaseStepMeta } @Override public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { int defaultStringLenNameValueElements = Const.toInt( space.environmentSubstitute( defaultStringLen ), new Integer( DEFAULT_STRING_LEN ) ); if ( includeFilenameField ) { ValueMetaInterface v = new ValueMetaString( space.environmentSubstitute( filenameField ) ); v.setLength( DEFAULT_STRING_LEN_FILENAME ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeRowNumberField ) { ValueMetaInterface v = new ValueMetaInteger( space.environmentSubstitute( rowNumberField ) ); v.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); v.setOrigin( name ); r.addValueMeta( v ); } if ( includeXmlDataTypeNumericField ) { ValueMetaInterface vdtn = new ValueMetaInteger( space.environmentSubstitute( xmlDataTypeNumericField ) ); vdtn.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); vdtn.setOrigin( name ); r.addValueMeta( vdtn ); } if ( includeXmlDataTypeDescriptionField ) { ValueMetaInterface vdtd = new ValueMetaString( space.environmentSubstitute( xmlDataTypeDescriptionField ) ); vdtd.setLength( 25 ); vdtd.setOrigin( name ); r.addValueMeta( vdtd ); } if ( includeXmlLocationLineField ) { ValueMetaInterface vline = new ValueMetaInteger( space.environmentSubstitute( xmlLocationLineField ) ); vline.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); vline.setOrigin( name ); r.addValueMeta( vline ); } if ( includeXmlLocationColumnField ) { ValueMetaInterface vcol = new ValueMetaInteger( space.environmentSubstitute( xmlLocationColumnField ) ); vcol.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); vcol.setOrigin( name ); r.addValueMeta( vcol ); } if ( includeXmlElementIDField ) { ValueMetaInterface vdid = new ValueMetaInteger( "xml_element_id" ); vdid.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); vdid.setOrigin( name ); r.addValueMeta( vdid ); } if ( includeXmlParentElementIDField ) { ValueMetaInterface vdparentid = new ValueMetaInteger( "xml_parent_element_id" ); vdparentid.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); vdparentid.setOrigin( name ); r.addValueMeta( vdparentid ); } if ( includeXmlElementLevelField ) { ValueMetaInterface vdlevel = new ValueMetaInteger( "xml_element_level" ); vdlevel.setLength( ValueMetaInterface.DEFAULT_INTEGER_LENGTH ); vdlevel.setOrigin( name ); r.addValueMeta( vdlevel ); } if ( includeXmlPathField ) { ValueMetaInterface vdparentxp = new ValueMetaString( "xml_path" ); vdparentxp.setLength( DEFAULT_STRING_LEN_PATH ); vdparentxp.setOrigin( name ); r.addValueMeta( vdparentxp ); } if ( includeXmlParentPathField ) { ValueMetaInterface vdparentpxp = new ValueMetaString( "xml_parent_path" ); vdparentpxp.setLength( DEFAULT_STRING_LEN_PATH ); vdparentpxp.setOrigin( name ); r.addValueMeta( vdparentpxp ); } if ( includeXmlDataNameField ) { ValueMetaInterface vdname = new ValueMetaString( "xml_data_name" ); vdname.setLength( defaultStringLenNameValueElements ); vdname.setOrigin( name ); r.addValueMeta( vdname ); } if ( includeXmlDataValueField ) { ValueMetaInterface vdval = new ValueMetaString( "xml_data_value" ); vdval.setLength( defaultStringLenNameValueElements ); vdval.setOrigin( name ); r.addValueMeta( vdval ); } } @Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { try { sourceFromInput = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "sourceFromInput" ) ); sourceFieldName = Const.NVL( XMLHandler.getTagValue( stepnode, "sourceFieldName" ), "" ); filename = Const.NVL( XMLHandler.getTagValue( stepnode, "filename" ), "" ); addResultFile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "addResultFile" ) ); nrRowsToSkip = Const.NVL( XMLHandler.getTagValue( stepnode, "nrRowsToSkip" ), "0" ); rowLimit = Const.NVL( XMLHandler.getTagValue( stepnode, "rowLimit" ), "0" ); defaultStringLen = Const.NVL( XMLHandler.getTagValue( stepnode, "defaultStringLen" ), DEFAULT_STRING_LEN ); encoding = Const.NVL( XMLHandler.getTagValue( stepnode, "encoding" ), DEFAULT_ENCODING ); enableNamespaces = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "enableNamespaces" ) ); enableTrim = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "enableTrim" ) ); // The fields in the output stream // When they are undefined (checked with NVL) the original default value will be taken includeFilenameField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeFilenameField" ) ); filenameField = Const.NVL( XMLHandler.getTagValue( stepnode, "filenameField" ), filenameField ); includeRowNumberField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeRowNumberField" ) ); rowNumberField = Const.NVL( XMLHandler.getTagValue( stepnode, "rowNumberField" ), rowNumberField ); includeXmlDataTypeNumericField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeDataTypeNumericField" ) ); xmlDataTypeNumericField = Const.NVL( XMLHandler.getTagValue( stepnode, "dataTypeNumericField" ), xmlDataTypeNumericField ); includeXmlDataTypeDescriptionField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeDataTypeDescriptionField" ) ); xmlDataTypeDescriptionField = Const.NVL( XMLHandler.getTagValue( stepnode, "dataTypeDescriptionField" ), xmlDataTypeDescriptionField ); includeXmlLocationLineField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlLocationLineField" ) ); xmlLocationLineField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlLocationLineField" ), xmlLocationLineField ); includeXmlLocationColumnField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlLocationColumnField" ) ); xmlLocationColumnField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlLocationColumnField" ), xmlLocationColumnField ); includeXmlElementIDField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlElementIDField" ) ); xmlElementIDField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlElementIDField" ), xmlElementIDField ); includeXmlParentElementIDField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlParentElementIDField" ) ); xmlParentElementIDField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlParentElementIDField" ), xmlParentElementIDField ); includeXmlElementLevelField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlElementLevelField" ) ); xmlElementLevelField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlElementLevelField" ), xmlElementLevelField ); includeXmlPathField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlPathField" ) ); xmlPathField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlPathField" ), xmlPathField ); includeXmlParentPathField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlParentPathField" ) ); xmlParentPathField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlParentPathField" ), xmlParentPathField ); includeXmlDataNameField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlDataNameField" ) ); xmlDataNameField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlDataNameField" ), xmlDataNameField ); includeXmlDataValueField = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "includeXmlDataValueField" ) ); xmlDataValueField = Const.NVL( XMLHandler.getTagValue( stepnode, "xmlDataValueField" ), xmlDataValueField ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } } @Override public Object clone() { XMLInputStreamMeta retval = (XMLInputStreamMeta) super.clone(); // TODO check return retval; } @Override public String getXML() { StringBuffer retval = new StringBuffer(); retval.append( " " + XMLHandler.addTagValue( "sourceFromInput", sourceFromInput ) ); retval.append( " " + XMLHandler.addTagValue( "sourceFieldName", sourceFieldName ) ); retval.append( " " + XMLHandler.addTagValue( "filename", filename ) ); retval.append( " " + XMLHandler.addTagValue( "addResultFile", addResultFile ) ); retval.append( " " + XMLHandler.addTagValue( "nrRowsToSkip", nrRowsToSkip ) ); retval.append( " " + XMLHandler.addTagValue( "rowLimit", rowLimit ) ); retval.append( " " + XMLHandler.addTagValue( "defaultStringLen", defaultStringLen ) ); retval.append( " " + XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " + XMLHandler.addTagValue( "enableNamespaces", enableNamespaces ) ); retval.append( " " + XMLHandler.addTagValue( "enableTrim", enableTrim ) ); // The fields in the output stream retval.append( " " + XMLHandler.addTagValue( "includeFilenameField", includeFilenameField ) ); retval.append( " " + XMLHandler.addTagValue( "filenameField", filenameField ) ); retval.append( " " + XMLHandler.addTagValue( "includeRowNumberField", includeRowNumberField ) ); retval.append( " " + XMLHandler.addTagValue( "rowNumberField", rowNumberField ) ); retval.append( " " + XMLHandler.addTagValue( "includeDataTypeNumericField", includeXmlDataTypeNumericField ) ); retval.append( " " + XMLHandler.addTagValue( "dataTypeNumericField", xmlDataTypeNumericField ) ); retval.append( " " + XMLHandler.addTagValue( "includeDataTypeDescriptionField", includeXmlDataTypeDescriptionField ) ); retval.append( " " + XMLHandler.addTagValue( "dataTypeDescriptionField", xmlDataTypeDescriptionField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlLocationLineField", includeXmlLocationLineField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlLocationLineField", xmlLocationLineField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlLocationColumnField", includeXmlLocationColumnField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlLocationColumnField", xmlLocationColumnField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlElementIDField", includeXmlElementIDField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlElementIDField", xmlElementIDField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlParentElementIDField", includeXmlParentElementIDField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlParentElementIDField", xmlParentElementIDField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlElementLevelField", includeXmlElementLevelField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlElementLevelField", xmlElementLevelField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlPathField", includeXmlPathField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlPathField", xmlPathField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlParentPathField", includeXmlParentPathField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlParentPathField", xmlParentPathField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlDataNameField", includeXmlDataNameField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlDataNameField", xmlDataNameField ) ); retval.append( " " + XMLHandler.addTagValue( "includeXmlDataValueField", includeXmlDataValueField ) ); retval.append( " " + XMLHandler.addTagValue( "xmlDataValueField", xmlDataValueField ) ); return retval.toString(); } @Override public void setDefault() { filename = ""; addResultFile = false; nrRowsToSkip = "0"; rowLimit = "0"; defaultStringLen = DEFAULT_STRING_LEN; encoding = DEFAULT_ENCODING; enableNamespaces = false; enableTrim = true; // The fields in the output stream includeFilenameField = false; filenameField = "xml_filename"; includeRowNumberField = false; rowNumberField = "xml_row_number"; includeXmlDataTypeNumericField = false; xmlDataTypeNumericField = "xml_data_type_numeric"; includeXmlDataTypeDescriptionField = true; xmlDataTypeDescriptionField = "xml_data_type_description"; includeXmlLocationLineField = false; xmlLocationLineField = "xml_location_line"; includeXmlLocationColumnField = false; xmlLocationColumnField = "xml_location_column"; includeXmlElementIDField = true; xmlElementIDField = "xml_element_id"; includeXmlParentElementIDField = true; xmlParentElementIDField = "xml_parent_element_id"; includeXmlElementLevelField = true; xmlElementLevelField = "xml_element_level"; includeXmlPathField = true; xmlPathField = "xml_path"; includeXmlParentPathField = true; xmlParentPathField = "xml_parent_path"; includeXmlDataNameField = true; xmlDataNameField = "xml_data_name"; includeXmlDataValueField = true; xmlDataValueField = "xml_data_value"; } @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { sourceFromInput = rep.getStepAttributeBoolean( id_step, "sourceFromInput" ); sourceFieldName = Const.NVL( rep.getStepAttributeString( id_step, "sourceFieldName" ), "" ); filename = Const.NVL( rep.getStepAttributeString( id_step, "filename" ), "" ); addResultFile = rep.getStepAttributeBoolean( id_step, "addResultFile" ); nrRowsToSkip = Const.NVL( rep.getStepAttributeString( id_step, "nrRowsToSkip" ), "0" ); rowLimit = Const.NVL( rep.getStepAttributeString( id_step, "rowLimit" ), "0" ); defaultStringLen = Const.NVL( rep.getStepAttributeString( id_step, "defaultStringLen" ), DEFAULT_STRING_LEN ); encoding = Const.NVL( rep.getStepAttributeString( id_step, "encoding" ), DEFAULT_ENCODING ); enableNamespaces = rep.getStepAttributeBoolean( id_step, "enableNamespaces" ); enableTrim = rep.getStepAttributeBoolean( id_step, "enableTrim" ); // The fields in the output stream // When they are undefined (checked with NVL) the original default value will be taken includeFilenameField = rep.getStepAttributeBoolean( id_step, "includeFilenameField" ); filenameField = Const.NVL( rep.getStepAttributeString( id_step, "filenameField" ), filenameField ); includeRowNumberField = rep.getStepAttributeBoolean( id_step, "includeRowNumberField" ); rowNumberField = Const.NVL( rep.getStepAttributeString( id_step, "rowNumberField" ), rowNumberField ); includeXmlDataTypeNumericField = rep.getStepAttributeBoolean( id_step, "includeDataTypeNumericField" ); xmlDataTypeNumericField = Const.NVL( rep.getStepAttributeString( id_step, "dataTypeNumericField" ), xmlDataTypeNumericField ); includeXmlDataTypeDescriptionField = rep.getStepAttributeBoolean( id_step, "includeDataTypeDescriptionField" ); xmlDataTypeDescriptionField = Const.NVL( rep.getStepAttributeString( id_step, "dataTypeDescriptionField" ), xmlDataTypeDescriptionField ); includeXmlLocationLineField = rep.getStepAttributeBoolean( id_step, "includeXmlLocationLineField" ); xmlLocationLineField = Const.NVL( rep.getStepAttributeString( id_step, "xmlLocationLineField" ), xmlLocationLineField ); includeXmlLocationColumnField = rep.getStepAttributeBoolean( id_step, "includeXmlLocationColumnField" ); xmlLocationColumnField = Const.NVL( rep.getStepAttributeString( id_step, "xmlLocationColumnField" ), xmlLocationColumnField ); includeXmlElementIDField = rep.getStepAttributeBoolean( id_step, "includeXmlElementIDField" ); xmlElementIDField = Const.NVL( rep.getStepAttributeString( id_step, "xmlElementIDField" ), xmlElementIDField ); includeXmlParentElementIDField = rep.getStepAttributeBoolean( id_step, "includeXmlParentElementIDField" ); xmlParentElementIDField = Const.NVL( rep.getStepAttributeString( id_step, "xmlParentElementIDField" ), xmlParentElementIDField ); includeXmlElementLevelField = rep.getStepAttributeBoolean( id_step, "includeXmlElementLevelField" ); xmlElementLevelField = Const.NVL( rep.getStepAttributeString( id_step, "xmlElementLevelField" ), xmlElementLevelField ); includeXmlPathField = rep.getStepAttributeBoolean( id_step, "includeXmlPathField" ); xmlPathField = Const.NVL( rep.getStepAttributeString( id_step, "xmlPathField" ), xmlPathField ); includeXmlParentPathField = rep.getStepAttributeBoolean( id_step, "includeXmlParentPathField" ); xmlParentPathField = Const.NVL( rep.getStepAttributeString( id_step, "xmlParentPathField" ), xmlParentPathField ); includeXmlDataNameField = rep.getStepAttributeBoolean( id_step, "includeXmlDataNameField" ); xmlDataNameField = Const.NVL( rep.getStepAttributeString( id_step, "xmlDataNameField" ), xmlDataNameField ); includeXmlDataValueField = rep.getStepAttributeBoolean( id_step, "includeXmlDataValueField" ); xmlDataValueField = Const.NVL( rep.getStepAttributeString( id_step, "xmlDataValueField" ), xmlDataValueField ); } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } } @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "sourceFromInput", sourceFromInput ); rep.saveStepAttribute( id_transformation, id_step, "sourceFieldName", sourceFieldName ); rep.saveStepAttribute( id_transformation, id_step, "filename", filename ); rep.saveStepAttribute( id_transformation, id_step, "addResultFile", addResultFile ); rep.saveStepAttribute( id_transformation, id_step, "nrRowsToSkip", nrRowsToSkip ); rep.saveStepAttribute( id_transformation, id_step, "rowLimit", rowLimit ); rep.saveStepAttribute( id_transformation, id_step, "defaultStringLen", defaultStringLen ); rep.saveStepAttribute( id_transformation, id_step, "encoding", encoding ); rep.saveStepAttribute( id_transformation, id_step, "enableNamespaces", enableNamespaces ); rep.saveStepAttribute( id_transformation, id_step, "enableTrim", enableTrim ); // The fields in the output stream rep.saveStepAttribute( id_transformation, id_step, "includeFilenameField", includeFilenameField ); rep.saveStepAttribute( id_transformation, id_step, "filenameField", filenameField ); rep.saveStepAttribute( id_transformation, id_step, "includeRowNumberField", includeRowNumberField ); rep.saveStepAttribute( id_transformation, id_step, "rowNumberField", rowNumberField ); rep.saveStepAttribute( id_transformation, id_step, "includeDataTypeNumericField", includeXmlDataTypeNumericField ); rep.saveStepAttribute( id_transformation, id_step, "dataTypeNumericField", xmlDataTypeNumericField ); rep.saveStepAttribute( id_transformation, id_step, "includeDataTypeDescriptionField", includeXmlDataTypeDescriptionField ); rep.saveStepAttribute( id_transformation, id_step, "dataTypeDescriptionField", xmlDataTypeDescriptionField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlLocationLineField", includeXmlLocationLineField ); rep.saveStepAttribute( id_transformation, id_step, "xmlLocationLineField", xmlLocationLineField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlLocationColumnField", includeXmlLocationColumnField ); rep.saveStepAttribute( id_transformation, id_step, "xmlLocationColumnField", xmlLocationColumnField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlElementIDField", includeXmlElementIDField ); rep.saveStepAttribute( id_transformation, id_step, "xmlElementIDField", xmlElementIDField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlParentElementIDField", includeXmlParentElementIDField ); rep.saveStepAttribute( id_transformation, id_step, "xmlParentElementIDField", xmlParentElementIDField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlElementLevelField", includeXmlElementLevelField ); rep.saveStepAttribute( id_transformation, id_step, "xmlElementLevelField", xmlElementLevelField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlPathField", includeXmlPathField ); rep.saveStepAttribute( id_transformation, id_step, "xmlPathField", xmlPathField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlParentPathField", includeXmlParentPathField ); rep.saveStepAttribute( id_transformation, id_step, "xmlParentPathField", xmlParentPathField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlDataNameField", includeXmlDataNameField ); rep.saveStepAttribute( id_transformation, id_step, "xmlDataNameField", xmlDataNameField ); rep.saveStepAttribute( id_transformation, id_step, "includeXmlDataValueField", includeXmlDataValueField ); rep.saveStepAttribute( id_transformation, id_step, "xmlDataValueField", xmlDataValueField ); } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } } @Override public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { // TODO externalize messages CheckResult cr; if ( Utils.isEmpty( filename ) ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, "Filename is not given", stepMeta ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, "Filename is given", stepMeta ); } remarks.add( cr ); if ( transMeta.findNrPrevSteps( stepMeta ) > 0 ) { RowMetaInterface previousFields; try { previousFields = transMeta.getPrevStepFields( stepMeta ); if ( null == previousFields.searchValueMeta( filename ) ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, "Field name is not in previous step", stepMeta ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_OK, "Field name is in previous step", stepMeta ); } } catch ( KettleStepException e ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, "Could not find previous step", stepMeta ); } remarks.add( cr ); } if ( includeXmlDataTypeNumericField || includeXmlDataTypeDescriptionField ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, "At least one Data Type field (numeric or description) is in the data stream", stepMeta ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_WARNING, "Data Type field (numeric or description) is missing in the data stream", stepMeta ); } remarks.add( cr ); if ( includeXmlDataValueField && includeXmlDataNameField ) { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_COMMENT, "Data Name and Data Value fields are in the data stream", stepMeta ); } else { cr = new CheckResult( CheckResultInterface.TYPE_RESULT_WARNING, "Both Data Name and Data Value fields should be in the data stream", stepMeta ); } remarks.add( cr ); } @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ) { return new XMLInputStream( stepMeta, stepDataInterface, cnr, transMeta, trans ); } @Override public StepDataInterface getStepData() { return new XMLInputStreamData(); } public String getFilename() { return filename; } public void setFilename( String filename ) { this.filename = filename; } public boolean isAddResultFile() { return addResultFile; } public void setAddResultFile( boolean addResultFile ) { this.addResultFile = addResultFile; } public String getNrRowsToSkip() { return nrRowsToSkip; } public void setNrRowsToSkip( String nrRowsToSkip ) { this.nrRowsToSkip = nrRowsToSkip; } public String getRowLimit() { return rowLimit; } public void setRowLimit( String rowLimit ) { this.rowLimit = rowLimit; } public String getDefaultStringLen() { return defaultStringLen; } public void setDefaultStringLen( String defaultStringLen ) { this.defaultStringLen = defaultStringLen; } public String getEncoding() { return encoding; } public void setEncoding( String encoding ) { this.encoding = encoding; } public boolean isEnableNamespaces() { return enableNamespaces; } public void setEnableNamespaces( boolean enableNamespaces ) { this.enableNamespaces = enableNamespaces; } public boolean isEnableTrim() { return enableTrim; } public void setEnableTrim( boolean enableTrim ) { this.enableTrim = enableTrim; } public boolean isIncludeFilenameField() { return includeFilenameField; } public void setIncludeFilenameField( boolean includeFilenameField ) { this.includeFilenameField = includeFilenameField; } public String getFilenameField() { return filenameField; } public void setFilenameField( String filenameField ) { this.filenameField = filenameField; } public boolean isIncludeRowNumberField() { return includeRowNumberField; } public void setIncludeRowNumberField( boolean includeRowNumberField ) { this.includeRowNumberField = includeRowNumberField; } public String getRowNumberField() { return rowNumberField; } public void setRowNumberField( String rowNumberField ) { this.rowNumberField = rowNumberField; } public boolean isIncludeXmlDataTypeNumericField() { return includeXmlDataTypeNumericField; } public void setIncludeXmlDataTypeNumericField( boolean includeXmlDataTypeNumericField ) { this.includeXmlDataTypeNumericField = includeXmlDataTypeNumericField; } public String getXmlDataTypeNumericField() { return xmlDataTypeNumericField; } public void setXmlDataTypeNumericField( String xmlDataTypeNumericField ) { this.xmlDataTypeNumericField = xmlDataTypeNumericField; } public boolean isIncludeXmlDataTypeDescriptionField() { return includeXmlDataTypeDescriptionField; } public void setIncludeXmlDataTypeDescriptionField( boolean includeXmlDataTypeDescriptionField ) { this.includeXmlDataTypeDescriptionField = includeXmlDataTypeDescriptionField; } public String getXmlDataTypeDescriptionField() { return xmlDataTypeDescriptionField; } public void setXmlDataTypeDescriptionField( String xmlDataTypeDescriptionField ) { this.xmlDataTypeDescriptionField = xmlDataTypeDescriptionField; } public boolean isIncludeXmlLocationLineField() { return includeXmlLocationLineField; } public void setIncludeXmlLocationLineField( boolean includeXmlLocationLineField ) { this.includeXmlLocationLineField = includeXmlLocationLineField; } public String getXmlLocationLineField() { return xmlLocationLineField; } public void setXmlLocationLineField( String xmlLocationLineField ) { this.xmlLocationLineField = xmlLocationLineField; } public boolean isIncludeXmlLocationColumnField() { return includeXmlLocationColumnField; } public void setIncludeXmlLocationColumnField( boolean includeXmlLocationColumnField ) { this.includeXmlLocationColumnField = includeXmlLocationColumnField; } public String getXmlLocationColumnField() { return xmlLocationColumnField; } public void setXmlLocationColumnField( String xmlLocationColumnField ) { this.xmlLocationColumnField = xmlLocationColumnField; } public boolean isIncludeXmlElementIDField() { return includeXmlElementIDField; } public void setIncludeXmlElementIDField( boolean includeXmlElementIDField ) { this.includeXmlElementIDField = includeXmlElementIDField; } public String getXmlElementIDField() { return xmlElementIDField; } public void setXmlElementIDField( String xmlElementIDField ) { this.xmlElementIDField = xmlElementIDField; } public boolean isIncludeXmlParentElementIDField() { return includeXmlParentElementIDField; } public void setIncludeXmlParentElementIDField( boolean includeXmlParentElementIDField ) { this.includeXmlParentElementIDField = includeXmlParentElementIDField; } public String getXmlParentElementIDField() { return xmlParentElementIDField; } public void setXmlParentElementIDField( String xmlParentElementIDField ) { this.xmlParentElementIDField = xmlParentElementIDField; } public boolean isIncludeXmlElementLevelField() { return includeXmlElementLevelField; } public void setIncludeXmlElementLevelField( boolean includeXmlElementLevelField ) { this.includeXmlElementLevelField = includeXmlElementLevelField; } public String getXmlElementLevelField() { return xmlElementLevelField; } public void setXmlElementLevelField( String xmlElementLevelField ) { this.xmlElementLevelField = xmlElementLevelField; } public boolean isIncludeXmlPathField() { return includeXmlPathField; } public void setIncludeXmlPathField( boolean includeXmlPathField ) { this.includeXmlPathField = includeXmlPathField; } public String getXmlPathField() { return xmlPathField; } public void setXmlPathField( String xmlPathField ) { this.xmlPathField = xmlPathField; } public boolean isIncludeXmlParentPathField() { return includeXmlParentPathField; } public void setIncludeXmlParentPathField( boolean includeXmlParentPathField ) { this.includeXmlParentPathField = includeXmlParentPathField; } public String getXmlParentPathField() { return xmlParentPathField; } public void setXmlParentPathField( String xmlParentPathField ) { this.xmlParentPathField = xmlParentPathField; } public boolean isIncludeXmlDataNameField() { return includeXmlDataNameField; } public void setIncludeXmlDataNameField( boolean includeXmlDataNameField ) { this.includeXmlDataNameField = includeXmlDataNameField; } public String getXmlDataNameField() { return xmlDataNameField; } public void setXmlDataNameField( String xmlDataNameField ) { this.xmlDataNameField = xmlDataNameField; } public boolean isIncludeXmlDataValueField() { return includeXmlDataValueField; } public void setIncludeXmlDataValueField( boolean includeXmlDataValueField ) { this.includeXmlDataValueField = includeXmlDataValueField; } public String getXmlDataValueField() { return xmlDataValueField; } public void setXmlDataValueField( String xmlDataValueField ) { this.xmlDataValueField = xmlDataValueField; } }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.extractor; import static com.google.android.exoplayer2.C.RESULT_END_OF_INPUT; import static com.google.common.truth.Truth.assertThat; import static java.util.Arrays.copyOf; import static java.util.Arrays.copyOfRange; import static org.junit.Assert.fail; import android.net.Uri; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.testutil.FakeDataSource; import com.google.android.exoplayer2.upstream.DataSpec; import java.io.EOFException; import java.io.IOException; import java.util.Arrays; import org.junit.Test; import org.junit.runner.RunWith; /** Test for {@link DefaultExtractorInput}. */ @RunWith(AndroidJUnit4.class) public class DefaultExtractorInputTest { private static final String TEST_URI = "http://www.google.com"; private static final byte[] TEST_DATA = new byte[] {0, 1, 2, 3, 4, 5, 6, 7, 8}; private static final int LARGE_TEST_DATA_LENGTH = 8192; @Test public void testInitialPosition() throws Exception { FakeDataSource testDataSource = buildDataSource(); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 123, C.LENGTH_UNSET); assertThat(input.getPosition()).isEqualTo(123); } @Test public void testRead() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; // We expect to perform three reads of three bytes, as setup in buildTestDataSource. int bytesRead = 0; bytesRead += input.read(target, 0, TEST_DATA.length); assertThat(bytesRead).isEqualTo(3); bytesRead += input.read(target, 3, TEST_DATA.length); assertThat(bytesRead).isEqualTo(6); bytesRead += input.read(target, 6, TEST_DATA.length); assertThat(bytesRead).isEqualTo(9); // Check the read data is correct. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); // Check we're now indicated that the end of input is reached. int expectedEndOfInput = input.read(target, 0, TEST_DATA.length); assertThat(expectedEndOfInput).isEqualTo(RESULT_END_OF_INPUT); } @Test public void testReadPeeked() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; input.advancePeekPosition(TEST_DATA.length); int bytesRead = input.read(target, 0, TEST_DATA.length); assertThat(bytesRead).isEqualTo(TEST_DATA.length); // Check the read data is correct. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); } @Test public void testReadMoreDataPeeked() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; input.advancePeekPosition(TEST_DATA.length); int bytesRead = input.read(target, 0, TEST_DATA.length + 1); assertThat(bytesRead).isEqualTo(TEST_DATA.length); // Check the read data is correct. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); } @Test public void testReadFullyOnce() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; input.readFully(target, 0, TEST_DATA.length); // Check that we read the whole of TEST_DATA. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); assertThat(input.getPosition()).isEqualTo(TEST_DATA.length); // Check that we see end of input if we read again with allowEndOfInput set. boolean result = input.readFully(target, 0, 1, true); assertThat(result).isFalse(); // Check that we fail with EOFException we read again with allowEndOfInput unset. try { input.readFully(target, 0, 1); fail(); } catch (EOFException e) { // Expected. } } @Test public void testReadFullyTwice() throws Exception { // Read TEST_DATA in two parts. DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[5]; input.readFully(target, 0, 5); assertThat(Arrays.equals(copyOf(TEST_DATA, 5), target)).isTrue(); assertThat(input.getPosition()).isEqualTo(5); target = new byte[4]; input.readFully(target, 0, 4); assertThat(Arrays.equals(copyOfRange(TEST_DATA, 5, 9), target)).isTrue(); assertThat(input.getPosition()).isEqualTo(5 + 4); } @Test public void testReadFullyTooMuch() throws Exception { // Read more than TEST_DATA. Should fail with an EOFException. Position should not update. DefaultExtractorInput input = createDefaultExtractorInput(); try { byte[] target = new byte[TEST_DATA.length + 1]; input.readFully(target, 0, TEST_DATA.length + 1); fail(); } catch (EOFException e) { // Expected. } assertThat(input.getPosition()).isEqualTo(0); // Read more than TEST_DATA with allowEndOfInput set. Should fail with an EOFException because // the end of input isn't encountered immediately. Position should not update. input = createDefaultExtractorInput(); try { byte[] target = new byte[TEST_DATA.length + 1]; input.readFully(target, 0, TEST_DATA.length + 1, true); fail(); } catch (EOFException e) { // Expected. } assertThat(input.getPosition()).isEqualTo(0); } @Test public void testReadFullyWithFailingDataSource() throws Exception { FakeDataSource testDataSource = buildFailingDataSource(); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNSET); try { byte[] target = new byte[TEST_DATA.length]; input.readFully(target, 0, TEST_DATA.length); fail(); } catch (IOException e) { // Expected. } // The position should not have advanced. assertThat(input.getPosition()).isEqualTo(0); } @Test public void testReadFullyHalfPeeked() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; input.advancePeekPosition(4); input.readFully(target, 0, TEST_DATA.length); // Check the read data is correct. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); assertThat(input.getPosition()).isEqualTo(TEST_DATA.length); } @Test public void testSkip() throws Exception { FakeDataSource testDataSource = buildDataSource(); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNSET); // We expect to perform three skips of three bytes, as setup in buildTestDataSource. for (int i = 0; i < 3; i++) { assertThat(input.skip(TEST_DATA.length)).isEqualTo(3); } // Check we're now indicated that the end of input is reached. int expectedEndOfInput = input.skip(TEST_DATA.length); assertThat(expectedEndOfInput).isEqualTo(RESULT_END_OF_INPUT); } @Test public void testLargeSkip() throws Exception { FakeDataSource testDataSource = buildLargeDataSource(); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNSET); // Check that skipping the entire data source succeeds. int bytesToSkip = LARGE_TEST_DATA_LENGTH; while (bytesToSkip > 0) { bytesToSkip -= input.skip(bytesToSkip); } } @Test public void testSkipFullyOnce() throws Exception { // Skip TEST_DATA. DefaultExtractorInput input = createDefaultExtractorInput(); input.skipFully(TEST_DATA.length); assertThat(input.getPosition()).isEqualTo(TEST_DATA.length); // Check that we see end of input if we skip again with allowEndOfInput set. boolean result = input.skipFully(1, true); assertThat(result).isFalse(); // Check that we fail with EOFException we skip again. try { input.skipFully(1); fail(); } catch (EOFException e) { // Expected. } } @Test public void testSkipFullyTwice() throws Exception { // Skip TEST_DATA in two parts. DefaultExtractorInput input = createDefaultExtractorInput(); input.skipFully(5); assertThat(input.getPosition()).isEqualTo(5); input.skipFully(4); assertThat(input.getPosition()).isEqualTo(5 + 4); } @Test public void testSkipFullyTwicePeeked() throws Exception { // Skip TEST_DATA. DefaultExtractorInput input = createDefaultExtractorInput(); input.advancePeekPosition(TEST_DATA.length); int halfLength = TEST_DATA.length / 2; input.skipFully(halfLength); assertThat(input.getPosition()).isEqualTo(halfLength); input.skipFully(TEST_DATA.length - halfLength); assertThat(input.getPosition()).isEqualTo(TEST_DATA.length); } @Test public void testSkipFullyTooMuch() throws Exception { // Skip more than TEST_DATA. Should fail with an EOFException. Position should not update. DefaultExtractorInput input = createDefaultExtractorInput(); try { input.skipFully(TEST_DATA.length + 1); fail(); } catch (EOFException e) { // Expected. } assertThat(input.getPosition()).isEqualTo(0); // Skip more than TEST_DATA with allowEndOfInput set. Should fail with an EOFException because // the end of input isn't encountered immediately. Position should not update. input = createDefaultExtractorInput(); try { input.skipFully(TEST_DATA.length + 1, true); fail(); } catch (EOFException e) { // Expected. } assertThat(input.getPosition()).isEqualTo(0); } @Test public void testSkipFullyWithFailingDataSource() throws Exception { FakeDataSource testDataSource = buildFailingDataSource(); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNSET); try { input.skipFully(TEST_DATA.length); fail(); } catch (IOException e) { // Expected. } // The position should not have advanced. assertThat(input.getPosition()).isEqualTo(0); } @Test public void testSkipFullyLarge() throws Exception { // Tests skipping an amount of data that's larger than any internal scratch space. int largeSkipSize = 1024 * 1024; FakeDataSource testDataSource = new FakeDataSource(); testDataSource.getDataSet().newDefaultData().appendReadData(new byte[largeSkipSize]); testDataSource.open(new DataSpec(Uri.parse(TEST_URI))); DefaultExtractorInput input = new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNSET); input.skipFully(largeSkipSize); assertThat(input.getPosition()).isEqualTo(largeSkipSize); // Check that we fail with EOFException we skip again. try { input.skipFully(1); fail(); } catch (EOFException e) { // Expected. } } @Test public void testPeekFully() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; input.peekFully(target, 0, TEST_DATA.length); // Check that we read the whole of TEST_DATA. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); assertThat(input.getPosition()).isEqualTo(0); assertThat(input.getPeekPosition()).isEqualTo(TEST_DATA.length); // Check that we can read again from the buffer byte[] target2 = new byte[TEST_DATA.length]; input.readFully(target2, 0, TEST_DATA.length); assertThat(Arrays.equals(TEST_DATA, target2)).isTrue(); assertThat(input.getPosition()).isEqualTo(TEST_DATA.length); assertThat(input.getPeekPosition()).isEqualTo(TEST_DATA.length); // Check that we fail with EOFException if we peek again try { input.peekFully(target, 0, 1); fail(); } catch (EOFException e) { // Expected. } } @Test public void testPeekFullyAfterEofExceptionPeeksAsExpected() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length + 10]; try { input.peekFully(target, /* offset= */ 0, target.length); fail(); } catch (EOFException expected) { // Do nothing. Expected. } input.peekFully(target, /* offset= */ 0, /* length= */ TEST_DATA.length); assertThat(input.getPeekPosition()).isEqualTo(TEST_DATA.length); assertThat(Arrays.equals(TEST_DATA, Arrays.copyOf(target, TEST_DATA.length))).isTrue(); } @Test public void testResetPeekPosition() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; input.peekFully(target, 0, TEST_DATA.length); // Check that we read the whole of TEST_DATA. assertThat(Arrays.equals(TEST_DATA, target)).isTrue(); assertThat(input.getPosition()).isEqualTo(0); // Check that we can peek again after resetting. input.resetPeekPosition(); byte[] target2 = new byte[TEST_DATA.length]; input.peekFully(target2, 0, TEST_DATA.length); assertThat(Arrays.equals(TEST_DATA, target2)).isTrue(); // Check that we fail with EOFException if we peek past the end of the input. try { input.peekFully(target, 0, 1); fail(); } catch (EOFException e) { // Expected. } } @Test public void testPeekFullyAtEndOfStreamWithAllowEndOfInputSucceeds() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; // Check peeking up to the end of input succeeds. assertThat(input.peekFully(target, 0, TEST_DATA.length, true)).isTrue(); // Check peeking at the end of input with allowEndOfInput signals the end of input. assertThat(input.peekFully(target, 0, 1, true)).isFalse(); } @Test public void testPeekFullyAtEndThenReadEndOfInput() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; // Peek up to the end of the input. assertThat(input.peekFully(target, 0, TEST_DATA.length, false)).isTrue(); // Peek the end of the input. assertThat(input.peekFully(target, 0, 1, true)).isFalse(); // Read up to the end of the input. assertThat(input.readFully(target, 0, TEST_DATA.length, false)).isTrue(); // Read the end of the input. assertThat(input.readFully(target, 0, 1, true)).isFalse(); } @Test public void testPeekFullyAcrossEndOfInputWithAllowEndOfInputFails() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; // Check peeking before the end of input with allowEndOfInput succeeds. assertThat(input.peekFully(target, 0, TEST_DATA.length - 1, true)).isTrue(); // Check peeking across the end of input with allowEndOfInput throws. try { input.peekFully(target, 0, 2, true); fail(); } catch (EOFException e) { // Expected. } } @Test public void testResetAndPeekFullyPastEndOfStreamWithAllowEndOfInputFails() throws Exception { DefaultExtractorInput input = createDefaultExtractorInput(); byte[] target = new byte[TEST_DATA.length]; // Check peeking up to the end of input succeeds. assertThat(input.peekFully(target, 0, TEST_DATA.length, true)).isTrue(); input.resetPeekPosition(); try { // Check peeking one more byte throws. input.peekFully(target, 0, TEST_DATA.length + 1, true); fail(); } catch (EOFException e) { // Expected. } } private static FakeDataSource buildDataSource() throws Exception { FakeDataSource testDataSource = new FakeDataSource(); testDataSource.getDataSet().newDefaultData() .appendReadData(Arrays.copyOfRange(TEST_DATA, 0, 3)) .appendReadData(Arrays.copyOfRange(TEST_DATA, 3, 6)) .appendReadData(Arrays.copyOfRange(TEST_DATA, 6, 9)); testDataSource.open(new DataSpec(Uri.parse(TEST_URI))); return testDataSource; } private static FakeDataSource buildFailingDataSource() throws Exception { FakeDataSource testDataSource = new FakeDataSource(); testDataSource.getDataSet().newDefaultData() .appendReadData(Arrays.copyOfRange(TEST_DATA, 0, 6)) .appendReadError(new IOException()) .appendReadData(Arrays.copyOfRange(TEST_DATA, 6, 9)); testDataSource.open(new DataSpec(Uri.parse(TEST_URI))); return testDataSource; } private static FakeDataSource buildLargeDataSource() throws Exception { FakeDataSource testDataSource = new FakeDataSource(); testDataSource.getDataSet().newDefaultData() .appendReadData(new byte[LARGE_TEST_DATA_LENGTH]); testDataSource.open(new DataSpec(Uri.parse(TEST_URI))); return testDataSource; } private static DefaultExtractorInput createDefaultExtractorInput() throws Exception { FakeDataSource testDataSource = buildDataSource(); return new DefaultExtractorInput(testDataSource, 0, C.LENGTH_UNSET); } }
package com.option_u.stolpersteine.api.model; import android.os.Parcel; import android.os.Parcelable; import com.google.android.gms.maps.model.LatLng; public class Location implements Parcelable { private String street; private String zipCode; private String city; private LatLng coordinates; public Location() { } public Location(Parcel orig) { readFromParcel(orig); } public String getStreet() { return street; } public void setStreet(String street) { this.street = street; } public String getZipCode() { return zipCode; } public void setZipCode(String zipCode) { this.zipCode = zipCode; } public String getCity() { return city; } public void setCity(String city) { this.city = city; } public LatLng getCoordinates() { return coordinates; } public void setCoordinates(LatLng coordinates) { this.coordinates = coordinates; } public String getAddressAsString() { AddressContext addressContext = new AddressContext(this); return addressContext.getAddress(); } private class AddressContext { private AddressFormatter addressFormatter; private AddressContext(Location location) { if (location.street != null && location.zipCode != null && location.city != null) { addressFormatter = new AddressWithAllFields(location); } else if (location.street == null && location.zipCode != null && location.city != null) { addressFormatter = new AddressWithoutStreet(location); } else if (location.street != null && location.zipCode == null && location.city != null) { addressFormatter = new AddressWithoutZipCode(location); } else if (location.street != null && location.zipCode != null && location.city == null) { addressFormatter = new AddressWithoutCity(location); } else if (location.street == null && location.zipCode == null && location.city != null) { addressFormatter = new AddressWithoutStreetAndZipCode(location); } else if (location.street == null && location.zipCode != null && location.city == null) { addressFormatter = new AddressWithoutStreetAndCity(location); } else if (location.street != null && location.zipCode == null && location.city == null) { addressFormatter = new AddressWithoutZipCodeAndCity(location); } else { addressFormatter = new AddressWithoutAnything(location); } } private String getAddress() { return addressFormatter.formatAddress(); } } private abstract class AddressFormatter { Location location; abstract String formatAddress(); } private class AddressWithAllFields extends AddressFormatter { private AddressWithAllFields(Location location) { this.location = location; } @Override String formatAddress() { return location.street + ", " + location.zipCode + " " + location.city; } } private class AddressWithoutStreet extends AddressFormatter { private AddressWithoutStreet(Location location) { this.location = location; } @Override String formatAddress() { return location.zipCode + " " + location.city; } } private class AddressWithoutZipCode extends AddressFormatter { private AddressWithoutZipCode(Location location) { this.location = location; } @Override String formatAddress() { return location.street + ", " + location.city; } } private class AddressWithoutCity extends AddressFormatter { private AddressWithoutCity(Location location) { this.location = location; } @Override String formatAddress() { return location.street + ", " + location.zipCode; } } private class AddressWithoutStreetAndZipCode extends AddressFormatter { private AddressWithoutStreetAndZipCode(Location location) { this.location = location; } @Override String formatAddress() { return location.city; } } private class AddressWithoutStreetAndCity extends AddressFormatter { private AddressWithoutStreetAndCity(Location location) { this.location = location; } @Override String formatAddress() { return location.zipCode; } } private class AddressWithoutZipCodeAndCity extends AddressFormatter { private AddressWithoutZipCodeAndCity(Location location) { this.location = location; } @Override String formatAddress() { return location.street; } } private class AddressWithoutAnything extends AddressFormatter { private AddressWithoutAnything(Location location) { this.location = location; } @Override String formatAddress() { return ""; } } @Override public int describeContents() { return 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeString(street); dest.writeString(zipCode); dest.writeString(city); dest.writeParcelable(coordinates, flags); } private void readFromParcel(Parcel orig) { street = orig.readString(); zipCode = orig.readString(); city = orig.readString(); coordinates = orig.readParcelable(LatLng.class.getClassLoader()); } public static final Parcelable.Creator<Location> CREATOR = new Parcelable.Creator<Location>() { @Override public Location createFromParcel(Parcel source) { return new Location(source); } @Override public Location[] newArray(int size) { return new Location[size]; } }; }
/* * Copyright (C) 2014 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.genomics.utils; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import com.google.api.services.genomics.Genomics; import com.google.api.services.genomics.model.LinearAlignment; import com.google.api.services.genomics.model.Position; import com.google.api.services.genomics.model.Read; import com.google.api.services.genomics.model.ReadGroupSet; import com.google.api.services.genomics.model.SearchReadGroupSetsRequest; import com.google.api.services.genomics.model.SearchReadGroupSetsResponse; import com.google.api.services.genomics.model.SearchReadsRequest; import com.google.api.services.genomics.model.SearchReadsResponse; import com.google.api.services.genomics.model.SearchVariantsRequest; import com.google.api.services.genomics.model.SearchVariantsResponse; import com.google.api.services.genomics.model.Variant; import com.google.common.collect.Lists; import org.hamcrest.CoreMatchers; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.mockito.Mock; import org.mockito.Mockito; import org.mockito.MockitoAnnotations; import java.util.Arrays; import java.util.List; @RunWith(JUnit4.class) public class PaginatorTest { @Mock Genomics genomics; @Mock Genomics.Readgroupsets readGroupSets; @Mock Genomics.Readgroupsets.Search readGroupSetSearch; @Mock Genomics.Variants variants; @Mock Genomics.Variants.Search variantsSearch; @Mock Genomics.Reads reads; @Mock Genomics.Reads.Search readsSearch; @Rule public ExpectedException thrown = ExpectedException.none(); @Before public void initMocks() { MockitoAnnotations.initMocks(this); Mockito.when(genomics.readgroupsets()).thenReturn(readGroupSets); Mockito.when(genomics.variants()).thenReturn(variants); Mockito.when(genomics.reads()).thenReturn(reads); } @Test public void testPagination() throws Exception { // Page 1 Mockito.when(readGroupSets.search(new SearchReadGroupSetsRequest().setName("HG"))) .thenReturn(readGroupSetSearch); // Page 2 Mockito.when(readGroupSets.search(new SearchReadGroupSetsRequest().setName("HG") .setPageToken("page2"))) .thenReturn(readGroupSetSearch); Mockito.when(readGroupSetSearch.execute()).thenReturn( new SearchReadGroupSetsResponse() .setReadGroupSets(Lists.newArrayList(new ReadGroupSet().setId("r1"))) .setNextPageToken("page2"), new SearchReadGroupSetsResponse() .setReadGroupSets(Lists.newArrayList(new ReadGroupSet().setId("r2")))); Paginator.ReadGroupSets paginator = Paginator.ReadGroupSets.create(genomics); List<String> ids = Lists.newArrayList(); for (ReadGroupSet readGroupSet : paginator.search( new SearchReadGroupSetsRequest().setName("HG"))) { ids.add(readGroupSet.getId()); } assertEquals(Lists.newArrayList("r1", "r2"), ids); } @Test public void testPagination_withNullResults() throws Exception { Mockito.when(readGroupSets.search(new SearchReadGroupSetsRequest())) .thenReturn(readGroupSetSearch); Mockito.when(readGroupSetSearch.execute()).thenReturn(new SearchReadGroupSetsResponse()); Paginator.ReadGroupSets paginator = Paginator.ReadGroupSets.create(genomics); List<String> ids = Lists.newArrayList(); for (ReadGroupSet readGroupSet : paginator.search(new SearchReadGroupSetsRequest())) { ids.add(readGroupSet.getId()); } // No results and no exceptions assertTrue(ids.isEmpty()); } @Test public void testFields() throws Exception { Mockito.when(readGroupSets.search(new SearchReadGroupSetsRequest().setName("HG"))) .thenReturn(readGroupSetSearch); Mockito.when(readGroupSetSearch.setFields(Mockito.anyString())) .thenReturn(readGroupSetSearch); Mockito.when(readGroupSetSearch.execute()).thenReturn( new SearchReadGroupSetsResponse() .setReadGroupSets(Lists.newArrayList(new ReadGroupSet().setId("r1")))); Paginator.ReadGroupSets paginator = Paginator.ReadGroupSets.create(genomics); List<String> ids = Lists.newArrayList(); for (ReadGroupSet set : paginator.search( new SearchReadGroupSetsRequest().setName("HG"), "nextPageToken,readGroupSets(id,name)")) { ids.add(set.getId()); } assertEquals(Lists.newArrayList("r1"), ids); // Make sure the fields parameter actually gets passed along Mockito.verify(readGroupSetSearch, Mockito.atLeastOnce()).setFields("nextPageToken,readGroupSets(id,name)"); } @Test public void testFieldsMissingNextPageToken() throws Exception { Mockito.when(readGroupSets.search(new SearchReadGroupSetsRequest().setName("HG"))) .thenReturn(readGroupSetSearch); Paginator.ReadGroupSets paginator = Paginator.ReadGroupSets.create(genomics); thrown.expect(IllegalArgumentException.class); paginator.search(new SearchReadGroupSetsRequest().setName("HG"), "readGroupSets(id,name)").iterator().next(); } @Test public void testVariantPagination() throws Exception { SearchVariantsRequest request = new SearchVariantsRequest().setStart(1000L).setEnd(2000L); Mockito.when(variants.search(request)).thenReturn(variantsSearch); Variant overlapStartWithinExtent = new Variant().setStart(900L).setEnd(1005L); Variant overlapStartExtent = new Variant().setStart(999L).setEnd(5000L); Variant atStartWithinExtent = new Variant().setStart(1000L).setEnd(1002L); Variant atStartOverlapExtent = new Variant().setStart(1000L).setEnd(5000L); Variant beyondStartWithinExtent = new Variant().setStart(1500L).setEnd(1502L); Variant beyondOverlapExtent = new Variant().setStart(1500L).setEnd(5000L); Variant[] input = new Variant[] { overlapStartWithinExtent, overlapStartExtent, atStartWithinExtent, atStartOverlapExtent, beyondStartWithinExtent, beyondOverlapExtent }; Mockito.when(variantsSearch.execute()).thenReturn( new SearchVariantsResponse().setVariants(Arrays.asList(input))); Paginator.Variants filteredPaginator = Paginator.Variants.create(genomics, ShardBoundary.Requirement.STRICT); List<Variant> filteredVariants = Lists.newArrayList(); for (Variant variant : filteredPaginator.search(request)) { filteredVariants.add(variant); } assertEquals(4, filteredVariants.size()); assertThat(filteredVariants, CoreMatchers.hasItems(atStartWithinExtent, atStartOverlapExtent, beyondStartWithinExtent, beyondOverlapExtent)); // Ensure searches with fields verify the preconditions for strict shards. final String nullFields = null; assertNotNull(filteredPaginator.search(request, nullFields).iterator().next()); assertNotNull(filteredPaginator.search(request, "nextPageToken,variants(start,id,calls(genotype,callSetName))").iterator().next()); assertNotNull(filteredPaginator.search(request, "id,nextPageToken,variants(start,id,calls(genotype,callSetName))").iterator().next()); assertNotNull(filteredPaginator.search(request, "variants(start,id,calls(genotype,callSetName)),nextPageToken").iterator().next()); try { filteredPaginator.search(request, "nextPageToken,variants(id,calls(genotype,callSetName))").iterator().next(); fail("should have thrown an IllegalArgumentxception"); } catch (IllegalArgumentException e) {} Paginator.Variants overlappingPaginator = Paginator.Variants.create(genomics, ShardBoundary.Requirement.OVERLAPS); List<Variant> overlappingVariants = Lists.newArrayList(); for (Variant variant : overlappingPaginator.search(request)) { overlappingVariants.add(variant); } assertEquals(6, overlappingVariants.size()); assertThat(overlappingVariants, CoreMatchers.hasItems(input)); // Ensure searches with fields verify the preconditions for overlapping shards. assertNotNull(overlappingPaginator.search(request, nullFields).iterator().next()); assertNotNull(overlappingPaginator.search(request, "nextPageToken,variants(start,id,calls(genotype,callSetName))").iterator().next()); assertNotNull(overlappingPaginator.search(request, "nextPageToken,variants(id,calls(genotype,callSetName))").iterator().next()); assertNotNull(overlappingPaginator.search(request, "id,nextPageToken,variants(start,id,calls(genotype,callSetName))").iterator().next()); assertNotNull(overlappingPaginator.search(request, "variants(id,calls(genotype,callSetName)),nextPageToken").iterator().next()); } @Test public void testVariantPaginationEmptyShard() throws Exception { SearchVariantsRequest request = new SearchVariantsRequest().setStart(1000L).setEnd(2000L); Mockito.when(variants.search(request)).thenReturn(variantsSearch); Mockito.when(variantsSearch.execute()).thenReturn( new SearchVariantsResponse()); Paginator.Variants filteredPaginator = Paginator.Variants.create(genomics, ShardBoundary.Requirement.STRICT); assertNotNull(filteredPaginator.search(request)); } static Read readHelper(int start, int end) { Position position = new Position().setPosition((long) start); LinearAlignment alignment = new LinearAlignment().setPosition(position); return new Read().setAlignment(alignment).setFragmentLength(end-start); } @Test public void testReadPagination() throws Exception { SearchReadsRequest request = new SearchReadsRequest().setStart(1000L).setEnd(2000L); Mockito.when(reads.search(request)).thenReturn(readsSearch); Read overlapStartWithinExtent = readHelper(900,1005); Read overlapStartExtent = readHelper(999, 5000); Read atStartWithinExtent = readHelper(1000, 1002); Read atStartOverlapExtent = readHelper(1000, 5000); Read beyondStartWithinExtent = readHelper(1500, 1502); Read beyondOverlapExtent = readHelper(1500, 5000); Read[] input = new Read[] { overlapStartWithinExtent, overlapStartExtent, atStartWithinExtent, atStartOverlapExtent, beyondStartWithinExtent, beyondOverlapExtent }; Mockito.when(readsSearch.execute()).thenReturn( new SearchReadsResponse().setAlignments(Arrays.asList(input))); Paginator.Reads filteredPaginator = Paginator.Reads.create(genomics, ShardBoundary.Requirement.STRICT); List<Read> filteredReads = Lists.newArrayList(); for (Read read : filteredPaginator.search(request)) { filteredReads.add(read); } assertEquals(4, filteredReads.size()); assertThat(filteredReads, CoreMatchers.hasItems(atStartWithinExtent, atStartOverlapExtent, beyondStartWithinExtent, beyondOverlapExtent)); Paginator.Reads overlappingPaginator = Paginator.Reads.create(genomics, ShardBoundary.Requirement.OVERLAPS); List<Read> overlappingReads = Lists.newArrayList(); for (Read read : overlappingPaginator.search(request)) { overlappingReads.add(read); } assertEquals(6, overlappingReads.size()); assertThat(overlappingReads, CoreMatchers.hasItems(input)); } @Test public void testReadPaginationStrictShardPrecondition() throws Exception { SearchReadsRequest request = new SearchReadsRequest().setStart(1000L).setEnd(2000L); Mockito.when(reads.search(request)).thenReturn(readsSearch); Paginator.Reads filteredPaginator = Paginator.Reads.create(genomics, ShardBoundary.Requirement.STRICT); thrown.expect(IllegalArgumentException.class); filteredPaginator.search(request, "nextPageToken,reads(id,alignment(cigar))").iterator().next(); } @Test public void testStrictReadPaginationNextPageTokenPrecondition() throws Exception { SearchReadsRequest request = new SearchReadsRequest().setStart(1000L).setEnd(2000L); Mockito.when(reads.search(request)).thenReturn(readsSearch); Paginator.Reads filteredPaginator = Paginator.Reads.create(genomics, ShardBoundary.Requirement.STRICT); thrown.expect(IllegalArgumentException.class); filteredPaginator.search(request, "reads(id,alignment(cigar,position))").iterator().next(); } @Test public void testOverlappingReadPaginationNextPageTokenPrecondition() throws Exception { SearchReadsRequest request = new SearchReadsRequest().setStart(1000L).setEnd(2000L); Mockito.when(reads.search(request)).thenReturn(readsSearch); Paginator.Reads overlappingPaginator = Paginator.Reads.create(genomics, ShardBoundary.Requirement.OVERLAPS); thrown.expect(IllegalArgumentException.class); overlappingPaginator.search(request, "reads(id,alignment(cigar,position))").iterator().next(); } }
package com.Ms.todoreminder.Controller; /** * @author SPEGAGNE Mathieu on 13/03/15. * @author https://github.com/mspegagne */ import java.util.Calendar; import java.util.Locale; import android.app.AlarmManager; import android.app.PendingIntent; import android.content.Context; import android.content.Intent; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.ActionBar; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.app.FragmentPagerAdapter; import android.os.Bundle; import android.support.v4.view.ViewPager; import android.view.Menu; import android.view.MenuItem; import com.Ms.todoreminder.Alarm.AlarmReceiver; import com.Ms.todoreminder.Model.ToDo; import com.Ms.todoreminder.R; import static com.Ms.todoreminder.R.*; /** * Main Activity controller, Fragment Manager, Alarm Manager */ public class MainActivity extends ActionBarActivity implements ActionBar.TabListener { public static SectionsPagerAdapter mSectionsPagerAdapter; public static ViewPager mViewPager; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(layout.activity_main); // Set up the action bar. final ActionBar actionBar = getSupportActionBar(); actionBar.setDisplayShowHomeEnabled(false); actionBar.setDisplayShowTitleEnabled(false); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_TABS); // Create the adapter that will return a fragment for each of the three // primary sections of the activity. mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); // When swiping between different sections, select the corresponding // tab. We can also use ActionBar.Tab#select() to do this if we have // a reference to the Tab. mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); // For each of the sections in the app, add a tab to the action bar. for (int i = 0; i < mSectionsPagerAdapter.getCount(); i++) { // Create a tab with text corresponding to the page title defined by // the adapter. Also specify this Activity object, which implements // the TabListener interface, as the callback (listener) for when // this tab is selected. actionBar.addTab( actionBar.newTab() .setText(mSectionsPagerAdapter.getPageTitle(i)) .setTabListener(this)); } mViewPager.setCurrentItem(1); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.menu_main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } @Override public void onTabSelected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { // When the given tab is selected, switch to the corresponding page in // the ViewPager. mViewPager.setCurrentItem(tab.getPosition()); } @Override public void onTabUnselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } @Override public void onTabReselected(ActionBar.Tab tab, FragmentTransaction fragmentTransaction) { } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to * one of the sections/tabs/pages. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int index) { switch (index) { case 0: // History fragment activity return new HistoryFragment(); case 1: // To Do fragment activity return new TodoFragment(); case 2: // Add To Do fragment activity return new AddFragment(); } return null; } @Override public int getCount() { // Show 3 total pages. return 3; } @Override public CharSequence getPageTitle(int position) { Locale l = Locale.getDefault(); switch (position) { case 0: return getString(string.title_section1).toUpperCase(l); case 1: return getString(string.title_section2).toUpperCase(l); case 2: return getString(string.title_section3).toUpperCase(l); } return null; } } //Set Alarm to display the reminder notification public static void setAlarm(Context context, ToDo todo, int id) { //Get the alarm manager AlarmManager alarmManager = (AlarmManager) context.getSystemService(ALARM_SERVICE); //Set the notification intent Intent alarmIntent = new Intent(context, AlarmReceiver.class); //Category is here to differentiate two alarm intent alarmIntent.addCategory("" + id); alarmIntent.putExtra("todo", todo); alarmIntent.putExtra("id", id); PendingIntent pendingIntent = PendingIntent.getBroadcast(context, 0, alarmIntent, PendingIntent.FLAG_CANCEL_CURRENT); //Set the alarm Calendar alarmStartTime = todo.getDate(); alarmManager.setRepeating(AlarmManager.RTC, alarmStartTime.getTimeInMillis(), getInterval(), pendingIntent); } //Delete an alarm public static void deleteAlarm(Context context, int id) { AlarmManager alarmManager = (AlarmManager) context.getSystemService(ALARM_SERVICE); Intent alarmIntent = new Intent(context, AlarmReceiver.class); //the alarmManager doesn't care about Extras, only Category. cf IntentFilterEquals alarmIntent.addCategory("" + id); PendingIntent pendingIntent = PendingIntent.getBroadcast(context, 0, alarmIntent, PendingIntent.FLAG_CANCEL_CURRENT); alarmManager.cancel(pendingIntent); } //Set the interval for repeating private static int getInterval() { int days = 1; int hours = 24; int minutes = 60; int seconds = 60; int milliseconds = 1000; return days * hours * minutes * seconds * milliseconds; } }
package i5.las2peer.classLoaders.libraries; import i5.las2peer.classLoaders.UnresolvedDependenciesException; import i5.las2peer.classLoaders.helpers.LibraryDependency; import i5.las2peer.classLoaders.helpers.LibraryIdentifier; import i5.las2peer.classLoaders.helpers.LibraryVersion; import i5.las2peer.tools.SimpleTools; import java.io.File; import java.util.Collection; import java.util.Enumeration; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; /** * implements a repository which loads all libraries from a given directory or from severeal ones. * The search for library files (jars) may be recursive. * * @author Holger Jan&szlig;en * */ public class FileSystemRepository implements Repository { private String[] directories; private boolean recursive = false; private Hashtable <String,Hashtable<LibraryVersion,String>> htFoundJars; /** * create a repository for the given directory, non-recursive * @param directory */ public FileSystemRepository ( String directory ) { this ( new String[] { directory }, false ); } /** * create a repository for the given directory * * @param directory * @param recursive */ public FileSystemRepository ( String directory, boolean recursive ) { this ( new String[] { directory } , recursive ); } /** * create a repository for the given directories, non-recursive * @param directories */ public FileSystemRepository ( String [] directories ) { this ( directories, false ); } /** * create a repository for the given directories * * @param directories * @param recursive */ public FileSystemRepository ( String[] directories, boolean recursive ) { this.directories = directories; this.recursive = recursive; initJarList(); } /** * get the newest library for the given name * * @param name * @return a LoadedLibrary for the requested library name * @throws NotFoundException * @throws UnresolvedDependenciesException */ public LoadedLibrary findLibrary(String name) throws NotFoundException, UnresolvedDependenciesException { Hashtable<LibraryVersion, String> htVersions = htFoundJars.get( name ); if ( htVersions == null ) { System.err.println ( this + " could not find " + name ); throw new NotFoundException(name); } else { System.err.println( this + " has " + htVersions.size() + " versions of " + name ); } LibraryVersion version = null; for ( Enumeration <LibraryVersion> en = htVersions.keys(); en.hasMoreElements(); ) { LibraryVersion v = en.nextElement(); if ( version == null || v.isLargerThan(version)) version = v; } try { return LoadedJarLibrary.createFromJar( htVersions.get ( version ) ); } catch ( IllegalArgumentException e ) { // somthing's wrong with the manifest throw new UnresolvedDependenciesException ( "Somethings seems wrong with the dependency information of " + name + ": " + e.getMessage(), e ); } catch ( Exception e ) { throw new NotFoundException ( "Error opening library jar " + htVersions.get(version), e); } } /** * get a library matching name and version of the given identifier * * @param lib * * @return a LoadedLibrary for the requested library identifier * * @throws NotFoundException */ public LoadedLibrary findLibrary(LibraryIdentifier lib) throws NotFoundException { Hashtable <LibraryVersion, String> htVersions = htFoundJars.get( lib.getName() ); if ( htVersions == null ) throw new NotFoundException(lib.toString()); String jar = htVersions.get( lib.getVersion() ); if ( jar == null ) throw new NotFoundException(lib.toString()); try { return LoadedJarLibrary.createFromJar( jar ); } catch (Exception e) { throw new NotFoundException ( lib.toString(), e ); } } /** * get the newest library matching the given library dependency (name and version range) * @param dep * @return a LoadedLibray matching the given library dependency * @throws NotFoundException */ public LoadedLibrary findMatchingLibrary(LibraryDependency dep) throws NotFoundException { // TODO: find better search solution: Search sorted and find always newest version Hashtable <LibraryVersion, String> htVersions = htFoundJars.get( dep.getName() ); if ( htVersions == null ) throw new NotFoundException(dep.getName()); for (LibraryVersion version: htVersions.keySet() ) { if ( dep.fits ( version )) { try { return LoadedJarLibrary.createFromJar( htVersions.get(version)); } catch ( Exception e ) { System.out.println ( "Error loading jar: " + e); e.printStackTrace(); } } //else System.out.println ( "--> does not fit"); } throw new NotFoundException ( dep.toString() ); } /** * get an array with all versions found for the given library name * @param libraryName * @return array with all available versions of the given library */ public String[] getAvailableVersions ( String libraryName ) { return getAvailableVersionSet( libraryName ).toArray(new String[0]); } /** * get a collection with all versions found for the given library name * @param libraryName * @return a collections with all versions of the given library */ public Collection<LibraryVersion> getAvailableVersionSet ( String libraryName ) { Hashtable<LibraryVersion,String> htFound = htFoundJars.get(libraryName ); if ( htFound == null) return new HashSet<LibraryVersion> (); return htFound.keySet(); } /** * get an array with found jar files within this repository * @return an array with all libraries in this repository */ public String[] getAllLibraries () { Collection<String> libs = getLibraryCollection (); return libs.toArray(new String[0]); } /** * get a collection with all found jar files within this repository * @return a collection with all libraries in this repository */ public Collection<String> getLibraryCollection () { HashSet<String> hsTemp = new HashSet<String> (); Enumeration<String> eLibs = htFoundJars.keys(); while ( eLibs.hasMoreElements() ) { String lib = eLibs.nextElement(); Iterator<String> jars = htFoundJars.get(lib).values().iterator(); while ( jars.hasNext() ) { String jar = jars.next(); hsTemp.add(jar); } } return hsTemp; } /** * initialize the list if jars */ private void initJarList () { htFoundJars = new Hashtable < String, Hashtable<LibraryVersion,String >>(); for ( int i=0; i < directories.length; i++ ) searchJars ( directories[i] ); } /** * look for jars in the given directory, search recursive, if flag is set * @param directory */ private void searchJars ( String directory ) { File f = new File ( directory ); if ( ! f.isDirectory() ) throw new IllegalArgumentException( "Given path is not a directory" ); File[] entries = f.listFiles (); for ( int i=0; i<entries.length; i++ ) { if ( entries[i].isDirectory () ) { if ( recursive ) searchJars ( entries[i].toString() ); } else if ( entries[i].getPath().endsWith(".jar")) { if ( entries[i].getName().contains("-")) { String[] split = entries[i].getName().substring (0, entries[i].getName().length()-4).split("-", 2); try { LibraryVersion version = new LibraryVersion ( split[1]); registerJar(entries[i].getPath(), split[0], version); } catch ( IllegalArgumentException e ) { // ok, version info not correct // TODO: print warning about missing version info? System.out.println ( "Error registering library " + entries[i] + ": " + e); } } else { // TODO: print warning about missing version info? // maybe depending on log level System.out.println ( "library " + entries[i] + " has no version info in it's name! - Won't be used!"); } } } } /** * register a found jar file to the hashtable of available jars in this repository * * @param file * @param name * @param version */ private void registerJar(String file, String name, LibraryVersion version) { Hashtable <LibraryVersion,String > htNameEntries = htFoundJars.get( name); if ( htNameEntries == null ) { htNameEntries = new Hashtable<LibraryVersion, String>(); htFoundJars.put( name,htNameEntries); } htNameEntries.put ( version, file ); } /** * @return a simple string representation of this object */ public String toString () { return "FS-Repository at " + SimpleTools.join ( directories, ":"); } }
package fr.jmini.asciidoctorj.testcases; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.asciidoctor.OptionsBuilder; import org.asciidoctor.ast.Cell; import org.asciidoctor.ast.Column; import org.asciidoctor.ast.Document; import org.asciidoctor.ast.Row; import org.asciidoctor.ast.Table; public class TableCaptionCustomCounterTestCase implements AdocTestCase { public static final String ASCIIDOC = "" + ":table-number: 10\n" + "\n" + ".this is the first caption\n" + "|=== \n" + "\n" + "|Cell in column 1, T 1 |Cell in column 2, T 1 |Cell in column 3, T 1\n" + "\n" + "|=== \n" + "\n" + ".this is the second caption\n" + "|=== \n" + "\n" + "|Cell in column 1, T 2 |Cell in column 2, T 2 |Cell in column 3, T 2\n" + "\n" + "|==="; @Override public String getAdocInput() { return ASCIIDOC; } @Override public Map<String, Object> getInputOptions() { return OptionsBuilder.options() .asMap(); } // tag::expected-html[] public static final String EXPECTED_HTML = "" + "<table class=\"tableblock frame-all grid-all spread\">\n" + "<caption class=\"title\">\n" + "Table 11. this is the first caption\n" + "</caption>\n" + "<colgroup>\n" + "<col style=\"width: 33.3333%;\" />\n" + "<col style=\"width: 33.3333%;\" />\n" + "<col style=\"width: 33.3334%;\" />\n" + "</colgroup>\n" + "<tbody>\n" + "<tr>\n" + "<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 1, T 1</p></td>\n" + "<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 2, T 1</p></td>\n" + "<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 3, T 1</p></td>\n" + "</tr>\n" + "</tbody>\n" + "</table>\n" + "<table class=\"tableblock frame-all grid-all spread\">\n" + "<caption class=\"title\">\n" + "Table 12. this is the second caption\n" + "</caption>\n" + "<colgroup>\n" + "<col style=\"width: 33.3333%;\" />\n" + "<col style=\"width: 33.3333%;\" />\n" + "<col style=\"width: 33.3334%;\" />\n" + "</colgroup>\n" + "<tbody>\n" + "<tr>\n" + "<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 1, T 2</p></td>\n" + "<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 2, T 2</p></td>\n" + "<td class=\"tableblock halign-left valign-top\"><p class=\"tableblock\">Cell in column 3, T 2</p></td>\n" + "</tr>\n" + "</tbody>\n" + "</table>"; // end::expected-html[] @Override public String getHtmlOutput() { return EXPECTED_HTML; } @Override // tag::assert-code[] public void checkAst(Document astDocument) { Document document1 = astDocument; assertThat(document1.getId()).isNull(); assertThat(document1.getNodeName()).isEqualTo("document"); assertThat(document1.getParent()).isNull(); assertThat(document1.getContext()).isEqualTo("document"); assertThat(document1.getDocument()).isSameAs(document1); assertThat(document1.isInline()).isFalse(); assertThat(document1.isBlock()).isTrue(); assertThat(document1.getAttributes()).containsEntry("doctype", "article") .containsEntry("example-caption", "Example") .containsEntry("figure-caption", "Figure") .containsEntry("filetype", "html") .containsEntry("notitle", "") .containsEntry("prewrap", "") .containsEntry("table-caption", "Table") .containsEntry("table-number", "10"); assertThat(document1.getRoles()).isNullOrEmpty(); assertThat(document1.isReftext()).isFalse(); assertThat(document1.getReftext()).isNull(); assertThat(document1.getCaption()).isNull(); assertThat(document1.getTitle()).isNull(); assertThat(document1.getStyle()).isNull(); assertThat(document1.getLevel()).isEqualTo(0); assertThat(document1.getContentModel()).isEqualTo("compound"); assertThat(document1.getSourceLocation()).isNull(); assertThat(document1.getSubstitutions()).isNullOrEmpty(); assertThat(document1.getBlocks()).hasSize(2); Table table1 = (Table) document1.getBlocks() .get(0); assertThat(table1.getId()).isNull(); assertThat(table1.getNodeName()).isEqualTo("table"); assertThat(table1.getParent()).isSameAs(document1); assertThat(table1.getContext()).isEqualTo("table"); assertThat(table1.getDocument()).isSameAs(document1); assertThat(table1.isInline()).isFalse(); assertThat(table1.isBlock()).isTrue(); assertThat(table1.getAttributes()).containsEntry("colcount", 3L) .containsEntry("rowcount", 1L) .containsEntry("style", "table") .containsEntry("tablepcwidth", 100L); assertThat(table1.getRoles()).isNullOrEmpty(); assertThat(table1.isReftext()).isFalse(); assertThat(table1.getReftext()).isNull(); assertThat(table1.getCaption()).isEqualTo("Table 11. "); assertThat(table1.getTitle()).isEqualTo("this is the first caption"); assertThat(table1.getStyle()).isEqualTo("table"); assertThat(table1.getLevel()).isEqualTo(0); assertThat(table1.getContentModel()).isEqualTo("compound"); assertThat(table1.getSourceLocation()).isNull(); assertThat(table1.getSubstitutions()).isNullOrEmpty(); assertThat(table1.getBlocks()).isNullOrEmpty(); assertThat(table1.hasHeaderOption()).isFalse(); assertThat(table1.getColumns()).hasSize(3); Column column1 = (Column) table1.getColumns() .get(0); assertThat(column1.getId()).isNull(); assertThat(column1.getNodeName()).isEqualTo("column"); assertThat(column1.getParent()).isSameAs(table1); assertThat(column1.getContext()).isEqualTo("column"); assertThat(column1.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { column1.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { column1.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(column1.getAttributes()).containsEntry("colnumber", 1L) .containsEntry("colpcwidth", 33.3333) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(column1.getRoles()).isNullOrEmpty(); assertThat(column1.isReftext()).isFalse(); assertThat(column1.getReftext()).isNull(); assertThat(column1.getStyle()).isNull(); assertThat(column1.getTable()).isSameAs(table1); assertThat(column1.getColumnNumber()).isEqualTo(1); assertThat(column1.getWidth()).isEqualTo(1); assertThat(column1.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(column1.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); Column column2 = (Column) table1.getColumns() .get(1); assertThat(column2.getId()).isNull(); assertThat(column2.getNodeName()).isEqualTo("column"); assertThat(column2.getParent()).isSameAs(table1); assertThat(column2.getContext()).isEqualTo("column"); assertThat(column2.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { column2.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { column2.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(column2.getAttributes()).containsEntry("colnumber", 2L) .containsEntry("colpcwidth", 33.3333) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(column2.getRoles()).isNullOrEmpty(); assertThat(column2.isReftext()).isFalse(); assertThat(column2.getReftext()).isNull(); assertThat(column2.getStyle()).isNull(); assertThat(column2.getTable()).isSameAs(table1); assertThat(column2.getColumnNumber()).isEqualTo(2); assertThat(column2.getWidth()).isEqualTo(1); assertThat(column2.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(column2.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); Column column3 = (Column) table1.getColumns() .get(2); assertThat(column3.getId()).isNull(); assertThat(column3.getNodeName()).isEqualTo("column"); assertThat(column3.getParent()).isSameAs(table1); assertThat(column3.getContext()).isEqualTo("column"); assertThat(column3.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { column3.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { column3.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(column3.getAttributes()).containsEntry("colnumber", 3L) .containsEntry("colpcwidth", 33.3334) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(column3.getRoles()).isNullOrEmpty(); assertThat(column3.isReftext()).isFalse(); assertThat(column3.getReftext()).isNull(); assertThat(column3.getStyle()).isNull(); assertThat(column3.getTable()).isSameAs(table1); assertThat(column3.getColumnNumber()).isEqualTo(3); assertThat(column3.getWidth()).isEqualTo(1); assertThat(column3.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(column3.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(table1.getHeader()).isNullOrEmpty(); assertThat(table1.getFooter()).isNullOrEmpty(); assertThat(table1.getBody()).hasSize(1); Row row1 = (Row) table1.getBody() .get(0); assertThat(row1.getCells()).hasSize(3); Cell cell1 = (Cell) row1.getCells() .get(0); assertThat(cell1.getId()).isNull(); assertThat(cell1.getNodeName()).isEqualTo("cell"); assertThat(cell1.getParent()).isSameAs(column1); assertThat(cell1.getContext()).isEqualTo("cell"); assertThat(cell1.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { cell1.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { cell1.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(cell1.getAttributes()).containsEntry("colnumber", 1L) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(cell1.getRoles()).isNullOrEmpty(); assertThat(cell1.isReftext()).isFalse(); assertThat(cell1.getReftext()).isNull(); assertThat(cell1.getColumn()).isSameAs(column1); assertThat(cell1.getColspan()).isEqualTo(0); assertThat(cell1.getRowspan()).isEqualTo(0); assertThat(cell1.getText()).isEqualTo("Cell in column 1, T 1"); assertThat(cell1.getSource()).isEqualTo("Cell in column 1, T 1"); assertThat(cell1.getStyle()).isNull(); assertThat(cell1.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(cell1.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(cell1.getInnerDocument()).isNull(); Cell cell2 = (Cell) row1.getCells() .get(1); assertThat(cell2.getId()).isNull(); assertThat(cell2.getNodeName()).isEqualTo("cell"); assertThat(cell2.getParent()).isSameAs(column2); assertThat(cell2.getContext()).isEqualTo("cell"); assertThat(cell2.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { cell2.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { cell2.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(cell2.getAttributes()).containsEntry("colnumber", 2L) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(cell2.getRoles()).isNullOrEmpty(); assertThat(cell2.isReftext()).isFalse(); assertThat(cell2.getReftext()).isNull(); assertThat(cell2.getColumn()).isSameAs(column2); assertThat(cell2.getColspan()).isEqualTo(0); assertThat(cell2.getRowspan()).isEqualTo(0); assertThat(cell2.getText()).isEqualTo("Cell in column 2, T 1"); assertThat(cell2.getSource()).isEqualTo("Cell in column 2, T 1"); assertThat(cell2.getStyle()).isNull(); assertThat(cell2.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(cell2.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(cell2.getInnerDocument()).isNull(); Cell cell3 = (Cell) row1.getCells() .get(2); assertThat(cell3.getId()).isNull(); assertThat(cell3.getNodeName()).isEqualTo("cell"); assertThat(cell3.getParent()).isSameAs(column3); assertThat(cell3.getContext()).isEqualTo("cell"); assertThat(cell3.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { cell3.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { cell3.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(cell3.getAttributes()).containsEntry("colnumber", 3L) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(cell3.getRoles()).isNullOrEmpty(); assertThat(cell3.isReftext()).isFalse(); assertThat(cell3.getReftext()).isNull(); assertThat(cell3.getColumn()).isSameAs(column3); assertThat(cell3.getColspan()).isEqualTo(0); assertThat(cell3.getRowspan()).isEqualTo(0); assertThat(cell3.getText()).isEqualTo("Cell in column 3, T 1"); assertThat(cell3.getSource()).isEqualTo("Cell in column 3, T 1"); assertThat(cell3.getStyle()).isNull(); assertThat(cell3.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(cell3.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(cell3.getInnerDocument()).isNull(); assertThat(table1.getFrame()).isEqualTo("all"); assertThat(table1.getGrid()).isEqualTo("all"); Table table2 = (Table) document1.getBlocks() .get(1); assertThat(table2.getId()).isNull(); assertThat(table2.getNodeName()).isEqualTo("table"); assertThat(table2.getParent()).isSameAs(document1); assertThat(table2.getContext()).isEqualTo("table"); assertThat(table2.getDocument()).isSameAs(document1); assertThat(table2.isInline()).isFalse(); assertThat(table2.isBlock()).isTrue(); assertThat(table2.getAttributes()).containsEntry("colcount", 3L) .containsEntry("rowcount", 1L) .containsEntry("style", "table") .containsEntry("tablepcwidth", 100L); assertThat(table2.getRoles()).isNullOrEmpty(); assertThat(table2.isReftext()).isFalse(); assertThat(table2.getReftext()).isNull(); assertThat(table2.getCaption()).isEqualTo("Table 12. "); assertThat(table2.getTitle()).isEqualTo("this is the second caption"); assertThat(table2.getStyle()).isEqualTo("table"); assertThat(table2.getLevel()).isEqualTo(0); assertThat(table2.getContentModel()).isEqualTo("compound"); assertThat(table2.getSourceLocation()).isNull(); assertThat(table2.getSubstitutions()).isNullOrEmpty(); assertThat(table2.getBlocks()).isNullOrEmpty(); assertThat(table2.hasHeaderOption()).isFalse(); assertThat(table2.getColumns()).hasSize(3); Column column4 = (Column) table2.getColumns() .get(0); assertThat(column4.getId()).isNull(); assertThat(column4.getNodeName()).isEqualTo("column"); assertThat(column4.getParent()).isSameAs(table2); assertThat(column4.getContext()).isEqualTo("column"); assertThat(column4.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { column4.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { column4.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(column4.getAttributes()).containsEntry("colnumber", 1L) .containsEntry("colpcwidth", 33.3333) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(column4.getRoles()).isNullOrEmpty(); assertThat(column4.isReftext()).isFalse(); assertThat(column4.getReftext()).isNull(); assertThat(column4.getStyle()).isNull(); assertThat(column4.getTable()).isSameAs(table2); assertThat(column4.getColumnNumber()).isEqualTo(1); assertThat(column4.getWidth()).isEqualTo(1); assertThat(column4.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(column4.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); Column column5 = (Column) table2.getColumns() .get(1); assertThat(column5.getId()).isNull(); assertThat(column5.getNodeName()).isEqualTo("column"); assertThat(column5.getParent()).isSameAs(table2); assertThat(column5.getContext()).isEqualTo("column"); assertThat(column5.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { column5.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { column5.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(column5.getAttributes()).containsEntry("colnumber", 2L) .containsEntry("colpcwidth", 33.3333) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(column5.getRoles()).isNullOrEmpty(); assertThat(column5.isReftext()).isFalse(); assertThat(column5.getReftext()).isNull(); assertThat(column5.getStyle()).isNull(); assertThat(column5.getTable()).isSameAs(table2); assertThat(column5.getColumnNumber()).isEqualTo(2); assertThat(column5.getWidth()).isEqualTo(1); assertThat(column5.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(column5.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); Column column6 = (Column) table2.getColumns() .get(2); assertThat(column6.getId()).isNull(); assertThat(column6.getNodeName()).isEqualTo("column"); assertThat(column6.getParent()).isSameAs(table2); assertThat(column6.getContext()).isEqualTo("column"); assertThat(column6.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { column6.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { column6.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(column6.getAttributes()).containsEntry("colnumber", 3L) .containsEntry("colpcwidth", 33.3334) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(column6.getRoles()).isNullOrEmpty(); assertThat(column6.isReftext()).isFalse(); assertThat(column6.getReftext()).isNull(); assertThat(column6.getStyle()).isNull(); assertThat(column6.getTable()).isSameAs(table2); assertThat(column6.getColumnNumber()).isEqualTo(3); assertThat(column6.getWidth()).isEqualTo(1); assertThat(column6.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(column6.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(table2.getHeader()).isNullOrEmpty(); assertThat(table2.getFooter()).isNullOrEmpty(); assertThat(table2.getBody()).hasSize(1); Row row2 = (Row) table2.getBody() .get(0); assertThat(row2.getCells()).hasSize(3); Cell cell4 = (Cell) row2.getCells() .get(0); assertThat(cell4.getId()).isNull(); assertThat(cell4.getNodeName()).isEqualTo("cell"); assertThat(cell4.getParent()).isSameAs(column4); assertThat(cell4.getContext()).isEqualTo("cell"); assertThat(cell4.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { cell4.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { cell4.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(cell4.getAttributes()).containsEntry("colnumber", 1L) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(cell4.getRoles()).isNullOrEmpty(); assertThat(cell4.isReftext()).isFalse(); assertThat(cell4.getReftext()).isNull(); assertThat(cell4.getColumn()).isSameAs(column4); assertThat(cell4.getColspan()).isEqualTo(0); assertThat(cell4.getRowspan()).isEqualTo(0); assertThat(cell4.getText()).isEqualTo("Cell in column 1, T 2"); assertThat(cell4.getSource()).isEqualTo("Cell in column 1, T 2"); assertThat(cell4.getStyle()).isNull(); assertThat(cell4.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(cell4.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(cell4.getInnerDocument()).isNull(); Cell cell5 = (Cell) row2.getCells() .get(1); assertThat(cell5.getId()).isNull(); assertThat(cell5.getNodeName()).isEqualTo("cell"); assertThat(cell5.getParent()).isSameAs(column5); assertThat(cell5.getContext()).isEqualTo("cell"); assertThat(cell5.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { cell5.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { cell5.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(cell5.getAttributes()).containsEntry("colnumber", 2L) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(cell5.getRoles()).isNullOrEmpty(); assertThat(cell5.isReftext()).isFalse(); assertThat(cell5.getReftext()).isNull(); assertThat(cell5.getColumn()).isSameAs(column5); assertThat(cell5.getColspan()).isEqualTo(0); assertThat(cell5.getRowspan()).isEqualTo(0); assertThat(cell5.getText()).isEqualTo("Cell in column 2, T 2"); assertThat(cell5.getSource()).isEqualTo("Cell in column 2, T 2"); assertThat(cell5.getStyle()).isNull(); assertThat(cell5.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(cell5.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(cell5.getInnerDocument()).isNull(); Cell cell6 = (Cell) row2.getCells() .get(2); assertThat(cell6.getId()).isNull(); assertThat(cell6.getNodeName()).isEqualTo("cell"); assertThat(cell6.getParent()).isSameAs(column6); assertThat(cell6.getContext()).isEqualTo("cell"); assertThat(cell6.getDocument()).isSameAs(document1); assertThatThrownBy(() -> { cell6.isInline(); }).hasMessageContaining("NotImplementedError"); assertThatThrownBy(() -> { cell6.isBlock(); }).hasMessageContaining("NotImplementedError"); assertThat(cell6.getAttributes()).containsEntry("colnumber", 3L) .containsEntry("halign", "left") .containsEntry("valign", "top") .containsEntry("width", 1L); assertThat(cell6.getRoles()).isNullOrEmpty(); assertThat(cell6.isReftext()).isFalse(); assertThat(cell6.getReftext()).isNull(); assertThat(cell6.getColumn()).isSameAs(column6); assertThat(cell6.getColspan()).isEqualTo(0); assertThat(cell6.getRowspan()).isEqualTo(0); assertThat(cell6.getText()).isEqualTo("Cell in column 3, T 2"); assertThat(cell6.getSource()).isEqualTo("Cell in column 3, T 2"); assertThat(cell6.getStyle()).isNull(); assertThat(cell6.getHorizontalAlignment()).isEqualTo(Table.HorizontalAlignment.LEFT); assertThat(cell6.getVerticalAlignment()).isEqualTo(Table.VerticalAlignment.TOP); assertThat(cell6.getInnerDocument()).isNull(); assertThat(table2.getFrame()).isEqualTo("all"); assertThat(table2.getGrid()).isEqualTo("all"); assertThat(document1.getStructuredDoctitle()).isNull(); assertThat(document1.getDoctitle()).isNull(); assertThat(document1.getOptions()).containsEntry("header_footer", false); } // end::assert-code[] @Override // tag::mock-code[] public Document createMock() { Document mockDocument1 = mock(Document.class); when(mockDocument1.getId()).thenReturn(null); when(mockDocument1.getNodeName()).thenReturn("document"); when(mockDocument1.getParent()).thenReturn(null); when(mockDocument1.getContext()).thenReturn("document"); when(mockDocument1.getDocument()).thenReturn(mockDocument1); when(mockDocument1.isInline()).thenReturn(false); when(mockDocument1.isBlock()).thenReturn(true); Map<String, Object> map1 = new HashMap<>(); map1.put("doctype", "article"); map1.put("example-caption", "Example"); map1.put("figure-caption", "Figure"); map1.put("filetype", "html"); map1.put("notitle", ""); map1.put("prewrap", ""); map1.put("table-caption", "Table"); map1.put("table-number", "10"); when(mockDocument1.getAttributes()).thenReturn(map1); when(mockDocument1.getRoles()).thenReturn(Collections.emptyList()); when(mockDocument1.isReftext()).thenReturn(false); when(mockDocument1.getReftext()).thenReturn(null); when(mockDocument1.getCaption()).thenReturn(null); when(mockDocument1.getTitle()).thenReturn(null); when(mockDocument1.getStyle()).thenReturn(null); when(mockDocument1.getLevel()).thenReturn(0); when(mockDocument1.getContentModel()).thenReturn("compound"); when(mockDocument1.getSourceLocation()).thenReturn(null); when(mockDocument1.getSubstitutions()).thenReturn(Collections.emptyList()); Table mockTable1 = mock(Table.class); when(mockTable1.getId()).thenReturn(null); when(mockTable1.getNodeName()).thenReturn("table"); when(mockTable1.getParent()).thenReturn(mockDocument1); when(mockTable1.getContext()).thenReturn("table"); when(mockTable1.getDocument()).thenReturn(mockDocument1); when(mockTable1.isInline()).thenReturn(false); when(mockTable1.isBlock()).thenReturn(true); Map<String, Object> map2 = new HashMap<>(); map2.put("colcount", 3L); map2.put("rowcount", 1L); map2.put("style", "table"); map2.put("tablepcwidth", 100L); when(mockTable1.getAttributes()).thenReturn(map2); when(mockTable1.getRoles()).thenReturn(Collections.emptyList()); when(mockTable1.isReftext()).thenReturn(false); when(mockTable1.getReftext()).thenReturn(null); when(mockTable1.getCaption()).thenReturn("Table 11. "); when(mockTable1.getTitle()).thenReturn("this is the first caption"); when(mockTable1.getStyle()).thenReturn("table"); when(mockTable1.getLevel()).thenReturn(0); when(mockTable1.getContentModel()).thenReturn("compound"); when(mockTable1.getSourceLocation()).thenReturn(null); when(mockTable1.getSubstitutions()).thenReturn(Collections.emptyList()); when(mockTable1.getBlocks()).thenReturn(Collections.emptyList()); when(mockTable1.hasHeaderOption()).thenReturn(false); Column mockColumn1 = mock(Column.class); when(mockColumn1.getId()).thenReturn(null); when(mockColumn1.getNodeName()).thenReturn("column"); when(mockColumn1.getParent()).thenReturn(mockTable1); when(mockColumn1.getContext()).thenReturn("column"); when(mockColumn1.getDocument()).thenReturn(mockDocument1); when(mockColumn1.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockColumn1.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map3 = new HashMap<>(); map3.put("colnumber", 1L); map3.put("colpcwidth", 33.3333); map3.put("halign", "left"); map3.put("valign", "top"); map3.put("width", 1L); when(mockColumn1.getAttributes()).thenReturn(map3); when(mockColumn1.getRoles()).thenReturn(Collections.emptyList()); when(mockColumn1.isReftext()).thenReturn(false); when(mockColumn1.getReftext()).thenReturn(null); when(mockColumn1.getStyle()).thenReturn(null); when(mockColumn1.getTable()).thenReturn(mockTable1); when(mockColumn1.getColumnNumber()).thenReturn(1); when(mockColumn1.getWidth()).thenReturn(1); when(mockColumn1.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockColumn1.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); Column mockColumn2 = mock(Column.class); when(mockColumn2.getId()).thenReturn(null); when(mockColumn2.getNodeName()).thenReturn("column"); when(mockColumn2.getParent()).thenReturn(mockTable1); when(mockColumn2.getContext()).thenReturn("column"); when(mockColumn2.getDocument()).thenReturn(mockDocument1); when(mockColumn2.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockColumn2.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map4 = new HashMap<>(); map4.put("colnumber", 2L); map4.put("colpcwidth", 33.3333); map4.put("halign", "left"); map4.put("valign", "top"); map4.put("width", 1L); when(mockColumn2.getAttributes()).thenReturn(map4); when(mockColumn2.getRoles()).thenReturn(Collections.emptyList()); when(mockColumn2.isReftext()).thenReturn(false); when(mockColumn2.getReftext()).thenReturn(null); when(mockColumn2.getStyle()).thenReturn(null); when(mockColumn2.getTable()).thenReturn(mockTable1); when(mockColumn2.getColumnNumber()).thenReturn(2); when(mockColumn2.getWidth()).thenReturn(1); when(mockColumn2.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockColumn2.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); Column mockColumn3 = mock(Column.class); when(mockColumn3.getId()).thenReturn(null); when(mockColumn3.getNodeName()).thenReturn("column"); when(mockColumn3.getParent()).thenReturn(mockTable1); when(mockColumn3.getContext()).thenReturn("column"); when(mockColumn3.getDocument()).thenReturn(mockDocument1); when(mockColumn3.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockColumn3.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map5 = new HashMap<>(); map5.put("colnumber", 3L); map5.put("colpcwidth", 33.3334); map5.put("halign", "left"); map5.put("valign", "top"); map5.put("width", 1L); when(mockColumn3.getAttributes()).thenReturn(map5); when(mockColumn3.getRoles()).thenReturn(Collections.emptyList()); when(mockColumn3.isReftext()).thenReturn(false); when(mockColumn3.getReftext()).thenReturn(null); when(mockColumn3.getStyle()).thenReturn(null); when(mockColumn3.getTable()).thenReturn(mockTable1); when(mockColumn3.getColumnNumber()).thenReturn(3); when(mockColumn3.getWidth()).thenReturn(1); when(mockColumn3.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockColumn3.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockTable1.getColumns()).thenReturn(Arrays.asList(mockColumn1, mockColumn2, mockColumn3)); when(mockTable1.getHeader()).thenReturn(Collections.emptyList()); when(mockTable1.getFooter()).thenReturn(Collections.emptyList()); Row mockRow1 = mock(Row.class); Cell mockCell1 = mock(Cell.class); when(mockCell1.getId()).thenReturn(null); when(mockCell1.getNodeName()).thenReturn("cell"); when(mockCell1.getParent()).thenReturn(mockColumn1); when(mockCell1.getContext()).thenReturn("cell"); when(mockCell1.getDocument()).thenReturn(mockDocument1); when(mockCell1.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockCell1.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map6 = new HashMap<>(); map6.put("colnumber", 1L); map6.put("halign", "left"); map6.put("valign", "top"); map6.put("width", 1L); when(mockCell1.getAttributes()).thenReturn(map6); when(mockCell1.getRoles()).thenReturn(Collections.emptyList()); when(mockCell1.isReftext()).thenReturn(false); when(mockCell1.getReftext()).thenReturn(null); when(mockCell1.getColumn()).thenReturn(mockColumn1); when(mockCell1.getColspan()).thenReturn(0); when(mockCell1.getRowspan()).thenReturn(0); when(mockCell1.getText()).thenReturn("Cell in column 1, T 1"); when(mockCell1.getSource()).thenReturn("Cell in column 1, T 1"); when(mockCell1.getStyle()).thenReturn(null); when(mockCell1.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockCell1.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockCell1.getInnerDocument()).thenReturn(null); Cell mockCell2 = mock(Cell.class); when(mockCell2.getId()).thenReturn(null); when(mockCell2.getNodeName()).thenReturn("cell"); when(mockCell2.getParent()).thenReturn(mockColumn2); when(mockCell2.getContext()).thenReturn("cell"); when(mockCell2.getDocument()).thenReturn(mockDocument1); when(mockCell2.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockCell2.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map7 = new HashMap<>(); map7.put("colnumber", 2L); map7.put("halign", "left"); map7.put("valign", "top"); map7.put("width", 1L); when(mockCell2.getAttributes()).thenReturn(map7); when(mockCell2.getRoles()).thenReturn(Collections.emptyList()); when(mockCell2.isReftext()).thenReturn(false); when(mockCell2.getReftext()).thenReturn(null); when(mockCell2.getColumn()).thenReturn(mockColumn2); when(mockCell2.getColspan()).thenReturn(0); when(mockCell2.getRowspan()).thenReturn(0); when(mockCell2.getText()).thenReturn("Cell in column 2, T 1"); when(mockCell2.getSource()).thenReturn("Cell in column 2, T 1"); when(mockCell2.getStyle()).thenReturn(null); when(mockCell2.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockCell2.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockCell2.getInnerDocument()).thenReturn(null); Cell mockCell3 = mock(Cell.class); when(mockCell3.getId()).thenReturn(null); when(mockCell3.getNodeName()).thenReturn("cell"); when(mockCell3.getParent()).thenReturn(mockColumn3); when(mockCell3.getContext()).thenReturn("cell"); when(mockCell3.getDocument()).thenReturn(mockDocument1); when(mockCell3.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockCell3.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map8 = new HashMap<>(); map8.put("colnumber", 3L); map8.put("halign", "left"); map8.put("valign", "top"); map8.put("width", 1L); when(mockCell3.getAttributes()).thenReturn(map8); when(mockCell3.getRoles()).thenReturn(Collections.emptyList()); when(mockCell3.isReftext()).thenReturn(false); when(mockCell3.getReftext()).thenReturn(null); when(mockCell3.getColumn()).thenReturn(mockColumn3); when(mockCell3.getColspan()).thenReturn(0); when(mockCell3.getRowspan()).thenReturn(0); when(mockCell3.getText()).thenReturn("Cell in column 3, T 1"); when(mockCell3.getSource()).thenReturn("Cell in column 3, T 1"); when(mockCell3.getStyle()).thenReturn(null); when(mockCell3.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockCell3.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockCell3.getInnerDocument()).thenReturn(null); when(mockRow1.getCells()).thenReturn(Arrays.asList(mockCell1, mockCell2, mockCell3)); when(mockTable1.getBody()).thenReturn(Collections.singletonList(mockRow1)); when(mockTable1.getFrame()).thenReturn("all"); when(mockTable1.getGrid()).thenReturn("all"); Table mockTable2 = mock(Table.class); when(mockTable2.getId()).thenReturn(null); when(mockTable2.getNodeName()).thenReturn("table"); when(mockTable2.getParent()).thenReturn(mockDocument1); when(mockTable2.getContext()).thenReturn("table"); when(mockTable2.getDocument()).thenReturn(mockDocument1); when(mockTable2.isInline()).thenReturn(false); when(mockTable2.isBlock()).thenReturn(true); Map<String, Object> map9 = new HashMap<>(); map9.put("colcount", 3L); map9.put("rowcount", 1L); map9.put("style", "table"); map9.put("tablepcwidth", 100L); when(mockTable2.getAttributes()).thenReturn(map9); when(mockTable2.getRoles()).thenReturn(Collections.emptyList()); when(mockTable2.isReftext()).thenReturn(false); when(mockTable2.getReftext()).thenReturn(null); when(mockTable2.getCaption()).thenReturn("Table 12. "); when(mockTable2.getTitle()).thenReturn("this is the second caption"); when(mockTable2.getStyle()).thenReturn("table"); when(mockTable2.getLevel()).thenReturn(0); when(mockTable2.getContentModel()).thenReturn("compound"); when(mockTable2.getSourceLocation()).thenReturn(null); when(mockTable2.getSubstitutions()).thenReturn(Collections.emptyList()); when(mockTable2.getBlocks()).thenReturn(Collections.emptyList()); when(mockTable2.hasHeaderOption()).thenReturn(false); Column mockColumn4 = mock(Column.class); when(mockColumn4.getId()).thenReturn(null); when(mockColumn4.getNodeName()).thenReturn("column"); when(mockColumn4.getParent()).thenReturn(mockTable2); when(mockColumn4.getContext()).thenReturn("column"); when(mockColumn4.getDocument()).thenReturn(mockDocument1); when(mockColumn4.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockColumn4.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map10 = new HashMap<>(); map10.put("colnumber", 1L); map10.put("colpcwidth", 33.3333); map10.put("halign", "left"); map10.put("valign", "top"); map10.put("width", 1L); when(mockColumn4.getAttributes()).thenReturn(map10); when(mockColumn4.getRoles()).thenReturn(Collections.emptyList()); when(mockColumn4.isReftext()).thenReturn(false); when(mockColumn4.getReftext()).thenReturn(null); when(mockColumn4.getStyle()).thenReturn(null); when(mockColumn4.getTable()).thenReturn(mockTable2); when(mockColumn4.getColumnNumber()).thenReturn(1); when(mockColumn4.getWidth()).thenReturn(1); when(mockColumn4.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockColumn4.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); Column mockColumn5 = mock(Column.class); when(mockColumn5.getId()).thenReturn(null); when(mockColumn5.getNodeName()).thenReturn("column"); when(mockColumn5.getParent()).thenReturn(mockTable2); when(mockColumn5.getContext()).thenReturn("column"); when(mockColumn5.getDocument()).thenReturn(mockDocument1); when(mockColumn5.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockColumn5.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map11 = new HashMap<>(); map11.put("colnumber", 2L); map11.put("colpcwidth", 33.3333); map11.put("halign", "left"); map11.put("valign", "top"); map11.put("width", 1L); when(mockColumn5.getAttributes()).thenReturn(map11); when(mockColumn5.getRoles()).thenReturn(Collections.emptyList()); when(mockColumn5.isReftext()).thenReturn(false); when(mockColumn5.getReftext()).thenReturn(null); when(mockColumn5.getStyle()).thenReturn(null); when(mockColumn5.getTable()).thenReturn(mockTable2); when(mockColumn5.getColumnNumber()).thenReturn(2); when(mockColumn5.getWidth()).thenReturn(1); when(mockColumn5.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockColumn5.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); Column mockColumn6 = mock(Column.class); when(mockColumn6.getId()).thenReturn(null); when(mockColumn6.getNodeName()).thenReturn("column"); when(mockColumn6.getParent()).thenReturn(mockTable2); when(mockColumn6.getContext()).thenReturn("column"); when(mockColumn6.getDocument()).thenReturn(mockDocument1); when(mockColumn6.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockColumn6.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map12 = new HashMap<>(); map12.put("colnumber", 3L); map12.put("colpcwidth", 33.3334); map12.put("halign", "left"); map12.put("valign", "top"); map12.put("width", 1L); when(mockColumn6.getAttributes()).thenReturn(map12); when(mockColumn6.getRoles()).thenReturn(Collections.emptyList()); when(mockColumn6.isReftext()).thenReturn(false); when(mockColumn6.getReftext()).thenReturn(null); when(mockColumn6.getStyle()).thenReturn(null); when(mockColumn6.getTable()).thenReturn(mockTable2); when(mockColumn6.getColumnNumber()).thenReturn(3); when(mockColumn6.getWidth()).thenReturn(1); when(mockColumn6.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockColumn6.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockTable2.getColumns()).thenReturn(Arrays.asList(mockColumn4, mockColumn5, mockColumn6)); when(mockTable2.getHeader()).thenReturn(Collections.emptyList()); when(mockTable2.getFooter()).thenReturn(Collections.emptyList()); Row mockRow2 = mock(Row.class); Cell mockCell4 = mock(Cell.class); when(mockCell4.getId()).thenReturn(null); when(mockCell4.getNodeName()).thenReturn("cell"); when(mockCell4.getParent()).thenReturn(mockColumn4); when(mockCell4.getContext()).thenReturn("cell"); when(mockCell4.getDocument()).thenReturn(mockDocument1); when(mockCell4.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockCell4.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map13 = new HashMap<>(); map13.put("colnumber", 1L); map13.put("halign", "left"); map13.put("valign", "top"); map13.put("width", 1L); when(mockCell4.getAttributes()).thenReturn(map13); when(mockCell4.getRoles()).thenReturn(Collections.emptyList()); when(mockCell4.isReftext()).thenReturn(false); when(mockCell4.getReftext()).thenReturn(null); when(mockCell4.getColumn()).thenReturn(mockColumn4); when(mockCell4.getColspan()).thenReturn(0); when(mockCell4.getRowspan()).thenReturn(0); when(mockCell4.getText()).thenReturn("Cell in column 1, T 2"); when(mockCell4.getSource()).thenReturn("Cell in column 1, T 2"); when(mockCell4.getStyle()).thenReturn(null); when(mockCell4.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockCell4.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockCell4.getInnerDocument()).thenReturn(null); Cell mockCell5 = mock(Cell.class); when(mockCell5.getId()).thenReturn(null); when(mockCell5.getNodeName()).thenReturn("cell"); when(mockCell5.getParent()).thenReturn(mockColumn5); when(mockCell5.getContext()).thenReturn("cell"); when(mockCell5.getDocument()).thenReturn(mockDocument1); when(mockCell5.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockCell5.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map14 = new HashMap<>(); map14.put("colnumber", 2L); map14.put("halign", "left"); map14.put("valign", "top"); map14.put("width", 1L); when(mockCell5.getAttributes()).thenReturn(map14); when(mockCell5.getRoles()).thenReturn(Collections.emptyList()); when(mockCell5.isReftext()).thenReturn(false); when(mockCell5.getReftext()).thenReturn(null); when(mockCell5.getColumn()).thenReturn(mockColumn5); when(mockCell5.getColspan()).thenReturn(0); when(mockCell5.getRowspan()).thenReturn(0); when(mockCell5.getText()).thenReturn("Cell in column 2, T 2"); when(mockCell5.getSource()).thenReturn("Cell in column 2, T 2"); when(mockCell5.getStyle()).thenReturn(null); when(mockCell5.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockCell5.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockCell5.getInnerDocument()).thenReturn(null); Cell mockCell6 = mock(Cell.class); when(mockCell6.getId()).thenReturn(null); when(mockCell6.getNodeName()).thenReturn("cell"); when(mockCell6.getParent()).thenReturn(mockColumn6); when(mockCell6.getContext()).thenReturn("cell"); when(mockCell6.getDocument()).thenReturn(mockDocument1); when(mockCell6.isInline()).thenThrow(new UnsupportedOperationException("NotImplementedError")); when(mockCell6.isBlock()).thenThrow(new UnsupportedOperationException("NotImplementedError")); Map<String, Object> map15 = new HashMap<>(); map15.put("colnumber", 3L); map15.put("halign", "left"); map15.put("valign", "top"); map15.put("width", 1L); when(mockCell6.getAttributes()).thenReturn(map15); when(mockCell6.getRoles()).thenReturn(Collections.emptyList()); when(mockCell6.isReftext()).thenReturn(false); when(mockCell6.getReftext()).thenReturn(null); when(mockCell6.getColumn()).thenReturn(mockColumn6); when(mockCell6.getColspan()).thenReturn(0); when(mockCell6.getRowspan()).thenReturn(0); when(mockCell6.getText()).thenReturn("Cell in column 3, T 2"); when(mockCell6.getSource()).thenReturn("Cell in column 3, T 2"); when(mockCell6.getStyle()).thenReturn(null); when(mockCell6.getHorizontalAlignment()).thenReturn(Table.HorizontalAlignment.LEFT); when(mockCell6.getVerticalAlignment()).thenReturn(Table.VerticalAlignment.TOP); when(mockCell6.getInnerDocument()).thenReturn(null); when(mockRow2.getCells()).thenReturn(Arrays.asList(mockCell4, mockCell5, mockCell6)); when(mockTable2.getBody()).thenReturn(Collections.singletonList(mockRow2)); when(mockTable2.getFrame()).thenReturn("all"); when(mockTable2.getGrid()).thenReturn("all"); when(mockDocument1.getBlocks()).thenReturn(Arrays.asList(mockTable1, mockTable2)); when(mockDocument1.getStructuredDoctitle()).thenReturn(null); when(mockDocument1.getDoctitle()).thenReturn(null); Map<Object, Object> map16 = new HashMap<>(); map16.put("attributes", "{}"); map16.put("header_footer", false); when(mockDocument1.getOptions()).thenReturn(map16); return mockDocument1; } // end::mock-code[] }
package com.baidubce.services.bmr.model; import java.util.Date; import java.util.List; import com.baidubce.model.AbstractBceResponse; import com.fasterxml.jackson.annotation.JsonFormat; public class TemplateInfoResponse extends AbstractBceResponse { List<StepConfig> steps; private Boolean alarmEnabled; private Boolean autoTerminate; private String logUri; private String payType; private Reminder reminder; private Boolean sendMessage; private Boolean terminationProtected; private String availabilityZone; private VpcInfo vpc; private SubnetInfo subnet; private BmrSecurityGroupVo systemSecurityGroup; private Boolean serviceHaEnabled; private Boolean safeModeEnabled; private String imageDescription; private boolean isCopyable; private boolean abandoned; private List<Application> applications; @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss'Z'", timezone = "UTC") private Date creationDateTime; @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss'Z'", timezone = "UTC") private Date updateDateTime; private String id; private String name; private boolean shared; private String imageType; private String imageVersion; private List<InstanceGroupConfig> instanceGroups; public List<StepConfig> getSteps() { return steps; } public void setSteps(List<StepConfig> steps) { this.steps = steps; } public Boolean getAlarmEnabled() { return alarmEnabled; } public void setAlarmEnabled(Boolean alarmEnabled) { this.alarmEnabled = alarmEnabled; } public Boolean getAutoTerminate() { return autoTerminate; } public void setAutoTerminate(Boolean autoTerminate) { this.autoTerminate = autoTerminate; } public String getLogUri() { return logUri; } public void setLogUri(String logUri) { this.logUri = logUri; } public String getPayType() { return payType; } public void setPayType(String payType) { this.payType = payType; } public Reminder getReminder() { return reminder; } public void setReminder(Reminder reminder) { this.reminder = reminder; } public Boolean getSendMessage() { return sendMessage; } public void setSendMessage(Boolean sendMessage) { this.sendMessage = sendMessage; } public Boolean getTerminationProtected() { return terminationProtected; } public void setTerminationProtected(Boolean terminationProtected) { this.terminationProtected = terminationProtected; } public String getAvailabilityZone() { return availabilityZone; } public void setAvailabilityZone(String availabilityZone) { this.availabilityZone = availabilityZone; } public VpcInfo getVpc() { return vpc; } public void setVpc(VpcInfo vpc) { this.vpc = vpc; } public SubnetInfo getSubnet() { return subnet; } public void setSubnet(SubnetInfo subnet) { this.subnet = subnet; } public BmrSecurityGroupVo getSystemSecurityGroup() { return systemSecurityGroup; } public void setSystemSecurityGroup(BmrSecurityGroupVo systemSecurityGroup) { this.systemSecurityGroup = systemSecurityGroup; } public Boolean getServiceHaEnabled() { return serviceHaEnabled; } public void setServiceHaEnabled(Boolean serviceHaEnabled) { this.serviceHaEnabled = serviceHaEnabled; } public Boolean getSafeModeEnabled() { return safeModeEnabled; } public void setSafeModeEnabled(Boolean safeModeEnabled) { this.safeModeEnabled = safeModeEnabled; } public String getImageDescription() { return imageDescription; } public void setImageDescription(String imageDescription) { this.imageDescription = imageDescription; } public boolean isCopyable() { return isCopyable; } public void setCopyable(boolean copyable) { isCopyable = copyable; } public boolean isAbandoned() { return abandoned; } public void setAbandoned(boolean abandoned) { this.abandoned = abandoned; } public List<Application> getApplications() { return applications; } public void setApplications(List<Application> applications) { this.applications = applications; } public Date getCreationDateTime() { return creationDateTime; } public void setCreationDateTime(Date creationDateTime) { this.creationDateTime = creationDateTime; } public Date getUpdateDateTime() { return updateDateTime; } public void setUpdateDateTime(Date updateDateTime) { this.updateDateTime = updateDateTime; } public String getId() { return id; } public void setId(String id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public boolean isShared() { return shared; } public void setShared(boolean shared) { this.shared = shared; } public String getImageType() { return imageType; } public void setImageType(String imageType) { this.imageType = imageType; } public String getImageVersion() { return imageVersion; } public void setImageVersion(String imageVersion) { this.imageVersion = imageVersion; } public List<InstanceGroupConfig> getInstanceGroups() { return instanceGroups; } public void setInstanceGroups(List<InstanceGroupConfig> instanceGroups) { this.instanceGroups = instanceGroups; } }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import com.google.common.base.Preconditions; import com.google.javascript.jscomp.NodeTraversal.ScopedCallback; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import java.util.ArrayDeque; import java.util.Deque; import java.util.LinkedList; import java.util.List; /** * Optimization for functions that have {@code var_args} or access the * arguments array. * * <p>Example: * <pre> * function() { alert(arguments[0] + argument[1]) } * </pre> * to: * <pre> * function(a, b) { alert(a, b) } * </pre> * * Each newly inserted variable name will be unique very much like the output * of the AST found after the {@link Normalize} pass. * */ class OptimizeArgumentsArray implements CompilerPass, ScopedCallback { // The arguments object as described by ECMAScript version 3 // section 10.1.8 private static final String ARGUMENTS = "arguments"; // To ensure that the newly introduced parameter names are unique. We will // use this string as prefix unless the caller specify a different prefix. private static final String PARAMETER_PREFIX = "JSCompiler_OptimizeArgumentsArray_p"; // The prefix for the newly introduced parameter name. private final String paramPredix; // To make each parameter name unique in the function. We append an // unique integer at the end. private int uniqueId = 0; // Reference to the compiler object to notify any changes to source code AST. private final AbstractCompiler compiler; // A stack of arguments access list to the corresponding outer functions. private final Deque<List<Node>> argumentsAccessStack = new ArrayDeque<>(); // This stores a list of argument access in the current scope. private List<Node> currentArgumentsAccess = null; /** * Construct this pass and use {@link #PARAMETER_PREFIX} as the prefix for * all parameter names that it introduces. */ OptimizeArgumentsArray(AbstractCompiler compiler) { this(compiler, PARAMETER_PREFIX); } /** * @param paramPrefix the prefix to use for all parameter names that this * pass introduces */ OptimizeArgumentsArray(AbstractCompiler compiler, String paramPrefix) { this.compiler = Preconditions.checkNotNull(compiler); this.paramPredix = Preconditions.checkNotNull(paramPrefix); } @Override public void process(Node externs, Node root) { NodeTraversal.traverseEs6(compiler, Preconditions.checkNotNull(root), this); } @Override public void enterScope(NodeTraversal traversal) { Preconditions.checkNotNull(traversal); // This optimization is valid only within a function so we are going to // skip over the initial entry to the global scope. Node function = traversal.getScopeRoot(); if (!function.isFunction()) { return; } // Introduces a new access list and stores the access list of the outer // scope in the stack if necessary. if (currentArgumentsAccess != null) { argumentsAccessStack.push(currentArgumentsAccess); } currentArgumentsAccess = new LinkedList<>(); } @Override public void exitScope(NodeTraversal traversal) { Preconditions.checkNotNull(traversal); // This is the case when we are exiting the global scope where we had never // collected argument access list. Since we do not perform this optimization // for the global scope, we will skip this exit point. if (currentArgumentsAccess == null) { return; } Node function = traversal.getScopeRoot(); if (!function.isFunction()) { return; } // Attempt to replace the argument access and if the AST has been change, // report back to the compiler. if (tryReplaceArguments(traversal.getScope())) { traversal.getCompiler().reportCodeChange(); } // After the attempt to replace the arguments. The currentArgumentsAccess // is stale and as we exit the Scope, no longer holds all the access to the // current scope anymore. We'll pop the access list from the outer scope // and set it as currentArgumentsAccess if the outer scope is not the global // scope. if (!argumentsAccessStack.isEmpty()) { currentArgumentsAccess = argumentsAccessStack.pop(); } else { currentArgumentsAccess = null; } } @Override public boolean shouldTraverse( NodeTraversal nodeTraversal, Node node, Node parent) { // We will continuously recurse down the AST regardless of the node types. return true; } @Override public void visit(NodeTraversal traversal, Node node, Node parent) { Preconditions.checkNotNull(traversal); Preconditions.checkNotNull(node); // Searches for all the references to the arguments array. // We don't have an arguments list set up for this scope. This implies we // are currently in the global scope so we will not record any arguments // array access. if (currentArgumentsAccess == null) { return; } // Otherwise, we are in a function scope and we should record if the current // name is referring to the implicit arguments array. if (node.isName() && ARGUMENTS.equals(node.getString())) { currentArgumentsAccess.add(node); } } /** * Tries to optimize all the arguments array access in this scope by assigning * a name to each element. * * @param scope scope of the function * @return true if any modification has been done to the AST */ private boolean tryReplaceArguments(Scope scope) { Node parametersList = scope.getRootNode().getSecondChild(); Preconditions.checkState(parametersList.isParamList()); // Keep track of rather this function modified the AST and needs to be // reported back to the compiler later. boolean changed = false; // Number of parameter that can be accessed without using the arguments // array. int numNamedParameter = parametersList.getChildCount(); // We want to guess what the highest index that has been access from the // arguments array. We will guess that it does not use anything index higher // than the named parameter list first until we see other wise. int highestIndex = numNamedParameter - 1; // Iterate through all the references to arguments array in the function to // determine the real highestIndex. for (Node ref : currentArgumentsAccess) { Node getElem = ref.getParent(); // Bail on anything but argument[c] access where c is a constant. // TODO(user): We might not need to bail out all the time, there might // be more cases that we can cover. if (!getElem.isGetElem() || ref != getElem.getFirstChild()) { return false; } Node index = ref.getNext(); // We have something like arguments[x] where x is not a constant. That // means at least one of the access is not known. if (!index.isNumber() || index.getDouble() < 0) { // TODO(user): Its possible not to give up just yet. The type // inference did a 'semi value propagation'. If we know that string // is never a subclass of the type of the index. We'd know that // it is never 'callee'. return false; // Give up. } Node getElemParent = getElem.getParent(); // When we have argument[0](), replacing it with a() is semantically // different if argument[0] is a function call that refers to 'this' if (getElemParent.isCall() && getElemParent.getFirstChild() == getElem) { // TODO(user): We can consider using .call() if aliasing that // argument allows shorter alias for other arguments. return false; } // Replace the highest index if we see an access that has a higher index // than all the one we saw before. int value = (int) index.getDouble(); if (value > highestIndex) { highestIndex = value; } } // Number of extra arguments we need. // For example: function() { arguments[3] } access index 3 so // it will need 4 extra named arguments to changed into: // function(a,b,c,d) { d }. int numExtraArgs = highestIndex - numNamedParameter + 1; // Temporary holds the new names as string for quick access later. String[] argNames = new String[numExtraArgs]; // Insert the formal parameter to the method's signature. // Example: function() --> function(r0, r1, r2) for (int i = 0; i < numExtraArgs; i++) { String name = getNewName(); argNames[i] = name; parametersList.addChildToBack( IR.name(name).useSourceInfoIfMissingFrom(parametersList)); changed = true; } // This loop performs the replacement of arguments[x] -> a if x is known. for (Node ref : currentArgumentsAccess) { Node index = ref.getNext(); // Skip if it is unknown. if (!index.isNumber()) { continue; } int value = (int) index.getDouble(); // Unnamed parameter. if (value >= numNamedParameter) { ref.getParent().getParent().replaceChild(ref.getParent(), IR.name(argNames[value - numNamedParameter])); } else { // Here, for no apparent reason, the user is accessing a named parameter // with arguments[idx]. We can replace it with the actual name for them. Node name = parametersList.getFirstChild(); // This is a linear search for the actual name from the signature. // It is not necessary to make this fast because chances are the user // will not deliberately write code like this. for (int i = 0; i < value; i++) { name = name.getNext(); } ref.getParent().getParent().replaceChild(ref.getParent(), IR.name(name.getString())); } changed = true; } return changed; } /** * Generate a unique name for the next parameter. */ private String getNewName() { return paramPredix + uniqueId++; } }
/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jakewharton.disklrucache; import java.io.BufferedWriter; import java.io.Closeable; import java.io.EOFException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * A cache that uses a bounded amount of space on a filesystem. Each cache * entry has a string key and a fixed number of values. Each key must match * the regex <strong>[a-z0-9_-]{1,120}</strong>. Values are byte sequences, * accessible as streams or files. Each value must be between {@code 0} and * {@code Integer.MAX_VALUE} bytes in length. * * <p>The cache stores its data in a directory on the filesystem. This * directory must be exclusive to the cache; the cache may delete or overwrite * files from its directory. It is an error for multiple processes to use the * same cache directory at the same time. * * <p>This cache limits the number of bytes that it will store on the * filesystem. When the number of stored bytes exceeds the limit, the cache will * remove entries in the background until the limit is satisfied. The limit is * not strict: the cache may temporarily exceed it while waiting for files to be * deleted. The limit does not include filesystem overhead or the cache * journal so space-sensitive applications should set a conservative limit. * * <p>Clients call {@link #edit} to create or update the values of an entry. An * entry may have only one editor at one time; if a value is not available to be * edited then {@link #edit} will return null. * <ul> * <li>When an entry is being <strong>created</strong> it is necessary to * supply a full set of values; the empty value should be used as a * placeholder if necessary. * <li>When an entry is being <strong>edited</strong>, it is not necessary * to supply data for every value; values default to their previous * value. * </ul> * Every {@link #edit} call must be matched by a call to {@link com.jakewharton.disklrucache.DiskLruCache.Editor#commit} * or {@link com.jakewharton.disklrucache.DiskLruCache.Editor#abort}. Committing is atomic: a read observes the full set * of values as they were before or after the commit, but never a mix of values. * * <p>Clients call {@link #get} to read a snapshot of an entry. The read will * observe the value at the time that {@link #get} was called. Updates and * removals after the call do not impact ongoing reads. * * <p>This class is tolerant of some I/O errors. If files are missing from the * filesystem, the corresponding entries will be dropped from the cache. If * an error occurs while writing a cache value, the edit will fail silently. * Callers should handle other problems by catching {@code IOException} and * responding appropriately. */ public final class DiskLruCache implements Closeable { static final String JOURNAL_FILE = "journal"; static final String JOURNAL_FILE_TEMP = "journal.tmp"; static final String JOURNAL_FILE_BACKUP = "journal.bkp"; static final String MAGIC = "libcore.io.DiskLruCache"; static final String VERSION_1 = "1"; static final long ANY_SEQUENCE_NUMBER = -1; static final String STRING_KEY_PATTERN = "[a-z0-9_-]{1,120}"; static final Pattern LEGAL_KEY_PATTERN = Pattern.compile(STRING_KEY_PATTERN); private static final String CLEAN = "CLEAN"; private static final String DIRTY = "DIRTY"; private static final String REMOVE = "REMOVE"; private static final String READ = "READ"; /* * This cache uses a journal file named "journal". A typical journal file * looks like this: * libcore.io.DiskLruCache * 1 * 100 * 2 * * CLEAN 3400330d1dfc7f3f7f4b8d4d803dfcf6 832 21054 * DIRTY 335c4c6028171cfddfbaae1a9c313c52 * CLEAN 335c4c6028171cfddfbaae1a9c313c52 3934 2342 * REMOVE 335c4c6028171cfddfbaae1a9c313c52 * DIRTY 1ab96a171faeeee38496d8b330771a7a * CLEAN 1ab96a171faeeee38496d8b330771a7a 1600 234 * READ 335c4c6028171cfddfbaae1a9c313c52 * READ 3400330d1dfc7f3f7f4b8d4d803dfcf6 * * The first five lines of the journal form its header. They are the * constant string "libcore.io.DiskLruCache", the disk cache's version, * the application's version, the value count, and a blank line. * * Each of the subsequent lines in the file is a record of the state of a * cache entry. Each line contains space-separated values: a state, a key, * and optional state-specific values. * o DIRTY lines track that an entry is actively being created or updated. * Every successful DIRTY action should be followed by a CLEAN or REMOVE * action. DIRTY lines without a matching CLEAN or REMOVE indicate that * temporary files may need to be deleted. * o CLEAN lines track a cache entry that has been successfully published * and may be read. A publish line is followed by the lengths of each of * its values. * o READ lines track accesses for LRU. * o REMOVE lines track entries that have been deleted. * * The journal file is appended to as cache operations occur. The journal may * occasionally be compacted by dropping redundant lines. A temporary file named * "journal.tmp" will be used during compaction; that file should be deleted if * it exists when the cache is opened. */ private final File directory; private final File journalFile; private final File journalFileTmp; private final File journalFileBackup; private final int appVersion; private long maxSize; private final int valueCount; private long size = 0; private Writer journalWriter; private final LinkedHashMap<String, Entry> lruEntries = new LinkedHashMap<String, Entry>(0, 0.75f, true); private int redundantOpCount; /** * To differentiate between old and current snapshots, each entry is given * a sequence number each time an edit is committed. A snapshot is stale if * its sequence number is not equal to its entry's sequence number. */ private long nextSequenceNumber = 0; /** This cache uses a single background thread to evict entries. */ final ThreadPoolExecutor executorService = new ThreadPoolExecutor(0, 1, 60L, TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>()); private final Callable<Void> cleanupCallable = new Callable<Void>() { public Void call() throws Exception { synchronized (DiskLruCache.this) { if (journalWriter == null) { return null; // Closed. } trimToSize(); if (journalRebuildRequired()) { rebuildJournal(); redundantOpCount = 0; } } return null; } }; private DiskLruCache(File directory, int appVersion, int valueCount, long maxSize) { this.directory = directory; this.appVersion = appVersion; this.journalFile = new File(directory, JOURNAL_FILE); this.journalFileTmp = new File(directory, JOURNAL_FILE_TEMP); this.journalFileBackup = new File(directory, JOURNAL_FILE_BACKUP); this.valueCount = valueCount; this.maxSize = maxSize; } /** * Opens the cache in {@code directory}, creating a cache if none exists * there. * * @param directory a writable directory * @param valueCount the number of values per cache entry. Must be positive. * @param maxSize the maximum number of bytes this cache should use to store * @throws java.io.IOException if reading or writing the cache directory fails */ public static DiskLruCache open(File directory, int appVersion, int valueCount, long maxSize) throws IOException { if (maxSize <= 0) { throw new IllegalArgumentException("maxSize <= 0"); } if (valueCount <= 0) { throw new IllegalArgumentException("valueCount <= 0"); } // If a bkp file exists, use it instead. File backupFile = new File(directory, JOURNAL_FILE_BACKUP); if (backupFile.exists()) { File journalFile = new File(directory, JOURNAL_FILE); // If journal file also exists just delete backup file. if (journalFile.exists()) { backupFile.delete(); } else { renameTo(backupFile, journalFile, false); } } // Prefer to pick up where we left off. DiskLruCache cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); if (cache.journalFile.exists()) { try { cache.readJournal(); cache.processJournal(); return cache; } catch (IOException journalIsCorrupt) { System.out .println("DiskLruCache " + directory + " is corrupt: " + journalIsCorrupt.getMessage() + ", removing"); cache.delete(); } } // Create a new empty cache. directory.mkdirs(); cache = new DiskLruCache(directory, appVersion, valueCount, maxSize); cache.rebuildJournal(); return cache; } private void readJournal() throws IOException { StrictLineReader reader = new StrictLineReader(new FileInputStream(journalFile), Util.US_ASCII); try { String magic = reader.readLine(); String version = reader.readLine(); String appVersionString = reader.readLine(); String valueCountString = reader.readLine(); String blank = reader.readLine(); if (!MAGIC.equals(magic) || !VERSION_1.equals(version) || !Integer.toString(appVersion).equals(appVersionString) || !Integer.toString(valueCount).equals(valueCountString) || !"".equals(blank)) { throw new IOException("unexpected journal header: [" + magic + ", " + version + ", " + valueCountString + ", " + blank + "]"); } int lineCount = 0; while (true) { try { readJournalLine(reader.readLine()); lineCount++; } catch (EOFException endOfJournal) { break; } } redundantOpCount = lineCount - lruEntries.size(); // If we ended on a truncated line, rebuild the journal before appending to it. if (reader.hasUnterminatedLine()) { rebuildJournal(); } else { journalWriter = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(journalFile, true), Util.US_ASCII)); } } finally { Util.closeQuietly(reader); } } private void readJournalLine(String line) throws IOException { int firstSpace = line.indexOf(' '); if (firstSpace == -1) { throw new IOException("unexpected journal line: " + line); } int keyBegin = firstSpace + 1; int secondSpace = line.indexOf(' ', keyBegin); final String key; if (secondSpace == -1) { key = line.substring(keyBegin); if (firstSpace == REMOVE.length() && line.startsWith(REMOVE)) { lruEntries.remove(key); return; } } else { key = line.substring(keyBegin, secondSpace); } Entry entry = lruEntries.get(key); if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } if (secondSpace != -1 && firstSpace == CLEAN.length() && line.startsWith(CLEAN)) { String[] parts = line.substring(secondSpace + 1).split(" "); entry.readable = true; entry.currentEditor = null; entry.setLengths(parts); } else if (secondSpace == -1 && firstSpace == DIRTY.length() && line.startsWith(DIRTY)) { entry.currentEditor = new Editor(entry); } else if (secondSpace == -1 && firstSpace == READ.length() && line.startsWith(READ)) { // This work was already done by calling lruEntries.get(). } else { throw new IOException("unexpected journal line: " + line); } } /** * Computes the initial size and collects garbage as a part of opening the * cache. Dirty entries are assumed to be inconsistent and will be deleted. */ private void processJournal() throws IOException { deleteIfExists(journalFileTmp); for (Iterator<Entry> i = lruEntries.values().iterator(); i.hasNext(); ) { Entry entry = i.next(); if (entry.currentEditor == null) { for (int t = 0; t < valueCount; t++) { size += entry.lengths[t]; } } else { entry.currentEditor = null; for (int t = 0; t < valueCount; t++) { deleteIfExists(entry.getCleanFile(t)); deleteIfExists(entry.getDirtyFile(t)); } i.remove(); } } } /** * Creates a new journal that omits redundant information. This replaces the * current journal if it exists. */ private synchronized void rebuildJournal() throws IOException { if (journalWriter != null) { journalWriter.close(); } Writer writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(journalFileTmp), Util.US_ASCII)); try { writer.write(MAGIC); writer.write("\n"); writer.write(VERSION_1); writer.write("\n"); writer.write(Integer.toString(appVersion)); writer.write("\n"); writer.write(Integer.toString(valueCount)); writer.write("\n"); writer.write("\n"); for (Entry entry : lruEntries.values()) { if (entry.currentEditor != null) { writer.write(DIRTY + ' ' + entry.key + '\n'); } else { writer.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); } } } finally { writer.close(); } if (journalFile.exists()) { renameTo(journalFile, journalFileBackup, true); } renameTo(journalFileTmp, journalFile, false); journalFileBackup.delete(); journalWriter = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(journalFile, true), Util.US_ASCII)); } private static void deleteIfExists(File file) throws IOException { if (file.exists() && !file.delete()) { throw new IOException(); } } private static void renameTo(File from, File to, boolean deleteDestination) throws IOException { if (deleteDestination) { deleteIfExists(to); } if (!from.renameTo(to)) { throw new IOException(); } } /** * Returns a snapshot of the entry named {@code key}, or null if it doesn't * exist is not currently readable. If a value is returned, it is moved to * the head of the LRU queue. */ public synchronized Snapshot get(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null) { return null; } if (!entry.readable) { return null; } // Open all streams eagerly to guarantee that we see a single published // snapshot. If we opened streams lazily then the streams could come // from different edits. InputStream[] ins = new InputStream[valueCount]; try { for (int i = 0; i < valueCount; i++) { ins[i] = new FileInputStream(entry.getCleanFile(i)); } } catch (FileNotFoundException e) { // A file must have been deleted manually! for (int i = 0; i < valueCount; i++) { if (ins[i] != null) { Util.closeQuietly(ins[i]); } else { break; } } return null; } redundantOpCount++; journalWriter.append(READ + ' ' + key + '\n'); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return new Snapshot(key, entry.sequenceNumber, ins, entry.lengths); } /** * Returns an editor for the entry named {@code key}, or null if another * edit is in progress. */ public Editor edit(String key) throws IOException { return edit(key, ANY_SEQUENCE_NUMBER); } private synchronized Editor edit(String key, long expectedSequenceNumber) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (expectedSequenceNumber != ANY_SEQUENCE_NUMBER && (entry == null || entry.sequenceNumber != expectedSequenceNumber)) { return null; // Snapshot is stale. } if (entry == null) { entry = new Entry(key); lruEntries.put(key, entry); } else if (entry.currentEditor != null) { return null; // Another edit is in progress. } Editor editor = new Editor(entry); entry.currentEditor = editor; // Flush the journal before creating files to prevent file leaks. journalWriter.write(DIRTY + ' ' + key + '\n'); journalWriter.flush(); return editor; } /** Returns the directory where this cache stores its data. */ public File getDirectory() { return directory; } /** * Returns the maximum number of bytes that this cache should use to store * its data. */ public synchronized long getMaxSize() { return maxSize; } /** * Changes the maximum number of bytes the cache can store and queues a job * to trim the existing store, if necessary. */ public synchronized void setMaxSize(long maxSize) { this.maxSize = maxSize; executorService.submit(cleanupCallable); } /** * Returns the number of bytes currently being used to store the values in * this cache. This may be greater than the max size if a background * deletion is pending. */ public synchronized long size() { return size; } private synchronized void completeEdit(Editor editor, boolean success) throws IOException { Entry entry = editor.entry; if (entry.currentEditor != editor) { throw new IllegalStateException(); } // If this edit is creating the entry for the first time, every index must have a value. if (success && !entry.readable) { for (int i = 0; i < valueCount; i++) { if (!editor.written[i]) { editor.abort(); throw new IllegalStateException("Newly created entry didn't create value for index " + i); } if (!entry.getDirtyFile(i).exists()) { editor.abort(); return; } } } for (int i = 0; i < valueCount; i++) { File dirty = entry.getDirtyFile(i); if (success) { if (dirty.exists()) { File clean = entry.getCleanFile(i); dirty.renameTo(clean); long oldLength = entry.lengths[i]; long newLength = clean.length(); entry.lengths[i] = newLength; size = size - oldLength + newLength; } } else { deleteIfExists(dirty); } } redundantOpCount++; entry.currentEditor = null; if (entry.readable | success) { entry.readable = true; journalWriter.write(CLEAN + ' ' + entry.key + entry.getLengths() + '\n'); if (success) { entry.sequenceNumber = nextSequenceNumber++; } } else { lruEntries.remove(entry.key); journalWriter.write(REMOVE + ' ' + entry.key + '\n'); } journalWriter.flush(); if (size > maxSize || journalRebuildRequired()) { executorService.submit(cleanupCallable); } } /** * We only rebuild the journal when it will halve the size of the journal * and eliminate at least 2000 ops. */ private boolean journalRebuildRequired() { final int redundantOpCompactThreshold = 2000; return redundantOpCount >= redundantOpCompactThreshold // && redundantOpCount >= lruEntries.size(); } /** * Drops the entry for {@code key} if it exists and can be removed. Entries * actively being edited cannot be removed. * * @return true if an entry was removed. */ public synchronized boolean remove(String key) throws IOException { checkNotClosed(); validateKey(key); Entry entry = lruEntries.get(key); if (entry == null || entry.currentEditor != null) { return false; } for (int i = 0; i < valueCount; i++) { File file = entry.getCleanFile(i); if (file.exists() && !file.delete()) { throw new IOException("failed to delete " + file); } size -= entry.lengths[i]; entry.lengths[i] = 0; } redundantOpCount++; journalWriter.append(REMOVE + ' ' + key + '\n'); lruEntries.remove(key); if (journalRebuildRequired()) { executorService.submit(cleanupCallable); } return true; } /** Returns true if this cache has been closed. */ public synchronized boolean isClosed() { return journalWriter == null; } private void checkNotClosed() { if (journalWriter == null) { throw new IllegalStateException("cache is closed"); } } /** Force buffered operations to the filesystem. */ public synchronized void flush() throws IOException { checkNotClosed(); trimToSize(); journalWriter.flush(); } /** Closes this cache. Stored values will remain on the filesystem. */ public synchronized void close() throws IOException { if (journalWriter == null) { return; // Already closed. } for (Entry entry : new ArrayList<Entry>(lruEntries.values())) { if (entry.currentEditor != null) { entry.currentEditor.abort(); } } trimToSize(); journalWriter.close(); journalWriter = null; } private void trimToSize() throws IOException { while (size > maxSize) { Map.Entry<String, Entry> toEvict = lruEntries.entrySet().iterator().next(); remove(toEvict.getKey()); } } /** * Closes the cache and deletes all of its stored values. This will delete * all files in the cache directory including files that weren't created by * the cache. */ public void delete() throws IOException { close(); Util.deleteContents(directory); } private void validateKey(String key) { Matcher matcher = LEGAL_KEY_PATTERN.matcher(key); if (!matcher.matches()) { throw new IllegalArgumentException("keys must match regex " + STRING_KEY_PATTERN + ": \"" + key + "\""); } } private static String inputStreamToString(InputStream in) throws IOException { return Util.readFully(new InputStreamReader(in, Util.UTF_8)); } /** A snapshot of the values for an entry. */ public final class Snapshot implements Closeable { private final String key; private final long sequenceNumber; private final InputStream[] ins; private final long[] lengths; private Snapshot(String key, long sequenceNumber, InputStream[] ins, long[] lengths) { this.key = key; this.sequenceNumber = sequenceNumber; this.ins = ins; this.lengths = lengths; } /** * Returns an editor for this snapshot's entry, or null if either the * entry has changed since this snapshot was created or if another edit * is in progress. */ public Editor edit() throws IOException { return DiskLruCache.this.edit(key, sequenceNumber); } /** Returns the unbuffered stream with the value for {@code index}. */ public InputStream getInputStream(int index) { return ins[index]; } /** Returns the string value for {@code index}. */ public String getString(int index) throws IOException { return inputStreamToString(getInputStream(index)); } /** Returns the byte length of the value for {@code index}. */ public long getLength(int index) { return lengths[index]; } public void close() { for (InputStream in : ins) { Util.closeQuietly(in); } } } private static final OutputStream NULL_OUTPUT_STREAM = new OutputStream() { @Override public void write(int b) throws IOException { // Eat all writes silently. Nom nom. } }; /** Edits the values for an entry. */ public final class Editor { private final Entry entry; private final boolean[] written; private boolean hasErrors; private boolean committed; private Editor(Entry entry) { this.entry = entry; this.written = (entry.readable) ? null : new boolean[valueCount]; } /** * Returns an unbuffered input stream to read the last committed value, * or null if no value has been committed. */ public InputStream newInputStream(int index) throws IOException { synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { return null; } try { return new FileInputStream(entry.getCleanFile(index)); } catch (FileNotFoundException e) { return null; } } } /** * Returns the last committed value as a string, or null if no value * has been committed. */ public String getString(int index) throws IOException { InputStream in = newInputStream(index); return in != null ? inputStreamToString(in) : null; } /** * Returns a new unbuffered output stream to write the value at * {@code index}. If the underlying output stream encounters errors * when writing to the filesystem, this edit will be aborted when * {@link #commit} is called. The returned output stream does not throw * IOExceptions. */ public OutputStream newOutputStream(int index) throws IOException { if (index < 0 || index >= valueCount) { throw new IllegalArgumentException("Expected index " + index + " to " + "be greater than 0 and less than the maximum value count " + "of " + valueCount); } synchronized (DiskLruCache.this) { if (entry.currentEditor != this) { throw new IllegalStateException(); } if (!entry.readable) { written[index] = true; } File dirtyFile = entry.getDirtyFile(index); FileOutputStream outputStream; try { outputStream = new FileOutputStream(dirtyFile); } catch (FileNotFoundException e) { // Attempt to recreate the cache directory. directory.mkdirs(); try { outputStream = new FileOutputStream(dirtyFile); } catch (FileNotFoundException e2) { // We are unable to recover. Silently eat the writes. return NULL_OUTPUT_STREAM; } } return new FaultHidingOutputStream(outputStream); } } /** Sets the value at {@code index} to {@code value}. */ public void set(int index, String value) throws IOException { Writer writer = null; try { writer = new OutputStreamWriter(newOutputStream(index), Util.UTF_8); writer.write(value); } finally { Util.closeQuietly(writer); } } /** * Commits this edit so it is visible to readers. This releases the * edit lock so another edit may be started on the same key. */ public void commit() throws IOException { if (hasErrors) { completeEdit(this, false); remove(entry.key); // The previous entry is stale. } else { completeEdit(this, true); } committed = true; } /** * Aborts this edit. This releases the edit lock so another edit may be * started on the same key. */ public void abort() throws IOException { completeEdit(this, false); } public void abortUnlessCommitted() { if (!committed) { try { abort(); } catch (IOException ignored) { } } } private class FaultHidingOutputStream extends FilterOutputStream { private FaultHidingOutputStream(OutputStream out) { super(out); } @Override public void write(int oneByte) { try { out.write(oneByte); } catch (IOException e) { hasErrors = true; } } @Override public void write(byte[] buffer, int offset, int length) { try { out.write(buffer, offset, length); } catch (IOException e) { hasErrors = true; } } @Override public void close() { try { out.close(); } catch (IOException e) { hasErrors = true; } } @Override public void flush() { try { out.flush(); } catch (IOException e) { hasErrors = true; } } } } private final class Entry { private final String key; /** Lengths of this entry's files. */ private final long[] lengths; /** True if this entry has ever been published. */ private boolean readable; /** The ongoing edit or null if this entry is not being edited. */ private Editor currentEditor; /** The sequence number of the most recently committed edit to this entry. */ private long sequenceNumber; private Entry(String key) { this.key = key; this.lengths = new long[valueCount]; } public String getLengths() throws IOException { StringBuilder result = new StringBuilder(); for (long size : lengths) { result.append(' ').append(size); } return result.toString(); } /** Set lengths using decimal numbers like "10123". */ private void setLengths(String[] strings) throws IOException { if (strings.length != valueCount) { throw invalidLengths(strings); } try { for (int i = 0; i < strings.length; i++) { lengths[i] = Long.parseLong(strings[i]); } } catch (NumberFormatException e) { throw invalidLengths(strings); } } private IOException invalidLengths(String[] strings) throws IOException { throw new IOException("unexpected journal line: " + java.util.Arrays.toString(strings)); } public File getCleanFile(int i) { return new File(directory, key + "." + i); } public File getDirtyFile(int i) { return new File(directory, key + "." + i + ".tmp"); } } }
/* * [New BSD License] * Copyright (c) 2011-2012, Brackit Project Team <info@brackit.org> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Brackit Project Team nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.brackit.server.xquery.function.xmark; import java.text.SimpleDateFormat; import java.util.Random; import org.brackit.server.metadata.DBCollection; import org.brackit.server.metadata.TXQueryContext; import org.brackit.server.xquery.function.FunUtil; import org.brackit.xquery.QueryContext; import org.brackit.xquery.QueryException; import org.brackit.xquery.atomic.QNm; import org.brackit.xquery.atomic.Str; import org.brackit.xquery.function.AbstractFunction; import org.brackit.xquery.module.StaticContext; import org.brackit.xquery.util.annotation.FunctionAnnotation; import org.brackit.xquery.xdm.DocumentException; import org.brackit.xquery.xdm.Sequence; import org.brackit.xquery.xdm.Signature; import org.brackit.xquery.xdm.type.AtomicType; import org.brackit.xquery.xdm.type.Cardinality; import org.brackit.xquery.xdm.type.SequenceType; /** * @author Sebastian Baechle */ @FunctionAnnotation(description = "Performs a randomized 'business operation' on an xmark document:\n" + "{PLACEBID REGISTER | READSELLERINFO}", parameters = { "$operation", "$document", "$optimized" }) public class XMark extends AbstractFunction { public static final QNm DEFAULT_NAME = new QNm(XMarkFun.XMARK_NSURI, XMarkFun.XMARK_PREFIX, "workload"); protected static final SimpleDateFormat dateFormat = new SimpleDateFormat( "MM/dd/yyyy"); protected static final SimpleDateFormat timeFormat = new SimpleDateFormat( "HH:mm:ss"); protected static Random elementRandom = new Random(); protected static Random amountRandom = new Random(); public XMark() { super(DEFAULT_NAME, new Signature(new SequenceType(AtomicType.STR, Cardinality.One), new SequenceType(AtomicType.STR, Cardinality.One), new SequenceType(AtomicType.STR, Cardinality.One), new SequenceType(AtomicType.BOOL, Cardinality.ZeroOrOne)), true); } public Sequence execute(StaticContext sctx, QueryContext ctx, Sequence[] args) throws QueryException { DBCollection<?> coll; int procedureCode; String procedure; boolean optimized; TXQueryContext txctx = (TXQueryContext) ctx; coll = (DBCollection<?>) txctx.getStore().lookup( FunUtil.getString(args, 1, "$document", null, null, true)); procedure = FunUtil.getString(args, 0, "$procedure", null, null, true); optimized = FunUtil.getBoolean(args, 2, "$optimized", false, false); procedure = procedure.toUpperCase(); // System.out.println(String.format("[%s] Start %s %s", // Thread.currentThread().getName(), // context.getTransaction().toShortString(), procedure)); if (procedure.equals("PLACEBID")) procedureCode = 0; else if (procedure.equals("REGISTER")) procedureCode = 1; else if (procedure.equals("READSELLERINFO")) procedureCode = 2; else if (procedure.equals("CHANGEUSERINFO")) procedureCode = 3; else if (procedure.equals("CHECKMAILS")) procedureCode = 4; else if (procedure.equals("READITEM")) procedureCode = 5; else if (procedure.equals("DELETEMAIL")) procedureCode = 6; else if (procedure.equals("ADDMAIL")) procedureCode = 7; else if (procedure.equals("ADDITEM")) procedureCode = 8; else if (procedure.equals("UPDATEITEMDESCRIPTION")) procedureCode = 9; else if (procedure.equals("CHANGESELLER")) procedureCode = 10; else if (procedure.equals("LOOKUPSELLER")) procedureCode = 11; else if (procedure.equals("UPDATEITEM")) procedureCode = 12; else throw new QueryException(XMarkFun.ERR_UNKNOWN_OPERATION, "Unknown operation: %s", procedure); try { Sequence result = null; XMarkUtil util = new XMarkUtil(); // System.out.println(String.format("[%s]: running op %s for %s", // Thread.currentThread().getName(), procedureCode, // context.getTransaction().toString())); try { switch (procedureCode) { case 0: result = util.placeBid(txctx, coll); break; case 1: result = util.register(txctx, coll, optimized); break; case 2: result = util.readSellerInfo(txctx, coll); break; case 3: result = util.changeUserData(txctx, coll); break; case 4: result = util.checkMails(txctx, coll); break; case 5: result = util.readItem(txctx, coll); break; case 6: result = util.deleteMail(txctx, coll); break; case 7: result = util.addMail(txctx, coll); break; case 8: result = util.addItem(txctx, coll, optimized); break; case 9: result = util.updateItemDescription(txctx, coll); break; case 10: result = util.changeSeller(txctx, coll, optimized); break; case 11: result = util.lookupSeller(txctx, coll); break; case 12: result = util.updateItem(txctx, coll); break; default: throw new QueryException(XMarkFun.ERR_UNKNOWN_OPERATION, "Unknown operation code: %s", procedureCode); } } catch (DocumentException e) { result = new Str(e.getMessage()); } catch (Throwable e) { e.printStackTrace(); result = new Str(e.getMessage()); } finally { // Collection<LockServiceStatistics> statisticsList = // ctx.getTX().getLockCB().getStatistics(); // // for (LockServiceStatistics stat : statisticsList) // { // result.addInfoObject(stat); // //System.out.println(stat.getName() + " -> " + // stat.getRequestCount()) ; // } } // System.out.println(String.format("[%s]: finishing op %s for %s", // Thread.currentThread().getName(), procedureCode, // context.getTransaction().toString())); // System.out.println(String.format("[%s] End %s %s", // Thread.currentThread().getName(), // context.getTransaction().toShortString(), procedure)); return result; } finally { // System.out.println(String.format("[%s]: failed op %s for %s", // Thread.currentThread().getName(), procedureCode, // context.getTransaction().toString())); } } }