repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
madanadit/alluxio
core/transport/src/main/java/alluxio/grpc/PAclEntryOrBuilder.java
1364
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: grpc/file_system_master.proto package alluxio.grpc; public interface PAclEntryOrBuilder extends // @@protoc_insertion_point(interface_extends:alluxio.grpc.file.PAclEntry) com.google.protobuf.MessageOrBuilder { /** * <code>optional .alluxio.grpc.file.PAclEntryType type = 1;</code> */ boolean hasType(); /** * <code>optional .alluxio.grpc.file.PAclEntryType type = 1;</code> */ alluxio.grpc.PAclEntryType getType(); /** * <code>optional string subject = 2;</code> */ boolean hasSubject(); /** * <code>optional string subject = 2;</code> */ java.lang.String getSubject(); /** * <code>optional string subject = 2;</code> */ com.google.protobuf.ByteString getSubjectBytes(); /** * <code>repeated .alluxio.grpc.file.PAclAction actions = 3;</code> */ java.util.List<alluxio.grpc.PAclAction> getActionsList(); /** * <code>repeated .alluxio.grpc.file.PAclAction actions = 3;</code> */ int getActionsCount(); /** * <code>repeated .alluxio.grpc.file.PAclAction actions = 3;</code> */ alluxio.grpc.PAclAction getActions(int index); /** * <code>optional bool isDefault = 4;</code> */ boolean hasIsDefault(); /** * <code>optional bool isDefault = 4;</code> */ boolean getIsDefault(); }
apache-2.0
Ragnarokma/metropolis
src/main/java/fundamental/games/metropolis/connection/global/MessagesQueue.java
780
package fundamental.games.metropolis.connection.global; import java.util.LinkedList; public class MessagesQueue<T> { private LinkedList<T> messages = new LinkedList<>(); private Object queueLock = new Object(); //******************************** public MessagesQueue() { } //********************************* public void addMessage(T message) { synchronized (queueLock) { messages.add(message); } } //********************************* public T popMessage() { T message; synchronized (queueLock) { if(messages.isEmpty()) message = null; else message = messages.remove(); } return message; } }
apache-2.0
bshp/midPoint
repo/repo-sql-impl/src/main/java/com/evolveum/midpoint/repo/sql/data/audit/RAuditPropertyValue.java
4382
/* * Copyright (c) 2010-2017 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.repo.sql.data.audit; import com.evolveum.midpoint.audit.api.AuditService; import com.evolveum.midpoint.repo.sql.data.InsertQueryBuilder; import com.evolveum.midpoint.repo.sql.data.SingleSqlQuery; import com.evolveum.midpoint.repo.sql.helpers.modify.Ignore; import com.evolveum.midpoint.repo.sql.util.EntityState; import javax.persistence.*; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Objects; import static com.evolveum.midpoint.repo.sql.data.audit.RAuditPropertyValue.COLUMN_RECORD_ID; import static com.evolveum.midpoint.repo.sql.data.audit.RAuditPropertyValue.TABLE_NAME; @Ignore @Entity @Table(name = TABLE_NAME, indexes = { @Index(name = "iAuditPropValRecordId", columnList = COLUMN_RECORD_ID)}) public class RAuditPropertyValue implements EntityState { public static final String TABLE_NAME = "m_audit_prop_value"; public static final String COLUMN_RECORD_ID = "record_id"; public static final String NAME_COLUMN_NAME = "name"; public static final String VALUE_COLUMN_NAME = "value"; private Boolean trans; private long id; private RAuditEventRecord record; private Long recordId; private String name; private String value; @Transient @Override public Boolean isTransient() { return trans; } @Override public void setTransient(Boolean trans) { this.trans = trans; } @Id @GeneratedValue(strategy = GenerationType.IDENTITY) public long getId() { return id; } public void setId(long id) { this.id = id; } //@ForeignKey(name = "none") @MapsId("record") @ManyToOne(fetch = FetchType.LAZY) @JoinColumns({ @JoinColumn(name = COLUMN_RECORD_ID, referencedColumnName = "id") }) public RAuditEventRecord getRecord() { return record; } @Column(name = COLUMN_RECORD_ID) public Long getRecordId() { if (recordId == null && record != null) { recordId = record.getId(); } return recordId; } public void setRecord(RAuditEventRecord record) { if (record.getId() != 0) { this.recordId = record.getId(); } this.record = record; } public void setRecordId(Long recordId) { this.recordId = recordId; } public String getName() { return name; } public void setName(String name) { this.name = name; } @Column(length = AuditService.MAX_PROPERTY_SIZE) public String getValue() { return value; } public void setValue(String value) { this.value = value; } public static RAuditPropertyValue toRepo(RAuditEventRecord record, String name, String value) { RAuditPropertyValue property = new RAuditPropertyValue(); property.setRecord(record); property.setName(name); property.setValue(value); return property; } public static SingleSqlQuery toRepo(Long recordId, String name, String value) { InsertQueryBuilder queryBuilder = new InsertQueryBuilder(TABLE_NAME); queryBuilder.addParameter(COLUMN_RECORD_ID, recordId); queryBuilder.addParameter(NAME_COLUMN_NAME, name); queryBuilder.addParameter(VALUE_COLUMN_NAME, value); return queryBuilder.build(); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof RAuditPropertyValue)) return false; RAuditPropertyValue that = (RAuditPropertyValue) o; return id == that.id && Objects.equals(recordId, that.recordId) && Objects.equals(name, that.name) && Objects.equals(value, that.value); } @Override public int hashCode() { return Objects.hash(id, recordId, name); } @Override public String toString() { return "RAuditPropertyValue{" + "id=" + id + ", recordId=" + recordId + ", name='" + name + '\'' + ", value='" + value + '\'' + '}'; } }
apache-2.0
lumeng689/luapp
practise/src/main/java/org/luapp/practise/utils/SplitText.java
1358
package org.luapp.practise.utils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; /** * Created by lum on 2015/6/18. */ public class SplitText { public static void main(String[] args) throws IOException { String filePath = "D:\\tmp\\catalina.out"; int q = 10; File f = new File(filePath); long perSize = f.length() / q; LineIterator li = FileUtils.lineIterator(f); int i = 0; File curFile = new File(f.getParent(), "part_" + i++); FileOutputStream curOs = new FileOutputStream(curFile); List<String> lines = new ArrayList<String>(); while (li.hasNext()) { String line = li.nextLine(); lines.add(line); if (lines.size() > 100) { IOUtils.writeLines(lines, null, curOs, "UTF-8"); lines.clear(); } if (curFile.length() > perSize) { IOUtils.closeQuietly(curOs); curFile = new File(f.getParent(), "part_" + i++); curOs = new FileOutputStream(curFile); } } IOUtils.closeQuietly(curOs); } }
apache-2.0
consulo/consulo-python
python-impl/src/main/java/com/jetbrains/python/codeInsight/override/PyMethodMember.java
2984
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.codeInsight.override; import java.util.List; import javax.swing.JTree; import com.intellij.codeInsight.generation.ClassMember; import com.intellij.codeInsight.generation.MemberChooserObject; import com.intellij.codeInsight.generation.PsiElementMemberChooserObject; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.ui.SimpleColoredComponent; import com.jetbrains.python.PyNames; import com.jetbrains.python.psi.PyClass; import com.jetbrains.python.psi.PyElement; import com.jetbrains.python.psi.PyFunction; import com.jetbrains.python.psi.PyParameter; import com.jetbrains.python.psi.PyUtil; import com.jetbrains.python.psi.types.TypeEvalContext; import consulo.awt.TargetAWT; import consulo.ide.IconDescriptorUpdaters; /** * @author Alexey.Ivanov */ public class PyMethodMember extends PsiElementMemberChooserObject implements ClassMember { private final String myFullName; private static String buildNameFor(final PyElement element) { if(element instanceof PyFunction) { final TypeEvalContext context = TypeEvalContext.userInitiated(element.getProject(), element.getContainingFile()); final List<PyParameter> parameters = PyUtil.getParameters((PyFunction) element, context); return element.getName() + "(" + StringUtil.join(parameters, parameter -> PyUtil.getReadableRepr(parameter, false), ", ") + ")"; } if(element instanceof PyClass && PyNames.FAKE_OLD_BASE.equals(element.getName())) { return "<old-style class>"; } return element.getName(); } public PyMethodMember(final PyElement element) { super(element, trimUnderscores(buildNameFor(element)), IconDescriptorUpdaters.getIcon(element, 0)); myFullName = buildNameFor(element); } public static String trimUnderscores(String s) { return StringUtil.trimStart(StringUtil.trimStart(s, "_"), "_"); } public MemberChooserObject getParentNodeDelegate() { final PyElement element = (PyElement) getPsiElement(); final PyClass parent = PsiTreeUtil.getParentOfType(element, PyClass.class, false); assert (parent != null); return new PyMethodMember(parent); } @Override public void renderTreeNode(SimpleColoredComponent component, JTree tree) { component.append(myFullName, getTextAttributes(tree)); component.setIcon(IconDescriptorUpdaters.getIcon(getPsiElement(), 0)); } }
apache-2.0
liqiang199696/PetrolStation
src/com/juhe/petrolstation/activity/StationListActivity.java
1943
package com.juhe.petrolstation.activity; import java.util.List; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.ImageView; import android.widget.ListView; import com.juhe.petrolstation.R; import com.juhe.petrolstation.adapter.StationListAdapter; import com.juhe.petrolstation.bean.Station; public class StationListActivity extends Activity { private Context mContext; private ListView lv_station; private ImageView iv_back; @Override protected void onCreate(Bundle savedInstanceState) { // TODO Auto-generated method stub super.onCreate(savedInstanceState); setContentView(R.layout.activity_list); mContext = this; initView(); } private void initView() { iv_back = (ImageView) findViewById(R.id.iv_back); iv_back.setVisibility(View.VISIBLE); iv_back.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub finish(); } }); lv_station = (ListView) findViewById(R.id.lv_station); final List<Station> list = getIntent().getParcelableArrayListExtra("list"); StationListAdapter adapter = new StationListAdapter(mContext, list); lv_station.setAdapter(adapter); lv_station.setOnItemClickListener(new OnItemClickListener() { @Override public void onItemClick(AdapterView<?> arg0, View arg1, int position, long arg3) { // TODO Auto-generated method stub Intent intent = new Intent(mContext,StationInfoActivity.class); intent.putExtra("s", list.get(position)); intent.putExtra("locLat", getIntent().getDoubleExtra("locLat", 0)); intent.putExtra("locLon", getIntent().getDoubleExtra("locLon", 0)); startActivity(intent); } }); } }
apache-2.0
modaclouds/modaclouds-sla-mediator
src/main/java/eu/atos/sla/client/ResourceCollection.java
10880
/** * Copyright 2014 Atos * Contact: Atos <roman.sosa@atos.net> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.atos.sla.client; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.List; import javax.ws.rs.Path; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.UriBuilder; import com.sun.jersey.api.client.GenericType; import com.sun.jersey.core.util.MultivaluedMapImpl; import com.sun.jersey.core.util.UnmodifiableMultivaluedMap; import eu.atos.sla.client.RestClient.ExceptionFactory; import eu.atos.sla.client.RestClient.RequestResponse; /** * Base class that provides basic operations to all resources. * * To use this class, extend and implement the additional (optional overriding existing) operations. * * <br/>Example: * <pre><code> * @Path("agreements") * public static class AgreementResource<E> extends ResourceCollection<E> { * public AgreementResource(final Class<E> clazz, String baseUrl, MediaType preferredType) { * super(AgreementResource.class, clazz, baseUrl, preferredType); * } * * @Path("active") * public Agreement getActive() {} * * @Path("{uuid}/status") * public Status getAgreementStatus(String uuid) {} * } * </code></pre> * * @param <E> entity class that this class is going to handle (i.e., create, retrieve...) * * @author rsosa */ public abstract class ResourceCollection<E> { private String resourceUrl; private final RestClient client; private Class<E> clazz; @SuppressWarnings("rawtypes") private final ResourceHelper<? extends ResourceCollection> helper; private final MediaType preferredType; private final GenericType<List<E>> listType; protected final MultivaluedMap<String, String> defaultHeaders; /* * Local class trick to get current method from: * http://stackoverflow.com/a/15377634 (Getting the name of the current executing method) */ /** * @param collectionClass class extending {@link ResourceCollection.ResourceHelper} (see example in class header) * @param entityClass class that acts as resource (see example in class header); if a jaxb annotated class, * the class will be de/serialized on each request/response. * @param baseUrl base url of the resource collection. * @param preferredType preferred type to use; it will be used as content-type and accept headers. */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ResourceCollection( final Class<? extends ResourceCollection> collectionClass, final Class<E> entityClass, String baseUrl, MediaType preferredType) { this.clazz = entityClass; this.client = new RestClient(baseUrl); this.helper = new ResourceHelper<ResourceCollection>((Class<ResourceCollection>) collectionClass); this.preferredType = preferredType; this.defaultHeaders = buildHeaders(preferredType); /* * http://jersey.576304.n2.nabble.com/Using-GenericType-in-a-generic-method-td6650758.html */ this.listType = new GenericType<List<E>>(new ListParameterizedType(entityClass)){}; } /** * Does a GET on a collection with optional query params. * @return list of entities. */ @Path("") public List<E> get(MultivaluedMap<String, String> queryParams) { class Local{}; String relativeUrl = helper.getPath(Local.class.getEnclosingMethod()); RequestResponse requestResponse = client.get(relativeUrl, queryParams, defaultHeaders); checkResponse(requestResponse); List<E> result = (List<E>)requestResponse.getEntity(listType); return result; } /** * Does a GET on a collection with no query parameters (probably equivalent to a getAll()). * @return list of entities */ public List<E> get() { return this.get(RestClient.EMPTY_MAP); } /** * Does a GET on an entity. * @param id * @return entity */ @Path("{uuid}") public E getById(String id) { class Local{}; String relativeUrl = helper.getPath(Local.class.getEnclosingMethod(), id); RequestResponse requestResponse = client.get(relativeUrl, RestClient.EMPTY_MAP, defaultHeaders); checkResponse(requestResponse); E result = requestResponse.getEntity(this.clazz); return result; } /** * Does a POST on a resource collection. * @param entity * @return body of the response */ @Path("") public String create(E entity) { class Local{}; String relativeUrl = helper.getPath(Local.class.getEnclosingMethod()); RequestResponse requestResponse = client.post(relativeUrl, entity, RestClient.EMPTY_MAP, defaultHeaders); checkResponse(requestResponse); String result = requestResponse.getEntity(String.class); return result; } protected final void checkResponse(RequestResponse requestResponse) { if (!requestResponse.isOk()) { throw ExceptionFactory.getException(requestResponse); } } @SuppressWarnings("rawtypes") protected final ResourceHelper<? extends ResourceCollection> getHelper() { return helper; } protected final RestClient getClient() { return client; } protected final GenericType<List<E>> getListType() { return listType; } private UnmodifiableMultivaluedMap<String, String> buildHeaders(MediaType preferredType) { MultivaluedMapImpl auxHeaders = new MultivaluedMapImpl(); auxHeaders.putSingle("Content-type", preferredType.toString()); auxHeaders.putSingle("Accept", preferredType.toString()); return new UnmodifiableMultivaluedMap<>(auxHeaders); } /** * Implements some operations for ResourceCollection regarding @Path annotations. * * @param <R> {@link ResourceCollection} class that uses this helper. * * @author rsosa */ static class ResourceHelper<R> { private final Class<R> resourceClass; public ResourceHelper(Class<R> resourceClass) { this.resourceClass = resourceClass; } /** * Build a relative path joining the @Path in ResourceCollection and the @Path * in the <code>method</code>. * * <br/>Example that returns "/providers/p1": * <code><pre> * * @Path("/providers") * class Providers { * @Path("{uuid}") * public Provider getByUuid(String providerId) { * class Local{}; * String relativeUrl = * helper.getPath(Local.class.getEnclosingMethod(), providerId); * ... * } * } * Providers().getByUuid("p1") * </pre></code> * * @param method * Method with a @Path annotation * @param values * to replace in the placeholders. * @return final relative url */ public String getPath(Method method, Object... values) { String classPath = getPathValue(resourceClass.getAnnotation(Path.class)); String methodPath = getPathValue(method.getAnnotation(Path.class)); String path = join(classPath, methodPath); String result = replaceValues(path, values); return result; } private String replaceValues(String path, Object... values) { return UriBuilder.fromPath(path).build(values).toString(); } /** * Build a relative path joining the @Path in ResourceCollection and the @Path * in the <code>method</code>. * * The method is obtained through reflection by its name and parameters. * {@link #getPath(Method, Object...)} is preferred as it is a lot safer than this. * * @see #getPath(Method, Object...) */ public String getPath(String methodName, Class<?> parameterTypes) { Method method = getMethod(methodName, parameterTypes); String result = getPath(method); return result; } private String getPathValue(Path path) { return path == null? "" : path.value(); } private Method getMethod(String methodName, Class<?> parameterTypes) { Method method; try { method = resourceClass.getMethod(methodName, parameterTypes); return method; } catch (NoSuchMethodException | SecurityException e) { throw new RuntimeException(e.getMessage(), e); } } private static String join(String... elems) { StringBuilder joined = new StringBuilder(); String sep = ""; for (String elem : elems) { joined.append(sep); joined.append(elem); sep = "/"; } return joined.toString(); } } /** * Needed to deserialize a list of entities. */ static class ListParameterizedType<E> implements ParameterizedType { private Class<E> clazz; public ListParameterizedType(Class<E> clazz) { this.clazz = clazz; } public Type[] getActualTypeArguments() { return new Type[] { clazz }; } public Type getRawType() { return List.class; } public Type getOwnerType() { return List.class; } } }
apache-2.0
jcamachor/calcite
piglet/src/test/java/org/apache/calcite/test/PigRelOpTest.java
72776
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.test; import org.apache.calcite.rel.RelNode; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.dialect.CalciteSqlDialect; import org.apache.calcite.util.TestUtil; import org.apache.calcite.util.Util; import org.hamcrest.Matcher; import org.junit.jupiter.api.Test; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.util.List; import static org.apache.calcite.test.Matchers.hasTree; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.MatcherAssert.assertThat; /** * Tests for {@code PigRelOpVisitor}. */ class PigRelOpTest extends PigRelTestBase { /** * SQL dialect for the tests. */ private static class PigRelSqlDialect extends SqlDialect { static final SqlDialect DEFAULT = new CalciteSqlDialect(SqlDialect.EMPTY_CONTEXT .withDatabaseProduct(DatabaseProduct.CALCITE)); private PigRelSqlDialect(Context context) { super(context); } } /** Contains a Pig script and has various methods to translate and * run that script and check the results. Each method returns * this, so that method calls for the same script can be * chained. */ class Fluent { private final String script; Fluent(String script) { this.script = script; } private Fluent assertRel(String pigAlias, boolean optimized, Matcher<RelNode> relMatcher) { try { final RelNode rel; final List<RelNode> relNodes = converter.pigQuery2Rel(script, optimized, true, optimized); if (pigAlias == null) { rel = relNodes.get(0); } else { rel = converter.getBuilder().getRel(pigAlias); } assertThat(rel, relMatcher); } catch (IOException e) { throw TestUtil.rethrow(e); } return this; } private Fluent assertRel(Matcher<RelNode> relMatcher) { return assertRel(null, false, relMatcher); } private Fluent assertOptimizedRel(Matcher<RelNode> relMatcher) { return assertRel(null, true, relMatcher); } private Fluent assertSql(Matcher<String> sqlMatcher) { try { final String sql = converter.pigToSql(script, PigRelSqlDialect.DEFAULT).get(0); assertThat(sql, sqlMatcher); return this; } catch (IOException e) { throw TestUtil.rethrow(e); } } private Fluent assertSql(Matcher<String> sqlMatcher, int pos) { try { final String sql = converter.pigToSql(script, PigRelSqlDialect.DEFAULT).get(pos); assertThat(sql, sqlMatcher); return this; } catch (IOException e) { throw TestUtil.rethrow(e); } } private Fluent assertResult(Matcher<String> resultMatcher) { final RelNode rel; try { rel = converter.pigQuery2Rel(script, false, true, false).get(0); } catch (IOException e) { throw TestUtil.rethrow(e); } final StringWriter sw = new StringWriter(); CalciteHandler.dump(rel, new PrintWriter(sw)); assertThat(Util.toLinux(sw.toString()), resultMatcher); return this; } } private static void writeToFile(File f, String[] inputData) { try (PrintWriter pw = new PrintWriter( new OutputStreamWriter(new FileOutputStream(f), StandardCharsets.UTF_8))) { for (String input : inputData) { pw.print(input); pw.print("\n"); } } catch (FileNotFoundException e) { throw TestUtil.rethrow(e); } } /** Creates a {@link Fluent} containing a script, that can then be used to * translate and execute that script. */ private Fluent pig(String script) { return new Fluent(script); } @Test void testLoadFromFile() { final String datadir = "/tmp/pigdata"; final String schema = "{\"fields\":[" + "{\"name\":\"x\",\"type\":55,\"schema\":null}," + "{\"name\":\"y\",\"type\":10,\"schema\":null}," + "{\"name\":\"z\",\"type\":25,\"schema\":null}]," + "\"version\":0,\"sortKeys\":[],\"sortKeyOrders\":[]}"; final File inputDir = new File(datadir, "testTable"); inputDir.mkdirs(); final File inputSchemaFile = new File(inputDir, ".pig_schema"); writeToFile(inputSchemaFile, new String[]{schema}); final String script = "" + "A = LOAD '" + inputDir.getAbsolutePath() + "' using PigStorage();\n" + "B = FILTER A BY z > 5.5;\n" + "C = GROUP B BY x;\n"; final String plan = "" + "LogicalProject(group=[$0], B=[$1])\n" + " LogicalAggregate(group=[{0}], B=[COLLECT($1)])\n" + " LogicalProject(x=[$0], $f1=[ROW($0, $1, $2)])\n" + " LogicalFilter(condition=[>($2, 5.5E0)])\n" + " LogicalTableScan(table=[[/tmp/pigdata/testTable]])\n"; pig(script).assertRel(hasTree(plan)); } @Test void testLoadWithoutSchema() { final String script = "A = LOAD 'scott.DEPT';"; final String plan = "LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "" + "(10,ACCOUNTING,NEW YORK)\n" + "(20,RESEARCH,DALLAS)\n" + "(30,SALES,CHICAGO)\n" + "(40,OPERATIONS,BOSTON)\n"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)); } @Test void testLoadWithSchema() { final String script = "" + "A = LOAD 'testSchema.testTable' as (a:int, b:long, c:float, " + "d:double, e:chararray, " + "f:bytearray, g:boolean, " + "h:datetime, i:biginteger, j:bigdecimal, k1:tuple(), k2:tuple" + "(k21:int, k22:float), " + "l1:bag{}, " + "l2:bag{l21:(l22:int, l23:float)}, m1:map[], m2:map[int], m3:map[" + "(m3:float)])\n;"; final String plan = "LogicalTableScan(table=[[testSchema, testTable]])\n"; pig(script).assertRel(hasTree(plan)); final String script1 = "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);"; pig(script1) .assertRel(hasTree("LogicalTableScan(table=[[scott, DEPT]])\n")); } @Test void testFilter() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = FILTER A BY DEPTNO == 10;\n"; final String plan = "" + "LogicalFilter(condition=[=($0, 10)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "(10,ACCOUNTING,NEW YORK)\n"; final String sql = "SELECT *\n" + "FROM scott.DEPT\n" + "WHERE DEPTNO = 10"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testSample() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = SAMPLE A 0.5;\n"; final String plan = "" + "LogicalFilter(condition=[<(RAND(), 5E-1)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String sql = "" + "SELECT *\n" + "FROM scott.DEPT\n" + "WHERE RAND() < 0.5"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)); } @Test void testSplit() { String script = "" + "A = LOAD 'scott.EMP'as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "SPLIT A INTO B1 IF DEPTNO == 10, B2 IF DEPTNO == 20;\n" + "B = UNION B1, B2;\n"; final String scan = " LogicalTableScan(table=[[scott, EMP]])\n"; final String plan = "" + "LogicalUnion(all=[true])\n" + " LogicalFilter(condition=[=($7, 10)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalFilter(condition=[=($7, 20)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "(7782,CLARK,MANAGER,7839,1981-06-09,2450.00,null,10)\n" + "(7839,KING,PRESIDENT,null,1981-11-17,5000.00,null,10)\n" + "(7934,MILLER,CLERK,7782,1982-01-23,1300.00,null,10)\n" + "(7369,SMITH,CLERK,7902,1980-12-17,800.00,null,20)\n" + "(7566,JONES,MANAGER,7839,1981-02-04,2975.00,null,20)\n" + "(7788,SCOTT,ANALYST,7566,1987-04-19,3000.00,null,20)\n" + "(7876,ADAMS,CLERK,7788,1987-05-23,1100.00,null,20)\n" + "(7902,FORD,ANALYST,7566,1981-12-03,3000.00,null,20)\n"; final String sql = "" + "SELECT *\n" + "FROM scott.EMP\n" + "WHERE DEPTNO = 10\n" + "UNION ALL\n" + "SELECT *\n" + "FROM scott.EMP\n" + "WHERE DEPTNO = 20"; pig(script) .assertRel("B1", false, hasTree("LogicalFilter(condition=[=($7, 10)])\n" + scan)) .assertRel("B2", false, hasTree("LogicalFilter(condition=[=($7, 20)])\n" + scan)) .assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testUdf() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = FILTER A BY ENDSWITH(DNAME, 'LES');\n"; final String plan = "" + "LogicalFilter(condition=[ENDSWITH(PIG_TUPLE($1, 'LES'))])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "(30,SALES,CHICAGO)\n"; final String sql = "" + "SELECT *\n" + "FROM scott.DEPT\n" + "WHERE ENDSWITH(PIG_TUPLE(DNAME, 'LES'))"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testSimpleForEach1() { String script = "" + "A = LOAD 'testSchema.testTable' as (a:int, b:long, c:float, " + "d:double, e:chararray, f:bytearray, g:boolean, " + "h:datetime, i:biginteger, j:bigdecimal, k1:tuple(), " + "k2:tuple(k21:int, k22:float), l1:bag{}, " + "l2:bag{l21:(l22:int, l23:float)}, " + "m1:map[], m2:map[int], m3:map[(m3:float)]);\n" + "B = FOREACH A GENERATE a, a as a2, b, c, d, e, f, g, h, i, j, k2, " + "l2, m2, null as n:chararray;\n"; final String plan = "" + "LogicalProject(a=[$0], a2=[$0], b=[$1], c=[$2], d=[$3], e=[$4], " + "f=[$5], g=[$6], h=[$7], i=[$8], j=[$9], k2=[$11], l2=[$13], " + "m2=[$15], n=[null:VARCHAR])\n" + " LogicalTableScan(table=[[testSchema, testTable]])\n"; final String sql = "" + "SELECT a, a AS a2, b, c, d, e, f, g, h, i, j, k2, l2, m2, " + "CAST(NULL AS VARCHAR CHARACTER SET ISO-8859-1) AS n\n" + "FROM testSchema.testTable"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)); } @Test void testSimpleForEach2() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FOREACH A GENERATE DEPTNO + 10 as dept, MGR;\n"; final String plan = "" + "LogicalProject(dept=[+($7, 10)], MGR=[$3])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "(30,7902)\n" + "(40,7698)\n" + "(40,7698)\n" + "(30,7839)\n" + "(40,7698)\n" + "(40,7839)\n" + "(20,7839)\n" + "(30,7566)\n" + "(20,null)\n" + "(40,7698)\n" + "(30,7788)\n" + "(40,7698)\n" + "(30,7566)\n" + "(20,7782)\n"; final String sql = "" + "SELECT DEPTNO + 10 AS dept, MGR\n" + "FROM scott.EMP"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testSimpleForEach3() { String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FILTER A BY JOB != 'CLERK';\n" + "C = GROUP B BY (DEPTNO, JOB);\n" + "D = FOREACH C GENERATE flatten(group) as (dept, job), flatten(B);\n" + "E = ORDER D BY dept, job;\n"; final String plan = "" + "LogicalSort(sort0=[$0], sort1=[$1], dir0=[ASC], dir1=[ASC])\n" + " LogicalProject(dept=[$0], job=[$1], EMPNO=[$3], ENAME=[$4], " + "JOB=[$5], MGR=[$6], HIREDATE=[$7], SAL=[$8], COMM=[$9], " + "DEPTNO=[$10])\n" + " LogicalCorrelate(correlation=[$cor0], joinType=[inner], " + "requiredColumns=[{2}])\n" + " LogicalProject(dept=[$0.DEPTNO], job=[$0.JOB], B=[$1])\n" + " LogicalProject(group=[ROW($0, $1)], B=[$2])\n" + " LogicalAggregate(group=[{0, 1}], B=[COLLECT($2)])\n" + " LogicalProject(DEPTNO=[$7], JOB=[$2], $f2=[ROW($0, " + "$1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalFilter(condition=[<>($2, 'CLERK')])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " Uncollect\n" + " LogicalProject($f0=[$cor0.B])\n" + " LogicalValues(tuples=[[{ 0 }]])\n"; final String sql = "" + "SELECT $cor1.DEPTNO AS dept, $cor1.JOB AS job, $cor1.EMPNO," + " $cor1.ENAME, $cor1.JOB0 AS JOB, $cor1.MGR, $cor1.HIREDATE," + " $cor1.SAL, $cor1.COMM, $cor1.DEPTNO0 AS DEPTNO\n" + "FROM (SELECT DEPTNO, JOB, COLLECT(ROW(EMPNO, ENAME, JOB, MGR, " + "HIREDATE, SAL, COMM, DEPTNO)) AS $f2\n" + " FROM scott.EMP\n" + " WHERE JOB <> 'CLERK'\n" + " GROUP BY DEPTNO, JOB) AS $cor1,\n" + " LATERAL UNNEST (SELECT $cor1.$f2 AS $f0\n" + " FROM (VALUES (0)) AS t (ZERO)) AS t3 (EMPNO, ENAME, JOB," + " MGR, HIREDATE, SAL, COMM, DEPTNO) AS t30\n" + "ORDER BY $cor1.DEPTNO, $cor1.JOB"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)); // TODO fix Calcite execution final String result = "" + "(10,7782,CLARK,MANAGER,7839,1981-06-09,2450.00,null,10)\n" + "(10,7839,KING,PRESIDENT,null,1981-11-17,5000.00,null,10)\n" + "(20,7566,JONES,MANAGER,7839,1981-02-04,2975.00,null,20)\n" + "(20,7788,SCOTT,ANALYST,7566,1987-04-19,3000.00,null,20)\n" + "(20,7902,FORD,ANALYST,7566,1981-12-03,3000.00,null,20)\n" + "(30,7499,ALLEN,SALESMAN,7698,1981-02-20,1600.00,300.00,30)\n" + "(30,7521,WARD,SALESMAN,7698,1981-02-22,1250.00,500.00,30)\n" + "(30,7654,MARTIN,SALESMAN,7698,1981-09-28,1250.00,1400.00,30)\n" + "(30,7698,BLAKE,MANAGER,7839,1981-01-05,2850.00,null,30)\n" + "(30,7844,TURNER,SALESMAN,7698,1981-09-08,1500.00,0.00,30)\n"; if (false) { pig(script).assertResult(is(result)); } } @Test void testForEachNested() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY DEPTNO;\n" + "C = FOREACH B {\n" + " S = FILTER A BY JOB != 'CLERK';\n" + " Y = FOREACH S GENERATE ENAME, JOB, DEPTNO, SAL;\n" + " X = ORDER Y BY SAL;\n" + " GENERATE group, COUNT(X) as cnt, flatten(X), BigDecimalMax(X.SAL);\n" + "}\n" + "D = ORDER C BY $0;\n"; final String plan = "" + "LogicalSort(sort0=[$0], dir0=[ASC])\n" + " LogicalProject(group=[$0], cnt=[$1], ENAME=[$4], JOB=[$5], " + "DEPTNO=[$6], SAL=[$7], $f3=[$3])\n" + " LogicalCorrelate(correlation=[$cor1], joinType=[inner], " + "requiredColumns=[{2}])\n" + " LogicalProject(group=[$0], cnt=[COUNT(PIG_BAG($2))], X=[$2], " + "$f3=[BigDecimalMax(PIG_BAG(MULTISET_PROJECTION($2, 3)))])\n" + " LogicalCorrelate(correlation=[$cor0], joinType=[inner], " + "requiredColumns=[{1}])\n" + " LogicalProject(group=[$0], A=[$1])\n" + " LogicalAggregate(group=[{0}], A=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[$7], $f1=[ROW($0, $1, $2, " + "$3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalProject(X=[$1])\n" + " LogicalAggregate(group=[{0}], X=[COLLECT($1)])\n" + " LogicalProject($f0=['all'], $f1=[ROW($0, $1, $2, $3)])\n" + " LogicalSort(sort0=[$3], dir0=[ASC])\n" + " LogicalProject(ENAME=[$1], JOB=[$2], " + "DEPTNO=[$7], SAL=[$5])\n" + " LogicalFilter(condition=[<>($2, 'CLERK')])\n" + " Uncollect\n" + " LogicalProject($f0=[$cor0.A])\n" + " LogicalValues(tuples=[[{ 0 }]])\n" + " Uncollect\n" + " LogicalProject($f0=[$cor1.X])\n" + " LogicalValues(tuples=[[{ 0 }]])\n"; final String result = "" + "(10,2,CLARK,MANAGER,10,2450.00,5000.00)\n" + "(10,2,KING,PRESIDENT,10,5000.00,5000.00)\n" + "(20,3,JONES,MANAGER,20,2975.00,3000.00)\n" + "(20,3,SCOTT,ANALYST,20,3000.00,3000.00)\n" + "(20,3,FORD,ANALYST,20,3000.00,3000.00)\n" + "(30,5,WARD,SALESMAN,30,1250.00,2850.00)\n" + "(30,5,MARTIN,SALESMAN,30,1250.00,2850.00)\n" + "(30,5,TURNER,SALESMAN,30,1500.00,2850.00)\n" + "(30,5,ALLEN,SALESMAN,30,1600.00,2850.00)\n" + "(30,5,BLAKE,MANAGER,30,2850.00,2850.00)\n"; final String sql = "" + "SELECT $cor5.group, $cor5.cnt, $cor5.ENAME, $cor5.JOB, " + "$cor5.DEPTNO, $cor5.SAL, $cor5.$f3\n" + "FROM (SELECT $cor4.DEPTNO AS group, " + "COUNT(PIG_BAG($cor4.X)) AS cnt, $cor4.X, " + "BigDecimalMax(PIG_BAG(MULTISET_PROJECTION($cor4.X, 3))) AS $f3\n" + " FROM (SELECT DEPTNO, COLLECT(ROW(EMPNO, ENAME, JOB, MGR, " + "HIREDATE, SAL, COMM, DEPTNO)) AS A\n" + " FROM scott.EMP\n" + " GROUP BY DEPTNO) AS $cor4,\n" + " LATERAL (SELECT COLLECT(ROW(ENAME, JOB, DEPTNO, SAL)) AS X\n" + " FROM (SELECT ENAME, JOB, DEPTNO, SAL\n" + " FROM UNNEST (SELECT $cor4.A AS $f0\n" + " FROM (VALUES (0)) AS t (ZERO)) " + "AS t2 (EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO)\n" + " WHERE JOB <> 'CLERK'\n" + " ORDER BY SAL) AS t5\n" + " GROUP BY 'all') AS t8) AS $cor5,\n" + " LATERAL UNNEST (SELECT $cor5.X AS $f0\n" + " FROM (VALUES (0)) AS t (ZERO)) " + "AS t11 (ENAME, JOB, DEPTNO, SAL) AS t110\n" + "ORDER BY $cor5.group"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testUnionSameSchema() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FILTER A BY DEPTNO == 10;\n" + "C = FILTER A BY DEPTNO == 20;\n" + "D = UNION B, C;\n"; final String plan = "" + "LogicalUnion(all=[true])\n" + " LogicalFilter(condition=[=($7, 10)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalFilter(condition=[=($7, 20)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "(7782,CLARK,MANAGER,7839,1981-06-09,2450.00,null,10)\n" + "(7839,KING,PRESIDENT,null,1981-11-17,5000.00,null,10)\n" + "(7934,MILLER,CLERK,7782,1982-01-23,1300.00,null,10)\n" + "(7369,SMITH,CLERK,7902,1980-12-17,800.00,null,20)\n" + "(7566,JONES,MANAGER,7839,1981-02-04,2975.00,null,20)\n" + "(7788,SCOTT,ANALYST,7566,1987-04-19,3000.00,null,20)\n" + "(7876,ADAMS,CLERK,7788,1987-05-23,1100.00,null,20)\n" + "(7902,FORD,ANALYST,7566,1981-12-03,3000.00,null,20)\n"; final String sql = "" + "SELECT *\n" + "FROM scott.EMP\n" + "WHERE DEPTNO = 10\n" + "UNION ALL\n" + "SELECT *\n" + "FROM scott.EMP\n" + "WHERE DEPTNO = 20"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testUnionDifferentSchemas1() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = FOREACH A GENERATE DEPTNO, DNAME;\n" + "C = UNION ONSCHEMA A, B;\n"; final String plan = "" + "LogicalUnion(all=[true])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalProject(DEPTNO=[$0], DNAME=[$1], LOC=[null:VARCHAR])\n" + " LogicalProject(DEPTNO=[$0], DNAME=[$1])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String optimizedPlan = "" + "LogicalUnion(all=[true])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalProject(DEPTNO=[$0], DNAME=[$1], LOC=[null:VARCHAR])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "" + "(10,ACCOUNTING,NEW YORK)\n" + "(20,RESEARCH,DALLAS)\n" + "(30,SALES,CHICAGO)\n" + "(40,OPERATIONS,BOSTON)\n" + "(10,ACCOUNTING,null)\n" + "(20,RESEARCH,null)\n" + "(30,SALES,null)\n" + "(40,OPERATIONS,null)\n"; final String sql = "" + "SELECT *\n" + "FROM scott.DEPT\n" + "UNION ALL\n" + "SELECT DEPTNO, DNAME, " + "CAST(NULL AS VARCHAR CHARACTER SET ISO-8859-1) AS LOC\n" + "FROM scott.DEPT"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testUnionDifferentSchemas2() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FILTER A BY DEPTNO == 10;\n" + "C = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "D = UNION ONSCHEMA B, C;\n"; final String plan = "" + "LogicalUnion(all=[true])\n" + " LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3], " + "HIREDATE=[$4], SAL=[$5], COMM=[$6], DEPTNO=[$7], " + "DNAME=[null:VARCHAR], LOC=[null:VARCHAR])\n" + " LogicalFilter(condition=[=($7, 10)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalProject(EMPNO=[null:INTEGER], ENAME=[null:VARCHAR], " + "JOB=[null:VARCHAR], MGR=[null:INTEGER], HIREDATE=[null:DATE], " + "SAL=[null:DECIMAL(19, 0)], COMM=[null:DECIMAL(19, 0)], DEPTNO=[$0], " + "DNAME=[$1], LOC=[$2])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "" + "(7782,CLARK,MANAGER,7839,1981-06-09,2450.00,null,10,null,null)\n" + "(7839,KING,PRESIDENT,null,1981-11-17,5000.00,null,10,null," + "null)\n" + "(7934,MILLER,CLERK,7782,1982-01-23,1300.00,null,10,null,null)\n" + "(null,null,null,null,null,null,null,10,ACCOUNTING,NEW YORK)\n" + "(null,null,null,null,null,null,null,20,RESEARCH,DALLAS)\n" + "(null,null,null,null,null,null,null,30,SALES,CHICAGO)\n" + "(null,null,null,null,null,null,null,40,OPERATIONS,BOSTON)\n"; final String sql = "" + "SELECT EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO, " + "CAST(NULL AS VARCHAR CHARACTER SET ISO-8859-1) AS DNAME, " + "CAST(NULL AS VARCHAR CHARACTER SET ISO-8859-1) AS LOC\n" + "FROM scott.EMP\n" + "WHERE DEPTNO = 10\n" + "UNION ALL\n" + "SELECT CAST(NULL AS INTEGER) AS EMPNO, " + "CAST(NULL AS VARCHAR CHARACTER SET ISO-8859-1) AS ENAME, " + "CAST(NULL AS VARCHAR CHARACTER SET ISO-8859-1) AS JOB, " + "CAST(NULL AS INTEGER) AS MGR, " + "CAST(NULL AS DATE) AS HIREDATE, CAST(NULL AS DECIMAL(19, 0)) AS SAL, " + "CAST(NULL AS DECIMAL(19, 0)) AS COMM, DEPTNO, DNAME, LOC\n" + "FROM scott.DEPT"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testJoin2Rels() { final String scanScript = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n"; final String scanPlan = "" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String innerScript = scanScript + "C = JOIN A BY DEPTNO, B BY DEPTNO;\n"; final String plan = "" + "LogicalJoin(condition=[=($7, $8)], joinType=[inner])\n" + scanPlan; final String innerSql = "" + "SELECT *\n" + "FROM scott.EMP\n" + " INNER JOIN scott.DEPT ON EMP.DEPTNO = DEPT.DEPTNO"; pig(innerScript).assertRel(hasTree(plan)) .assertSql(is(innerSql)); final String leftScript = scanScript + "C = JOIN A BY DEPTNO LEFT OUTER, B BY DEPTNO;\n"; final String leftSql = "" + "SELECT *\n" + "FROM scott.EMP\n" + " LEFT JOIN scott.DEPT ON EMP.DEPTNO = DEPT.DEPTNO"; final String leftPlan = "" + "LogicalJoin(condition=[=($7, $8)], joinType=[left])\n" + scanPlan; pig(leftScript).assertRel(hasTree(leftPlan)) .assertSql(is(leftSql)); final String rightScript = scanScript + "C = JOIN A BY DEPTNO RIGHT OUTER, B BY DEPTNO;\n"; final String rightSql = "" + "SELECT *\n" + "FROM scott.EMP\n" + " RIGHT JOIN scott.DEPT ON EMP.DEPTNO = DEPT.DEPTNO"; final String rightPlan = "LogicalJoin(condition=[=($7, $8)], joinType=[right])\n" + scanPlan; pig(rightScript) .assertRel(hasTree(rightPlan)) .assertSql(is(rightSql)); final String fullScript = scanScript + "C = JOIN A BY DEPTNO FULL, B BY DEPTNO;\n"; final String fullPlan = "" + "LogicalJoin(condition=[=($7, $8)], joinType=[full])\n" + scanPlan; final String fullSql = "" + "SELECT *\n" + "FROM scott.EMP\n" + " FULL JOIN scott.DEPT ON EMP.DEPTNO = DEPT.DEPTNO"; final String fullResult = "" + "(7369,SMITH,CLERK,7902,1980-12-17,800.00,null,20,20," + "RESEARCH,DALLAS)\n" + "(7499,ALLEN,SALESMAN,7698,1981-02-20,1600.00,300.00,30,30," + "SALES,CHICAGO)\n" + "(7521,WARD,SALESMAN,7698,1981-02-22,1250.00,500.00,30,30," + "SALES,CHICAGO)\n" + "(7566,JONES,MANAGER,7839,1981-02-04,2975.00,null,20,20," + "RESEARCH,DALLAS)\n" + "(7654,MARTIN,SALESMAN,7698,1981-09-28,1250.00,1400.00,30,30," + "SALES,CHICAGO)\n" + "(7698,BLAKE,MANAGER,7839,1981-01-05,2850.00,null,30,30," + "SALES,CHICAGO)\n" + "(7782,CLARK,MANAGER,7839,1981-06-09,2450.00,null,10,10," + "ACCOUNTING,NEW YORK)\n" + "(7788,SCOTT,ANALYST,7566,1987-04-19,3000.00,null,20,20," + "RESEARCH,DALLAS)\n" + "(7839,KING,PRESIDENT,null,1981-11-17,5000.00,null,10,10," + "ACCOUNTING,NEW YORK)\n" + "(7844,TURNER,SALESMAN,7698,1981-09-08,1500.00,0.00,30,30," + "SALES,CHICAGO)\n" + "(7876,ADAMS,CLERK,7788,1987-05-23,1100.00,null,20,20," + "RESEARCH,DALLAS)\n" + "(7900,JAMES,CLERK,7698,1981-12-03,950.00,null,30,30,SALES," + "CHICAGO)\n" + "(7902,FORD,ANALYST,7566,1981-12-03,3000.00,null,20,20," + "RESEARCH,DALLAS)\n" + "(7934,MILLER,CLERK,7782,1982-01-23,1300.00,null,10,10," + "ACCOUNTING,NEW YORK)\n" + "(null,null,null,null,null,null,null,null,40,OPERATIONS," + "BOSTON)\n"; pig(fullScript) .assertRel(hasTree(fullPlan)) .assertSql(is(fullSql)) .assertResult(is(fullResult)); } @Test void testJoin3Rels() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "C = FILTER B BY LOC == 'CHICAGO';\n" + "D = JOIN A BY DEPTNO, B BY DEPTNO, C BY DEPTNO;\n"; final String plan = "" + "LogicalJoin(condition=[=($7, $11)], joinType=[inner])\n" + " LogicalJoin(condition=[=($7, $8)], joinType=[inner])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalFilter(condition=[=($2, 'CHICAGO')])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String sql = "" + "SELECT *\n" + "FROM scott.EMP\n" + " INNER JOIN scott.DEPT ON EMP.DEPTNO = DEPT.DEPTNO\n" + " INNER JOIN (SELECT *\n" + " FROM scott.DEPT\n" + " WHERE LOC = 'CHICAGO') AS t ON EMP.DEPTNO = t.DEPTNO"; final String result = "" + "(7499,ALLEN,SALESMAN,7698,1981-02-20,1600.00,300.00,30,30," + "SALES,CHICAGO,30,SALES," + "CHICAGO)\n" + "(7521,WARD,SALESMAN,7698,1981-02-22,1250.00,500.00,30,30," + "SALES,CHICAGO,30,SALES," + "CHICAGO)\n" + "(7654,MARTIN,SALESMAN,7698,1981-09-28,1250.00,1400.00,30,30," + "SALES,CHICAGO,30," + "SALES,CHICAGO)\n" + "(7698,BLAKE,MANAGER,7839,1981-01-05,2850.00,null,30,30,SALES," + "CHICAGO,30,SALES," + "CHICAGO)\n" + "(7844,TURNER,SALESMAN,7698,1981-09-08,1500.00,0.00,30,30," + "SALES,CHICAGO,30,SALES," + "CHICAGO)\n" + "(7900,JAMES,CLERK,7698,1981-12-03,950.00,null,30,30,SALES," + "CHICAGO,30,SALES," + "CHICAGO)\n"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)) .assertResult(is(result)); final String script2 = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray,\n" + " LOC:CHARARRAY);\n" + "C = FILTER B BY LOC == 'CHICAGO';\n" + "D = JOIN A BY (DEPTNO, ENAME), B BY (DEPTNO, DNAME),\n" + " C BY (DEPTNO, DNAME);\n"; final String plan2 = "" + "LogicalJoin(condition=[AND(=($7, $11), =($9, $12))], " + "joinType=[inner])\n" + " LogicalJoin(condition=[AND(=($7, $8), =($1, $9))], " + "joinType=[inner])\n" + " LogicalTableScan(table=[[scott, EMP]])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalFilter(condition=[=($2, 'CHICAGO')])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String sql2 = "" + "SELECT *\n" + "FROM scott.EMP\n" + " INNER JOIN scott.DEPT ON EMP.DEPTNO = DEPT.DEPTNO " + "AND EMP.ENAME = DEPT.DNAME\n" + " INNER JOIN (SELECT *\n" + " FROM scott.DEPT\n" + " WHERE LOC = 'CHICAGO') AS t ON EMP.DEPTNO = t.DEPTNO " + "AND DEPT.DNAME = t.DNAME"; pig(script2).assertRel(hasTree(plan2)) .assertSql(is(sql2)); } @Test void testCross() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray,\n" + " LOC:CHARARRAY);\n" + "B = FOREACH A GENERATE DEPTNO;\n" + "C = FILTER B BY DEPTNO <= 20;\n" + "D = CROSS B, C;\n"; final String plan = "" + "LogicalJoin(condition=[true], joinType=[inner])\n" + " LogicalProject(DEPTNO=[$0])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalFilter(condition=[<=($0, 20)])\n" + " LogicalProject(DEPTNO=[$0])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String sql = "" + "SELECT *\n" + "FROM (SELECT DEPTNO\n" + " FROM scott.DEPT) AS t,\n" + " (SELECT DEPTNO\n" + " FROM scott.DEPT\n" + " WHERE DEPTNO <= 20) AS t1"; final String result = "" + "(10,10)\n" + "(10,20)\n" + "(20,10)\n" + "(20,20)\n" + "(30,10)\n" + "(30,20)\n" + "(40,10)\n" + "(40,20)\n"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)) .assertResult(is(result)); final String script2 = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray," + " LOC:CHARARRAY);\n" + "B = FOREACH A GENERATE DEPTNO;\n" + "C = FILTER B BY DEPTNO <= 20;\n" + "D = FILTER B BY DEPTNO > 20;\n" + "E = CROSS B, C, D;\n"; final String plan2 = "" + "LogicalJoin(condition=[true], joinType=[inner])\n" + " LogicalJoin(condition=[true], joinType=[inner])\n" + " LogicalProject(DEPTNO=[$0])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalFilter(condition=[<=($0, 20)])\n" + " LogicalProject(DEPTNO=[$0])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalFilter(condition=[>($0, 20)])\n" + " LogicalProject(DEPTNO=[$0])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result2 = "" + "(10,10,30)\n" + "(10,10,40)\n" + "(10,20,30)\n" + "(10,20,40)\n" + "(20,10,30)\n" + "(20,10,40)\n" + "(20,20,30)\n" + "(20,20,40)\n" + "(30,10,30)\n" + "(30,10,40)\n" + "(30,20,30)\n" + "(30,20,40)\n" + "(40,10,30)\n" + "(40,10,40)\n" + "(40,20,30)\n" + "(40,20,40)\n"; final String sql2 = "" + "SELECT *\n" + "FROM (SELECT DEPTNO\n" + " FROM scott.DEPT) AS t,\n" + " (SELECT DEPTNO\n" + " FROM scott.DEPT\n" + " WHERE DEPTNO <= 20) AS t1,\n" + " (SELECT DEPTNO\n" + " FROM scott.DEPT\n" + " WHERE DEPTNO > 20) AS t3"; pig(script2).assertRel(hasTree(plan2)) .assertResult(is(result2)) .assertSql(is(sql2)); } @Test void testGroupby() { final String baseScript = "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n"; final String basePlan = " LogicalTableScan(table=[[scott, DEPT]])\n"; final String script = baseScript + "B = GROUP A BY DEPTNO;\n"; final String plan = "" + "LogicalProject(group=[$0], A=[$1])\n" + " LogicalAggregate(group=[{0}], A=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[$0], $f1=[ROW($0, $1, $2)])\n" + basePlan; final String result = "" + "(20,{(20,RESEARCH,DALLAS)})\n" + "(40,{(40,OPERATIONS,BOSTON)})\n" + "(10,{(10,ACCOUNTING,NEW YORK)})\n" + "(30,{(30,SALES,CHICAGO)})\n"; final String sql = "" + "SELECT DEPTNO, COLLECT(ROW(DEPTNO, DNAME, LOC)) AS A\n" + "FROM scott.DEPT\n" + "GROUP BY DEPTNO"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); final String script1 = baseScript + "B = GROUP A ALL;\n"; final String plan1 = "" + "LogicalProject(group=[$0], A=[$1])\n" + " LogicalAggregate(group=[{0}], A=[COLLECT($1)])\n" + " LogicalProject($f0=['all'], $f1=[ROW($0, $1, $2)])\n" + basePlan; final String result1 = "" + "(all,{(10,ACCOUNTING,NEW YORK),(20,RESEARCH,DALLAS)," + "(30,SALES,CHICAGO),(40,OPERATIONS,BOSTON)})\n"; pig(script1).assertResult(is(result1)) .assertRel(hasTree(plan1)); } @Test void testGroupby2() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FOREACH A GENERATE EMPNO, ENAME, JOB, MGR, SAL, COMM, DEPTNO;\n" + "C = GROUP B BY (DEPTNO, JOB);\n"; final String plan = "" + "LogicalProject(group=[ROW($0, $1)], B=[$2])\n" + " LogicalAggregate(group=[{0, 1}], B=[COLLECT($2)])\n" + " LogicalProject(DEPTNO=[$6], JOB=[$2], $f2=[ROW($0, $1, $2, " + "$3, $4, $5, $6)])\n" + " LogicalProject(EMPNO=[$0], ENAME=[$1], JOB=[$2], MGR=[$3]," + " SAL=[$5], COMM=[$6], DEPTNO=[$7])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "({10, MANAGER},{(7782,CLARK,MANAGER,7839,2450.00,null,10)})\n" + "({10, PRESIDENT},{(7839,KING,PRESIDENT,null,5000.00,null,10)})\n" + "({20, CLERK},{(7369,SMITH,CLERK,7902,800.00,null,20)," + "(7876,ADAMS,CLERK,7788,1100.00,null,20)})\n" + "({30, MANAGER},{(7698,BLAKE,MANAGER,7839,2850.00,null,30)})\n" + "({20, ANALYST},{(7788,SCOTT,ANALYST,7566,3000.00,null,20)," + "(7902,FORD,ANALYST,7566,3000.00,null,20)})\n" + "({30, SALESMAN},{(7499,ALLEN,SALESMAN,7698,1600.00,300.00,30)," + "(7521,WARD,SALESMAN,7698,1250.00,500.00,30)," + "(7654,MARTIN,SALESMAN,7698,1250.00,1400.00,30)," + "(7844,TURNER,SALESMAN,7698,1500.00,0.00,30)})\n" + "({30, CLERK},{(7900,JAMES,CLERK,7698,950.00,null,30)})\n" + "({20, MANAGER},{(7566,JONES,MANAGER,7839,2975.00,null,20)})\n" + "({10, CLERK},{(7934,MILLER,CLERK,7782,1300.00,null,10)})\n"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)); } @Test void testCubeCube() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = CUBE A BY CUBE(DEPTNO, JOB);\n" + "C = FOREACH B GENERATE group, COUNT(cube.ENAME);\n"; final String plan = "" + "LogicalProject(group=[$0], $f1=[COUNT(PIG_BAG" + "(MULTISET_PROJECTION($1, 3)))])\n" + " LogicalProject(group=[ROW($0, $1)], cube=[$2])\n" + " LogicalAggregate(group=[{0, 1}], " + "groups=[[{0, 1}, {0}, {1}, {}]], cube=[COLLECT($2)])\n" + " LogicalProject(DEPTNO=[$7], JOB=[$2], " + "$f2=[ROW($7, $2, $0, $1, $3, $4, $5, $6)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalProject(group=[ROW($0, $1)], $f1=[CAST($2):BIGINT])\n" + " LogicalAggregate(group=[{0, 1}], " + "groups=[[{0, 1}, {0}, {1}, {}]], agg#0=[COUNT($2)])\n" + " LogicalProject(DEPTNO=[$7], JOB=[$2], ENAME=[$1])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "({30, SALESMAN},4)\n" + "({30, null},6)\n" + "({10, null},3)\n" + "({null, PRESIDENT},1)\n" + "({30, MANAGER},1)\n" + "({20, MANAGER},1)\n" + "({20, ANALYST},2)\n" + "({10, MANAGER},1)\n" + "({null, CLERK},4)\n" + "({null, null},14)\n" + "({20, null},5)\n" + "({10, PRESIDENT},1)\n" + "({null, ANALYST},2)\n" + "({null, SALESMAN},4)\n" + "({30, CLERK},1)\n" + "({10, CLERK},1)\n" + "({20, CLERK},2)\n" + "({null, MANAGER},3)\n"; final String sql = "" + "SELECT ROW(DEPTNO, JOB) AS group," + " CAST(COUNT(ENAME) AS BIGINT) AS $f1\n" + "FROM scott.EMP\n" + "GROUP BY CUBE(DEPTNO, JOB)"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testCubeRollup() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = CUBE A BY ROLLUP(DEPTNO, JOB);\n" + "C = FOREACH B GENERATE group, COUNT(cube.ENAME);\n"; final String plan = "" + "LogicalProject(group=[$0], $f1=[COUNT(PIG_BAG" + "(MULTISET_PROJECTION($1, 3)))])\n" + " LogicalProject(group=[ROW($0, $1)], cube=[$2])\n" + " LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {1}, {}]]," + " cube=[COLLECT($2)])\n" + " LogicalProject(DEPTNO=[$7], JOB=[$2], $f2=[ROW($7, $2, $0," + " $1, $3, $4, $5, $6)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalProject(group=[ROW($0, $1)], $f1=[CAST($2):BIGINT])\n" + " LogicalAggregate(group=[{0, 1}], groups=[[{0, 1}, {1}, {}]], " + "agg#0=[COUNT($2)])\n" + " LogicalProject(DEPTNO=[$7], JOB=[$2], ENAME=[$1])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "({30, SALESMAN},4)\n" + "({null, PRESIDENT},1)\n" + "({30, MANAGER},1)\n" + "({20, MANAGER},1)\n" + "({20, ANALYST},2)\n" + "({10, MANAGER},1)\n" + "({null, CLERK},4)\n" + "({null, null},14)\n" + "({10, PRESIDENT},1)\n" + "({null, ANALYST},2)\n" + "({null, SALESMAN},4)\n" + "({30, CLERK},1)\n" + "({10, CLERK},1)\n" + "({20, CLERK},2)\n" + "({null, MANAGER},3)\n"; final String sql = "" + "SELECT ROW(DEPTNO, JOB) AS group, " + "CAST(COUNT(ENAME) AS BIGINT) AS $f1\n" + "FROM scott.EMP\n" + "GROUP BY ROLLUP(DEPTNO, JOB)"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testMultisetProjection() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray,\n" + " LOC:CHARARRAY);\n" + "B = GROUP A BY DEPTNO;\n" + "C = FOREACH B GENERATE A.(DEPTNO, DNAME);\n"; final String plan = "" + "LogicalProject($f0=[MULTISET_PROJECTION($1, 0, 1)])\n" + " LogicalProject(group=[$0], A=[$1])\n" + " LogicalAggregate(group=[{0}], A=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[$0], $f1=[ROW($0, $1, $2)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String optimizedPlan = "" + "LogicalProject($f0=[$1])\n" + " LogicalAggregate(group=[{0}], agg#0=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[$0], $f2=[ROW($0, $1)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "" + "({(20,RESEARCH)})\n" + "({(40,OPERATIONS)})\n" + "({(10,ACCOUNTING)})\n" + "({(30,SALES)})\n"; final String sql = "" + "SELECT COLLECT(ROW(DEPTNO, DNAME)) AS $f0\n" + "FROM scott.DEPT\n" + "GROUP BY DEPTNO"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testOrderBy() { final String scan = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray,\n" + " LOC:CHARARRAY);\n"; final String scanPlan = " LogicalTableScan(table=[[scott, DEPT]])\n"; final String plan0 = "LogicalSort(sort0=[$1], dir0=[ASC])\n" + scanPlan; final String script0 = scan + "B = ORDER A BY DNAME;\n"; final String sql0 = "SELECT *\n" + "FROM scott.DEPT\n" + "ORDER BY DNAME"; final String result0 = "" + "(10,ACCOUNTING,NEW YORK)\n" + "(40,OPERATIONS,BOSTON)\n" + "(20,RESEARCH,DALLAS)\n" + "(30,SALES,CHICAGO)\n"; pig(script0).assertRel(hasTree(plan0)) .assertSql(is(sql0)) .assertResult(is(result0)); final String plan1 = "LogicalSort(sort0=[$1], dir0=[DESC])\n" + scanPlan; final String script1 = scan + "B = ORDER A BY DNAME DESC;\n"; final String sql1 = "SELECT *\n" + "FROM scott.DEPT\n" + "ORDER BY DNAME DESC"; pig(script1).assertRel(hasTree(plan1)) .assertSql(is(sql1)); final String plan2 = "" + "LogicalSort(sort0=[$2], sort1=[$0], dir0=[DESC], dir1=[ASC])\n" + scanPlan; final String script2 = scan + "B = ORDER A BY LOC DESC, DEPTNO;\n"; final String sql2 = "SELECT *\n" + "FROM scott.DEPT\n" + "ORDER BY LOC DESC, DEPTNO"; pig(script2).assertRel(hasTree(plan2)) .assertSql(is(sql2)); final String plan3 = "" + "LogicalSort(sort0=[$0], sort1=[$1], sort2=[$2], dir0=[ASC], dir1=[ASC], dir2=[ASC])\n" + scanPlan; final String script3 = scan + "B = ORDER A BY *;\n"; final String sql3 = "SELECT *\n" + "FROM scott.DEPT\n" + "ORDER BY DEPTNO, DNAME, LOC"; pig(script3).assertRel(hasTree(plan3)) .assertSql(is(sql3)); final String plan4 = "" + "LogicalSort(sort0=[$0], sort1=[$1], sort2=[$2], dir0=[DESC], dir1=[DESC], dir2=[DESC])\n" + scanPlan; final String script4 = scan + "B = ORDER A BY * DESC;\n"; final String result4 = "" + "(40,OPERATIONS,BOSTON)\n" + "(30,SALES,CHICAGO)\n" + "(20,RESEARCH,DALLAS)\n" + "(10,ACCOUNTING,NEW YORK)\n"; pig(script4).assertRel(hasTree(plan4)) .assertResult(is(result4)); } @Test void testRank() { final String base = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FOREACH A GENERATE EMPNO, JOB, DEPTNO;\n"; final String basePlan = "" + " LogicalProject(EMPNO=[$0], JOB=[$2], DEPTNO=[$7])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalProject(rank_B=[$3], EMPNO=[$0], JOB=[$1], DEPTNO=[$2])\n" + " LogicalWindow(window#0=[window(order by [2, 1 DESC] " + "aggs [RANK()])])\n" + " LogicalProject(EMPNO=[$0], JOB=[$2], DEPTNO=[$7])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String script = base + "C = RANK B BY DEPTNO ASC, JOB DESC;\n"; final String plan = "" + "LogicalProject(rank_B=[RANK() OVER (ORDER BY $2, $1 DESC)], " + "EMPNO=[$0], JOB=[$1], DEPTNO=[$2])\n" + basePlan; final String result = "" + "(1,7839,PRESIDENT,10)\n" + "(2,7782,MANAGER,10)\n" + "(3,7934,CLERK,10)\n" + "(4,7566,MANAGER,20)\n" + "(5,7369,CLERK,20)\n" + "(5,7876,CLERK,20)\n" + "(7,7788,ANALYST,20)\n" + "(7,7902,ANALYST,20)\n" + "(9,7499,SALESMAN,30)\n" + "(9,7521,SALESMAN,30)\n" + "(9,7654,SALESMAN,30)\n" + "(9,7844,SALESMAN,30)\n" + "(13,7698,MANAGER,30)\n" + "(14,7900,CLERK,30)\n"; final String sql = "" + "SELECT RANK() OVER (ORDER BY DEPTNO, JOB DESC RANGE BETWEEN " + "UNBOUNDED PRECEDING AND CURRENT ROW) AS rank_B, EMPNO, JOB, DEPTNO\n" + "FROM scott.EMP"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); final String script2 = base + "C = RANK B BY DEPTNO ASC, JOB DESC DENSE;\n"; final String optimizedPlan2 = "" + "LogicalProject(rank_B=[$3], EMPNO=[$0], JOB=[$1], DEPTNO=[$2])\n" + " LogicalWindow(window#0=[window(order by [2, 1 DESC] " + "aggs [DENSE_RANK()])" + "])\n" + " LogicalProject(EMPNO=[$0], JOB=[$2], DEPTNO=[$7])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String plan2 = "" + "LogicalProject(rank_B=[DENSE_RANK() OVER (ORDER BY $2, $1 DESC)], " + "EMPNO=[$0], JOB=[$1], DEPTNO=[$2])\n" + basePlan; final String result2 = "" + "(1,7839,PRESIDENT,10)\n" + "(2,7782,MANAGER,10)\n" + "(3,7934,CLERK,10)\n" + "(4,7566,MANAGER,20)\n" + "(5,7369,CLERK,20)\n" + "(5,7876,CLERK,20)\n" + "(6,7788,ANALYST,20)\n" + "(6,7902,ANALYST,20)\n" + "(7,7499,SALESMAN,30)\n" + "(7,7521,SALESMAN,30)\n" + "(7,7654,SALESMAN,30)\n" + "(7,7844,SALESMAN,30)\n" + "(8,7698,MANAGER,30)\n" + "(9,7900,CLERK,30)\n"; final String sql2 = "" + "SELECT DENSE_RANK() OVER (ORDER BY DEPTNO, JOB DESC RANGE BETWEEN " + "UNBOUNDED PRECEDING AND CURRENT ROW) AS rank_B, EMPNO, JOB, DEPTNO\n" + "FROM scott.EMP"; pig(script2).assertRel(hasTree(plan2)) .assertOptimizedRel(hasTree(optimizedPlan2)) .assertResult(is(result2)) .assertSql(is(sql2)); } @Test void testLimit() { final String scan = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray,\n" + " LOC:CHARARRAY);\n"; final String scanPlan = " LogicalTableScan(table=[[scott, DEPT]])\n"; final String plan1 = "LogicalSort(sort0=[$1], dir0=[ASC], fetch=[2])\n" + scanPlan; final String script1 = scan + "B = ORDER A BY DNAME;\n" + "C = LIMIT B 2;\n"; final String sql1 = "SELECT *\n" + "FROM scott.DEPT\n" + "ORDER BY DNAME\n" + "FETCH NEXT 2 ROWS ONLY"; final String result1 = "" + "(10,ACCOUNTING,NEW YORK)\n" + "(40,OPERATIONS,BOSTON)\n"; pig(script1).assertRel(hasTree(plan1)) .assertSql(is(sql1)) .assertResult(is(result1)); final String plan2 = "LogicalSort(fetch=[2])\n" + scanPlan; final String script2 = scan + "B = LIMIT A 2;\n"; final String sql2 = "SELECT *\n" + "FROM scott.DEPT\n" + "FETCH NEXT 2 ROWS ONLY"; pig(script2).assertRel(hasTree(plan2)) .assertSql(is(sql2)); } @Test void testDistinct() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = FOREACH A GENERATE DEPTNO;\n" + "C = DISTINCT B;\n"; final String plan = "" + "LogicalAggregate(group=[{0}])\n" + " LogicalProject(DEPTNO=[$7])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "(20)\n" + "(10)\n" + "(30)\n"; final String sql = "SELECT DEPTNO\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testAggregate() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY DEPTNO;\n" + "C = FOREACH B GENERATE group, COUNT(A), BigDecimalSum(A.SAL);\n"; final String plan = "" + "LogicalProject(group=[$0], $f1=[COUNT(PIG_BAG($1))], " + "$f2=[BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5)))])\n" + " LogicalProject(group=[$0], A=[$1])\n" + " LogicalAggregate(group=[{0}], A=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[$7], $f1=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalProject(group=[$0], $f1=[CAST($1):BIGINT], $f2=[CAST($2):DECIMAL(19, 0)])\n" + " LogicalAggregate(group=[{0}], agg#0=[COUNT()], agg#1=[SUM($1)])\n" + " LogicalProject(DEPTNO=[$7], SAL=[$5])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "(20,5,10875.00)\n" + "(10,3,8750.00)\n" + "(30,6,9400.00)\n"; final String sql = "" + "SELECT DEPTNO AS group, CAST(COUNT(*) AS BIGINT) AS $f1, CAST(SUM(SAL) AS " + "DECIMAL(19, 0)) AS $f2\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testAggregate2() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE group, COUNT(A), SUM(A.SAL) as salSum;\n" + "D = ORDER C BY salSum;\n"; final String plan = "" + "LogicalSort(sort0=[$2], dir0=[ASC])\n" + " LogicalProject(group=[$0], $f1=[COUNT(PIG_BAG($1))], " + "salSum=[BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5)))])\n" + " LogicalProject(group=[ROW($0, $1, $2)], A=[$3])\n" + " LogicalAggregate(group=[{0, 1, 2}], A=[COLLECT($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], " + "$f3=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalSort(sort0=[$2], dir0=[ASC])\n" + " LogicalProject(group=[ROW($0, $1, $2)], $f1=[CAST($3):BIGINT], " + "salSum=[CAST($4):DECIMAL(19, 0)])\n" + " LogicalAggregate(group=[{0, 1, 2}], agg#0=[COUNT()], agg#1=[SUM($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], SAL=[$5])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)); final String result = "" + "({20, 7902, 1980-12-17},1,800.00)\n" + "({30, 7698, 1981-12-03},1,950.00)\n" + "({20, 7788, 1987-05-23},1,1100.00)\n" + "({30, 7698, 1981-09-28},1,1250.00)\n" + "({30, 7698, 1981-02-22},1,1250.00)\n" + "({10, 7782, 1982-01-23},1,1300.00)\n" + "({30, 7698, 1981-09-08},1,1500.00)\n" + "({30, 7698, 1981-02-20},1,1600.00)\n" + "({10, 7839, 1981-06-09},1,2450.00)\n" + "({30, 7839, 1981-01-05},1,2850.00)\n" + "({20, 7839, 1981-02-04},1,2975.00)\n" + "({20, 7566, 1981-12-03},1,3000.00)\n" + "({20, 7566, 1987-04-19},1,3000.00)\n" + "({10, null, 1981-11-17},1,5000.00)\n"; final String sql = "" + "SELECT ROW(DEPTNO, MGR, HIREDATE) AS group, CAST(COUNT(*) AS " + "BIGINT) AS $f1, CAST(SUM(SAL) AS DECIMAL(19, 0)) AS salSum\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO, MGR, HIREDATE\n" + "ORDER BY CAST(SUM(SAL) AS DECIMAL(19, 0))"; pig(script).assertResult(is(result)) .assertSql(is(sql)); } @Test void testAggregate2half() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE flatten(group) as (DEPTNO, MGR, HIREDATE),\n" + " COUNT(A), SUM(A.SAL) as salSum, MAX(A.DEPTNO) as maxDep,\n" + " MIN(A.HIREDATE) as minHire;\n" + "D = ORDER C BY salSum;\n"; final String plan = "" + "LogicalSort(sort0=[$4], dir0=[ASC])\n" + " LogicalProject(DEPTNO=[$0.DEPTNO], MGR=[$0.MGR], HIREDATE=[$0.HIREDATE], " + "$f3=[COUNT(PIG_BAG($1))], salSum=[BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5)))]" + ", maxDep=[IntMax(PIG_BAG(MULTISET_PROJECTION($1, 7)))], minHire=[DateTimeMin(PIG_BAG" + "(MULTISET_PROJECTION($1, 4)))])\n" + " LogicalProject(group=[ROW($0, $1, $2)], A=[$3])\n" + " LogicalAggregate(group=[{0, 1, 2}], A=[COLLECT($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], " + "$f3=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalSort(sort0=[$4], dir0=[ASC])\n" + " LogicalProject(DEPTNO=[$0], MGR=[$1], HIREDATE=[$2], $f3=[CAST($3):BIGINT], " + "salSum=[CAST($4):DECIMAL(19, 0)], maxDep=[CAST($5):INTEGER], minHire=[$6])\n" + " LogicalAggregate(group=[{0, 1, 2}], agg#0=[COUNT()" + "], agg#1=[SUM($3)], agg#2=[MAX($0)], agg#3=[MIN($2)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], SAL=[$5])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)); final String script2 = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE group.DEPTNO, COUNT(A), SUM(A.SAL) as salSum, " + "group.MGR, MAX(A.DEPTNO) as maxDep, MIN(A.HIREDATE) as minHire;\n" + "D = ORDER C BY salSum;\n"; final String plan2 = "" + "LogicalSort(sort0=[$2], dir0=[ASC])\n" + " LogicalProject(DEPTNO=[$0.DEPTNO], $f1=[COUNT(PIG_BAG($1))], " + "salSum=[BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5)))], " + "MGR=[$0.MGR], maxDep=[IntMax(PIG_BAG(MULTISET_PROJECTION($1, 7)" + "))], minHire=[DateTimeMin(PIG_BAG(MULTISET_PROJECTION($1, 4)))])\n" + " LogicalProject(group=[ROW($0, $1, $2)], A=[$3])\n" + " LogicalAggregate(group=[{0, 1, 2}], A=[COLLECT($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], " + "$f3=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan2 = "" + "LogicalSort(sort0=[$2], dir0=[ASC])\n" + " LogicalProject(DEPTNO=[$0], $f1=[CAST($3):BIGINT], salSum=[CAST($4):DECIMAL(19, 0)]" + ", MGR=[$1], maxDep=[CAST($5):INTEGER], minHire=[$6])\n" + " LogicalAggregate(group=[{0, 1, 2}], agg#0=[COUNT()], agg#1=[SUM($3)], " + "agg#2=[MAX($0)], agg#3=[MIN($2)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], SAL=[$5])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; pig(script2).assertRel(hasTree(plan2)) .assertOptimizedRel(hasTree(optimizedPlan2)); } @Test void testAggregate3() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE group, COUNT(A) + 1, BigDecimalSum(A.SAL) as " + "salSum, BigDecimalSum(A.SAL) / COUNT(A) as salAvg;\n" + "D = ORDER C BY salSum;\n"; final String plan = "" + "LogicalSort(sort0=[$2], dir0=[ASC])\n" + " LogicalProject(group=[$0], $f1=[+(COUNT(PIG_BAG($1)), 1)], " + "salSum=[BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5)))], " + "salAvg=[/(BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5))), " + "CAST(COUNT(PIG_BAG($1))):DECIMAL(19, 0))])\n" + " LogicalProject(group=[ROW($0, $1, $2)], A=[$3])\n" + " LogicalAggregate(group=[{0, 1, 2}], A=[COLLECT($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], " + "$f3=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalSort(sort0=[$2], dir0=[ASC])\n" + " LogicalProject(group=[ROW($0, $1, $2)], $f1=[+($3, 1)], salSum=[CAST($4):DECIMAL(19," + " 0)], salAvg=[/(CAST($4):DECIMAL(19, 0), CAST($3):DECIMAL(19, 0))])\n" + " LogicalAggregate(group=[{0, 1, 2}], agg#0=[COUNT()], agg#1=[SUM($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], SAL=[$5])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String result = "" + "({20, 7902, 1980-12-17},2,800.00,800.00)\n" + "({30, 7698, 1981-12-03},2,950.00,950.00)\n" + "({20, 7788, 1987-05-23},2,1100.00,1100.00)\n" + "({30, 7698, 1981-09-28},2,1250.00,1250.00)\n" + "({30, 7698, 1981-02-22},2,1250.00,1250.00)\n" + "({10, 7782, 1982-01-23},2,1300.00,1300.00)\n" + "({30, 7698, 1981-09-08},2,1500.00,1500.00)\n" + "({30, 7698, 1981-02-20},2,1600.00,1600.00)\n" + "({10, 7839, 1981-06-09},2,2450.00,2450.00)\n" + "({30, 7839, 1981-01-05},2,2850.00,2850.00)\n" + "({20, 7839, 1981-02-04},2,2975.00,2975.00)\n" + "({20, 7566, 1981-12-03},2,3000.00,3000.00)\n" + "({20, 7566, 1987-04-19},2,3000.00,3000.00)\n" + "({10, null, 1981-11-17},2,5000.00,5000.00)\n"; final String sql = "" + "SELECT ROW(DEPTNO, MGR, HIREDATE) AS group, COUNT(*) + 1 AS $f1, CAST(SUM(SAL) AS " + "DECIMAL(19, 0)) AS salSum, CAST(SUM(SAL) AS DECIMAL(19, 0)) / CAST(COUNT(*) AS DECIMAL" + "(19, 0)) AS salAvg\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO, MGR, HIREDATE\n" + "ORDER BY CAST(SUM(SAL) AS DECIMAL(19, 0))"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertResult(is(result)) .assertSql(is(sql)); final String script2 = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE group, COUNT(A) + 1, BigDecimalSum(A.SAL) as salSum, " + "BigDecimalSum(A.SAL) / COUNT(A) as salAvg, A;\n" + "D = ORDER C BY salSum;\n"; final String sql2 = "" + "SELECT ROW(DEPTNO, MGR, HIREDATE) AS group, COUNT(*) + 1 AS $f1, CAST(SUM(SAL) AS " + "DECIMAL(19, 0)) AS salSum, CAST(SUM(SAL) AS DECIMAL(19, 0)) / CAST(COUNT(*) AS DECIMAL" + "(19, 0)) AS salAvg, COLLECT(ROW(EMPNO, ENAME, JOB, MGR, HIREDATE, SAL, COMM, DEPTNO)) " + "AS A\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO, MGR, HIREDATE\n" + "ORDER BY CAST(SUM(SAL) AS DECIMAL(19, 0))"; pig(script2).assertSql(is(sql2)); final String script3 = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE group, A, COUNT(A);\n"; final String sql3 = "" + "SELECT ROW(DEPTNO, MGR, HIREDATE) AS group, COLLECT(ROW(EMPNO, ENAME, " + "JOB, MGR, HIREDATE, SAL, COMM, DEPTNO)) AS A, CAST(COUNT(*) AS BIGINT) " + "AS $f2\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO, MGR, HIREDATE"; pig(script3).assertSql(is(sql3)); } @Test void testAggregate4() { final String script = "" + "A = LOAD 'scott.EMP' as (EMPNO:int, ENAME:chararray,\n" + " JOB:chararray, MGR:int, HIREDATE:datetime, SAL:bigdecimal,\n" + " COMM:bigdecimal, DEPTNO:int);\n" + "B = GROUP A BY (DEPTNO, MGR, HIREDATE);\n" + "C = FOREACH B GENERATE FLATTEN(group) as (DEPTNO, MGR, HIREDATE), " + "COUNT(A), 1L as newCol, A.COMM as comArray, SUM(A.SAL) as salSum;\n" + "D = ORDER C BY salSum;\n"; final String plan = "" + "LogicalSort(sort0=[$6], dir0=[ASC])\n" + " LogicalProject(DEPTNO=[$0.DEPTNO], MGR=[$0.MGR], HIREDATE=[$0.HIREDATE], " + "$f3=[COUNT(PIG_BAG($1))], newCol=[1:BIGINT], comArray=[MULTISET_PROJECTION($1, 6)], " + "salSum=[BigDecimalSum(PIG_BAG(MULTISET_PROJECTION($1, 5)))])\n" + " LogicalProject(group=[ROW($0, $1, $2)], A=[$3])\n" + " LogicalAggregate(group=[{0, 1, 2}], A=[COLLECT($3)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], " + "$f3=[ROW($0, $1, $2, $3, $4, $5, $6, $7)])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String optimizedPlan = "" + "LogicalSort(sort0=[$6], dir0=[ASC])\n" + " LogicalProject(DEPTNO=[$0], MGR=[$1], HIREDATE=[$2], $f3=[CAST($3):BIGINT], " + "newCol=[1:BIGINT], comArray=[$4], salSum=[CAST($5):DECIMAL(19, 0)])\n" + " LogicalAggregate(group=[{0, 1, 2}], agg#0=[COUNT()], agg#1=[COLLECT($3)], " + "agg#2=[SUM($4)])\n" + " LogicalProject(DEPTNO=[$7], MGR=[$3], HIREDATE=[$4], COMM=[$6], SAL=[$5])\n" + " LogicalTableScan(table=[[scott, EMP]])\n"; final String sql = "" + "SELECT DEPTNO, MGR, HIREDATE, CAST(COUNT(*) AS BIGINT) AS $f3, 1 AS newCol, " + "COLLECT(COMM) AS comArray, CAST(SUM(SAL) AS DECIMAL(19, 0)) AS salSum\n" + "FROM scott.EMP\n" + "GROUP BY DEPTNO, MGR, HIREDATE\n" + "ORDER BY CAST(SUM(SAL) AS DECIMAL(19, 0))"; pig(script).assertRel(hasTree(plan)) .assertOptimizedRel(hasTree(optimizedPlan)) .assertSql(is(sql)); } @Test void testCoGroup() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = FILTER A BY DEPTNO <= 30;\n" + "C = FILTER A BY DEPTNO >= 20;\n" + "D = GROUP A BY DEPTNO + 10, B BY (int) DEPTNO, C BY (int) DEPTNO;\n" + "E = ORDER D BY $0;\n"; final String plan = "" + "LogicalSort(sort0=[$0], dir0=[ASC])\n" + " LogicalProject(group=[$0], A=[$1], B=[$2], C=[$3])\n" + " LogicalProject(DEPTNO=[CASE(IS NOT NULL($0), $0, $3)], A=[$1], B=[$2], C=[$4])\n" + " LogicalJoin(condition=[=($0, $3)], joinType=[full])\n" + " LogicalProject(DEPTNO=[CASE(IS NOT NULL($0), $0, $2)], A=[$1], B=[$3])\n" + " LogicalJoin(condition=[=($0, $2)], joinType=[full])\n" + " LogicalAggregate(group=[{0}], A=[COLLECT($1)])\n" + " LogicalProject($f0=[+($0, 10)], $f1=[ROW($0, $1, $2)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalAggregate(group=[{0}], B=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[CAST($0):INTEGER], $f1=[ROW($0, $1, $2)])\n" + " LogicalFilter(condition=[<=($0, 30)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n" + " LogicalAggregate(group=[{0}], C=[COLLECT($1)])\n" + " LogicalProject(DEPTNO=[CAST($0):INTEGER], $f1=[ROW($0, $1, $2)])\n" + " LogicalFilter(condition=[>=($0, 20)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String result = "" + "(10,{},{(10,ACCOUNTING,NEW YORK)},{})\n" + "(20,{(10,ACCOUNTING,NEW YORK)},{(20,RESEARCH,DALLAS)},{(20,RESEARCH,DALLAS)})\n" + "(30,{(20,RESEARCH,DALLAS)},{(30,SALES,CHICAGO)},{(30,SALES,CHICAGO)})\n" + "(40,{(30,SALES,CHICAGO)},{},{(40,OPERATIONS,BOSTON)})\n" + "(50,{(40,OPERATIONS,BOSTON)},{},{})\n"; final String sql = "" + "SELECT CASE WHEN t4.DEPTNO IS NOT NULL THEN t4.DEPTNO ELSE t7.DEPTNO END " + "AS DEPTNO, t4.A, t4.B, t7.C\n" + "FROM (SELECT CASE WHEN t0.$f0 IS NOT NULL THEN t0.$f0 ELSE t3.DEPTNO END " + "AS DEPTNO, t0.A, t3.B\n" + " FROM (SELECT DEPTNO + 10 AS $f0, " + "COLLECT(ROW(DEPTNO, DNAME, LOC)) AS A\n" + " FROM scott.DEPT\n" + " GROUP BY DEPTNO + 10) AS t0\n" + " FULL JOIN (SELECT CAST(DEPTNO AS INTEGER) AS DEPTNO, " + "COLLECT(ROW(DEPTNO, DNAME, LOC)) AS B\n" + " FROM scott.DEPT\n" + " WHERE DEPTNO <= 30\n" + " GROUP BY CAST(DEPTNO AS INTEGER)) AS t3 " + "ON t0.$f0 = t3.DEPTNO) AS t4\n" + " FULL JOIN (SELECT CAST(DEPTNO AS INTEGER) AS DEPTNO, COLLECT(ROW(DEPTNO, DNAME, " + "LOC)) AS C\n" + " FROM scott.DEPT\n" + " WHERE DEPTNO >= 20\n" + " GROUP BY CAST(DEPTNO AS INTEGER)) AS t7 ON t4.DEPTNO = t7.DEPTNO\n" + "ORDER BY CASE WHEN t4.DEPTNO IS NOT NULL THEN t4.DEPTNO ELSE t7.DEPTNO END"; pig(script).assertRel(hasTree(plan)) .assertResult(is(result)) .assertSql(is(sql)); } @Test void testFlattenStrSplit() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = FOREACH A GENERATE FLATTEN(STRSPLIT(DNAME, ',')) as NAMES;\n"; final String plan = "" + "LogicalProject(NAMES=[CAST(ITEM(STRSPLIT(PIG_TUPLE($1, ',')), 1)):BINARY(1)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String sql = "" + "SELECT CAST(STRSPLIT(PIG_TUPLE(DNAME, ','))[1] AS BINARY(1)) AS NAMES\n" + "FROM scott.DEPT"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)); } @Test void testMultipleStores() { final String script = "" + "A = LOAD 'scott.DEPT' as (DEPTNO:int, DNAME:chararray, LOC:CHARARRAY);\n" + "B = FILTER A BY DEPTNO <= 30;\n" + "STORE B into 'output.csv';\n" + "C = FILTER A BY DEPTNO >= 20;\n" + "STORE C into 'output1.csv';\n"; final String plan = "" + "LogicalFilter(condition=[<=($0, 30)])\n" + " LogicalTableScan(table=[[scott, DEPT]])\n"; final String sql0 = "" + "SELECT *\n" + "FROM scott.DEPT\n" + "WHERE DEPTNO <= 30"; final String sql1 = "" + "SELECT *\n" + "FROM scott.DEPT\n" + "WHERE DEPTNO >= 20"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql0), 0) .assertSql(is(sql1), 1); } @Test void testRankAndFilter() { final String script = "" + "A = LOAD 'emp1' USING PigStorage(',') as (" + " id:int, name:chararray, age:int, city:chararray);\n" + "B = rank A;\n" + "C = FILTER B by ($0 > 1);"; final String plan = "" + "LogicalFilter(condition=[>($0, 1)])\n" + " LogicalProject(rank_A=[RANK() OVER ()], id=[$0]," + " name=[$1], age=[$2], city=[$3])\n" + " LogicalTableScan(table=[[emp1]])\n"; final String sql = "SELECT w0$o0 AS rank_A, id, name, age, city\n" + "FROM (SELECT id, name, age, city, RANK() OVER (RANGE BETWEEN " + "UNBOUNDED PRECEDING AND CURRENT ROW)\n" + " FROM emp1) AS t\n" + "WHERE w0$o0 > 1"; pig(script).assertRel(hasTree(plan)) .assertSql(is(sql)); } }
apache-2.0
guci314/DataNucleusJpaSample
src/main/java/org/datanucleus/samples/jpa/tutorial/Product.java
3305
/********************************************************************** Copyright (c) 2006 Andy Jefferson and others. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Contributors: ... **********************************************************************/ package org.datanucleus.samples.jpa.tutorial; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; import javax.persistence.Inheritance; import javax.persistence.InheritanceType; /** * Definition of a Product * Represents a product, and contains the key aspects of the item. * * @version $Revision: 1.3 $ **/ @Entity @Inheritance(strategy=InheritanceType.TABLE_PER_CLASS) public class Product { /** Id for the product. */ @Id protected long id; /** Name of the Product. */ @Basic protected String name=null; /** Description of the Product. */ @Basic protected String description=null; /** Price of the Product. */ @Basic @Column (name="THE_PRICE") protected double price=0.0; /** * Default constructor. */ protected Product() { } /** * Constructor. * @param name name of product * @param description description of product * @param price Price **/ public Product(String name, String description, double price) { this.name = name; this.description = description; this.price = price; } /** * Accessor for the name of the product. * @return Name of the product. */ public String getName() { return name; } /** * Accessor for the description of the product. * @return Description of the product. */ public String getDescription() { return description; } /** * Accessor for the price of the product. * @return Price of the product. */ public double getPrice() { return price; } /** * Accessor for the id * @return The identity */ public long getId() { return id; } /** * Mutator for the name of the product. * @param name Name of the product. */ public void setName(String name) { this.name = name; } /** * Mutator for the description of the product. * @param description Description of the product. */ public void setDescription(String description) { this.description = description; } /** * Mutator for the price of the product. * @param price price of the product. */ public void setPrice(double price) { this.price = price; } public String toString() { return "Product : " + name + " [" + description + "]"; } }
apache-2.0
options-util/options-util
src/test/java/org/plukh/options/interfaces/TransientUnsupportedClassTestOptions.java
952
/* * Copyright 2012-2014 by Victor Denisov (vdenisov@plukh.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.plukh.options.interfaces; import org.plukh.options.Option; import org.plukh.options.Options; import java.io.File; public interface TransientUnsupportedClassTestOptions extends Options { @Option(transientOption = true) File getFile(); void setFile(File file); }
apache-2.0
bmwcarit/joynr
java/core/libjoynr/src/main/java/io/joynr/dispatching/RequestCaller.java
1828
/* * #%L * %% * Copyright (C) 2011 - 2017 BMW Car IT GmbH * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package io.joynr.dispatching; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import io.joynr.provider.AbstractJoynrProvider; import io.joynr.provider.CallContext; import io.joynr.provider.JoynrProvider; public class RequestCaller implements JoynrProvider { private Object provider; private Object proxy; public RequestCaller(Object proxy, Object provider) { this.proxy = proxy; this.provider = provider; } public void setContext(CallContext context) { if (provider instanceof AbstractJoynrProvider) { if (context != null) { AbstractJoynrProvider.setCallContext(context); } } } public void removeContext() { if (provider instanceof AbstractJoynrProvider) { AbstractJoynrProvider.removeCallContext(); } } public Object invoke(Method method, Object[] params) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException { return method.invoke(proxy, params); } public Object getProxy() { return proxy; } }
apache-2.0
immutables/immutables
value-processor/src/org/immutables/value/processor/meta/LongBits.java
4078
/* Copyright 2014 Immutables Authors and Contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.immutables.value.processor.meta; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Maps; import com.google.common.collect.Multimaps; import com.google.common.primitives.Longs; import java.util.Collection; import java.util.IdentityHashMap; import java.util.Set; import javax.annotation.Nullable; import static com.google.common.base.Preconditions.checkArgument; /** * Structure to calculate bit packing */ public final class LongBits implements Function<Iterable<? extends Object>, LongBits.LongPositions> { private static final int BITS_IN_LONG = Longs.BYTES * Byte.SIZE; @Override public LongPositions apply(Iterable<? extends Object> input) { return forIterable(input, BITS_IN_LONG); } public LongPositions forIterable(Iterable<? extends Object> input, int bitPerLong) { return new LongPositions(input, bitPerLong); } public static final class LongPositions implements Function<Object, BitPosition> { private final IdentityHashMap<Object, BitPosition> positions = Maps.newIdentityHashMap(); private final ImmutableList<Object> elements; private final ImmutableMap<Integer, LongSet> longPositions; LongPositions(Iterable<? extends Object> elements, final int bitPerLong) { this.elements = ImmutableList.copyOf(elements); checkArgument(bitPerLong <= BITS_IN_LONG, bitPerLong); for (int i = 0; i < this.elements.size(); i++) { positions.put( this.elements.get(i), new BitPosition( i / bitPerLong, i % bitPerLong)); } this.longPositions = ImmutableSortedMap.copyOf( Maps.transformEntries( Multimaps.index(positions.values(), ToLongIndex.FUNCTION).asMap(), new Maps.EntryTransformer<Integer, Collection<BitPosition>, LongSet>() { @Override public LongSet transformEntry(Integer key, Collection<BitPosition> position) { return new LongSet(key, position); } })); } public Set<Integer> longsIndeces() { return longPositions.keySet(); } public Collection<LongSet> longs() { return longPositions.values(); } @Nullable @Override public BitPosition apply(Object input) { return positions.get(input); } } public static final class LongSet { public final int index; public final long occupation; public final Iterable<BitPosition> positions; LongSet(int index, Iterable<BitPosition> positions) { this.index = index; this.positions = ImmutableList.copyOf(positions); this.occupation = computeOccupation(); } private long computeOccupation() { long occupation = 0; for (BitPosition position : this.positions) { occupation |= position.mask; } return occupation; } } enum ToLongIndex implements Function<BitPosition, Integer> { FUNCTION; @Override public Integer apply(BitPosition input) { return input.index; } } public static final class BitPosition { public final int index; public final int bit; public final long mask; BitPosition(int index, int bit) { this.index = index; this.bit = bit; this.mask = 1L << bit; } } }
apache-2.0
sduskis/cloud-bigtable-client
bigtable-dataflow-parent/bigtable-beam-import/src/test/java/com/google/cloud/bigtable/beam/it/CloudBigtableBeamITTest.java
10984
/* * Copyright 2019 Google LLC. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.beam.it; import static com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BigtableOptions.BIGTABLE_ADMIN_HOST_DEFAULT; import static com.google.bigtable.repackaged.com.google.cloud.bigtable.config.BigtableOptions.BIGTABLE_BATCH_DATA_HOST_DEFAULT; import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_ADMIN_HOST_KEY; import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.BIGTABLE_HOST_KEY; import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.INSTANCE_ID_KEY; import static com.google.cloud.bigtable.hbase.BigtableOptionsFactory.PROJECT_ID_KEY; import com.google.bigtable.repackaged.com.google.common.base.Preconditions; import com.google.cloud.bigtable.beam.CloudBigtableIO; import com.google.cloud.bigtable.beam.CloudBigtableScanConfiguration; import com.google.cloud.bigtable.beam.CloudBigtableTableConfiguration; import com.google.cloud.bigtable.config.Logger; import com.google.cloud.bigtable.hbase.BigtableConfiguration; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Random; import org.apache.beam.runners.dataflow.DataflowRunner; import org.apache.beam.runners.dataflow.options.DataflowPipelineOptions; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.PipelineResult; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.metrics.Counter; import org.apache.beam.sdk.metrics.Metrics; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.transforms.Count; import org.apache.beam.sdk.transforms.Create; import org.apache.beam.sdk.transforms.DoFn; import org.apache.beam.sdk.transforms.ParDo; import org.apache.beam.sdk.values.PCollection; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter; import org.apache.hadoop.hbase.shaded.org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * This class contains integration test for Beam Dataflow.It creates dataflow pipelines that perform * the following task using pipeline chain process: * * <pre> * <ol> * <li> Creates records and performs a Bigtable Put on each record. * <li> Creates Scan and perform count for each Row of Bigtable. * </ol> * </pre> * * Arguments to configure in this integration test. The first four argument are required when * running the test case on Google Cloud Platform. -Dgoogle.bigtable.project.id=[bigtable project] * \\ -Dgoogle.bigtable.instance.id=[bigtable instance id] \\ -DstagingLocation=gs://[your google * storage bucket] \\ -DdataflowZoneId=[dataflow zone Id] \\ * * <p>This options are optional, If not provided it will fallback to defaults. * -Dgoogle.bigtable.endpoint.host=[ bigtable batch host] \\ * -Dgoogle.bigtable.admin.endpoint.host=[bigtable admin host] \\ -DtableName=[tableName to be used] * \\ -Dtotal_row_count=[number of rows to write and read] \\ -Dprefix_count=[cell prefix count] \\ */ @RunWith(JUnit4.class) public class CloudBigtableBeamITTest { private final Logger LOG = new Logger(getClass()); private static final String STAGING_LOCATION_KEY = "dataflowStagingLocation"; private static final String ZONE_ID_KEY = "dataflowZoneId"; private static final String projectId = System.getProperty(PROJECT_ID_KEY); private static final String instanceId = System.getProperty(INSTANCE_ID_KEY); private static final String stagingLocation = System.getProperty(STAGING_LOCATION_KEY); private static final String zoneId = System.getProperty(ZONE_ID_KEY); private static final String workerMachineType = System.getProperty("workerMachineType", "n1" + "-standard-8"); private static final String dataEndpoint = System.getProperty(BIGTABLE_HOST_KEY, BIGTABLE_BATCH_DATA_HOST_DEFAULT); private static final String adminEndpoint = System.getProperty(BIGTABLE_ADMIN_HOST_KEY, BIGTABLE_ADMIN_HOST_DEFAULT); private static final String TABLE_NAME_STR = System.getProperty("tableName", "BeamCloudBigtableIOIntegrationTest"); private static final TableName TABLE_NAME = TableName.valueOf(TABLE_NAME_STR); private static final byte[] FAMILY = Bytes.toBytes("test-family"); private static final byte[] QUALIFIER = Bytes.toBytes("test-qualifier"); private static final int CELL_SIZE = Integer.getInteger("cell_size", 1_00); private static final long TOTAL_ROW_COUNT = Integer.getInteger("total_row_count", 10_000); private static final int PREFIX_COUNT = Integer.getInteger("prefix_count", 1_00); @BeforeClass public static void setUpConfiguration() { Preconditions.checkArgument(stagingLocation != null, "Set -D" + STAGING_LOCATION_KEY + "."); Preconditions.checkArgument(zoneId != null, "Set -D" + ZONE_ID_KEY + "."); Preconditions.checkArgument(projectId != null, "Set -D" + PROJECT_ID_KEY + "."); Preconditions.checkArgument(instanceId != null, "Set -D" + INSTANCE_ID_KEY + "."); } @Before public void setUp() throws IOException { Configuration config = BigtableConfiguration.configure(projectId, instanceId); config.set(BIGTABLE_HOST_KEY, dataEndpoint); config.set(BIGTABLE_ADMIN_HOST_KEY, adminEndpoint); try (Connection conn = BigtableConfiguration.connect(config); Admin admin = conn.getAdmin()) { if (admin.tableExists(TABLE_NAME)) { admin.deleteTable(TABLE_NAME); } admin.createTable(new HTableDescriptor(TABLE_NAME).addFamily(new HColumnDescriptor(FAMILY))); LOG.info("Created a table to perform batching: %s", TABLE_NAME); } } private static final DoFn<String, Mutation> WRITE_ONE_TENTH_PERCENT = new DoFn<String, Mutation>() { private static final long serialVersionUID = 1L; private Counter rowCounter = Metrics.counter(CloudBigtableBeamITTest.class, "sent_puts"); @ProcessElement public void processElement(ProcessContext context) throws Exception { String prefix = context.element() + "_"; int max = (int) (TOTAL_ROW_COUNT / PREFIX_COUNT); for (int i = 0; i < max; i++) { rowCounter.inc(); context.output( new Put(Bytes.toBytes(prefix + i)) .addColumn(FAMILY, QUALIFIER, createRandomValue())); } } }; private void testWriteToBigtable() { DataflowPipelineOptions options = createOptions(); options.setAppName("testWriteToBigtable-" + System.currentTimeMillis()); LOG.info("Started writeToBigtable test with jobName as: %s", options.getAppName()); CloudBigtableTableConfiguration config = new CloudBigtableTableConfiguration.Builder() .withProjectId(projectId) .withInstanceId(instanceId) .withTableId(TABLE_NAME.getNameAsString()) .withConfiguration(BIGTABLE_ADMIN_HOST_KEY, adminEndpoint) .withConfiguration(BIGTABLE_HOST_KEY, dataEndpoint) .build(); List<String> keys = new ArrayList<>(); for (int i = 0; i < PREFIX_COUNT; i++) { keys.add(RandomStringUtils.randomAlphanumeric(10)); } PipelineResult.State result = Pipeline.create(options) .apply("Keys", Create.of(keys)) .apply("Create Puts", ParDo.of(WRITE_ONE_TENTH_PERCENT)) .apply("Write to BT", CloudBigtableIO.writeToTable(config)) .getPipeline() .run() .waitUntilFinish(); Assert.assertEquals(PipelineResult.State.DONE, result); } private Pipeline testReadFromBigtable() { PipelineOptions options = createOptions(); options.setJobName("testReadFromBigtable-" + System.currentTimeMillis()); LOG.info("Started readFromBigtable test with jobName as: %s", options.getJobName()); Scan scan = new Scan(); scan.setFilter(new FirstKeyOnlyFilter()); CloudBigtableScanConfiguration config = new CloudBigtableScanConfiguration.Builder() .withProjectId(projectId) .withInstanceId(instanceId) .withTableId(TABLE_NAME.getNameAsString()) .withScan(scan) .withConfiguration(BIGTABLE_ADMIN_HOST_KEY, adminEndpoint) .withConfiguration(BIGTABLE_HOST_KEY, dataEndpoint) .build(); Pipeline pipeLine = Pipeline.create(options); PCollection<Long> count = pipeLine .apply("Read from BT", Read.from(CloudBigtableIO.read(config))) .apply("Count", Count.<Result>globally()); PAssert.thatSingleton(count).isEqualTo(TOTAL_ROW_COUNT); return pipeLine; } @Test public void testRunner() { try { // Submitted write pipeline to mutate the Bigtable. testWriteToBigtable(); Pipeline result = testReadFromBigtable(); PipelineResult.State readJobStatue = result.run().waitUntilFinish(); Assert.assertEquals(PipelineResult.State.DONE, readJobStatue); } catch (Exception ex) { ex.printStackTrace(); throw new AssertionError("Exception occurred while pipeline execution"); } } private static byte[] createRandomValue() { byte[] bytes = new byte[CELL_SIZE]; new Random().nextBytes(bytes); return bytes; } private DataflowPipelineOptions createOptions() { DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setProject(projectId); options.setZone(zoneId); options.setStagingLocation(stagingLocation + "/stage"); options.setTempLocation(stagingLocation + "/temp"); options.setRunner(DataflowRunner.class); options.setWorkerMachineType(workerMachineType); return options; } }
apache-2.0
DebalinaDey/AuraDevelopDeb
aura-impl/src/main/java/org/auraframework/impl/javascript/controller/JavascriptActionDef.java
2770
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.impl.javascript.controller; import java.io.IOException; import java.util.Collections; import java.util.List; import org.auraframework.def.ActionDef; import org.auraframework.def.DefDescriptor; import org.auraframework.def.TypeDef; import org.auraframework.def.ValueDef; import org.auraframework.impl.system.DefinitionImpl; import org.auraframework.system.SubDefDescriptor; import org.auraframework.util.json.JsFunction; import org.auraframework.util.json.Json; /** * javascript action. can't be instantiated server side */ public class JavascriptActionDef extends DefinitionImpl<ActionDef> implements ActionDef { private static final long serialVersionUID = 2121724820799466774L; private final JsFunction function; protected JavascriptActionDef(Builder builder) { super(builder); this.function = builder.function; SubDefDescriptor<?, ?> desc = (SubDefDescriptor<?, ?>) descriptor; function.setName(String.format("%s$%s_%s", desc.getParentDescriptor().getNamespace(), desc .getParentDescriptor().getName(), desc.getName())); } @Override public ActionType getActionType() { return ActionType.CLIENT; } @Override public List<ValueDef> getParameters() { // if we do allow extra params, they must somehow be annotated as we // have no way to infer the type from the code return Collections.emptyList(); } @Override public DefDescriptor<TypeDef> getReturnType() { // same as above. probably shouldn't have a return value return null; } @Override public void serialize(Json json) throws IOException { json.writeValue(function); } public static class Builder extends DefinitionImpl.BuilderImpl<ActionDef> { public Builder() { super(ActionDef.class); } public JsFunction function; @Override public JavascriptActionDef build() { return new JavascriptActionDef(this); } } @Override public List<String> getLoggableParams() { return Collections.emptyList(); } }
apache-2.0
luanalbineli/baking-time
app/src/main/java/com/udacity/bakingtime/recipedetail/ingredientlistdialog/IngredientListDialog.java
3396
package com.udacity.bakingtime.recipedetail.ingredientlistdialog; import android.app.DialogFragment; import android.app.Fragment; import android.content.DialogInterface; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.widget.TextView; import com.udacity.bakingtime.R; import com.udacity.bakingtime.model.RecipeIngredientModel; import com.udacity.bakingtime.recipedetail.ingredientlist.RecipeIngredientListFragment; import java.security.InvalidParameterException; import java.util.ArrayList; import java.util.List; import butterknife.ButterKnife; public class IngredientListDialog extends DialogFragment { private List<RecipeIngredientModel> mRecipeIngredientList; private DialogInterface.OnDismissListener onDismissListener; public void setOnDismissListener(DialogInterface.OnDismissListener onDismissListener) { this.onDismissListener = onDismissListener; } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if (getArguments() == null || !getArguments().containsKey(INGREDIENT_LIST_BUNDLE_KEY)) { throw new InvalidParameterException(INGREDIENT_LIST_BUNDLE_KEY); } mRecipeIngredientList = getArguments().getParcelableArrayList(INGREDIENT_LIST_BUNDLE_KEY); } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, Bundle savedInstanceState) { if (getDialog() != null) { getDialog().requestWindowFeature(Window.FEATURE_NO_TITLE); } return inflater.inflate(R.layout.ingredient_list_dialog, container, false); } @Override public void onViewCreated(View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); Fragment fragment = getChildFragmentManager().findFragmentByTag(INGREDIENT_LIST_FRAGMENT_TAG); if (fragment == null) { RecipeIngredientListFragment recipeIngredientListFragment = RecipeIngredientListFragment.getInstance(mRecipeIngredientList); getChildFragmentManager().beginTransaction() .add(R.id.flIngredientListDialogContainer, recipeIngredientListFragment, INGREDIENT_LIST_FRAGMENT_TAG) .commit(); } TextView closeButton = ButterKnife.findById(view, R.id.btQuantityPickerDialogConfirm); closeButton.setOnClickListener(v -> dismiss()); } @Override public void onDismiss(DialogInterface dialog) { super.onDismiss(dialog); if (onDismissListener != null) { onDismissListener.onDismiss(dialog); } } public static IngredientListDialog getInstance(List<RecipeIngredientModel> recipeIngredientList) { Bundle arguments = new Bundle(); arguments.putParcelableArrayList(INGREDIENT_LIST_BUNDLE_KEY, new ArrayList<>(recipeIngredientList)); IngredientListDialog instance = new IngredientListDialog(); instance.setArguments(arguments); return instance; } private static final String INGREDIENT_LIST_FRAGMENT_TAG = "ingredient_list_fragment_tag"; private static final String INGREDIENT_LIST_BUNDLE_KEY = "ingredient_list_bundle_key"; }
apache-2.0
onhate/schemorger
src/main/java/org/schema/WPSideBar.java
223
package org.schema; /** * * A sidebar section of the page. * * @fullPath Thing > CreativeWork > WebPageElement > WPSideBar * * @author Texelz (by Onhate) * */ public class WPSideBar extends WebPageElement { }
apache-2.0
googleads/google-ads-java
google-ads-stubs-v10/src/main/java/com/google/ads/googleads/v10/resources/AdGroupCriterionCustomizerOrBuilder.java
5365
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v10/resources/ad_group_criterion_customizer.proto package com.google.ads.googleads.v10.resources; public interface AdGroupCriterionCustomizerOrBuilder extends // @@protoc_insertion_point(interface_extends:google.ads.googleads.v10.resources.AdGroupCriterionCustomizer) com.google.protobuf.MessageOrBuilder { /** * <pre> * Immutable. The resource name of the ad group criterion customizer. * Ad group criterion customizer resource names have the form: * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The resourceName. */ java.lang.String getResourceName(); /** * <pre> * Immutable. The resource name of the ad group criterion customizer. * Ad group criterion customizer resource names have the form: * `customers/{customer_id}/adGroupCriterionCustomizers/{ad_group_id}~{criterion_id}~{customizer_attribute_id}` * </pre> * * <code>string resource_name = 1 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for resourceName. */ com.google.protobuf.ByteString getResourceNameBytes(); /** * <pre> * Immutable. The ad group criterion to which the customizer attribute is linked. * It must be a keyword criterion. * </pre> * * <code>optional string ad_group_criterion = 2 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return Whether the adGroupCriterion field is set. */ boolean hasAdGroupCriterion(); /** * <pre> * Immutable. The ad group criterion to which the customizer attribute is linked. * It must be a keyword criterion. * </pre> * * <code>optional string ad_group_criterion = 2 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The adGroupCriterion. */ java.lang.String getAdGroupCriterion(); /** * <pre> * Immutable. The ad group criterion to which the customizer attribute is linked. * It must be a keyword criterion. * </pre> * * <code>optional string ad_group_criterion = 2 [(.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for adGroupCriterion. */ com.google.protobuf.ByteString getAdGroupCriterionBytes(); /** * <pre> * Required. Immutable. The customizer attribute which is linked to the ad group criterion. * </pre> * * <code>string customizer_attribute = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The customizerAttribute. */ java.lang.String getCustomizerAttribute(); /** * <pre> * Required. Immutable. The customizer attribute which is linked to the ad group criterion. * </pre> * * <code>string customizer_attribute = 3 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE, (.google.api.resource_reference) = { ... }</code> * @return The bytes for customizerAttribute. */ com.google.protobuf.ByteString getCustomizerAttributeBytes(); /** * <pre> * Output only. The status of the ad group criterion customizer. * </pre> * * <code>.google.ads.googleads.v10.enums.CustomizerValueStatusEnum.CustomizerValueStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The enum numeric value on the wire for status. */ int getStatusValue(); /** * <pre> * Output only. The status of the ad group criterion customizer. * </pre> * * <code>.google.ads.googleads.v10.enums.CustomizerValueStatusEnum.CustomizerValueStatus status = 4 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * @return The status. */ com.google.ads.googleads.v10.enums.CustomizerValueStatusEnum.CustomizerValueStatus getStatus(); /** * <pre> * Required. The value to associate with the customizer attribute at this level. The * value must be of the type specified for the CustomizerAttribute. * </pre> * * <code>.google.ads.googleads.v10.common.CustomizerValue value = 5 [(.google.api.field_behavior) = REQUIRED];</code> * @return Whether the value field is set. */ boolean hasValue(); /** * <pre> * Required. The value to associate with the customizer attribute at this level. The * value must be of the type specified for the CustomizerAttribute. * </pre> * * <code>.google.ads.googleads.v10.common.CustomizerValue value = 5 [(.google.api.field_behavior) = REQUIRED];</code> * @return The value. */ com.google.ads.googleads.v10.common.CustomizerValue getValue(); /** * <pre> * Required. The value to associate with the customizer attribute at this level. The * value must be of the type specified for the CustomizerAttribute. * </pre> * * <code>.google.ads.googleads.v10.common.CustomizerValue value = 5 [(.google.api.field_behavior) = REQUIRED];</code> */ com.google.ads.googleads.v10.common.CustomizerValueOrBuilder getValueOrBuilder(); }
apache-2.0
brunocvcunha/mdmconnectors
mdm-rest-client/src/main/java/com/totvslabs/mdm/restclient/command/CommandListDatasource.java
1178
package com.totvslabs.mdm.restclient.command; import java.util.HashMap; import java.util.Map; import com.totvslabs.mdm.restclient.vo.CommandTypeEnum; import com.totvslabs.mdm.restclient.vo.EnvelopeVO; public class CommandListDatasource implements ICommand, AuthenticationRequired { private String tenantId; public CommandListDatasource(String domain) { this.tenantId = domain; } @Override public Map<String, String> getParametersHeader() { Map<String, String> parameters = new HashMap<String, String>(); parameters.put("tenant", this.tenantId); return parameters; } @Override public void processReturn() { } @Override public String getCommandURL() { return "api/v1/dataSources?offset=0&pageSize=10&sortBy=_mdmId&sortOrder=ASC"; } @SuppressWarnings({ "rawtypes", "unchecked" }) @Override public Class getResponseType() { return EnvelopeVO.class; } @Override public Map<String, String> getParameterPath() { return null; } @Override public CommandTypeEnum getType() { return CommandTypeEnum.GET; } @Override public Object getData() { return null; } @Override public Map<String, String> getFormData() { return null; } }
apache-2.0
mohit/elasticsearch
core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java
19454
/* x * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.sort; import org.apache.lucene.index.Term; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSelector; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.search.SortedSetSelector; import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TermQuery; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.MultiValueMode; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.elasticsearch.search.sort.NestedSortBuilderTests.createRandomNestedSort; import static org.hamcrest.Matchers.instanceOf; public class FieldSortBuilderTests extends AbstractSortTestCase<FieldSortBuilder> { /** * {@link #provideMappedFieldType(String)} will return a */ private static String MAPPED_STRING_FIELDNAME = "_stringField"; @Override protected FieldSortBuilder createTestItem() { return randomFieldSortBuilder(); } private List<Object> missingContent = Arrays.asList( "_last", "_first", Integer.toString(randomInt()), randomInt()); public FieldSortBuilder randomFieldSortBuilder() { String fieldName = rarely() ? FieldSortBuilder.DOC_FIELD_NAME : randomAlphaOfLengthBetween(1, 10); FieldSortBuilder builder = new FieldSortBuilder(fieldName); if (randomBoolean()) { builder.order(randomFrom(SortOrder.values())); } if (randomBoolean()) { builder.missing(randomFrom(missingContent)); } if (randomBoolean()) { builder.unmappedType(randomAlphaOfLengthBetween(1, 10)); } if (randomBoolean()) { builder.sortMode(randomFrom(SortMode.values())); } if (randomBoolean()) { if (randomBoolean()) { builder.setNestedSort(createRandomNestedSort(3)); } else { // the following are alternative ways to setNestedSort for nested sorting if (randomBoolean()) { builder.setNestedFilter(randomNestedFilter()); } if (randomBoolean()) { builder.setNestedPath(randomAlphaOfLengthBetween(1, 10)); } } } return builder; } @Override protected FieldSortBuilder mutate(FieldSortBuilder original) throws IOException { FieldSortBuilder mutated = new FieldSortBuilder(original); int parameter = randomIntBetween(0, 4); switch (parameter) { case 0: if (original.getNestedPath() == null && original.getNestedFilter() == null) { mutated.setNestedSort( randomValueOtherThan(original.getNestedSort(), () -> NestedSortBuilderTests.createRandomNestedSort(3))); } else { if (randomBoolean()) { mutated.setNestedPath(randomValueOtherThan(original.getNestedPath(), () -> randomAlphaOfLengthBetween(1, 10))); } else { mutated.setNestedFilter(randomValueOtherThan(original.getNestedFilter(), () -> randomNestedFilter())); } } break; case 1: mutated.sortMode(randomValueOtherThan(original.sortMode(), () -> randomFrom(SortMode.values()))); break; case 2: mutated.unmappedType(randomValueOtherThan( original.unmappedType(), () -> randomAlphaOfLengthBetween(1, 10))); break; case 3: mutated.missing(randomValueOtherThan(original.missing(), () -> randomFrom(missingContent))); break; case 4: mutated.order(randomValueOtherThan(original.order(), () -> randomFrom(SortOrder.values()))); break; default: throw new IllegalStateException("Unsupported mutation."); } return mutated; } @Override protected void sortFieldAssertions(FieldSortBuilder builder, SortField sortField, DocValueFormat format) throws IOException { SortField.Type expectedType; if (builder.getFieldName().equals(FieldSortBuilder.DOC_FIELD_NAME)) { expectedType = SortField.Type.DOC; } else { expectedType = SortField.Type.CUSTOM; } assertEquals(expectedType, sortField.getType()); assertEquals(builder.order() == SortOrder.ASC ? false : true, sortField.getReverse()); if (expectedType == SortField.Type.CUSTOM) { assertEquals(builder.getFieldName(), sortField.getField()); } assertEquals(DocValueFormat.RAW, format); } /** * Test that missing values get transfered correctly to the SortField */ public void testBuildSortFieldMissingValue() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); FieldSortBuilder fieldSortBuilder = new FieldSortBuilder("value").missing("_first"); SortField sortField = fieldSortBuilder.build(shardContextMock).field; SortedNumericSortField expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(Double.NEGATIVE_INFINITY); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").missing("_last"); sortField = fieldSortBuilder.build(shardContextMock).field; expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(Double.POSITIVE_INFINITY); assertEquals(expectedSortField, sortField); Double randomDouble = randomDouble(); fieldSortBuilder = new FieldSortBuilder("value").missing(randomDouble); sortField = fieldSortBuilder.build(shardContextMock).field; expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(randomDouble); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").missing(randomDouble.toString()); sortField = fieldSortBuilder.build(shardContextMock).field; expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE); expectedSortField.setMissingValue(randomDouble); assertEquals(expectedSortField, sortField); } /** * Test that the sort builder order gets transfered correctly to the SortField */ public void testBuildSortFieldOrder() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); FieldSortBuilder fieldSortBuilder = new FieldSortBuilder("value"); SortField sortField = fieldSortBuilder.build(shardContextMock).field; SortedNumericSortField expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE, false); expectedSortField.setMissingValue(Double.POSITIVE_INFINITY); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").order(SortOrder.ASC); sortField = fieldSortBuilder.build(shardContextMock).field; expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE, false); expectedSortField.setMissingValue(Double.POSITIVE_INFINITY); assertEquals(expectedSortField, sortField); fieldSortBuilder = new FieldSortBuilder("value").order(SortOrder.DESC); sortField = fieldSortBuilder.build(shardContextMock).field; expectedSortField = new SortedNumericSortField("value", SortField.Type.DOUBLE, true, SortedNumericSelector.Type.MAX); expectedSortField.setMissingValue(Double.NEGATIVE_INFINITY); assertEquals(expectedSortField, sortField); } /** * Test that the sort builder mode gets transfered correctly to the SortField */ public void testMultiValueMode() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); FieldSortBuilder sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.MIN); SortField sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedNumericSortField.class)); SortedNumericSortField numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MIN, numericSortField.getSelector()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.MAX); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedNumericSortField.class)); numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MAX, numericSortField.getSelector()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.SUM); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.SUM, comparatorSource.sortMode()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.AVG); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.AVG, comparatorSource.sortMode()); sortBuilder = new FieldSortBuilder("value").sortMode(SortMode.MEDIAN); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); assertEquals(MultiValueMode.MEDIAN, comparatorSource.sortMode()); // sort mode should also be set by build() implicitely to MIN or MAX if not set explicitely on builder sortBuilder = new FieldSortBuilder("value"); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedNumericSortField.class)); numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MIN, numericSortField.getSelector()); sortBuilder = new FieldSortBuilder("value").order(SortOrder.DESC); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedNumericSortField.class)); numericSortField = (SortedNumericSortField) sortField; assertEquals(SortedNumericSelector.Type.MAX, numericSortField.getSelector()); } /** * Test that the sort builder nested object gets created in the SortField */ public void testBuildNested() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); FieldSortBuilder sortBuilder = new FieldSortBuilder("fieldName") .setNestedSort(new NestedSortBuilder("path").setFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value"))); SortField sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); XFieldComparatorSource comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); Nested nested = comparatorSource.nested(); assertNotNull(nested); assertEquals(new TermQuery(new Term(MAPPED_STRING_FIELDNAME, "value")), nested.getInnerQuery()); sortBuilder = new FieldSortBuilder("fieldName").setNestedPath("path"); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); assertNotNull(nested); assertEquals(new TermQuery(new Term(TypeFieldMapper.NAME, "__path")), nested.getInnerQuery()); sortBuilder = new FieldSortBuilder("fieldName").setNestedPath("path") .setNestedFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField.getComparatorSource(), instanceOf(XFieldComparatorSource.class)); comparatorSource = (XFieldComparatorSource) sortField.getComparatorSource(); nested = comparatorSource.nested(); assertNotNull(nested); assertEquals(new TermQuery(new Term(MAPPED_STRING_FIELDNAME, "value")), nested.getInnerQuery()); // if nested path is missing, we omit any filter and return a SortedNumericSortField sortBuilder = new FieldSortBuilder("fieldName").setNestedFilter(QueryBuilders.termQuery(MAPPED_STRING_FIELDNAME, "value")); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedNumericSortField.class)); } public void testUnknownOptionFails() throws IOException { String json = "{ \"post_date\" : {\"reverse\" : true} },\n"; XContentParser parser = createParser(JsonXContent.jsonXContent, json); // need to skip until parser is located on second START_OBJECT parser.nextToken(); parser.nextToken(); parser.nextToken(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> FieldSortBuilder.fromXContent(parser, "")); assertEquals("[field_sort] unknown field [reverse], parser not found", e.getMessage()); } @Override protected MappedFieldType provideMappedFieldType(String name) { if (name.equals(MAPPED_STRING_FIELDNAME)) { KeywordFieldMapper.KeywordFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); fieldType.setName(name); fieldType.setHasDocValues(true); return fieldType; } else { return super.provideMappedFieldType(name); } } /** * Test that MIN, MAX mode work on non-numeric fields, but other modes throw exception */ public void testModeNonNumericField() throws IOException { QueryShardContext shardContextMock = createMockShardContext(); FieldSortBuilder sortBuilder = new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.MIN); SortField sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedSetSortField.class)); assertEquals(SortedSetSelector.Type.MIN, ((SortedSetSortField) sortField).getSelector()); sortBuilder = new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.MAX); sortField = sortBuilder.build(shardContextMock).field; assertThat(sortField, instanceOf(SortedSetSortField.class)); assertEquals(SortedSetSelector.Type.MAX, ((SortedSetSortField) sortField).getSelector()); String expectedError = "we only support AVG, MEDIAN and SUM on number based fields"; QueryShardException e = expectThrows(QueryShardException.class, () -> new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.AVG).build(shardContextMock)); assertEquals(expectedError, e.getMessage()); e = expectThrows(QueryShardException.class, () -> new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.SUM).build(shardContextMock)); assertEquals(expectedError, e.getMessage()); e = expectThrows(QueryShardException.class, () -> new FieldSortBuilder(MAPPED_STRING_FIELDNAME).sortMode(SortMode.MEDIAN).build(shardContextMock)); assertEquals(expectedError, e.getMessage()); } /** * Test we can either set nested sort via path/filter or via nested sort builder, not both */ public void testNestedSortBothThrows() throws IOException { FieldSortBuilder sortBuilder = new FieldSortBuilder(MAPPED_STRING_FIELDNAME); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> sortBuilder.setNestedPath("nestedPath").setNestedSort(new NestedSortBuilder("otherPath"))); assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); iae = expectThrows(IllegalArgumentException.class, () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedPath("nestedPath")); assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); iae = expectThrows(IllegalArgumentException.class, () -> sortBuilder.setNestedSort(new NestedSortBuilder("otherPath")).setNestedFilter(QueryBuilders.matchAllQuery())); assertEquals("Setting both nested_path/nested_filter and nested not allowed", iae.getMessage()); } @Override protected void assertWarnings(FieldSortBuilder testItem) { List<String> expectedWarnings = new ArrayList<>(); if (testItem.getNestedFilter() != null) { expectedWarnings.add("[nested_filter] has been deprecated in favour for the [nested] parameter"); } if (testItem.getNestedPath() != null) { expectedWarnings.add("[nested_path] has been deprecated in favor of the [nested] parameter"); } if (expectedWarnings.isEmpty() == false) { assertWarnings(expectedWarnings.toArray(new String[expectedWarnings.size()])); } } @Override protected FieldSortBuilder fromXContent(XContentParser parser, String fieldName) throws IOException { return FieldSortBuilder.fromXContent(parser, fieldName); } }
apache-2.0
DenverM80/ds3_java_sdk
ds3-sdk/src/main/java/com/spectralogic/ds3client/utils/Predicate.java
859
/* * ****************************************************************************** * Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"). You may not use * this file except in compliance with the License. A copy of the License is located at * * http://www.apache.org/licenses/LICENSE-2.0 * * or in the "license" file accompanying this file. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **************************************************************************** */ package com.spectralogic.ds3client.utils; public interface Predicate<T> { boolean test(T t); }
apache-2.0
cruisechang/javautils
com/cruise/utils/CheckUtil.java
4063
package com.cruise.utils; import java.util.*; /** * @author cruisechang * @description * Check utils. */ public class CheckUtil { public static final String DATA_TYPE_INT="int"; public static final String DATA_TYPE_STRING="string"; public static final String DATA_TYPE_BOOLEAN="bool"; public static final String DATA_TYPE_DOUBLE="dou"; public static final String DATA_TYPE_LONG="lon"; private static boolean dataType(Object data,String type) { if(type.equals(DATA_TYPE_INT)){ if(data instanceof Integer){ return true; } }else if(type.equals(DATA_TYPE_STRING)){ if(data instanceof String){ return true; } }else if(type.equals(DATA_TYPE_BOOLEAN)){ if(data instanceof Boolean){ return true; } }else if(type.equals(DATA_TYPE_DOUBLE)){ if(data instanceof Double){ return true; } }else if(type.equals(DATA_TYPE_LONG)){ if(data instanceof Long){ return true; } } return false; } public static boolean checkLong(long lon,int min) { if(dataType(lon,DATA_TYPE_LONG)){ if(lon>=min) return true; } return false; } public static boolean checkString(String str) { if(dataType(str,DATA_TYPE_STRING)){ return true; } return false; } public static boolean checkString(String str,int lenMin) { if(dataType(str,DATA_TYPE_STRING)){ if(str.length()>=lenMin) return true; } return false; } public static boolean checkBool(Object data) { return dataType(data,DATA_TYPE_BOOLEAN); } public static boolean checkInt(int num) { if(dataType(num,DATA_TYPE_INT)){ return true; } return false; } /** * check if param is int and if >=min && < =max * @param num * @param min * @param max * @return */ public static boolean checkInt(int num,int min,int max) { if(dataType(num,DATA_TYPE_INT)){ if(num>=min && num<=max){ return true; } } return false; } /** * @param num * @param min * @return */ public static boolean checkInt(int num,int min) { if(dataType(num,DATA_TYPE_INT)){ if(num>=min){ return true; } } return false; } public static boolean checkId(int id) { if(dataType(id,DATA_TYPE_INT)){ if(id>=0){ return true; } } return false; } public static boolean checkPlayerNum(int playerNum) { if(dataType(playerNum,DATA_TYPE_INT)){ if(playerNum>0 && playerNum<7){ return true; } } return false; } public static boolean checkLatLng(double lat,double lng) { if(dataType(lat,DATA_TYPE_DOUBLE)){ if(lat>=0 && lat<=90 ){ return true; } } return false; } public static String getRandomKey(int length) { String s=""; List <String> aList=getStringArrayList(); int size=aList.size(); //����摨衣��葡 for(int i=0;i<length;i++){ String r=aList.get(MathUtil.random(0, size-1)); s+=r; } return s; } private static List<String> getStringArrayList() { List<String> list = new ArrayList<String>(); list.add("~"); list.add("!"); list.add("@"); list.add("$"); list.add("^"); list.add("&"); list.add("*"); list.add("("); list.add(")"); list.add("_"); list.add("+"); list.add("a"); list.add("b"); list.add("c"); list.add("d"); list.add("e"); list.add("f"); list.add("g"); list.add("h"); list.add("i"); list.add("j"); list.add("k"); list.add("l"); list.add("m"); list.add("n"); list.add("o"); list.add("p"); list.add("q"); list.add("r"); list.add("s"); list.add("t"); list.add("u"); list.add("v"); list.add("w"); list.add("x"); list.add("y"); list.add("z"); return list; } public static boolean checkTimeEnough(long allTime,long nowTime) { long ex=nowTime-allTime; // 30sec if(ex>=-30000){ return true; } return false; } }
apache-2.0
masonmei/java-agent
profiler/src/main/java/com/baidu/oped/apm/profiler/interceptor/registry/GlobalInterceptorRegistryBinder.java
1375
/* * Copyright 2014 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.baidu.oped.apm.profiler.interceptor.registry; import com.baidu.oped.apm.bootstrap.interceptor.registry.GlobalInterceptorRegistry; import com.baidu.oped.apm.bootstrap.interceptor.registry.InterceptorRegistryAdaptor; /** * for test * @author emeroad */ @Deprecated public class GlobalInterceptorRegistryBinder implements InterceptorRegistryBinder { public GlobalInterceptorRegistryBinder() { } @Override public void bind() { } @Override public void unbind() { } public InterceptorRegistryAdaptor getInterceptorRegistryAdaptor() { return GlobalInterceptorRegistry.REGISTRY; } @Override public String getInterceptorRegistryClassName() { return GlobalInterceptorRegistry.class.getName(); } }
apache-2.0
IvanOleynik/java_pft
cain-tests/src/test/java/ru/cain/tests/TestBase.java
502
package ru.cain.tests; import org.openqa.selenium.remote.BrowserType; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import ru.cain.appmanager.ApplicationManager; /** * Created by www on 16.11.2016. */ public class TestBase { protected final ApplicationManager app = new ApplicationManager(BrowserType.CHROME); @BeforeMethod public void setUp() throws Exception { app.init(); } @AfterMethod public void tearDown() { app.stop(); } }
apache-2.0
google-ar/sceneform-android-sdk
sceneformsrc/sceneform/src/main/java/com/google/ar/sceneform/rendering/ResourceManager.java
3736
package com.google.ar.sceneform.rendering; import android.support.annotation.Nullable; import com.google.ar.sceneform.resources.ResourceHolder; import com.google.ar.sceneform.resources.ResourceRegistry; import java.util.ArrayList; /** * Minimal resource manager. Maintains mappings from ids to created resources and a task executor * dedicated to loading resources asynchronously. * * @hide */ @SuppressWarnings("initialization") // Suppress @UnderInitialization warning. public class ResourceManager { @Nullable private static ResourceManager instance = null; private final ArrayList<ResourceHolder> resourceHolders = new ArrayList<>(); private final ResourceRegistry<Texture> textureRegistry = new ResourceRegistry<>(); private final ResourceRegistry<Material> materialRegistry = new ResourceRegistry<>(); private final ResourceRegistry<ModelRenderable> modelRenderableRegistry = new ResourceRegistry<>(); private final ResourceRegistry<ViewRenderable> viewRenderableRegistry = new ResourceRegistry<>(); private final CleanupRegistry<CameraStream> cameraStreamCleanupRegistry = new CleanupRegistry<>(); private final CleanupRegistry<ExternalTexture> externalTextureCleanupRegistry = new CleanupRegistry<>(); private final CleanupRegistry<Material> materialCleanupRegistry = new CleanupRegistry<>(); private final CleanupRegistry<RenderableInstance> renderableInstanceCleanupRegistry = new CleanupRegistry<>(); private final CleanupRegistry<Texture> textureCleanupRegistry = new CleanupRegistry<>(); ResourceRegistry<Texture> getTextureRegistry() { return textureRegistry; } ResourceRegistry<Material> getMaterialRegistry() { return materialRegistry; } ResourceRegistry<ModelRenderable> getModelRenderableRegistry() { return modelRenderableRegistry; } ResourceRegistry<ViewRenderable> getViewRenderableRegistry() { return viewRenderableRegistry; } CleanupRegistry<CameraStream> getCameraStreamCleanupRegistry() { return cameraStreamCleanupRegistry; } CleanupRegistry<ExternalTexture> getExternalTextureCleanupRegistry() { return externalTextureCleanupRegistry; } CleanupRegistry<Material> getMaterialCleanupRegistry() { return materialCleanupRegistry; } CleanupRegistry<RenderableInstance> getRenderableInstanceCleanupRegistry() { return renderableInstanceCleanupRegistry; } CleanupRegistry<Texture> getTextureCleanupRegistry() { return textureCleanupRegistry; } public long reclaimReleasedResources() { long resourcesInUse = 0; for (ResourceHolder registry : resourceHolders) { resourcesInUse += registry.reclaimReleasedResources(); } return resourcesInUse; } /** Forcibly deletes all tracked references */ public void destroyAllResources() { for (ResourceHolder resourceHolder : resourceHolders) { resourceHolder.destroyAllResources(); } } public void addResourceHolder(ResourceHolder resource) { resourceHolders.add(resource); } public static ResourceManager getInstance() { if (instance == null) { instance = new ResourceManager(); } return instance; } private ResourceManager() { addResourceHolder(textureRegistry); addResourceHolder(materialRegistry); addResourceHolder(modelRenderableRegistry); addViewRenderableRegistry(); addResourceHolder(cameraStreamCleanupRegistry); addResourceHolder(externalTextureCleanupRegistry); addResourceHolder(materialCleanupRegistry); addResourceHolder(renderableInstanceCleanupRegistry); addResourceHolder(textureCleanupRegistry); } private void addViewRenderableRegistry() { addResourceHolder(viewRenderableRegistry); } }
apache-2.0
estatio/estatio
estatioapp/app/src/main/java/org/estatio/module/lease/fixtures/LeaseTypeForItalyRefData.java
2513
/* * * Copyright 2012-2014 Eurocommercial Properties NV * * * Licensed under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.estatio.module.lease.fixtures; import javax.inject.Inject; import org.apache.isis.applib.annotation.Programmatic; import org.apache.isis.applib.fixturescripts.FixtureScript; import org.isisaddons.module.security.dom.tenancy.ApplicationTenancyRepository; import org.estatio.module.lease.dom.LeaseType; import org.estatio.module.lease.dom.LeaseTypeRepository; @Programmatic public class LeaseTypeForItalyRefData extends FixtureScript { private enum LeaseTypeData { AA("Apparecchiature Automatic"), AD("Affitto d'Azienda"), CG("Comodato Gratuito"), CO("Comodato"), DH("Dehors"), LO("Locazione"), OA("Occup. Abusiva Affito"), OL("Occup. Abusiva Locazione"), PA("Progroga Affitto"), PL("Progroga Locazione"), PP("Pannelli Pubblicitari"), PR("Precaria"), SA("Scritt. Privata Affitto"), SL("Scritt. Privata Locazione"); private final String title; private LeaseTypeData(final String title) { this.title = title; } public String title() { return title; } } @Override protected void execute(ExecutionContext fixtureResults) { for (LeaseTypeData ltd : LeaseTypeData.values()) { createLeaseType(fixtureResults, ltd); } } private void createLeaseType(ExecutionContext fixtureResults, LeaseTypeData ltd) { final LeaseType leaseType = leaseTypeRepository.findOrCreate(ltd.name(), ltd.title(), applicationTenancyRepository.findByPath("/ITA")); fixtureResults.addResult(this, leaseType.getReference(), leaseType); } // ////////////////////////////////////// @Inject private LeaseTypeRepository leaseTypeRepository; @Inject private ApplicationTenancyRepository applicationTenancyRepository; }
apache-2.0
Jamling/AFDemo
library/src/cn/ieclipse/af/adapter/delegate/AdapterDelegate.java
4002
/* * Copyright (C) 2015-2016 QuickAF * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package cn.ieclipse.af.adapter.delegate; import android.support.v7.widget.RecyclerView; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import java.lang.reflect.Constructor; import cn.ieclipse.af.adapter.AfRecyclerAdapter; import cn.ieclipse.af.adapter.AfViewHolder; /** * Description * * @author Jamling */ public abstract class AdapterDelegate<T> implements Comparable<AdapterDelegate> { protected int viewType; private LayoutInflater layoutInflater; private AfRecyclerAdapter<T> adapter; private RecyclerView.ViewHolder viewHolder; public AdapterDelegate(int viewType) { this.viewType = viewType; } public AdapterDelegate() { this(0); } void setViewType(int viewType) { this.viewType = viewType; } int getViewType() { return viewType; } void setAdapter(AfRecyclerAdapter<T> adapter) { this.adapter = adapter; } public AfRecyclerAdapter<T> getAdapter() { return adapter; } public RecyclerView.ViewHolder getViewHolder() { return viewHolder; } public boolean isForViewType(T info, int position) { return true; } public Class<? extends RecyclerView.ViewHolder> getViewHolderClass() { return AfViewHolder.class; } public abstract int getLayout(); public abstract void onUpdateView(RecyclerView.ViewHolder holder, T info, int position); public void onBindViewHolder(T info, int position, RecyclerView.ViewHolder holder) { onUpdateView(holder, info, position); } public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent) { if (layoutInflater == null) { layoutInflater = LayoutInflater.from(parent.getContext()); } int layout = getLayout(); View view = null; if (layout > 0) { view = layoutInflater.inflate(layout, parent, false); } viewHolder = instanceViewHolder(view); if (viewHolder != null && viewHolder instanceof AfViewHolder) { ((AfViewHolder) viewHolder).setAdapter(getAdapter()); } return viewHolder; } /** * Instance ViewHolder with reflection, you can override to new concrete ViewHolder * * @param itemView item view of ViewHolder * * @return ViewHolder * @see #onCreateViewHolder(android.view.ViewGroup) * @see #getViewHolderClass() * @since 3.0.1 */ protected RecyclerView.ViewHolder instanceViewHolder(View itemView) { Class<?> cls = getViewHolderClass(); if (cls != null) { try { Constructor c = cls.getConstructor(View.class); c.setAccessible(true); return (RecyclerView.ViewHolder) c.newInstance(itemView); } catch (Exception e) { String msg = String.format("Can't instance ViewHolder(%s) in %s, is it an assessable (public/static) " + "class? \nPlease see more info in https://github.com/Jamling/QuickAF/issues/41\n root cause " + "message: %s", cls, getClass(), e.getMessage()); throw new IllegalAccessError(msg); } } return null; } @Override public int compareTo(AdapterDelegate delegate) { return viewType - delegate.viewType; } }
apache-2.0
ryanseys/escape-mouseville
EscapeFromMouseville/Dot.java
360
/** * A dot item is eaten! * * @author Ryan Seys * @version 1.0 */ public class Dot implements Item { // instance variables - replace the example below with your own private static final String LETTER = "o"; /** * Returns the string representation of the object. **/ public String getLetter() { return LETTER; } }
apache-2.0
casid/mazebert-ladder
src/main/java/com/mazebert/gateways/fake/FakeGameGateway.java
776
package com.mazebert.gateways.fake; import com.mazebert.entities.Game; import com.mazebert.gateways.GameGateway; import java.util.List; import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.stream.Collectors; public class FakeGameGateway implements GameGateway { private final ConcurrentMap<UUID, Game> byId = new ConcurrentHashMap<>(); @Override public void add(Game game) { byId.put(game.id, game); } @Override public Game findById(UUID id) { return byId.get(id); } @Override public List<Game> getGames() { return byId.values().stream().sorted((o1, o2) -> Long.compare(o2.created, o1.created)).collect(Collectors.toList()); } }
apache-2.0
cocoJamboo/jboss-eap-6.4.0.GA-quickstarts
logging-tools/target/generated-sources/annotations/org/jboss/as/quickstarts/loggingToolsQS/exceptions/DateExceptionsBundle_$bundle_sv.java
677
package org.jboss.as.quickstarts.loggingToolsQS.exceptions; import javax.annotation.Generated; /** * Warning this class consists of generated code. * */ @Generated(value = "org.jboss.logging.processor.generator.model.MessageBundleTranslator", date = "2015-10-14T23:22:02+0200") public class DateExceptionsBundle_$bundle_sv extends DateExceptionsBundle_$bundle implements DateExceptionsBundle { public final static DateExceptionsBundle_$bundle_sv INSTANCE = new DateExceptionsBundle_$bundle_sv(); protected DateExceptionsBundle_$bundle_sv() { super(); } @Override protected Object readResolve() { return INSTANCE; } }
apache-2.0
xsolla/xsolla-sdk-android
xsollasdk/src/main/java/com/xsolla/android/sdk/util/cache/Util.java
2408
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.xsolla.android.sdk.util.cache; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.Reader; import java.io.StringWriter; import java.nio.charset.Charset; /** * Junk drawer of utility methods. */ final class Util { static final Charset US_ASCII = Charset.forName("US-ASCII"); static final Charset UTF_8 = Charset.forName("UTF-8"); private Util() { } static String readFully(Reader reader) throws IOException { try { StringWriter writer = new StringWriter(); char[] buffer = new char[1024]; int count; while ((count = reader.read(buffer)) != -1) { writer.write(buffer, 0, count); } return writer.toString(); } finally { reader.close(); } } /** * Deletes the contents of {@code dir}. Throws an IOException if any file * could not be deleted, or if {@code dir} is not a readable directory. */ static void deleteContents(File dir) throws IOException { File[] files = dir.listFiles(); if (files == null) { throw new IOException("not a readable directory: " + dir); } for (File file : files) { if (file.isDirectory()) { deleteContents(file); } if (!file.delete()) { throw new IOException("failed to delete file: " + file); } } } static void closeQuietly(/*Auto*/Closeable closeable) { if (closeable != null) { try { closeable.close(); } catch (RuntimeException rethrown) { throw rethrown; } catch (Exception ignored) { } } } }
apache-2.0
0570dev/flickr-glass
src/com/googlecode/flickrjandroid/interestingness/InterestingnessInterface.java
5423
/* *------------------------------------------------------- * (c) 2006 Das B&uuml;ro am Draht GmbH - All Rights reserved *------------------------------------------------------- */ package com.googlecode.flickrjandroid.interestingness; import java.io.IOException; import java.security.InvalidKeyException; import java.security.NoSuchAlgorithmException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.Set; import org.json.JSONException; import com.googlecode.flickrjandroid.FlickrException; import com.googlecode.flickrjandroid.Parameter; import com.googlecode.flickrjandroid.Response; import com.googlecode.flickrjandroid.Transport; import com.googlecode.flickrjandroid.photos.Extras; import com.googlecode.flickrjandroid.photos.PhotoList; import com.googlecode.flickrjandroid.photos.PhotoUtils; import com.googlecode.flickrjandroid.util.StringUtilities; /** * * @author till * @version $Id: InterestingnessInterface.java,v 1.9 2009/07/11 20:30:27 x-mago Exp $ */ public class InterestingnessInterface { public static final String METHOD_GET_LIST = "flickr.interestingness.getList"; private static final String KEY_METHOD = "method"; private static final String KEY_API_KEY = "api_key"; private static final String KEY_DATE = "date"; private static final String KEY_EXTRAS = "extras"; private static final String KEY_PER_PAGE = "per_page"; private static final String KEY_PAGE = "page"; private static final ThreadLocal<SimpleDateFormat> DATE_FORMATS = new ThreadLocal<SimpleDateFormat>() { protected synchronized SimpleDateFormat initialValue() { return new SimpleDateFormat("yyyy-MM-dd"); } }; private String apiKey; private String sharedSecret; private Transport transportAPI; public InterestingnessInterface( String apiKey, String sharedSecret, Transport transportAPI ) { this.apiKey = apiKey; this.sharedSecret = sharedSecret; this.transportAPI = transportAPI; } /** * Returns the list of interesting photos for the most recent day or a user-specified date. * * This method does not require authentication. * * @param date * @param extras A set of Strings controlling the extra information to fetch for each returned record. Currently supported fields are: license, date_upload, date_taken, owner_name, icon_server, original_format, last_update, geo. Set to null or an empty set to not specify any extras. * @param perPage The number of photos to show per page * @param page The page offset * @return PhotoList * @throws FlickrException * @throws IOException * @throws JSONException * @throws NoSuchAlgorithmException * @throws InvalidKeyException * @see com.googlecode.flickrjandroid.photos.Extras */ public PhotoList getList(String date, Set<String> extras, int perPage, int page) throws FlickrException, IOException, InvalidKeyException, NoSuchAlgorithmException, JSONException { List<Parameter> parameters = new ArrayList<Parameter>(); parameters.add(new Parameter(KEY_METHOD, METHOD_GET_LIST)); parameters.add(new Parameter(KEY_API_KEY, apiKey)); if (date != null) { parameters.add(new Parameter(KEY_DATE, date)); } if (extras != null) { parameters.add(new Parameter(KEY_EXTRAS, StringUtilities.join(extras, ","))); } if (perPage > 0) { parameters.add(new Parameter(KEY_PER_PAGE, String.valueOf(perPage))); } if (page > 0) { parameters.add(new Parameter(KEY_PAGE, String.valueOf(page))); } Response response = transportAPI.get(transportAPI.getPath(), parameters); if (response.isError()) { throw new FlickrException(response.getErrorCode(), response.getErrorMessage()); } return PhotoUtils.createPhotoList(response.getData()); } /** * * @param date * @param extras * @param perPage * @param page * @return PhotoList * @throws FlickrException * @throws IOException * @throws JSONException * @throws NoSuchAlgorithmException * @throws InvalidKeyException * @see com.googlecode.flickrjandroid.photos.Extras */ public PhotoList getList(Date date, Set<String> extras, int perPage, int page) throws FlickrException, IOException, InvalidKeyException, NoSuchAlgorithmException, JSONException { String dateString = null; if (date != null) { DateFormat df = (DateFormat)DATE_FORMATS.get(); dateString = df.format(date); } return getList(dateString, extras, perPage, page); } /** * convenience method to get the list of all 500 most recent photos * in flickr explore with all known extra attributes. * * @return a List of Photos * @throws FlickrException * @throws IOException * @throws JSONException * @throws NoSuchAlgorithmException * @throws InvalidKeyException */ public PhotoList getList() throws FlickrException, IOException, InvalidKeyException, NoSuchAlgorithmException, JSONException { return getList((String) null, Extras.ALL_EXTRAS, 500, 1); } }
apache-2.0
fedevelatec/asic-core
src/main/java/com/fedevela/core/workflow/pojos/AuthorityWorkflowSecurityEntity.java
3122
package com.fedevela.core.workflow.pojos; /** * Created by fvelazquez on 31/03/14. */ import java.io.Serializable; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.Id; import javax.xml.bind.annotation.XmlRootElement; @Entity @XmlRootElement public class AuthorityWorkflowSecurityEntity implements Serializable { private static final long serialVersionUID = 1L; @Id @Column(name = "ID") private Long id; @Basic(optional = false) @Column(name = "ID_AUTHORITY") private String idAuthority; @Basic(optional = false) @Column(name = "DESCRIPTION") private String description; @Basic(optional = false) @Column(name = "ENABLED") private Character enabled; @Column(name = "ID_WORKFLOW_CONFIG") private Long idWorkflowConfig; @Column(name = "ALLOW_STATE") private Character allowState; @Column(name = "ALLOW_EVENT") private Character allowEvent; public AuthorityWorkflowSecurityEntity() { } public Character getAllowEvent() { return allowEvent; } public void setAllowEvent(Character allowEvent) { this.allowEvent = allowEvent; } public Character getAllowState() { return allowState; } public void setAllowState(Character allowState) { this.allowState = allowState; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Character getEnabled() { return enabled; } public void setEnabled(Character enabled) { this.enabled = enabled; } public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getIdAuthority() { return idAuthority; } public void setIdAuthority(String idAuthority) { this.idAuthority = idAuthority; } public Long getIdWorkflowConfig() { return idWorkflowConfig; } public void setIdWorkflowConfig(Long idWorkflowConfig) { this.idWorkflowConfig = idWorkflowConfig; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AuthorityWorkflowSecurityEntity other = (AuthorityWorkflowSecurityEntity) obj; if (this.id != other.id && (this.id == null || !this.id.equals(other.id))) { return false; } return true; } @Override public int hashCode() { int hash = 3; hash = 37 * hash + (this.id != null ? this.id.hashCode() : 0); return hash; } @Override public String toString() { return "AuthorityWorkflowSecurityEntity{" + "id=" + id + ", idAuthority=" + idAuthority + ", description=" + description + ", enabled=" + enabled + ", idWorkflowConfig=" + idWorkflowConfig + ", allowState=" + allowState + ", allowEvent=" + allowEvent + '}'; } }
apache-2.0
vjanmey/EpicMudfia
com/planet_ink/coffee_mud/Abilities/Prayers/Prayer_Paralyze.java
4716
package com.planet_ink.coffee_mud.Abilities.Prayers; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("rawtypes") public class Prayer_Paralyze extends Prayer { @Override public String ID() { return "Prayer_Paralyze"; } private final static String localizedName = CMLib.lang()._("Paralyze"); @Override public String name() { return localizedName; } @Override public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_CORRUPTION;} @Override public int abstractQuality(){ return Ability.QUALITY_MALICIOUS;} @Override public long flags(){return Ability.FLAG_UNHOLY|Ability.FLAG_PARALYZING;} private final static String localizedStaticDisplay = CMLib.lang()._("(Paralyzed)"); @Override public String displayText() { return localizedStaticDisplay; } @Override protected int canAffectCode(){return Ability.CAN_MOBS;} @Override protected int canTargetCode(){return Ability.CAN_MOBS;} @Override public void affectPhyStats(Physical affected, PhyStats affectableStats) { super.affectPhyStats(affected,affectableStats); if(affected==null) return; if(!(affected instanceof MOB)) return; affectableStats.setSensesMask(affectableStats.sensesMask()|PhyStats.CAN_NOT_MOVE); } @Override public void unInvoke() { // undo the affects of this spell if(!(affected instanceof MOB)) return; final MOB mob=(MOB)affected; super.unInvoke(); if(canBeUninvoked()) mob.tell(_("The paralysis eases out of your muscles.")); } @Override public int castingQuality(MOB mob, Physical target) { if(mob!=null) { if(target instanceof MOB) { if(!CMLib.flags().canMove(((MOB)target))) return Ability.QUALITY_INDIFFERENT; } } return super.castingQuality(mob,target); } @Override public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel) { final MOB target=this.getTarget(mob,commands,givenTarget); if(target==null) return false; int levelDiff=target.phyStats().level()-(mob.phyStats().level()+(2*super.getXLEVELLevel(mob))); if(levelDiff<0) levelDiff=0; if(levelDiff>6) levelDiff=6; if(!super.invoke(mob,commands,givenTarget,auto,asLevel)) return false; boolean success=proficiencyCheck(mob,-25+(super.getXLEVELLevel(mob))-((target.charStats().getStat(CharStats.STAT_WISDOM)*2)+(levelDiff*5)),auto); if(success) { // it worked, so build a copy of this ability, // and add it to the affects list of the // affected MOB. Then tell everyone else // what happened. final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":_("^S<S-NAME> invoke(s) an unholy paralysis upon <T-NAMESELF>.^?")); final CMMsg msg2=CMClass.getMsg(mob,target,this,CMMsg.MASK_MALICIOUS|CMMsg.TYP_PARALYZE|(auto?CMMsg.MASK_ALWAYS:0),null); if((mob.location().okMessage(mob,msg))&&(mob.location().okMessage(mob,msg2))) { mob.location().send(mob,msg); mob.location().send(mob,msg2); if((msg.value()<=0)&&(msg2.value()<=0)) { int duration = 8 - levelDiff; if(duration < 2) duration = 2; success=maliciousAffect(mob,target,asLevel,duration,-1); mob.location().show(target,null,CMMsg.MSG_OK_VISUAL,_("<S-NAME> can't move!")); } } } else return maliciousFizzle(mob,target,_("<S-NAME> attempt(s) to paralyze <T-NAMESELF>, but flub(s) it.")); // return whether it worked return success; } }
apache-2.0
Evolveum/midpoint-ide-plugins
com.evolveum.midpoint.eclipse.parent/com.evolveum.midpoint.eclipse.ui/src/com/evolveum/midpoint/eclipse/ui/components/browser/ShowGenerator.java
1303
package com.evolveum.midpoint.eclipse.ui.components.browser; import java.util.List; import javax.xml.namespace.QName; import org.w3c.dom.Document; import org.w3c.dom.Element; import com.evolveum.midpoint.eclipse.runtime.api.Constants; import com.evolveum.midpoint.eclipse.runtime.api.resp.ServerObject; import com.evolveum.midpoint.eclipse.ui.util.Console; import com.evolveum.midpoint.util.DOMUtil; public class ShowGenerator extends Generator { public ShowGenerator() { } @Override public String getLabel() { return null; // not needed } @Override public String generate(List<ServerObject> objects, GeneratorOptions options) { if (objects.isEmpty()) { return null; } if (objects.size() == 1) { return objects.get(0).getXml(); } try { Document doc = DOMUtil.getDocument(new QName(Constants.COMMON_NS, "objects", "c")); Element root = doc.getDocumentElement(); for (ServerObject object : objects) { Element obj = DOMUtil.parseDocument(object.getXml()).getDocumentElement(); root.appendChild(doc.importNode(obj, true)); } return DOMUtil.serializeDOMToString(root); } catch (Throwable t) { Console.logError("Couldn't copy selected objects to new XML document", t); return null; } } }
apache-2.0
mkopylec/errorest-spring-boot-starter
src/main/java/com/github/mkopylec/errorest/handling/errordata/http/ServletRequestBindingErrorDataProvider.java
1527
package com.github.mkopylec.errorest.handling.errordata.http; import com.github.mkopylec.errorest.configuration.ErrorestProperties; import com.github.mkopylec.errorest.handling.errordata.ErrorData; import org.springframework.boot.web.servlet.error.ErrorAttributes; import org.springframework.http.HttpStatus; import org.springframework.web.bind.ServletRequestBindingException; import org.springframework.web.context.request.WebRequest; import javax.servlet.http.HttpServletRequest; import static org.apache.commons.lang3.StringUtils.uncapitalize; import static org.springframework.http.HttpStatus.BAD_REQUEST; public class ServletRequestBindingErrorDataProvider extends HttpClientErrorDataProvider<ServletRequestBindingException> { public ServletRequestBindingErrorDataProvider(ErrorestProperties errorestProperties) { super(errorestProperties); } @Override public ErrorData getErrorData(ServletRequestBindingException ex, HttpServletRequest request) { return getErrorData(ex, request, BAD_REQUEST); } @Override public ErrorData getErrorData(ServletRequestBindingException ex, HttpServletRequest request, HttpStatus responseHttpStatus, ErrorAttributes errorAttributes, WebRequest webRequest) { return super.getErrorData(ex, request, BAD_REQUEST, errorAttributes, webRequest); } @Override protected String getErrorDescription(ServletRequestBindingException ex) { return BAD_REQUEST.getReasonPhrase() + ", " + uncapitalize(ex.getMessage()); } }
apache-2.0
raphaelazzolini/mercurius
mercurius/datatype/src/main/java/br/unicamp/ic/lsd/mercurius/datatype/factory/ProductImageFactory.java
186
package br.unicamp.ic.lsd.mercurius.datatype.factory; import br.unicamp.ic.lsd.mercurius.datatype.ProductImage; public interface ProductImageFactory extends Factory<ProductImage> { }
apache-2.0
i386/chesterfield
src/test/java/chesterfield/DatabaseTest.java
2821
package chesterfield; public class DatabaseTest extends ChesterfieldTestCase { public void testGetDocumentCount() throws Exception { assertEquals(0, getDatabase().getDocumentCount()); MyDocument document = new MyDocument(); document.setHelloWorld("Hello, world!"); document.setId("123"); MyDocument document2 = new MyDocument(); document2.setHelloWorld("Hello, world!"); document2.setId("456"); getDatabase().forDocument(document).save(); getDatabase().forDocument(document2).save(); assertEquals(2, getDatabase().getDocumentCount()); } public void testCreateSaveUpdateDelete() throws Exception { MyDocument document = new MyDocument(); document.setHelloWorld("Hello, world!"); document.setId("123"); document.getMyBean().setFoo("I like cake"); document.getMyBean().setBar("This is my bean"); getDatabase().forDocument(document).save(); MyDocument savedDocument = getDatabase().get("123", MyDocument.class); assertNotNull(savedDocument); assertEquals(document.getId(), savedDocument.getId()); assertEquals(document.getRev(), savedDocument.getRev()); assertEquals("Hello, world!", savedDocument.getHelloWorld()); assertEquals(document.getMyBean().getFoo(), savedDocument.getMyBean().getFoo()); assertEquals(document.getMyBean().getBar(), savedDocument.getMyBean().getBar()); assertTrue(getDatabase().forDocument(document).delete()); assertNull(getDatabase().get("123", MyDocument.class)); } public void testCreateSaveUpdateDeleteWithServerAssignedId() throws Exception { MyDocument document = new MyDocument(); document.setHelloWorld("Hello, world!"); getDatabase().forDocument(document).save(); MyDocument savedDocument = getDatabase().get(document.getId(), MyDocument.class); assertNotNull(savedDocument); assertEquals(document.getId(), savedDocument.getId()); assertEquals(document.getRev(), savedDocument.getRev()); assertEquals("Hello, world!", savedDocument.getHelloWorld()); assertTrue(getDatabase().forDocument(document).delete()); assertNull(getDatabase().get("123", MyDocument.class)); } public void testCopy() throws Exception { MyDocument document = new MyDocument(); document.setId("123"); document.setHelloWorld("copycat!"); getDatabase().forDocument(document).save(); getDatabase().forDocument(document).copy("987"); MyDocument copiedDocument = getDatabase().get("987", MyDocument.class); assertNotNull(copiedDocument); assertEquals(document.getHelloWorld(), copiedDocument.getHelloWorld()); } }
apache-2.0
cap-framework/cap-http
src/main/java/capframework/http/constant/ResponseType.java
1293
//region Copyright /*Copyright 2015-2016 尚尔路(sel8616@gmail.com/philshang@163.com) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ //endregion package capframework.http.constant; import capframework.http.annotation.CapAction; import capframework.http.publics.AppConfigStore; /** * Any action's ResponseType should be corresponding with the method return type except DEFAULT.<br/> * <li>{@link #HTML}/{@link #JSP}/{@link #FILE} --> {@link String}</li> * <li>{@link #TEXT}/{@link #JSON}/{@link #XML} --> {@link Object}</li> * <li>{@link #DEFAULT} --> This item is only used as default value of {@link CapAction#response_type()}, which means {@link AppConfigStore#getDefaultResponseType()} will take effect on the Action.</li> */ public enum ResponseType { DEFAULT, TEXT, XML, JSON, HTML, JSP, FILE }
apache-2.0
driver-pete/driver-pete-server
src/test/java/com/otognan/driverpete/logic/TrajectoryReaderTest.java
2221
package com.otognan.driverpete.logic; import static org.junit.Assert.assertEquals; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.text.ParseException; import java.util.List; import org.hamcrest.collection.IsIterableContainingInOrder; import org.junit.Assert; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.S3Object; import com.otognan.driverpete.BaseStatelesSecurityITTest; public class TrajectoryReaderTest extends BaseStatelesSecurityITTest{ @Autowired AWSCredentials awsCredentials; @Test public void testTrajectoryReader() throws IOException, ParseException { AmazonS3 s3client = new AmazonS3Client(awsCredentials); S3Object object = s3client.getObject( new GetObjectRequest("driverpete-storage", "_testing/testing_merged_0")); InputStream objectData = object.getObjectContent(); List<Location> locations = TrajectoryReader.readTrajectory(objectData); //Process the objectData stream. objectData.close(); assertEquals(2423, locations.size()); } @Test public void testTrajectoryWriter() throws Exception { AmazonS3 s3client = new AmazonS3Client(awsCredentials); S3Object object = s3client.getObject( new GetObjectRequest("driverpete-storage", "_testing/testing_raw_0")); InputStream objectData = object.getObjectContent(); List<Location> locations = TrajectoryReader.readTrajectory(objectData); //Process the objectData stream. objectData.close(); byte[] compressedBytes = TrajectoryReader.writeTrajectory(locations); List<Location> locationsCopy = TrajectoryReader.readTrajectory(compressedBytes); Assert.assertThat(locationsCopy, IsIterableContainingInOrder.contains(locations.toArray())); } }
apache-2.0
alexruiz/fest-assert-2.x
src/test/java/org/fest/assertions/internal/Strings_assertContains_Test.java
2755
/* * Created on Dec 24, 2010 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright @2010-2013 the original author or authors. */ package org.fest.assertions.internal; import static org.fest.assertions.test.FailureMessages.actualIsNull; import static org.fest.assertions.test.TestFailures.expectedAssertionErrorNotThrown; import static org.fest.test.ExpectedException.none; import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.same; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import org.fest.assertions.description.Description; import org.fest.assertions.error.ErrorMessageFactory; import org.fest.test.ExpectedException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; /** * Tests for {@link Strings#assertContains(Description, String, String)}. * * @author Alex Ruiz * @author Yvonne Wang */ public class Strings_assertContains_Test { @Rule public ExpectedException thrown = none(); private Failures failures; private Strings strings; @Before public void setUp() { failures = spy(new Failures()); strings = new Strings(); strings.failures = failures; } @Test public void should_pass_if_actual_contains_sequence() { strings.assertContains(mock(Description.class), "Yoda", "Yo"); } @Test public void should_throw_error_if_sequence_is_null() { thrown.expect(NullPointerException.class); strings.assertContains(mock(Description.class), "Yoda", null); } @Test public void should_fail_if_actual_is_null() { thrown.expect(AssertionError.class, actualIsNull()); strings.assertContains(mock(Description.class), null, "Yoda"); } @Test public void should_fail_if_actual_does_not_contain_sequence() { Description description = new TestDescription("Testing"); try { strings.assertContains(description, "Yoda", "Luke"); } catch (AssertionError e) { assertEquals("[Testing] expecting:<'Yoda'> to contain:<'Luke'>", e.getMessage()); verify(failures).failure(same(description), any(ErrorMessageFactory.class)); return; } throw expectedAssertionErrorNotThrown(); } }
apache-2.0
mgtechsoftware/smockin
src/main/java/com/smockin/admin/exception/MockExportException.java
281
package com.smockin.admin.exception; public class MockExportException extends RuntimeException { public MockExportException(final String msg) { super(msg); } public MockExportException(final String msg, Throwable cause) { super(msg, cause); } }
apache-2.0
sammcveety/DataflowJavaSDK
sdk/src/main/java/com/google/cloud/dataflow/sdk/coders/ListCoder.java
2395
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.coders; import static com.google.common.base.Preconditions.checkArgument; import com.google.cloud.dataflow.sdk.util.PropertyNames; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; /** * A {@link Coder} for {@link List}, using the format of {@link IterableLikeCoder}. * * @param <T> the type of the elements of the Lists being transcoded */ public class ListCoder<T> extends IterableLikeCoder<T, List<T>> { public static <T> ListCoder<T> of(Coder<T> elemCoder) { return new ListCoder<>(elemCoder); } ///////////////////////////////////////////////////////////////////////////// // Internal operations below here. @Override protected final List<T> decodeToIterable(List<T> decodedElements) { return decodedElements; } @JsonCreator public static ListCoder<?> of( @JsonProperty(PropertyNames.COMPONENT_ENCODINGS) List<Coder<?>> components) { checkArgument(components.size() == 1, "Expecting 1 component, got " + components.size()); return of((Coder<?>) components.get(0)); } /** * Returns the first element in this list if it is non-empty, * otherwise returns {@code null}. */ public static <T> List<Object> getInstanceComponents(List<T> exampleValue) { return getInstanceComponentsHelper(exampleValue); } protected ListCoder(Coder<T> elemCoder) { super(elemCoder, "List"); } /** * List sizes are always known, so ListIterable may be deterministic while * the general IterableLikeCoder is not. */ @Override public void verifyDeterministic() throws NonDeterministicException { verifyDeterministic( "ListCoder.elemCoder must be deterministic", getElemCoder()); } }
apache-2.0
xingstarx/TinkerDemo
app/src/main/java/com/star/tinker/reporter/SampleLoadReporter.java
3899
/* * Tencent is pleased to support the open source community by making Tinker available. * * Copyright (C) 2016 THL A29 Limited, a Tencent company. All rights reserved. * * Licensed under the BSD 3-Clause License (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * https://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. */ package com.star.tinker.reporter; import android.content.Context; import android.os.Looper; import android.os.MessageQueue; import com.tencent.tinker.lib.reporter.DefaultLoadReporter; import com.tencent.tinker.lib.util.UpgradePatchRetry; import com.tencent.tinker.loader.shareutil.ShareConstants; import java.io.File; /** * optional, you can just use DefaultLoadReporter * Created by zhangshaowen on 16/4/13. */ public class SampleLoadReporter extends DefaultLoadReporter { private final static String TAG = "Tinker.SampleLoadReporter"; public SampleLoadReporter(Context context) { super(context); } @Override public void onLoadPatchListenerReceiveFail(final File patchFile, int errorCode) { super.onLoadPatchListenerReceiveFail(patchFile, errorCode); SampleTinkerReport.onTryApplyFail(errorCode); } @Override public void onLoadResult(File patchDirectory, int loadCode, long cost) { super.onLoadResult(patchDirectory, loadCode, cost); switch (loadCode) { case ShareConstants.ERROR_LOAD_OK: SampleTinkerReport.onLoaded(cost); break; } Looper.getMainLooper().myQueue().addIdleHandler(new MessageQueue.IdleHandler() { @Override public boolean queueIdle() { if (UpgradePatchRetry.getInstance(context).onPatchRetryLoad()) { SampleTinkerReport.onReportRetryPatch(); } return false; } }); } @Override public void onLoadException(Throwable e, int errorCode) { super.onLoadException(e, errorCode); SampleTinkerReport.onLoadException(e, errorCode); } @Override public void onLoadFileMd5Mismatch(File file, int fileType) { super.onLoadFileMd5Mismatch(file, fileType); SampleTinkerReport.onLoadFileMisMatch(fileType); } /** * try to recover patch oat file * * @param file * @param fileType * @param isDirectory */ @Override public void onLoadFileNotFound(File file, int fileType, boolean isDirectory) { super.onLoadFileNotFound(file, fileType, isDirectory); SampleTinkerReport.onLoadFileNotFound(fileType); } @Override public void onLoadPackageCheckFail(File patchFile, int errorCode) { super.onLoadPackageCheckFail(patchFile, errorCode); SampleTinkerReport.onLoadPackageCheckFail(errorCode); } @Override public void onLoadPatchInfoCorrupted(String oldVersion, String newVersion, File patchInfoFile) { super.onLoadPatchInfoCorrupted(oldVersion, newVersion, patchInfoFile); SampleTinkerReport.onLoadInfoCorrupted(); } @Override public void onLoadInterpret(int type, Throwable e) { super.onLoadInterpret(type, e); SampleTinkerReport.onLoadInterpretReport(type, e); } @Override public void onLoadPatchVersionChanged(String oldVersion, String newVersion, File patchDirectoryFile, String currentPatchName) { super.onLoadPatchVersionChanged(oldVersion, newVersion, patchDirectoryFile, currentPatchName); } }
apache-2.0
mminella/jsr-352-ri-tck
JSR352.Annotations/src/javax/batch/annotation/BatchProperty.java
1090
/* * Copyright 2012 International Business Machines Corp. * * See the NOTICE file distributed with this work for additional information * regarding copyright ownership. Licensed under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.batch.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Target(ElementType.FIELD) @Retention(RetentionPolicy.RUNTIME) public @interface BatchProperty { public String name() default ""; }
apache-2.0
xiaomi-passport/oauth-java-sdk
src/test/java/com/xiaomi/passport/api/RefreshAccessTokenHelperTest.java
1899
package com.xiaomi.passport.api; import com.xiaomi.passport.exception.OAuthSdkException; import com.xiaomi.passport.pojo.AccessToken; import com.xiaomi.passport.pojo.Client; import com.xiaomi.passport.utils.OAuthTestUtils; import org.apache.commons.lang.RandomStringUtils; import org.apache.commons.lang3.StringUtils; import org.junit.Assert; import org.junit.Before; import org.junit.Test; /** * @author zhenchao.wang 2017-04-21 11:43 * @version 1.0.0 */ public class RefreshAccessTokenHelperTest { private Client client; private RefreshAccessTokenHelper helper; @Before public void setUp() throws Exception { client = OAuthTestUtils.getTestClient(); helper = new RefreshAccessTokenHelper(client); } @Test public void refreshAccessTokenWithErrorTest() throws Exception { String refreshToken = RandomStringUtils.randomAlphanumeric(32); try { helper.refreshAccessToken(refreshToken); Assert.fail(); } catch (OAuthSdkException e) { Assert.assertEquals(96009, e.getErrorCode()); } } @Test public void refreshAccessTokenTest() throws Exception { String refreshToken = "your refresh token here"; try { AccessToken accessToken = helper.refreshAccessToken(refreshToken); Assert.assertNotNull(accessToken); System.out.println("access token : " + accessToken); Assert.assertTrue(StringUtils.isNotBlank(accessToken.getToken())); Assert.assertEquals(AccessToken.TokenType.MAC.getType(), accessToken.getTokenType()); Assert.assertTrue(StringUtils.isNotBlank(accessToken.getOpenId())); Assert.assertTrue(StringUtils.isNotBlank(accessToken.getRefreshToken())); } catch (OAuthSdkException e) { e.printStackTrace(); Assert.fail(); } } }
apache-2.0
vjanmey/EpicMudfia
com/planet_ink/coffee_mud/Abilities/Diseases/Disease_HeatExhaustion.java
4533
package com.planet_ink.coffee_mud.Abilities.Diseases; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2014 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ public class Disease_HeatExhaustion extends Disease { @Override public String ID() { return "Disease_HeatExhaustion"; } private final static String localizedName = CMLib.lang()._("Heat Exhaustion"); @Override public String name() { return localizedName; } private final static String localizedStaticDisplay = CMLib.lang()._("(Heat Exhaustion)"); @Override public String displayText() { return localizedStaticDisplay; } @Override protected int canAffectCode(){return CAN_MOBS;} @Override protected int canTargetCode(){return CAN_MOBS;} @Override public int abstractQuality(){return Ability.QUALITY_MALICIOUS;} @Override public boolean putInCommandlist(){return false;} @Override public int difficultyLevel(){return 1;} @Override protected int DISEASE_TICKS(){return 300;} @Override protected int DISEASE_DELAY(){return 3;} @Override protected String DISEASE_DONE(){return "Your head stops spinning.";} @Override protected String DISEASE_START(){return "^G<S-NAME> <S-IS-ARE> overcome by the heat.^?";} @Override protected String DISEASE_AFFECT(){return "";} @Override public int abilityCode(){return 0;} protected Room theRoom=null; protected int changeDown=300; public Room room(Room R) { if((theRoom==null) &&(R!=null) &&(!R.getArea().isProperlyEmpty())) theRoom=R.getArea().getRandomProperRoom(); theRoom=CMLib.map().getRoom(theRoom); if(R==theRoom) theRoom=null; return theRoom; } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { if((affected!=null) &&(affected==msg.source()) &&(msg.amITarget(msg.source().location())) &&((msg.targetMinor()==CMMsg.TYP_LOOK)||(msg.targetMinor()==CMMsg.TYP_EXAMINE))) { final Room R=room(msg.source().location()); if((R==null)||(R==msg.source().location())) return true; final CMMsg msg2=CMClass.getMsg(msg.source(),R,msg.tool(), msg.sourceCode(),msg.sourceMessage(), msg.targetCode(),msg.targetMessage(), msg.othersCode(),msg.othersMessage()); if(R.okMessage(msg.source(),msg2)) { R.executeMsg(msg.source(),msg2); return false; } } return super.okMessage(myHost,msg); } @Override public boolean tick(Tickable ticking, int tickID) { if(!super.tick(ticking,tickID)) return false; if((affected instanceof MOB) &&(canBeUninvoked())) { final MOB M=((MOB)affected); if(M.location()!=null) { final Area A=M.location().getArea(); switch(M.location().domainType()) { case Room.DOMAIN_INDOORS_UNDERWATER: case Room.DOMAIN_OUTDOORS_UNDERWATER: unInvoke(); return false; } Climate C=null; if(A!=null) C=A.getClimateObj(); if(C!=null) switch(C.weatherType(M.location())) { case Climate.WEATHER_BLIZZARD: case Climate.WEATHER_HAIL: case Climate.WEATHER_RAIN: case Climate.WEATHER_SNOW: case Climate.WEATHER_THUNDERSTORM: case Climate.WEATHER_WINTER_COLD: { unInvoke(); return false; } default: break; } } } if((--changeDown)<=0) { changeDown=300; theRoom=null; } return true; } }
apache-2.0
mgtechsoftware/smockin
src/main/java/com/smockin/admin/persistence/migration/DataMigrationService.java
3224
package com.smockin.admin.persistence.migration; import com.smockin.admin.exception.MigrationException; import com.smockin.admin.persistence.migration.version.*; import com.smockin.utils.GeneralUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import javax.annotation.PostConstruct; import java.util.*; /** * Created by gallina. */ @Service public class DataMigrationService { private final Logger logger = LoggerFactory.getLogger(DataMigrationService.class); @Autowired private MigrationPatch_121 migrationPatch_121; @Autowired private MigrationPatch_130 migrationPatch_130; @Autowired private MigrationPatch_150 migrationPatch_150; @Autowired private MigrationPatch_151 migrationPatch_151; @Autowired private MigrationPatch_167 migrationPatch_167; @Autowired private MigrationPatch_170 migrationPatch_170; @Autowired private MigrationPatch_190 migrationPatch_190; @Autowired private MigrationPatch_200 migrationPatch_200; @Autowired private MigrationPatch_210 migrationPatch_210; @Autowired private MigrationPatch_290 migrationPatch_290; @Autowired private MigrationPatch_2100 migrationPatch_2100; @Autowired private MigrationPatch_2160 migrationPatch_2160; private final Set<MigrationPatch> patches = new HashSet<>(); @Transactional public void applyVersionChanges(final String currentVersion, final String latestVersion) throws MigrationException { if (currentVersion == null) { // new app, no need to migrate return; } final int currentVersionNo = GeneralUtils.exactVersionNo(currentVersion); final int latestVersionNo = GeneralUtils.exactVersionNo(latestVersion); if (logger.isInfoEnabled()) { logger.info("Current Version No: " + currentVersionNo); logger.info("Latest Version No: " + latestVersionNo); } if (latestVersionNo == currentVersionNo) { // no app version change found, so nothing to migrate. return; } // Apply all patches for versions later then the previous version. for (MigrationPatch p : patches) { if (GeneralUtils.exactVersionNo(p.versionNo()) > currentVersionNo) { logger.info("Running data migration patch for app version " + p.versionNo()); p.execute(); } } } @PostConstruct public void after() { patches.add(migrationPatch_121); patches.add(migrationPatch_130); patches.add(migrationPatch_150); patches.add(migrationPatch_151); patches.add(migrationPatch_167); patches.add(migrationPatch_170); patches.add(migrationPatch_190); patches.add(migrationPatch_200); patches.add(migrationPatch_210); patches.add(migrationPatch_290); patches.add(migrationPatch_2100); patches.add(migrationPatch_2160); } }
apache-2.0
nafae/developer
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201306/ApiVersionError.java
1585
package com.google.api.ads.dfp.jaxws.v201306; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * * Errors related to the usage of API versions. * * * <p>Java class for ApiVersionError complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="ApiVersionError"> * &lt;complexContent> * &lt;extension base="{https://www.google.com/apis/ads/publisher/v201306}ApiError"> * &lt;sequence> * &lt;element name="reason" type="{https://www.google.com/apis/ads/publisher/v201306}ApiVersionError.Reason" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "ApiVersionError", propOrder = { "reason" }) public class ApiVersionError extends ApiError { protected ApiVersionErrorReason reason; /** * Gets the value of the reason property. * * @return * possible object is * {@link ApiVersionErrorReason } * */ public ApiVersionErrorReason getReason() { return reason; } /** * Sets the value of the reason property. * * @param value * allowed object is * {@link ApiVersionErrorReason } * */ public void setReason(ApiVersionErrorReason value) { this.reason = value; } }
apache-2.0
alexryndin/ambari
ambari-server/src/main/java/org/apache/ambari/server/checks/StormShutdownWarning.java
2786
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.server.checks; import java.util.Arrays; import org.apache.ambari.server.AmbariException; import org.apache.ambari.server.controller.PrereqCheckRequest; import org.apache.ambari.server.state.stack.PrereqCheckStatus; import org.apache.ambari.server.state.stack.PrerequisiteCheck; import org.apache.ambari.server.state.stack.upgrade.UpgradeType; import com.google.inject.Singleton; /** * The {@link StormShutdownWarning} to see if Storm is installed and if the * upgrade type is {@link UpgradeType#ROLLING}. If so, then a * {@link PrereqCheckStatus#WARNING} is produced which will let the operator * know that Storm cannot be rolling on ceratin versions of the HDP stack. * <p/> * The upgrade packs must include this check where it is applicable. It contains * no logic for determine stack versions and only checks for the presence of * Storm and the type of upgrade. */ @Singleton @UpgradeCheck(group = UpgradeCheckGroup.INFORMATIONAL_WARNING, required = UpgradeType.ROLLING) public class StormShutdownWarning extends AbstractCheckDescriptor { /** * Constructor. */ public StormShutdownWarning() { super(CheckDescription.SERVICES_STORM_ROLLING_WARNING); } /** * {@inheritDoc} * <p/> * This check is only applicable if Storm is installed and the upgrade type is * {@link UpgradeType#ROLLING}. */ @Override public boolean isApplicable(PrereqCheckRequest request) throws AmbariException { boolean isApplicable = super.isApplicable(request, Arrays.asList("STORM"), true); return isApplicable && request.getUpgradeType() == UpgradeType.ROLLING; } /** * {@inheritDoc} */ @Override public void perform(PrerequisiteCheck prerequisiteCheck, PrereqCheckRequest request) throws AmbariException { prerequisiteCheck.getFailedOn().add("STORM"); prerequisiteCheck.setStatus(PrereqCheckStatus.WARNING); prerequisiteCheck.setFailReason(getFailReason(prerequisiteCheck, request)); } }
apache-2.0
YoungDigitalPlanet/empiria.player
src/main/java/eu/ydp/empiria/player/client/components/event/InputEventRegistrar.java
1386
/* * Copyright 2017 Young Digital Planet S.A. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package eu.ydp.empiria.player.client.components.event; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.dom.client.Element; import com.google.gwt.user.client.ui.IsWidget; public class InputEventRegistrar { public void registerInputHandler(IsWidget widget, InputEventListener listener) { InputEventListenerJsWrapper listenerJs = new InputEventListenerJsWrapper(listener); registerInputHandler(widget.asWidget().getElement(), listenerJs.getJavaScriptObject()); } private native void registerInputHandler(Element element, JavaScriptObject listenerJs)/*-{ var self = this; element.oninput = function () { listenerJs.onInput(); } }-*/; }
apache-2.0
nafae/developer
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201403/ContendingLineItem.java
6010
/** * ContendingLineItem.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201403; /** * Describes contending line items for a {@link Forecast}. */ public class ContendingLineItem implements java.io.Serializable { /* The {@link LineItem#id Id} of the contending line item. */ private java.lang.Long lineItemId; /* Number of impressions contended for by both the forecasted * line item and this line item, * but served to this line item in the forecast simulation. */ private java.lang.Long contendingImpressions; public ContendingLineItem() { } public ContendingLineItem( java.lang.Long lineItemId, java.lang.Long contendingImpressions) { this.lineItemId = lineItemId; this.contendingImpressions = contendingImpressions; } /** * Gets the lineItemId value for this ContendingLineItem. * * @return lineItemId * The {@link LineItem#id Id} of the contending line item. */ public java.lang.Long getLineItemId() { return lineItemId; } /** * Sets the lineItemId value for this ContendingLineItem. * * @param lineItemId * The {@link LineItem#id Id} of the contending line item. */ public void setLineItemId(java.lang.Long lineItemId) { this.lineItemId = lineItemId; } /** * Gets the contendingImpressions value for this ContendingLineItem. * * @return contendingImpressions * Number of impressions contended for by both the forecasted * line item and this line item, * but served to this line item in the forecast simulation. */ public java.lang.Long getContendingImpressions() { return contendingImpressions; } /** * Sets the contendingImpressions value for this ContendingLineItem. * * @param contendingImpressions * Number of impressions contended for by both the forecasted * line item and this line item, * but served to this line item in the forecast simulation. */ public void setContendingImpressions(java.lang.Long contendingImpressions) { this.contendingImpressions = contendingImpressions; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof ContendingLineItem)) return false; ContendingLineItem other = (ContendingLineItem) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = true && ((this.lineItemId==null && other.getLineItemId()==null) || (this.lineItemId!=null && this.lineItemId.equals(other.getLineItemId()))) && ((this.contendingImpressions==null && other.getContendingImpressions()==null) || (this.contendingImpressions!=null && this.contendingImpressions.equals(other.getContendingImpressions()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = 1; if (getLineItemId() != null) { _hashCode += getLineItemId().hashCode(); } if (getContendingImpressions() != null) { _hashCode += getContendingImpressions().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ContendingLineItem.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201403", "ContendingLineItem")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("lineItemId"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201403", "lineItemId")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("contendingImpressions"); elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201403", "contendingImpressions")); elemField.setXmlType(new javax.xml.namespace.QName("http://www.w3.org/2001/XMLSchema", "long")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
hamnis/funclite
src/main/java/net/hamnaberg/funclite/Monoids.java
1851
package net.hamnaberg.funclite; import java.util.ArrayList; import java.util.List; public class Monoids { public static <A> Monoid<List<A>> listMonoid() { return new Monoid<List<A>>() { @Override public List<A> zero() { return new ArrayList<>(); } @Override public List<A> append(List<A> as, List<A> b) { ArrayList<A> list = new ArrayList<>(as); list.addAll(b); return list; } }; } public static Monoid<Integer> intPlusMonoid() { return new Monoid<Integer>() { @Override public Integer zero() { return 0; } @Override public Integer append(Integer a, Integer b) { return a + b; } }; } public static Monoid<Integer> intProductMonoid() { return new Monoid<Integer>() { @Override public Integer zero() { return 1; } @Override public Integer append(Integer a, Integer b) { return a * b; } }; } public static Monoid<Long> longProductMonoid() { return new Monoid<Long>() { @Override public Long zero() { return 1L; } @Override public Long append(Long a, Long b) { return a * b; } }; } public static Monoid<Long> longPlusMonoid() { return new Monoid<Long>() { @Override public Long zero() { return 0L; } @Override public Long append(Long a, Long b) { return a + b; } }; } }
apache-2.0
pklall/countit
countIt/src/com/example/countit/ImgGallery.java
1086
package com.example.countit; import android.support.v7.app.ActionBarActivity; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; public class ImgGallery extends ActionBarActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_img_gallery); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.img_gallery, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.action_settings) { return true; } return super.onOptionsItemSelected(item); } }
apache-2.0
liuyongfeng90/NSCGApp
app/src/main/java/com/nscg/app/model/domain/ListReport.java
2348
package com.nscg.app.model.domain; import java.io.Serializable; /** * Created by liuyongfeng on 2018/3/16. */ public class ListReport implements Serializable { private String 街道名称;//"大岗镇", private String 任务数;//0, private String 自发任务数;//0, private String 紧急任务数;//0, private String 已整改任务数;//0, private String 已整改紧急任务数;//0, private String 总整改率;//"-", private String 自发任务率;//"-", private String 紧急任务整改率;//"-" public String get街道名称() { return 街道名称; } public void set街道名称(String 街道名称) { this.街道名称 = 街道名称; } public String get任务数() { return 任务数; } public void set任务数(String 任务数) { this.任务数 = 任务数; } public String get自发任务数() { return 自发任务数; } public void set自发任务数(String 自发任务数) { this.自发任务数 = 自发任务数; } public String get紧急任务数() { return 紧急任务数; } public void set紧急任务数(String 紧急任务数) { this.紧急任务数 = 紧急任务数; } public String get已整改任务数() { return 已整改任务数; } public void set已整改任务数(String 已整改任务数) { this.已整改任务数 = 已整改任务数; } public String get已整改紧急任务数() { return 已整改紧急任务数; } public void set已整改紧急任务数(String 已整改紧急任务数) { this.已整改紧急任务数 = 已整改紧急任务数; } public String get总整改率() { return 总整改率; } public void set总整改率(String 总整改率) { this.总整改率 = 总整改率; } public String get自发任务率() { return 自发任务率; } public void set自发任务率(String 自发任务率) { this.自发任务率 = 自发任务率; } public String get紧急任务整改率() { return 紧急任务整改率; } public void set紧急任务整改率(String 紧急任务整改率) { this.紧急任务整改率 = 紧急任务整改率; } }
apache-2.0
leafclick/intellij-community
plugins/gradle/src/org/jetbrains/plugins/gradle/service/GradleInstallationManager.java
24326
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.gradle.service; import com.intellij.openapi.components.ServiceManager; import com.intellij.openapi.externalSystem.ExternalSystemModulePropertyManager; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemJdkException; import com.intellij.openapi.externalSystem.service.execution.ExternalSystemJdkUtil; import com.intellij.openapi.externalSystem.service.notification.callback.OpenExternalSystemSettingsCallback; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JdkUtil; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.OrderEnumerator; import com.intellij.openapi.util.Pair; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.Version; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.JarFileSystem; import com.intellij.openapi.vfs.LocalFileSystem; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.util.containers.ContainerUtil; import org.gradle.StartParameter; import org.gradle.util.DistributionLocator; import org.gradle.util.GradleVersion; import org.gradle.wrapper.PathAssembler; import org.gradle.wrapper.WrapperConfiguration; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.plugins.gradle.settings.DistributionType; import org.jetbrains.plugins.gradle.settings.GradleLocalSettings; import org.jetbrains.plugins.gradle.settings.GradleProjectSettings; import org.jetbrains.plugins.gradle.settings.GradleSettings; import org.jetbrains.plugins.gradle.util.GradleEnvironment; import org.jetbrains.plugins.gradle.util.GradleLog; import org.jetbrains.plugins.gradle.util.GradleUtil; import java.io.File; import java.net.URI; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Encapsulates algorithm of gradle libraries discovery. * <p/> * Thread-safe. * * @author Denis Zhdanov */ @SuppressWarnings("MethodMayBeStatic") public class GradleInstallationManager { public static final Pattern GRADLE_JAR_FILE_PATTERN; public static final Pattern ANY_GRADLE_JAR_FILE_PATTERN; public static final Pattern ANT_JAR_PATTERN = Pattern.compile("ant(-(.*))?\\.jar"); public static final Pattern IVY_JAR_PATTERN = Pattern.compile("ivy(-(.*))?\\.jar"); private static final String[] GRADLE_START_FILE_NAMES; @NonNls private static final String GRADLE_ENV_PROPERTY_NAME; private static final Path BREW_GRADLE_LOCATION = Paths.get("/usr/local/Cellar/gradle/"); private static final String LIBEXEC = "libexec"; static { // Init static data with ability to redefine it locally. GRADLE_JAR_FILE_PATTERN = Pattern.compile(System.getProperty("gradle.pattern.core.jar", "gradle-(core-)?(\\d.*)\\.jar")); ANY_GRADLE_JAR_FILE_PATTERN = Pattern.compile(System.getProperty("gradle.pattern.core.jar", "gradle-(.*)\\.jar")); GRADLE_START_FILE_NAMES = System.getProperty("gradle.start.file.names", "gradle:gradle.cmd:gradle.sh").split(":"); GRADLE_ENV_PROPERTY_NAME = System.getProperty("gradle.home.env.key", "GRADLE_HOME"); } @Nullable private Ref<File> myCachedGradleHomeFromPath; /** * Allows to get file handles for the gradle binaries to use. * * @param gradleHome gradle sdk home * @return file handles for the gradle binaries; {@code null} if gradle is not discovered */ @Nullable public Collection<File> getAllLibraries(@Nullable File gradleHome) { if (gradleHome == null || !gradleHome.isDirectory()) { return null; } List<File> result = new ArrayList<>(); File libs = new File(gradleHome, "lib"); File[] files = libs.listFiles(); if (files != null) { for (File file : files) { if (file.getName().endsWith(".jar")) { result.add(file); } } } File plugins = new File(libs, "plugins"); files = plugins.listFiles(); if (files != null) { for (File file : files) { if (file.getName().endsWith(".jar")) { result.add(file); } } } return result.isEmpty() ? null : result; } @Nullable public File getGradleHome(@Nullable Project project, @NotNull String linkedProjectPath) { return doGetGradleHome(project, linkedProjectPath); } @Nullable public Sdk getGradleJdk(@Nullable Project project, @NotNull String linkedProjectPath) { return doGetGradleJdk(project, linkedProjectPath); } @Nullable private Sdk doGetGradleJdk(@Nullable Project project, String linkedProjectPath) { if (project == null) { return null; } final GradleProjectSettings settings = GradleSettings.getInstance(project).getLinkedProjectSettings(linkedProjectPath); if (settings == null) { Pair<String, Sdk> sdkPair = ExternalSystemJdkUtil.getAvailableJdk(project); if (!ExternalSystemJdkUtil.USE_INTERNAL_JAVA.equals(sdkPair.first) || ExternalSystemJdkUtil.isValidJdk(sdkPair.second)) { return sdkPair.second; } else { return null; } } final String gradleJvm = settings.getGradleJvm(); Sdk sdk; try { sdk = ExternalSystemJdkUtil.getJdk(project, gradleJvm); } catch (ExternalSystemJdkException e) { throw new ExternalSystemJdkException( String.format("Invalid Gradle JDK configuration found. <a href='%s'>Open Gradle Settings</a> \n", OpenExternalSystemSettingsCallback.ID), e, OpenExternalSystemSettingsCallback.ID); } if (sdk == null && gradleJvm != null) { throw new ExternalSystemJdkException( String.format("Invalid Gradle JDK configuration found. <a href='%s'>Open Gradle Settings</a> \n", OpenExternalSystemSettingsCallback.ID), null, OpenExternalSystemSettingsCallback.ID); } String sdkHomePath = sdk != null ? sdk.getHomePath() : null; if (sdkHomePath != null && JdkUtil.checkForJre(sdkHomePath) && !JdkUtil.checkForJdk(sdkHomePath)) { throw new ExternalSystemJdkException( String.format("Please, use JDK instead of JRE for Gradle importer. <a href='%s'>Open Gradle Settings</a> \n", OpenExternalSystemSettingsCallback.ID), null, OpenExternalSystemSettingsCallback.ID); } return sdk; } /** * Tries to return file handle that points to the gradle installation home. * * @param project target project (if any) * @param linkedProjectPath path to the target linked project config * @return file handle that points to the gradle installation home (if any) */ @Nullable private File doGetGradleHome(@Nullable Project project, @NotNull String linkedProjectPath) { if (project == null) { return null; } GradleProjectSettings settings = GradleSettings.getInstance(project).getLinkedProjectSettings(linkedProjectPath); if (settings == null || settings.getDistributionType() == null) { return null; } String gradleHome = settings.getDistributionType() == DistributionType.WRAPPED ? GradleLocalSettings.getInstance(project).getGradleHome(linkedProjectPath) : settings.getGradleHome(); return getGradleHome(settings.getDistributionType(), linkedProjectPath, gradleHome); } @Nullable private File getGradleHome(@NotNull DistributionType distributionType, @NotNull String linkedProjectPath, @Nullable String gradleHome) { File candidate = null; switch (distributionType) { case LOCAL: case WRAPPED: if (gradleHome != null) { candidate = new File(gradleHome); } break; case DEFAULT_WRAPPED: WrapperConfiguration wrapperConfiguration = GradleUtil.getWrapperConfiguration(linkedProjectPath); candidate = getWrappedGradleHome(linkedProjectPath, wrapperConfiguration); break; case BUNDLED: WrapperConfiguration bundledWrapperSettings = new WrapperConfiguration(); DistributionLocator distributionLocator = new DistributionLocator(); bundledWrapperSettings.setDistribution(distributionLocator.getDistributionFor(GradleVersion.current())); candidate = getWrappedGradleHome(linkedProjectPath, bundledWrapperSettings); break; } File result = null; if (candidate != null) { result = isGradleSdkHome(candidate) ? candidate : null; } if (result != null) { return result; } return getAutodetectedGradleHome(); } /** * Tries to deduce gradle location from current environment. * * @return gradle home deduced from the current environment (if any); {@code null} otherwise */ @Nullable public File getAutodetectedGradleHome() { File result = getGradleHomeFromPath(); if (result != null) return result; result = getGradleHomeFromEnvProperty(); if (result != null) return result; if (SystemInfo.isMac) { return getGradleHomeFromBrew(); } return null; } @Nullable private File getGradleHomeFromBrew() { try { try (DirectoryStream<Path> ds = Files.newDirectoryStream(BREW_GRADLE_LOCATION)) { Path bestPath = null; Version highestVersion = null; for (Path path : ds) { String fileName = path.getFileName().toString(); try { Version version = Version.parseVersion(fileName); if (version == null) continue; if (highestVersion == null || version.compareTo(highestVersion) > 0) { highestVersion = version; bestPath = path; } } catch (NumberFormatException ignored) { } } if (bestPath != null) { Path libexecPath = bestPath.resolve(LIBEXEC); if (Files.exists(libexecPath)) { return libexecPath.toFile(); } } } } catch (Exception ignored) { } return null; } /** * Tries to suggest better path to gradle home * @param homePath expected path to gradle home * @return proper in terms of {@link #isGradleSdkHome(File)} path or {@code null} if it is impossible to fix path */ public String suggestBetterGradleHomePath(@NotNull String homePath) { Path path = Paths.get(homePath); if (path.startsWith(BREW_GRADLE_LOCATION)) { Path libexecPath = path.resolve(LIBEXEC); File libexecFile = libexecPath.toFile(); if (isGradleSdkHome(libexecFile)) { return libexecPath.toString(); } } return null; } /** * Tries to return gradle home that is defined as a dependency to the given module. * * @param module target module * @return file handle that points to the gradle installation home defined as a dependency of the given module (if any) */ @Nullable public VirtualFile getGradleHome(@Nullable Module module) { if (module == null) { return null; } final VirtualFile[] roots = OrderEnumerator.orderEntries(module).getAllLibrariesAndSdkClassesRoots(); for (VirtualFile root : roots) { if (root != null && isGradleSdkHome(root)) { return root; } } return null; } /** * Tries to return gradle home defined as a dependency of the given module; falls back to the project-wide settings otherwise. * * @param module target module that can have gradle home as a dependency * @param project target project which gradle home setting should be used if module-specific gradle location is not defined * @return gradle home derived from the settings of the given entities (if any); {@code null} otherwise */ @Nullable public VirtualFile getGradleHome(@Nullable Module module, @Nullable Project project, @NotNull String linkedProjectPath) { final VirtualFile result = getGradleHome(module); if (result != null) { return result; } final File home = getGradleHome(project, linkedProjectPath); return home == null ? null : LocalFileSystem.getInstance().refreshAndFindFileByIoFile(home); } /** * Tries to discover gradle installation path from the configured system path * * @return file handle for the gradle directory if it's possible to deduce from the system path; {@code null} otherwise */ @Nullable public File getGradleHomeFromPath() { Ref<File> ref = myCachedGradleHomeFromPath; if (ref != null) { return ref.get(); } String path = System.getenv("PATH"); if (path == null) { return null; } for (String pathEntry : path.split(File.pathSeparator)) { File dir = new File(pathEntry); if (!dir.isDirectory()) { continue; } for (String fileName : GRADLE_START_FILE_NAMES) { File startFile = new File(dir, fileName); if (startFile.isFile()) { File candidate = dir.getParentFile(); if (isGradleSdkHome(candidate)) { myCachedGradleHomeFromPath = new Ref<>(candidate); return candidate; } } } } return null; } /** * Tries to discover gradle installation via environment property. * * @return file handle for the gradle directory deduced from the system property (if any) */ @Nullable public File getGradleHomeFromEnvProperty() { String path = System.getenv(GRADLE_ENV_PROPERTY_NAME); if (path == null) { return null; } File candidate = new File(path); return isGradleSdkHome(candidate) ? candidate : null; } /** * Does the same job as {@link #isGradleSdkHome(File)} for the given virtual file. * * @param file gradle installation home candidate * @return {@code true} if given file points to the gradle installation; {@code false} otherwise */ public boolean isGradleSdkHome(@Nullable VirtualFile file) { if (file == null) { return false; } return isGradleSdkHome(new File(file.getPath())); } /** * Allows to answer if given virtual file points to the gradle installation root. * * @param file gradle installation root candidate * @return {@code true} if we consider that given file actually points to the gradle installation root; * {@code false} otherwise */ public boolean isGradleSdkHome(@Nullable File file) { if (file == null) { return false; } final File libs = new File(file, "lib"); if (!libs.isDirectory()) { if (GradleEnvironment.DEBUG_GRADLE_HOME_PROCESSING) { GradleLog.LOG.info(String.format( "Gradle sdk check failed for the path '%s'. Reason: it doesn't have a child directory named 'lib'", file.getAbsolutePath() )); } return false; } final boolean found = isGradleSdk(libs.listFiles()); if (GradleEnvironment.DEBUG_GRADLE_HOME_PROCESSING) { GradleLog.LOG.info(String.format("Gradle home check %s for the path '%s'", found ? "passed" : "failed", file.getAbsolutePath())); } return found; } /** * Allows to answer if given virtual file points to the gradle installation root. * * @param gradleHomePath gradle installation root candidate * @return {@code true} if we consider that given file actually points to the gradle installation root; * {@code false} otherwise */ public boolean isGradleSdkHome(String gradleHomePath) { return isGradleSdkHome(new File(gradleHomePath)); } /** * Allows to answer if given files contain the one from gradle installation. * * @param files files to process * @return {@code true} if one of the given files is from the gradle installation; {@code false} otherwise */ public boolean isGradleSdk(VirtualFile @Nullable ... files) { if (files == null) { return false; } File[] arg = new File[files.length]; for (int i = 0; i < files.length; i++) { arg[i] = new File(files[i].getPath()); } return isGradleSdk(arg); } private boolean isGradleSdk(File @Nullable ... files) { return findGradleJar(files) != null; } @Nullable private File findGradleJar(File @Nullable ... files) { if (files == null) { return null; } for (File file : files) { if (GRADLE_JAR_FILE_PATTERN.matcher(file.getName()).matches()) { return file; } } if (GradleEnvironment.DEBUG_GRADLE_HOME_PROCESSING) { StringBuilder filesInfo = new StringBuilder(); for (File file : files) { filesInfo.append(file.getAbsolutePath()).append(';'); } if (filesInfo.length() > 0) { filesInfo.setLength(filesInfo.length() - 1); } GradleLog.LOG.info(String.format( "Gradle sdk check fails. Reason: no one of the given files matches gradle JAR pattern (%s). Files: %s", GRADLE_JAR_FILE_PATTERN.toString(), filesInfo )); } return null; } /** * Allows to ask for the classpath roots of the classes that are additionally provided by the gradle integration (e.g. gradle class * files, bundled groovy-all jar etc). * * @param project target project to use for gradle home retrieval * @return classpath roots of the classes that are additionally provided by the gradle integration (if any); * {@code null} otherwise */ @Nullable public List<VirtualFile> getClassRoots(@Nullable Project project) { List<File> files = getClassRoots(project, null); if(files == null) return null; final LocalFileSystem localFileSystem = LocalFileSystem.getInstance(); final JarFileSystem jarFileSystem = JarFileSystem.getInstance(); return ContainerUtil.mapNotNull(files, file -> { final VirtualFile virtualFile = localFileSystem.refreshAndFindFileByIoFile(file); return virtualFile != null ? jarFileSystem.getJarRootForLocalFile(virtualFile) : null; }); } @Nullable public List<File> getClassRoots(@Nullable Project project, @Nullable String rootProjectPath) { if (project == null) return null; if(rootProjectPath == null) { for (Module module : ModuleManager.getInstance(project).getModules()) { rootProjectPath = ExternalSystemModulePropertyManager.getInstance(module).getRootProjectPath(); List<File> result = findGradleSdkClasspath(project, rootProjectPath); if(!result.isEmpty()) return result; } } else { return findGradleSdkClasspath(project, rootProjectPath); } return null; } @Nullable public static String getGradleVersion(@Nullable String gradleHome) { if (gradleHome == null) return null; File libs = new File(gradleHome, "lib"); if(!libs.isDirectory()) return null; File[] files = libs.listFiles(); if (files != null) { for (File file : files) { final Matcher matcher = GRADLE_JAR_FILE_PATTERN.matcher(file.getName()); if (matcher.matches()) { return matcher.group(2); } } } return null; } private List<File> findGradleSdkClasspath(Project project, String rootProjectPath) { List<File> result = new ArrayList<>(); if (StringUtil.isEmpty(rootProjectPath)) return result; File gradleHome = getGradleHome(project, rootProjectPath); if (gradleHome == null || !gradleHome.isDirectory()) { return result; } File src = new File(gradleHome, "src"); if (src.isDirectory()) { if(new File(src, "org").isDirectory()) { addRoots(result, src); } else { addRoots(result, src.listFiles()); } } final Collection<File> libraries = getAllLibraries(gradleHome); if (libraries == null) { return result; } for (File file : libraries) { if (isGradleBuildClasspathLibrary(file)) { ContainerUtil.addIfNotNull(result, file); } } return result; } private boolean isGradleBuildClasspathLibrary(File file) { String fileName = file.getName(); return ANY_GRADLE_JAR_FILE_PATTERN.matcher(fileName).matches() || ANT_JAR_PATTERN.matcher(fileName).matches() || IVY_JAR_PATTERN.matcher(fileName).matches() || isGroovyJar(fileName); } private void addRoots(@NotNull List<? super File> result, File @Nullable ... files) { if (files == null) return; for (File file : files) { if (file == null || !file.isDirectory()) continue; result.add(file); } } private File getWrappedGradleHome(String linkedProjectPath, @Nullable final WrapperConfiguration wrapperConfiguration) { if (wrapperConfiguration == null) { return null; } File gradleSystemDir; if ("PROJECT".equals(wrapperConfiguration.getDistributionBase())) { gradleSystemDir = new File(linkedProjectPath, ".gradle"); } else { gradleSystemDir = StartParameter.DEFAULT_GRADLE_USER_HOME; } if (!gradleSystemDir.isDirectory()) { return null; } PathAssembler.LocalDistribution localDistribution = new PathAssembler(gradleSystemDir).getDistribution(wrapperConfiguration); if (localDistribution.getDistributionDir() == null) { return null; } File[] distFiles = localDistribution.getDistributionDir().listFiles( f -> f.isDirectory() && StringUtil.startsWith(f.getName(), "gradle-")); return distFiles == null || distFiles.length == 0 ? null : distFiles[0]; } private static boolean isGroovyJar(@NotNull String name) { name = StringUtil.toLowerCase(name); return name.startsWith("groovy-all-") && name.endsWith(".jar") && !name.contains("src") && !name.contains("doc"); } @Nullable public static GradleVersion getGradleVersion(@NotNull GradleProjectSettings settings) { GradleVersion version = null; DistributionType distributionType = settings.getDistributionType(); if (distributionType == null) return null; if (distributionType == DistributionType.LOCAL) { String gradleVersion = getGradleVersion(settings.getGradleHome()); if (gradleVersion != null) { version = getGradleVersionSafe(gradleVersion); } } else if (distributionType == DistributionType.BUNDLED) { return GradleVersion.current(); } else if (distributionType == DistributionType.DEFAULT_WRAPPED) { WrapperConfiguration wrapperConfiguration = GradleUtil.getWrapperConfiguration(settings.getExternalProjectPath()); GradleInstallationManager installationManager = ServiceManager.getService(GradleInstallationManager.class); File gradleHome = installationManager.getWrappedGradleHome(settings.getExternalProjectPath(), wrapperConfiguration); if (gradleHome != null) { String gradleVersion = getGradleVersion(settings.getGradleHome()); if (gradleVersion != null) { version = getGradleVersionSafe(gradleVersion); } } if (version == null && wrapperConfiguration != null) { URI uri = wrapperConfiguration.getDistribution(); if (uri != null) { String path = uri.getRawPath(); if (path != null) { version = parseDistributionVersion(path); } } } } return version; } @Nullable public static GradleVersion parseDistributionVersion(@NotNull String path) { path = StringUtil.substringAfterLast(path, "/"); if (path == null) return null; path = StringUtil.substringAfterLast(path, "gradle-"); if (path == null) return null; int i = path.lastIndexOf('-'); if (i <= 0) return null; return getGradleVersionSafe(path.substring(0, i)); } @Nullable private static GradleVersion getGradleVersionSafe(String gradleVersion) { try { return GradleVersion.version(gradleVersion); } catch (IllegalArgumentException e) { // GradleVersion.version(gradleVersion) might throw exception for custom Gradle versions // https://youtrack.jetbrains.com/issue/IDEA-216892 return null; } } }
apache-2.0
whummer/scaleDOM
src/main/java/at/ac/tuwien/dsg/scaledom/ScaleDomDocumentBuilderFactory.java
4673
package at.ac.tuwien.dsg.scaledom; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import java.util.HashMap; import java.util.Map; import javax.xml.XMLConstants; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import at.ac.tuwien.dsg.scaledom.io.ReaderFactory; import at.ac.tuwien.dsg.scaledom.lazy.LazyLoadingStrategy; import at.ac.tuwien.dsg.scaledom.parser.XmlParser; import at.ac.tuwien.dsg.scaledom.util.ComponentFactory; /** * ScaleDOM implementation of the <code>DocumentBuilderFactory</code>. Utilizes the <code>attributes</code> map for * configuration, see {@link ScaleDom} for possible configuration options.<br/> * Warnings: * <ul> * <li>DocumentBuilderFactory.isXIncludeAware is not supported</li> * <li>DocumentBuilderFactory.schema is not supported</li> * <li>http://javax.xml.XMLConstants/feature/secure-processing feature has no impact</li> * </ul> * * @author Dominik Rauch * @see DocumentBuilderFactory * @see ScaleDom */ public class ScaleDomDocumentBuilderFactory extends DocumentBuilderFactory { private final Map<String, Object> attributes; private final Map<String, Boolean> features; /** * Default constructor. */ public ScaleDomDocumentBuilderFactory() { attributes = new HashMap<String, Object>(); features = new HashMap<String, Boolean>(); // Add features which are required to be supported features.put(XMLConstants.FEATURE_SECURE_PROCESSING, false); // Add all configuration options using defaults as initial values attributes.put(ScaleDom.ATTRIBUTE_XMLPARSER_IMPLEMENTATION, ScaleDom.DEFAULT_XMLPARSER_IMPLEMENTATION); attributes.put(ScaleDom.ATTRIBUTE_READERFACTORY_IMPLEMENTATION, ScaleDom.DEFAULT_READERFACTORY_IMPLEMENTATION); attributes.put(ScaleDom.ATTRIBUTE_LAZYLOADINGSTRATEGY_IMPLEMENTATION, ScaleDom.DEFAULT_LAZYLOADINGSTRATEGY_IMPLEMENTATION); attributes.put(ScaleDom.ATTRIBUTE_DEFAULTENCODING, ScaleDom.DEFAULT_DEFAULTENCODING); } @Override @SuppressWarnings("unchecked") public DocumentBuilder newDocumentBuilder() throws ParserConfigurationException { try { final ComponentFactory componentFactory = new ComponentFactory(); // Bind XmlParser instance componentFactory.bind(XmlParser.class, (Class<? extends XmlParser>) attributes.get(ScaleDom.ATTRIBUTE_XMLPARSER_IMPLEMENTATION), this); // Bind ReaderFactory implementation type componentFactory.bind(ReaderFactory.class, (Class<? extends ReaderFactory>) attributes.get(ScaleDom.ATTRIBUTE_READERFACTORY_IMPLEMENTATION)); // Bind LazyLoadingStrategy implementation type componentFactory.bind(LazyLoadingStrategy.class, (Class<? extends LazyLoadingStrategy>) attributes .get(ScaleDom.ATTRIBUTE_LAZYLOADINGSTRATEGY_IMPLEMENTATION)); final String defaultEncoding = (String) attributes.get(ScaleDom.ATTRIBUTE_DEFAULTENCODING); return new ScaleDomDocumentBuilder(componentFactory, defaultEncoding); } catch (final InstantiationException ex) { throw new ParserConfigurationException("Component 'XmlParser' could not be instantiated."); } } @Override public Object getAttribute(final String name) throws IllegalArgumentException { checkNotNull(name, "Argument name must not be null."); checkArgument(attributes.containsKey(name), "Attribute '%s' is not recognized.", name); return attributes.get(name); } @Override public void setAttribute(final String name, final Object value) throws IllegalArgumentException { checkNotNull(name, "Argument name must not be null."); checkArgument(attributes.containsKey(name), "Attribute '%s' is not recognized.", name); checkNotNull(value, "Argument value must not be null."); attributes.put(name, value); } @Override public boolean getFeature(final String name) throws ParserConfigurationException { checkNotNull(name, "Argument name must not be null."); if (!features.containsKey(name)) { throw new ParserConfigurationException("Feature '" + name + "' is not supported."); } return features.get(name); } @Override public void setFeature(final String name, final boolean value) throws ParserConfigurationException { checkNotNull(name, "Argument name must not be null."); if (!features.containsKey(name)) { throw new ParserConfigurationException("Feature '" + name + "' is not supported."); } checkNotNull(value, "Argument value must not be null."); features.put(name, value); } }
apache-2.0
alincalinciuc/nubomedia-paas
src/main/java/org/project/openbaton/nubomedia/api/openshift/json/SshGitChallengeSecret.java
915
package org.project.openbaton.nubomedia.api.openshift.json; import org.apache.commons.codec.binary.Base64; /** * Created by maa on 01.10.15. */ public class SshGitChallengeSecret implements SecretType{ private String username; private String password; public SshGitChallengeSecret(String username, String password) { this.username = Base64.encodeBase64String(username.getBytes()); this.password = Base64.encodeBase64String(password.getBytes()); } public SshGitChallengeSecret() { } public String getUsername() { return username; } public void setUsername(String username) { this.username = Base64.encodeBase64String(username.getBytes()); } public String getPassword() { return password; } public void setPassword(String password) { this.password = Base64.encodeBase64String(password.getBytes()); } }
apache-2.0
rrbutani/redolent-waddle
source/AudioVisualizer/src/Microphone.java
6371
import javax.sound.sampled.*; import java.io.File; /*************************************************************************** * Microphone class that contains methods to capture audio from microphone * * @author Luke Kuza, Aaron Gokaslan ***************************************************************************/ public class Microphone { /** * TargetDataLine variable to receive data from microphone */ private TargetDataLine targetDataLine; /** * Enum for current Microphone state */ public enum CaptureState { PROCESSING_AUDIO, STARTING_CAPTURE, CLOSED } /** * Variable for enum */ CaptureState state; /** * Variable for the audios saved file type */ private AudioFileFormat.Type fileType; /** * Variable that holds the saved audio file */ private File audioFile; /** * Gets the current state of Microphone * * @return PROCESSING_AUDIO is returned when the Thread is recording Audio and/or saving it to a file<br> * STARTING_CAPTURE is returned if the Thread is setting variables<br> * CLOSED is returned if the Thread is not doing anything/not capturing audio */ public CaptureState getState() { return state; } /** * Sets the current state of Microphone * * @param state State from enum */ private void setState(CaptureState state) { this.state = state; } public File getAudioFile() { return audioFile; } public void setAudioFile(File audioFile) { this.audioFile = audioFile; } public AudioFileFormat.Type getFileType() { return fileType; } public void setFileType(AudioFileFormat.Type fileType) { this.fileType = fileType; } public TargetDataLine getTargetDataLine() { return targetDataLine; } public void setTargetDataLine(TargetDataLine targetDataLine) { this.targetDataLine = targetDataLine; } /** * Constructor * * @param fileType File type to save the audio in<br> * Example, to save as WAVE use AudioFileFormat.Type.WAVE */ public Microphone(AudioFileFormat.Type fileType) { setState(CaptureState.CLOSED); setFileType(fileType); initTargetDataLine(); } /** * Initializes the target data line. */ private void initTargetDataLine(){ DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, getAudioFormat()); try { setTargetDataLine((TargetDataLine) AudioSystem.getLine(dataLineInfo)); } catch (LineUnavailableException e) { // TODO Auto-generated catch block e.printStackTrace(); return; } } /** * Captures audio from the microphone and saves it a file * * @param audioFile The File to save the audio to * @throws Exception Throws an exception if something went wrong */ public void captureAudioToFile(File audioFile) throws Exception { setState(CaptureState.STARTING_CAPTURE); setAudioFile(audioFile); DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, getAudioFormat()); setTargetDataLine((TargetDataLine) AudioSystem.getLine(dataLineInfo)); //Get Audio new Thread(new CaptureThread()).start(); } /** * Captures audio from the microphone and saves it a file * * @param audioFile The fully path (String) to a file you want to save the audio in * @throws Exception Throws an exception if something went wrong */ public void captureAudioToFile(String audioFile) throws Exception { setState(CaptureState.STARTING_CAPTURE); File file = new File(audioFile); setAudioFile(file); DataLine.Info dataLineInfo = new DataLine.Info(TargetDataLine.class, getAudioFormat()); setTargetDataLine((TargetDataLine) AudioSystem.getLine(dataLineInfo)); //Get Audio new Thread(new CaptureThread()).start(); } /** * The audio format to save in * * @return Returns AudioFormat to be used later when capturing audio from microphone */ public AudioFormat getAudioFormat() { float sampleRate = 44100.0F; //8000,11025,16000,22050,44100 int sampleSizeInBits = 16; //8,16 int channels = 1; //1,2 boolean signed = true; //true,false boolean bigEndian = false; //true,false return new AudioFormat(sampleRate, sampleSizeInBits, channels, signed, bigEndian); } /** * Opens the microphone, starting the targetDataLine. * If it's already open, it does nothing. */ public void open(){ if(getTargetDataLine()==null){ initTargetDataLine(); } if(!getTargetDataLine().isOpen()){ try { setState(CaptureState.PROCESSING_AUDIO); getTargetDataLine().open(getAudioFormat()); getTargetDataLine().start(); } catch (LineUnavailableException e) { // TODO Auto-generated catch block e.printStackTrace(); return; } } } /** * Close the microphone capture, saving all processed audio to the specified file.<br> * If already closed, this does nothing */ public void close() { if (getState() == CaptureState.CLOSED) { } else { getTargetDataLine().stop(); getTargetDataLine().close(); setState(CaptureState.CLOSED); } } /** * Thread to capture the audio from the microphone and save it to a file */ private class CaptureThread implements Runnable { /** * Run method for thread */ public void run() { try { AudioFileFormat.Type fileType = getFileType(); File audioFile = getAudioFile(); open(); AudioSystem.write(new AudioInputStream(getTargetDataLine()), fileType, audioFile); //Will write to File until it's closed. } catch (Exception ex) { ex.printStackTrace(); } } } }
artistic-2.0
shwenzhang/apk-parser
apk-parser-lib/src/main/java/tinker/net/dongliu/apk/parser/struct/xml/XmlNamespaceEndTag.java
518
package tinker.net.dongliu.apk.parser.struct.xml; /** * @author dongliu */ public class XmlNamespaceEndTag { private String prefix; private String uri; public String getPrefix() { return prefix; } public void setPrefix(String prefix) { this.prefix = prefix; } public String getUri() { return uri; } public void setUri(String uri) { this.uri = uri; } @Override public String toString() { return prefix + "=" + uri; } }
bsd-2-clause
kbsriram/keypan
src/core/com/kbsriram/keypan/core/IVerifier.java
603
package com.kbsriram.keypan.core; import java.io.IOException; public interface IVerifier { /** * Return a profile from a key uid, and use the original query to * set the is-critical flag on the profile if necessary. * Return null if you cannot handle this uid. */ public AProfile fromUid(String uid, String query); /** * Return the profile (with any additional info) if you were able * to find a confirming fingerprint. Otherwise, return null. */ public AProfile verify(AProfile in, String fingerprint, IGetter getter) throws IOException; }
bsd-2-clause
finikes/sider
src/main/java/org/finikes/sider/async/AsyncRedisPipelindMGetHandler.java
749
package org.finikes.sider.async; import java.util.Collection; import java.util.List; import java.util.Map; public abstract class AsyncRedisPipelindMGetHandler extends AsyncRedisPipelindHandlerAdapter { protected String unrealizedMsg = "Method unrealized."; public abstract Map<String, String> mget(String... keys); public abstract Map<String, String> mget(List<String> keys); @Override public final void mset(String[] keys, String[] values) { throw new RuntimeException(unrealizedMsg); } @Override public final void mset(List<String> keys, Collection<String> values) { throw new RuntimeException(unrealizedMsg); } @Override public final void mset(Map<String, String> entrys) { throw new RuntimeException(unrealizedMsg); } }
bsd-2-clause
cortical-io/java-client-sdk
retina-service-rest-model/src/main/java/io/cortical/rest/model/Text.java
1776
/******************************************************************************* * Copyright (c) cortical.io GmbH. All rights reserved. * * This software is confidential and proprietary information. * You shall use it only in accordance with the terms of the * license agreement you entered into with cortical.io GmbH. ******************************************************************************/ package io.cortical.rest.model; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonRootName; /** * * Text model. */ @JsonRootName("text") public class Text extends Model { private String text; private Fingerprint fingerprint; /** * * Creates a new instance of {@link Text}. * */ public Text() { } /** * Creates a new instance of {@link Text}. * * @param text the actual text of the element * @param positions the positions of the fingerprint representation of the text */ public Text(String text, int[] positions) { this.text = text; this.fingerprint = new Fingerprint(positions); } /** * Creates a new instance of {@link Text}. * * @param text the actual text of the element */ public Text(String text) { this.text = text; } /** * Gets the text. * * @return the text */ @JsonProperty(SerializationConstants.TEXT_STRING_PROPERTYLABEL) public String getText() { return text; } /** * Gets the fingerprint. * * @return the fingerprint */ @JsonProperty(SerializationConstants.FINGERPRINT_PROPERTY_LABEL) public Fingerprint getFingerprint() { return fingerprint; } }
bsd-2-clause
weisong44/weisong-object-copy
src/test/java/com/weisong/common/vodo/DummyGrandChildVo.java
568
package com.weisong.common.vodo; import lombok.Getter; import lombok.Setter; import com.weisong.common.value.BaseValueObject; import com.weisong.common.vodo.annotation.BindToClass; import com.weisong.common.vodo.annotation.BindToField; import com.weisong.common.vodo.annotation.VoOnly; @Getter @Setter @BindToClass("com.weisong.common.vodo.DummyGrandChild") public class DummyGrandChildVo extends BaseValueObject { private String grandChildName; @VoOnly @BindToField("id") private Long longId; @VoOnly @BindToField("id") private String strId; }
bsd-2-clause
jinterval/jinterval
ru-nsc-globalopt/src/main/java/ru/nsc/interval/globalopt/Main.java
5500
package ru.nsc.interval.globalopt; import java.io.IOException; import java.nio.file.Paths; import net.java.jinterval.expression.OptimizationProblem; import net.java.jinterval.expression.example.FireRisk; import net.java.jinterval.interval.set.*; import net.java.jinterval.rational.ExtendedRational; public class Main { public static void test(String msg, OptimizationProblem problem, SetIntervalContext ic, double... tolerances) { System.out.println(msg); for (double tolerance : tolerances) { SetInterval[] box = new SetInterval[problem.getNumInps()]; for (int i = 0; i < box.length; i++) { box[i] = ic.textToInterval(problem.getInpRange(i)); } IAOGO algorithm = new IAOGO(problem.getObjective()); long startTime = System.currentTimeMillis(); algorithm.start(box, ExtendedRational.valueOf(tolerance), ic); long stopTime = System.currentTimeMillis(); System.out.println("tol=" + tolerance + " time=" + (stopTime - startTime) / 1E3 + "s"); } } public static void main(String[] args) { OptimizationProblem lit = FireRisk.createOptimizationProblem(FireRisk.From.tpac_T_uw_d, FireRisk.To.Fsqr, true); OptimizationProblem num = Functions.createOptimizationProblem(); SetIntervalContext plain = SetIntervalContexts.getPlain(); SetIntervalContext accur = SetIntervalContexts.getAccur64(); switch (2) { case 1: test("num plain", num, plain, 1E-9); break; case 2: test("num accur", num, accur, 1E-9); break; case 3: test("lit plain", lit, plain, 1E-9); break; case 4: test("lit accur", lit, accur, 1E-9); break; } } private static void Help() { System.out.println("Usage: [-p] problemfile tolerance"); System.out.println(" -p plain mode (faster but without guarantee"); System.exit(1); } public static void mainWithArgs(String[] args) { SetIntervalContext ic = SetIntervalContexts.getDefault(); String problemFile = null; Double tolerance = null; for (String arg : args) { if (arg.startsWith("-")) { if (arg.equals("-p")) { ic = SetIntervalContexts.getPlain(); } else { Help(); } } else if (problemFile == null) { problemFile = arg; } else if (tolerance == null) { tolerance = new Double(arg); } else { Help(); } } if (tolerance == null) { Help(); } OptimizationProblem problem = null; try { problem = OptimizationProblem.load(Paths.get(problemFile)); } catch (IOException e) { System.out.println("File " + problemFile + " not found"); Help(); } SetInterval[] box = new SetInterval[problem.getNumInps()]; for (int i = 0; i < box.length; i++) { box[i] = ic.textToInterval(problem.getInpRange(i)); } IAOGO algorithm = new IAOGO(problem.getObjective()); long startTime = System.currentTimeMillis(); algorithm.start(box, ExtendedRational.valueOf(tolerance), ic); long stopTime = System.currentTimeMillis(); System.out.println("tol=" + tolerance + " time=" + (stopTime - startTime) / 1E3 + "s"); } } /* -Fsqr num plain 83573 -0.007023416664737814 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000000000, 34.800000000000000000] -0.007023416664737814 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000000000, 34.800000000000000000] tol=1.0E-9 time=393.574s num accur 83573 -0.007023416664739057 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000004000, 34.800000000000004000] -0.007023416664739057 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000004000, 34.800000000000004000] tol=1.0E-9 time=487.459s lit plain 83573 -0.007023416664738106 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000000000, 34.800000000000000000] -0.007023416664738106 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000000000, 34.800000000000000000] tol=1.0E-9 time=265.774s lit accur 83573 -0.007023416664740045 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000004000, 34.800000000000004000] -0.007023416664740045 [170.000000000000000000, 170.000000000000000000] [299.000000000000000000, 299.000000000000000000] [9.000000000000000000, 9.000000000000000000] [34.800000000000004000, 34.800000000000004000] tol=1.0E-9 time=412.011s */
bsd-2-clause
atlassian/commonmark-java
commonmark/src/main/java/org/commonmark/internal/InlineParserImpl.java
28061
package org.commonmark.internal; import org.commonmark.internal.inline.Scanner; import org.commonmark.internal.inline.*; import org.commonmark.internal.util.Escaping; import org.commonmark.internal.util.LinkScanner; import org.commonmark.internal.util.Parsing; import org.commonmark.node.*; import org.commonmark.parser.InlineParser; import org.commonmark.parser.InlineParserContext; import org.commonmark.parser.delimiter.DelimiterProcessor; import java.util.*; import java.util.regex.Pattern; public class InlineParserImpl implements InlineParser, InlineParserState { private static final String ASCII_PUNCTUATION = "!\"#\\$%&'\\(\\)\\*\\+,\\-\\./:;<=>\\?@\\[\\\\\\]\\^_`\\{\\|\\}~"; private static final Pattern PUNCTUATION = Pattern .compile("^[" + ASCII_PUNCTUATION + "\\p{Pc}\\p{Pd}\\p{Pe}\\p{Pf}\\p{Pi}\\p{Po}\\p{Ps}]"); private static final Pattern UNICODE_WHITESPACE_CHAR = Pattern.compile("^[\\p{Zs}\t\r\n\f]"); private final BitSet specialCharacters; private final BitSet delimiterCharacters; private final Map<Character, DelimiterProcessor> delimiterProcessors; private final InlineParserContext context; private final Map<Character, List<InlineContentParser>> inlineParsers; private Scanner scanner; private int trailingSpaces; /** * Top delimiter (emphasis, strong emphasis or custom emphasis). (Brackets are on a separate stack, different * from the algorithm described in the spec.) */ private Delimiter lastDelimiter; /** * Top opening bracket (<code>[</code> or <code>![)</code>). */ private Bracket lastBracket; public InlineParserImpl(InlineParserContext inlineParserContext) { this.delimiterProcessors = calculateDelimiterProcessors(inlineParserContext.getCustomDelimiterProcessors()); this.context = inlineParserContext; this.inlineParsers = new HashMap<>(); this.inlineParsers.put('\\', Collections.<InlineContentParser>singletonList(new BackslashInlineParser())); this.inlineParsers.put('`', Collections.<InlineContentParser>singletonList(new BackticksInlineParser())); this.inlineParsers.put('&', Collections.<InlineContentParser>singletonList(new EntityInlineParser())); this.inlineParsers.put('<', Arrays.asList(new AutolinkInlineParser(), new HtmlInlineParser())); this.delimiterCharacters = calculateDelimiterCharacters(this.delimiterProcessors.keySet()); this.specialCharacters = calculateSpecialCharacters(delimiterCharacters, inlineParsers.keySet()); } public static BitSet calculateDelimiterCharacters(Set<Character> characters) { BitSet bitSet = new BitSet(); for (Character character : characters) { bitSet.set(character); } return bitSet; } public static BitSet calculateSpecialCharacters(BitSet delimiterCharacters, Set<Character> characters) { BitSet bitSet = new BitSet(); bitSet.or(delimiterCharacters); for (Character c : characters) { bitSet.set(c); } bitSet.set('['); bitSet.set(']'); bitSet.set('!'); bitSet.set('\n'); return bitSet; } public static Map<Character, DelimiterProcessor> calculateDelimiterProcessors(List<DelimiterProcessor> delimiterProcessors) { Map<Character, DelimiterProcessor> map = new HashMap<>(); addDelimiterProcessors(Arrays.<DelimiterProcessor>asList(new AsteriskDelimiterProcessor(), new UnderscoreDelimiterProcessor()), map); addDelimiterProcessors(delimiterProcessors, map); return map; } @Override public Scanner scanner() { return scanner; } private static void addDelimiterProcessors(Iterable<DelimiterProcessor> delimiterProcessors, Map<Character, DelimiterProcessor> map) { for (DelimiterProcessor delimiterProcessor : delimiterProcessors) { char opening = delimiterProcessor.getOpeningCharacter(); char closing = delimiterProcessor.getClosingCharacter(); if (opening == closing) { DelimiterProcessor old = map.get(opening); if (old != null && old.getOpeningCharacter() == old.getClosingCharacter()) { StaggeredDelimiterProcessor s; if (old instanceof StaggeredDelimiterProcessor) { s = (StaggeredDelimiterProcessor) old; } else { s = new StaggeredDelimiterProcessor(opening); s.add(old); } s.add(delimiterProcessor); map.put(opening, s); } else { addDelimiterProcessorForChar(opening, delimiterProcessor, map); } } else { addDelimiterProcessorForChar(opening, delimiterProcessor, map); addDelimiterProcessorForChar(closing, delimiterProcessor, map); } } } private static void addDelimiterProcessorForChar(char delimiterChar, DelimiterProcessor toAdd, Map<Character, DelimiterProcessor> delimiterProcessors) { DelimiterProcessor existing = delimiterProcessors.put(delimiterChar, toAdd); if (existing != null) { throw new IllegalArgumentException("Delimiter processor conflict with delimiter char '" + delimiterChar + "'"); } } /** * Parse content in block into inline children, appending them to the block node. */ @Override public void parse(List<CharSequence> lines, Node block) { reset(lines); while (true) { Node node = parseInline(); if (node != null) { block.appendChild(node); } else { break; } } processDelimiters(null); mergeChildTextNodes(block); } void reset(List<CharSequence> lines) { this.scanner = Scanner.of(lines); this.trailingSpaces = 0; this.lastDelimiter = null; this.lastBracket = null; } private Text text(String text) { return new Text(text); } /** * Parse the next inline element in subject, advancing our position. * On success, return the new inline node. * On failure, return null. */ private Node parseInline() { char c = scanner.peek(); if (c == Scanner.END) { return null; } Position position = scanner.position(); List<InlineContentParser> inlineParsers = this.inlineParsers.get(c); if (inlineParsers != null) { for (InlineContentParser inlineParser : inlineParsers) { ParsedInline parsedInline = inlineParser.tryParse(this); if (parsedInline instanceof ParsedInlineImpl) { ParsedInlineImpl parsedInlineImpl = (ParsedInlineImpl) parsedInline; scanner.setPosition(parsedInlineImpl.getPosition()); return parsedInlineImpl.getNode(); } else { // Reset position scanner.setPosition(position); } } } switch (c) { case '[': return parseOpenBracket(); case '!': return parseBang(); case ']': return parseCloseBracket(); case '\n': return parseLineBreak(); } boolean isDelimiter = delimiterCharacters.get(c); if (isDelimiter) { DelimiterProcessor delimiterProcessor = delimiterProcessors.get(c); Node delimiterNode = parseDelimiters(delimiterProcessor, c); if (delimiterNode != null) { return delimiterNode; } } // If we get here, even for a special/delimiter character, we will just treat it as text. return parseText(); } /** * Attempt to parse delimiters like emphasis, strong emphasis or custom delimiters. */ private Node parseDelimiters(DelimiterProcessor delimiterProcessor, char delimiterChar) { DelimiterData res = scanDelimiters(delimiterProcessor, delimiterChar); if (res == null) { return null; } Text node = res.text; // Add entry to stack for this opener lastDelimiter = new Delimiter(node, delimiterChar, res.canOpen, res.canClose, lastDelimiter); lastDelimiter.length = res.count; lastDelimiter.originalLength = res.count; if (lastDelimiter.previous != null) { lastDelimiter.previous.next = lastDelimiter; } return node; } /** * Add open bracket to delimiter stack and add a text node to block's children. */ private Node parseOpenBracket() { scanner.next(); Position start = scanner.position(); Text node = text("["); // Add entry to stack for this opener addBracket(Bracket.link(node, start, lastBracket, lastDelimiter)); return node; } /** * If next character is [, and ! delimiter to delimiter stack and add a text node to block's children. * Otherwise just add a text node. */ private Node parseBang() { scanner.next(); if (scanner.next('[')) { Text node = text("!["); // Add entry to stack for this opener addBracket(Bracket.image(node, scanner.position(), lastBracket, lastDelimiter)); return node; } else { return text("!"); } } /** * Try to match close bracket against an opening in the delimiter stack. Return either a link or image, or a * plain [ character. If there is a matching delimiter, remove it from the delimiter stack. */ private Node parseCloseBracket() { Position beforeClose = scanner.position(); scanner.next(); Position afterClose = scanner.position(); // Get previous `[` or `![` Bracket opener = lastBracket; if (opener == null) { // No matching opener, just return a literal. return text("]"); } if (!opener.allowed) { // Matching opener but it's not allowed, just return a literal. removeLastBracket(); return text("]"); } // Check to see if we have a link/image String dest = null; String title = null; // Maybe a inline link like `[foo](/uri "title")` if (scanner.next('(')) { scanner.whitespace(); dest = parseLinkDestination(scanner); if (dest == null) { scanner.setPosition(afterClose); } else { int whitespace = scanner.whitespace(); // title needs a whitespace before if (whitespace >= 1) { title = parseLinkTitle(scanner); scanner.whitespace(); } if (!scanner.next(')')) { // Don't have a closing `)`, so it's not a destination and title -> reset. // Note that something like `[foo](` could be valid, `(` will just be text. scanner.setPosition(afterClose); dest = null; title = null; } } } // Maybe a reference link like `[foo][bar]`, `[foo][]` or `[foo]`. // Note that even `[foo](` could be a valid link if there's a reference, which is why this is not just an `else` // here. if (dest == null) { // See if there's a link label like `[bar]` or `[]` String ref = parseLinkLabel(scanner); if (ref == null) { scanner.setPosition(afterClose); } if ((ref == null || ref.isEmpty()) && !opener.bracketAfter) { // If the second label is empty `[foo][]` or missing `[foo]`, then the first label is the reference. // But it can only be a reference when there's no (unescaped) bracket in it. // If there is, we don't even need to try to look up the reference. This is an optimization. ref = scanner.textBetween(opener.contentPosition, beforeClose).toString(); } if (ref != null) { String label = Escaping.normalizeLabelContent(ref); LinkReferenceDefinition definition = context.getLinkReferenceDefinition(label); if (definition != null) { dest = definition.getDestination(); title = definition.getTitle(); } } } if (dest != null) { // If we got here, open is a potential opener Node linkOrImage = opener.image ? new Image(dest, title) : new Link(dest, title); Node node = opener.node.getNext(); while (node != null) { Node next = node.getNext(); linkOrImage.appendChild(node); node = next; } // Process delimiters such as emphasis inside link/image processDelimiters(opener.previousDelimiter); mergeChildTextNodes(linkOrImage); // We don't need the corresponding text node anymore, we turned it into a link/image node opener.node.unlink(); removeLastBracket(); // Links within links are not allowed. We found this link, so there can be no other link around it. if (!opener.image) { Bracket bracket = lastBracket; while (bracket != null) { if (!bracket.image) { // Disallow link opener. It will still get matched, but will not result in a link. bracket.allowed = false; } bracket = bracket.previous; } } return linkOrImage; } else { // No link or image, parse just the bracket as text and continue removeLastBracket(); scanner.setPosition(afterClose); return text("]"); } } private void addBracket(Bracket bracket) { if (lastBracket != null) { lastBracket.bracketAfter = true; } lastBracket = bracket; } private void removeLastBracket() { lastBracket = lastBracket.previous; } /** * Attempt to parse link destination, returning the string or null if no match. */ private String parseLinkDestination(Scanner scanner) { char delimiter = scanner.peek(); Position start = scanner.position(); if (!LinkScanner.scanLinkDestination(scanner)) { return null; } String dest; if (delimiter == '<') { // chop off surrounding <..>: CharSequence rawDestination = scanner.textBetween(start, scanner.position()); dest = rawDestination.subSequence(1, rawDestination.length() - 1).toString(); } else { dest = scanner.textBetween(start, scanner.position()).toString(); } return Escaping.unescapeString(dest); } /** * Attempt to parse link title (sans quotes), returning the string or null if no match. */ private String parseLinkTitle(Scanner scanner) { Position start = scanner.position(); if (!LinkScanner.scanLinkTitle(scanner)) { return null; } // chop off ', " or parens CharSequence rawTitle = scanner.textBetween(start, scanner.position()); String title = rawTitle.subSequence(1, rawTitle.length() - 1).toString(); return Escaping.unescapeString(title); } /** * Attempt to parse a link label, returning the label between the brackets or null. */ String parseLinkLabel(Scanner scanner) { if (!scanner.next('[')) { return null; } Position start = scanner.position(); if (!LinkScanner.scanLinkLabelContent(scanner)) { return null; } Position end = scanner.position(); if (!scanner.next(']')) { return null; } String content = scanner.textBetween(start, end).toString(); // spec: A link label can have at most 999 characters inside the square brackets. if (content.length() > 999) { return null; } return content; } private Node parseLineBreak() { scanner.next(); if (trailingSpaces >= 2) { return new HardLineBreak(); } else { return new SoftLineBreak(); } } /** * Parse the next character as plain text, and possibly more if the following characters are non-special. */ private Node parseText() { Position start = scanner.position(); scanner.next(); while (scanner.hasNext()) { if (specialCharacters.get(scanner.peek())) { break; } scanner.next(); } String text = scanner.textBetween(start, scanner.position()).toString(); char c = scanner.peek(); if (c == '\n') { // We parsed until the end of the line. Trim any trailing spaces and remember them (for hard line breaks). int end = Parsing.skipBackwards(' ', text, text.length() - 1, 0) + 1; trailingSpaces = text.length() - end; text = text.substring(0, end); } else if (c == Scanner.END) { // For the last line, both tabs and spaces are trimmed for some reason (checked with commonmark.js). int end = Parsing.skipSpaceTabBackwards(text, text.length() - 1, 0) + 1; text = text.substring(0, end); } return text(text); } /** * Scan a sequence of characters with code delimiterChar, and return information about the number of delimiters * and whether they are positioned such that they can open and/or close emphasis or strong emphasis. * * @return information about delimiter run, or {@code null} */ private DelimiterData scanDelimiters(DelimiterProcessor delimiterProcessor, char delimiterChar) { char charBefore = scanner.peekPrevious(); Position start = scanner.position(); int delimiterCount = scanner.matchMultiple(delimiterChar); if (delimiterCount < delimiterProcessor.getMinLength()) { scanner.setPosition(start); return null; } char charAfter = scanner.peek(); String before = charBefore == Scanner.END ? "\n" : String.valueOf(charBefore); String after = charAfter == Scanner.END ? "\n" : String.valueOf(charAfter); // We could be more lazy here, in most cases we don't need to do every match case. boolean beforeIsPunctuation = PUNCTUATION.matcher(before).matches(); boolean beforeIsWhitespace = UNICODE_WHITESPACE_CHAR.matcher(before).matches(); boolean afterIsPunctuation = PUNCTUATION.matcher(after).matches(); boolean afterIsWhitespace = UNICODE_WHITESPACE_CHAR.matcher(after).matches(); boolean leftFlanking = !afterIsWhitespace && (!afterIsPunctuation || beforeIsWhitespace || beforeIsPunctuation); boolean rightFlanking = !beforeIsWhitespace && (!beforeIsPunctuation || afterIsWhitespace || afterIsPunctuation); boolean canOpen; boolean canClose; if (delimiterChar == '_') { canOpen = leftFlanking && (!rightFlanking || beforeIsPunctuation); canClose = rightFlanking && (!leftFlanking || afterIsPunctuation); } else { canOpen = leftFlanking && delimiterChar == delimiterProcessor.getOpeningCharacter(); canClose = rightFlanking && delimiterChar == delimiterProcessor.getClosingCharacter(); } String text = scanner.textBetween(start, scanner.position()).toString(); return new DelimiterData(delimiterCount, canOpen, canClose, new Text(text)); } private void processDelimiters(Delimiter stackBottom) { Map<Character, Delimiter> openersBottom = new HashMap<>(); // find first closer above stackBottom: Delimiter closer = lastDelimiter; while (closer != null && closer.previous != stackBottom) { closer = closer.previous; } // move forward, looking for closers, and handling each while (closer != null) { char delimiterChar = closer.delimiterChar; DelimiterProcessor delimiterProcessor = delimiterProcessors.get(delimiterChar); if (!closer.canClose || delimiterProcessor == null) { closer = closer.next; continue; } char openingDelimiterChar = delimiterProcessor.getOpeningCharacter(); // Found delimiter closer. Now look back for first matching opener. int useDelims = 0; boolean openerFound = false; boolean potentialOpenerFound = false; Delimiter opener = closer.previous; while (opener != null && opener != stackBottom && opener != openersBottom.get(delimiterChar)) { if (opener.canOpen && opener.delimiterChar == openingDelimiterChar) { potentialOpenerFound = true; useDelims = delimiterProcessor.getDelimiterUse(opener, closer); if (useDelims > 0) { openerFound = true; break; } } opener = opener.previous; } if (!openerFound) { if (!potentialOpenerFound) { // Set lower bound for future searches for openers. // Only do this when we didn't even have a potential // opener (one that matches the character and can open). // If an opener was rejected because of the number of // delimiters (e.g. because of the "multiple of 3" rule), // we want to consider it next time because the number // of delimiters can change as we continue processing. openersBottom.put(delimiterChar, closer.previous); if (!closer.canOpen) { // We can remove a closer that can't be an opener, // once we've seen there's no matching opener: removeDelimiterKeepNode(closer); } } closer = closer.next; continue; } Text openerNode = opener.node; Text closerNode = closer.node; // Remove number of used delimiters from stack and inline nodes. opener.length -= useDelims; closer.length -= useDelims; openerNode.setLiteral( openerNode.getLiteral().substring(0, openerNode.getLiteral().length() - useDelims)); closerNode.setLiteral( closerNode.getLiteral().substring(0, closerNode.getLiteral().length() - useDelims)); removeDelimitersBetween(opener, closer); // The delimiter processor can re-parent the nodes between opener and closer, // so make sure they're contiguous already. Exclusive because we want to keep opener/closer themselves. mergeTextNodesBetweenExclusive(openerNode, closerNode); delimiterProcessor.process(openerNode, closerNode, useDelims); // No delimiter characters left to process, so we can remove delimiter and the now empty node. if (opener.length == 0) { removeDelimiterAndNode(opener); } if (closer.length == 0) { Delimiter next = closer.next; removeDelimiterAndNode(closer); closer = next; } } // remove all delimiters while (lastDelimiter != null && lastDelimiter != stackBottom) { removeDelimiterKeepNode(lastDelimiter); } } private void removeDelimitersBetween(Delimiter opener, Delimiter closer) { Delimiter delimiter = closer.previous; while (delimiter != null && delimiter != opener) { Delimiter previousDelimiter = delimiter.previous; removeDelimiterKeepNode(delimiter); delimiter = previousDelimiter; } } /** * Remove the delimiter and the corresponding text node. For used delimiters, e.g. `*` in `*foo*`. */ private void removeDelimiterAndNode(Delimiter delim) { Text node = delim.node; node.unlink(); removeDelimiter(delim); } /** * Remove the delimiter but keep the corresponding node as text. For unused delimiters such as `_` in `foo_bar`. */ private void removeDelimiterKeepNode(Delimiter delim) { removeDelimiter(delim); } private void removeDelimiter(Delimiter delim) { if (delim.previous != null) { delim.previous.next = delim.next; } if (delim.next == null) { // top of stack lastDelimiter = delim.previous; } else { delim.next.previous = delim.previous; } } private void mergeTextNodesBetweenExclusive(Node fromNode, Node toNode) { // No nodes between them if (fromNode == toNode || fromNode.getNext() == toNode) { return; } mergeTextNodesInclusive(fromNode.getNext(), toNode.getPrevious()); } private void mergeChildTextNodes(Node node) { // No children or just one child node, no need for merging if (node.getFirstChild() == node.getLastChild()) { return; } mergeTextNodesInclusive(node.getFirstChild(), node.getLastChild()); } private void mergeTextNodesInclusive(Node fromNode, Node toNode) { Text first = null; Text last = null; int length = 0; Node node = fromNode; while (node != null) { if (node instanceof Text) { Text text = (Text) node; if (first == null) { first = text; } length += text.getLiteral().length(); last = text; } else { mergeIfNeeded(first, last, length); first = null; last = null; length = 0; } if (node == toNode) { break; } node = node.getNext(); } mergeIfNeeded(first, last, length); } private void mergeIfNeeded(Text first, Text last, int textLength) { if (first != null && last != null && first != last) { StringBuilder sb = new StringBuilder(textLength); sb.append(first.getLiteral()); Node node = first.getNext(); Node stop = last.getNext(); while (node != stop) { sb.append(((Text) node).getLiteral()); Node unlink = node; node = node.getNext(); unlink.unlink(); } String literal = sb.toString(); first.setLiteral(literal); } } private static class DelimiterData { final int count; final boolean canClose; final boolean canOpen; final Text text; DelimiterData(int count, boolean canOpen, boolean canClose, Text text) { this.count = count; this.canOpen = canOpen; this.canClose = canClose; this.text = text; } } }
bsd-2-clause
imagej/imagej-common
src/main/java/net/imagej/types/DataType64BitSignedComplexFloat.java
3884
/* * #%L * ImageJ2 software for multidimensional image processing and analysis. * %% * Copyright (C) 2009 - 2022 ImageJ2 developers. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package net.imagej.types; import net.imglib2.type.numeric.complex.ComplexFloatType; import org.scijava.AbstractContextual; //TODO - uncomment when we are ready to support //@Plugin(type = DataType.class) /** * {@link DataType} definition for 64-bit complex float numbers. * * @author Barry DeZonia */ public class DataType64BitSignedComplexFloat extends AbstractContextual implements DataType<ComplexFloatType> { // -- fields -- private ComplexFloatType type = new ComplexFloatType(); // -- DataType methods -- @Override public ComplexFloatType getType() { return type; } @Override public String shortName() { return "64-bit complex"; } @Override public String longName() { return "64-bit complex float"; } @Override public String description() { return "A complex floating data type with 32-bit float subcomponents"; } @Override public boolean isComplex() { return true; } @Override public boolean isFloat() { return true; } @Override public boolean isSigned() { return true; } @Override public boolean isBounded() { return false; } @Override public void lowerBound(ComplexFloatType dest) { throw new UnsupportedOperationException("complex numbers are unbounded"); } @Override public void upperBound(ComplexFloatType dest) { throw new UnsupportedOperationException("complex numbers are unbounded"); } @Override public int bitCount() { return 64; } @Override public ComplexFloatType createVariable() { return new ComplexFloatType(); } @Override public void cast(ComplexFloatType val, BigComplex dest) { dest.setReal(val.getRealFloat()); dest.setImag(val.getImaginaryFloat()); } @Override public void cast(BigComplex val, ComplexFloatType dest) { dest.setReal(val.getReal().floatValue()); dest.setImaginary(val.getImag().floatValue()); } @Override public boolean hasDoubleRepresentation() { return false; } @Override public boolean hasLongRepresentation() { return false; } @Override public double asDouble(ComplexFloatType val) { throw new UnsupportedOperationException(); } @Override public long asLong(ComplexFloatType val) { throw new UnsupportedOperationException(); } @Override public void setDouble(ComplexFloatType val, double v) { val.setReal(v); val.setImaginary(0); } @Override public void setLong(ComplexFloatType val, long v) { val.setReal(v); val.setImaginary(0); } }
bsd-2-clause
sixshot626/h2o
h2o-utils/src/main/java/h2o/utils/log/AbstractTagLogger.java
1841
package h2o.utils.log; import h2o.common.exception.ExceptionUtil; import h2o.common.util.collection.CollectionUtil; import h2o.common.util.date.DateUtil; import h2o.common.util.format.FormattingTuple; import h2o.common.util.format.MessageFormatter; import java.util.Date; public abstract class AbstractTagLogger implements TagLogger { @Override public abstract void log(LogLevel level, String[] tags, String prompt, Object log); @Override public void fmtLog(LogLevel level, String[] tags, String prompt, String fmt, Object... args ) { FormattingTuple tp = MessageFormatter.arrayFormat( fmt , args); if ( tp.getThrowable() != null ) { this.log( level , tags , prompt , tp.getThrowable() ); } else { this.log( level , tags , prompt , tp.getMessage() ); } } protected String formatLog( LogLevel level , String[] tags , String prompt, Object log ) { StringBuilder sbm = new StringBuilder(); sbm.append( DateUtil.toLongString( new Date() ) ); sbm.append( " [" ); sbm.append( level ); sbm.append( "] tags : {" ); if ( !CollectionUtil.argsIsBlank( tags ) ) { for ( int i = 0 ; i < tags.length ; i++ ) { if ( i > 0 ) { sbm.append( "," ); } sbm.append( tags[i] ); } } sbm.append( "} " ); sbm.append(prompt); sbm.append(" "); if( log != null ) { sbm.append( log instanceof Throwable ? this.exceptionToString( (Throwable)log ) : log.toString() ); } return sbm.toString(); } protected String exceptionToString( Throwable e ) { return ExceptionUtil.exceptionChainToString(e); } }
bsd-2-clause
iotoasis/SI
si-modules/LWM2M_IPE_Server/leshan-server-core/src/main/java/org/eclipse/leshan/server/model/LwM2mModelProvider.java
1717
/******************************************************************************* * Copyright (c) 2015 Sierra Wireless and others. * * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Eclipse Distribution License v1.0 which accompany this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.html. * * Contributors: * Sierra Wireless - initial API and implementation *******************************************************************************/ package org.eclipse.leshan.server.model; import org.eclipse.leshan.core.model.LwM2mModel; import org.eclipse.leshan.core.node.codec.LwM2mNodeDecoder; import org.eclipse.leshan.core.node.codec.LwM2mNodeEncoder; import org.eclipse.leshan.server.client.Client; /** * A <code>LwM2mModelProvider</code> implementation is in charge of returning the description of the LWM2M objects for * each registered client. * <p> * The description of each object is mainly used by the {@link LwM2mNodeEncoder}/{@link LwM2mNodeDecoder} to * encode/decode the requests/responses payload. * </p> * <p> * A typical use case to implement a custom provider is the need to support several version of the specification. * </p> */ public interface LwM2mModelProvider { /** * Returns the description of the objects supported by the given client. * * @param client the registered client * @return the list of object descriptions */ LwM2mModel getObjectModel(Client client); }
bsd-2-clause
javaakademie/MovieStore
src/main/java/de/javaakademie/moviestore/model/Actor.java
1327
package de.javaakademie.moviestore.model; import java.time.LocalDate; import java.time.Period;; /** * Actor. * * @author Guido.Oelmann */ public class Actor { private Integer id; private String name; private String surname; private LocalDate birthday; private String wikipediaUrl; public Actor() { } public Actor(Integer id, String name, String surname, LocalDate birthday, String wikipediaUrl) { this.id = id; this.name = name; this.surname = surname; this.birthday = birthday; this.wikipediaUrl = wikipediaUrl; } public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getSurname() { return surname; } public void setSurname(String surname) { this.surname = surname; } public LocalDate getBirthday() { return birthday; } public void setBirthday(LocalDate birthday) { this.birthday = birthday; } public Integer getAge() { return Period.between(birthday, LocalDate.now()).getYears(); } public String getWikipediaUrl() { return wikipediaUrl; } public void setWikipediaUrl(String wikipediaUrl) { this.wikipediaUrl = wikipediaUrl; } }
bsd-2-clause
jochenseeber/gradle-project-config
src/main/java/me/seeber/gradle/ide/eclipse/annotations/JarReader.java
5838
/** * BSD 2-Clause License * * Copyright (c) 2016-2017, Jochen Seeber * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package me.seeber.gradle.ide.eclipse.annotations; import static java.lang.String.format; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; import com.google.common.collect.ImmutableMap; import com.google.common.io.Closeables; import com.google.common.io.Files; import me.seeber.gradle.util.Validate; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.dynamic.ClassFileLocator; import net.bytebuddy.pool.TypePool; import net.bytebuddy.pool.TypePool.CacheProvider; import net.bytebuddy.pool.TypePool.Default.ReaderMode; /** * Scan a dependency for nullability annotations * * @see <a href="https://wiki.eclipse.org/JDT_Core/Null_Analysis/External_Annotations">Eclipse JDT External * Annotations</a> */ public class JarReader implements Closeable { /** * Class loader to resolve types */ private final ClassLoader classLoader; /** * Class file locator to search for class files */ private @Nullable ClassFileLocator classFileLocator; /** * Loaded types */ private @Nullable Map<@NonNull String, @NonNull TypeDescription> types = new HashMap<>(); /** * Create a new AnnotationsJarBuilder * * @param jarFile JAR file to read * @param classLoader Class loader to resolve types */ public JarReader(File jarFile, ClassLoader classLoader) { this.classLoader = classLoader; try { openJarFile(jarFile); } catch (IOException e) { throw new RuntimeException("Cannot create annotations JAR builder", e); } } /** * Open a JAR file for reading * * @param jarFile JAR file for reading * @throws IOException I'm so sorry... */ protected void openJarFile(File jarFile) throws IOException { ClassFileLocator classFileLocator = null; try { List<String> classFileNames = new ArrayList<>(); try (JarFile jar = new JarFile(jarFile)) { for (JarEntry entry : Collections.list(jar.entries())) { if (!entry.isDirectory() && Files.getFileExtension(entry.getName()).equals("class")) { classFileNames.add(entry.getName()); } } } catch (IOException e) { throw new RuntimeException(format("Could not scan jar file '%s' for contained classes", jarFile), e); } classFileLocator = ClassFileLocator.ForJarFile.of(jarFile); TypePool parentTypePool = TypePool.Default.of(getClassLoader()); TypePool typePool = new TypePool.Default.WithLazyResolution(CacheProvider.Simple.withObjectType(), classFileLocator, ReaderMode.FAST, parentTypePool); Map<@NonNull String, @NonNull TypeDescription> types = new HashMap<>(); for (String classFileName : classFileNames) { String className = Files.getNameWithoutExtension(classFileName.replace(File.separatorChar, '.')); TypeDescription type = typePool.describe(className).resolve(); if (!type.isPackageType()) { types.put(type.getName(), type); } } this.classFileLocator = classFileLocator; this.types = ImmutableMap.copyOf(types); } catch (Exception e) { Closeables.close(classFileLocator, true); throw new RuntimeException(String.format("Could not scan JAR file '%s'", jarFile)); } } /** * Get the loaded types * * @return types Loaded types */ public Map<String, TypeDescription> getTypes() { return Validate.notNull(this.types); } /** * Get the class loader * * @return classLoader Class loader */ public ClassLoader getClassLoader() { return this.classLoader; } /** * @see java.lang.AutoCloseable#close() */ @Override public void close() throws IOException { this.types = null; Closeables.close(this.classFileLocator, false); } }
bsd-2-clause
CameronTolooee/galileo
src/galileo/net/MessageRouter.java
19491
/* Copyright (c) 2013, Colorado State University All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. This software is provided by the copyright holders and contributors "as is" and any express or implied warranties, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose are disclaimed. In no event shall the copyright holder or contributors be liable for any direct, indirect, incidental, special, exemplary, or consequential damages (including, but not limited to, procurement of substitute goods or services; loss of use, data, or profits; or business interruption) however caused and on any theory of liability, whether in contract, strict liability, or tort (including negligence or otherwise) arising in any way out of the use of this software, even if advised of the possibility of such damage. */ package galileo.net; import java.io.IOException; import java.net.Socket; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.channels.CancelledKeyException; import java.nio.channels.SelectionKey; import java.nio.channels.Selector; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Level; import java.util.logging.Logger; /** * Provides an abstract implementation for consuming and publishing messages on * both the server and client side. * * @author malensek */ public abstract class MessageRouter implements Runnable { protected static final Logger logger = Logger.getLogger("galileo"); /** The size (in bytes) of the message prefix used in the system. */ public static final int PREFIX_SZ = Integer.SIZE / Byte.SIZE; /** The default read buffer size is 8 MB. */ public static final int DEFAULT_READ_BUFFER_SIZE = 8388608; /** The default write queue allows 100 items to be inserted before it * starts blocking. This prevents situations where the MessageRouter is * overwhelmed by an extreme number of write requests, exhausting available * resources. */ public static final int DEFAULT_WRITE_QUEUE_SIZE = 100; /** System property that overrides the read buffer size. */ public static final String READ_BUFFER_PROPERTY = "galileo.net.MessageRouter.readBufferSize"; /** System property that overrides the write queue maximum size. */ public static final String WRITE_QUEUE_PROPERTY = "galileo.net.MessageRouter.writeQueueSize"; /** Flag used to determine whether the Selector thread should run */ protected boolean online; private List<MessageListener> listeners = new ArrayList<>(); protected Selector selector; protected int readBufferSize; protected int writeQueueSize; private ByteBuffer readBuffer; protected ConcurrentHashMap<SelectionKey, Integer> changeInterest = new ConcurrentHashMap<>(); public MessageRouter() { this(DEFAULT_READ_BUFFER_SIZE, DEFAULT_WRITE_QUEUE_SIZE); } public MessageRouter(int readBufferSize, int maxWriteQueueSize) { String readSz = System.getProperty(READ_BUFFER_PROPERTY); if (readSz == null) { this.readBufferSize = readBufferSize; } else { this.readBufferSize = Integer.parseInt(readSz); } String queueSz = System.getProperty(WRITE_QUEUE_PROPERTY); if (queueSz == null) { this.writeQueueSize = maxWriteQueueSize; } else { this.writeQueueSize = Integer.parseInt(queueSz); } readBuffer = ByteBuffer.allocateDirect(this.readBufferSize); } /** * As long as the MessageRouter is online, monitor connection operations * through the Selector instance. */ @Override public void run() { while (online) { try { updateInterestOps(); processSelectionKeys(); } catch (IOException e) { logger.log(Level.WARNING, "Error in selector thread", e); } } } /** * Updates interest sets for any SelectionKey instances that require * changes. This allows external threads to queue up changes to the * interest sets that will be fulfilled by the selector thread. */ protected void updateInterestOps() { Iterator<SelectionKey> it = changeInterest.keySet().iterator(); while (it.hasNext()) { SelectionKey key = it.next(); if (key.isValid()) { SocketChannel channel = (SocketChannel) key.channel(); if (channel.isConnected() == false || channel.isRegistered() == false) { continue; } key.interestOps(changeInterest.get(key)); } changeInterest.remove(key); } } /** * Performs a select operation, and then processes the resulting * SelectionKey set based on interest ops. */ protected void processSelectionKeys() throws IOException { selector.select(); Iterator<SelectionKey> keys = selector.selectedKeys().iterator(); while (keys.hasNext()) { SelectionKey key = keys.next(); keys.remove(); if (key.isValid() == false) { continue; } try { if (key.isAcceptable()) { accept(key); continue; } if (key.isConnectable()) { connect(key); continue; } if (key.isWritable()) { write(key); } if (key.isReadable()) { read(key); } } catch (CancelledKeyException e) { /* SelectionKey was cancelled by another thread. */ continue; } } } /** * Accepts new connections. * * @param key The SelectionKey for the connecting client. */ protected void accept(SelectionKey key) throws IOException { ServerSocketChannel servSocket = (ServerSocketChannel) key.channel(); SocketChannel channel = servSocket.accept(); logger.info("Accepted connection: " + getClientString(channel)); TransmissionTracker tracker = new TransmissionTracker(writeQueueSize); channel.configureBlocking(false); channel.register(selector, SelectionKey.OP_READ, tracker); dispatchConnect(getDestination(channel)); } /** * Finishes setting up a connection on a SocketChannel. * * @param key SelectionKey for the SocketChannel. */ protected void connect(SelectionKey key) { try { SocketChannel channel = (SocketChannel) key.channel(); if (channel.finishConnect()) { TransmissionTracker tracker = TransmissionTracker.fromKey(key); if (tracker.hasPendingData() == false) { changeInterest.put(key, SelectionKey.OP_READ); } else { /* Data has already been queued up; start writing */ changeInterest.put(key, SelectionKey.OP_READ | SelectionKey.OP_WRITE); } } dispatchConnect(getDestination(channel)); } catch (IOException e) { logger.log(Level.INFO, "Connection finalization failed", e); disconnect(key); } } /** * Read data from a SocketChannel. * * @param key SelectionKey for the SocketChannel. */ protected void read(SelectionKey key) { SocketChannel channel = (SocketChannel) key.channel(); readBuffer.clear(); int bytesRead = 0; try { /* Read data from the channel */ while ((bytesRead = channel.read(readBuffer)) > 0) { readBuffer.flip(); processIncomingMessage(key); } } catch (IOException e) { logger.log(Level.FINE, "Abnormal remote termination", e); disconnect(key); return; } catch (BufferUnderflowException e) { /* Incoming packets lied about their size! */ logger.log(Level.WARNING, "Incoming packet size mismatch", e); } if (bytesRead == -1) { /* Connection was terminated by the client. */ logger.fine("Reached EOF in channel input stream"); disconnect(key); return; } } /** * Process data received from a client SocketChannel. This method is * chiefly concerned with processing incoming data streams into * GalileoMessage packets to be consumed by the system. * * @param key SelectionKey for the client. */ protected void processIncomingMessage(SelectionKey key) { TransmissionTracker transmission = TransmissionTracker.fromKey(key); if (transmission.expectedBytes == 0) { /* We don't know how much data the client is sending yet. * Read the message prefix to determine the payload size. */ boolean ready = readPrefix(readBuffer, transmission); /* Check if we have read the payload size prefix yet. If * not, then we're done for now. */ if (ready == false) { return; } } int readSize = transmission.expectedBytes - transmission.readPointer; if (readSize > readBuffer.remaining()) { readSize = readBuffer.remaining(); } readBuffer.get(transmission.payload, transmission.readPointer, readSize); transmission.readPointer += readSize; if (transmission.readPointer == transmission.expectedBytes) { /* The payload has been read */ GalileoMessage msg = new GalileoMessage( transmission.payload, new MessageContext(this, key)); dispatchMessage(msg); transmission.resetCounters(); if (readBuffer.hasRemaining()) { /* There is another payload to read */ processIncomingMessage(key); /* Note: this process continues until we reach the end of the * buffer. Not doing so would cause us to lose data. */ } } } /** * Read the payload size prefix from a channel. * Each message in Galileo is prefixed with a payload size field; this is * read to allocate buffers for the incoming message. * * @return true if the payload size has been determined; false otherwise. */ protected static boolean readPrefix(ByteBuffer buffer, TransmissionTracker transmission) { /* Make sure the prefix hasn't already been read. */ if (transmission.expectedBytes != 0) { return true; } /* Can we determine the payload size in one shot? (we must read at least * PREFIX_SZ bytes) */ if (transmission.prefixPointer == 0 && buffer.remaining() >= PREFIX_SZ) { transmission.expectedBytes = buffer.getInt(); transmission.allocatePayload(); return true; } else { /* Keep reading until we have at least PREFIX_SZ bytes to determine * the payload size. */ int prefixLeft = PREFIX_SZ - transmission.prefixPointer; if (buffer.remaining() < prefixLeft) { prefixLeft = buffer.remaining(); } buffer.get(transmission.prefix, transmission.prefixPointer, prefixLeft); transmission.prefixPointer += prefixLeft; if (transmission.prefixPointer >= PREFIX_SZ) { ByteBuffer buf = ByteBuffer.wrap(transmission.prefix); transmission.expectedBytes = buf.getInt(); transmission.allocatePayload(); return true; } } return false; } /** * Wraps a given message in a {@link ByteBuffer}, including the payload size * prefix. Data produced by this method will be subsequently read by the * readPrefix() method. */ protected static ByteBuffer wrapWithPrefix(GalileoMessage message) { int messageSize = message.getPayload().length; ByteBuffer buffer = ByteBuffer.allocate(messageSize + 4); buffer.putInt(messageSize); buffer.put(message.getPayload()); buffer.flip(); return buffer; } /** * Adds a message to the pending write queue for a particular SelectionKey * and submits a change request for its interest set. Pending data is placed * in a blocking queue, so this function may block to prevent queueing an * excessive amount of data. * <p> * The system property <em>galileo.net.MessageRouter.writeQueueSize</em> * tunes the maximum amount of data that can be queued. * * @param key SelectionKey for the channel. * @param message GalileoMessage to publish on the channel. * * @return {@link Transmission} instance representing the send operation. */ public Transmission sendMessage(SelectionKey key, GalileoMessage message) throws IOException { //TODO reduce the visibility of this method to protected if (this.isOnline() == false) { throw new IOException("MessageRouter is not online."); } TransmissionTracker tracker = TransmissionTracker.fromKey(key); ByteBuffer payload = wrapWithPrefix(message); Transmission trans = null; try { tracker.queueOutgoingData(payload); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new IOException("Interrupted while waiting to queue data"); } changeInterest.put(key, SelectionKey.OP_READ | SelectionKey.OP_WRITE); selector.wakeup(); return trans; } /** * When a {@link SelectionKey} is writable, push as much pending data * out on the channel as possible. * * @param key {@link SelectionKey} of the channel to write to. */ private void write(SelectionKey key) { TransmissionTracker tracker = TransmissionTracker.fromKey(key); SocketChannel channel = (SocketChannel) key.channel(); while (tracker.hasPendingData() == true) { Transmission trans = tracker.getNextTransmission(); ByteBuffer buffer = trans.getPayload(); if (buffer == null) { break; } int written = 0; while (buffer.hasRemaining()) { try { written = channel.write(buffer); } catch (IOException e) { /* Broken pipe */ disconnect(key); return; } if (buffer.hasRemaining() == false) { /* Done writing */ tracker.transmissionFinished(); } if (written == 0) { /* Return now, to keep our OP_WRITE interest op set. */ return; } } } /* At this point, the queue is empty. */ key.interestOps(SelectionKey.OP_READ); return; } /** * Handle termination of connections. * * @param key The SelectionKey of the SocketChannel that has disconnected. */ protected void disconnect(SelectionKey key) { if (key.isValid() == false) { return; } SocketChannel channel = (SocketChannel) key.channel(); NetworkDestination destination = getDestination(channel); logger.info("Terminating connection: " + destination.toString()); try { key.cancel(); key.channel().close(); } catch (IOException e) { logger.log(Level.WARNING, "Failed to disconnect channel", e); } dispatchDisconnect(destination); } /** * Adds a message listener (consumer) to this MessageRouter. Listeners * receive messages that are published by this MessageRouter. * * @param listener {@link MessageListener} that will consume messages * published by this MessageRouter. */ public void addListener(MessageListener listener) { listeners.add(listener); } /** * Dispatches a message to all listening consumers. * * @param message {@link GalileoMessage} to dispatch. */ protected void dispatchMessage(GalileoMessage message) { for (MessageListener listener : listeners) { listener.onMessage(message); } } /** * Informs all listening consumers that a connection to a remote endpoint * has been made. */ protected void dispatchConnect(NetworkDestination endpoint) { for (MessageListener listener : listeners) { listener.onConnect(endpoint); } } /** * Informs all listening consumers that a connection to a remote endpoint * has been terminated. */ protected void dispatchDisconnect(NetworkDestination endpoint) { for (MessageListener listener : listeners) { listener.onDisconnect(endpoint); } } /** * Determines whether or not this MessageRouter is online. As long as the * router is online, the selector thread will continue to run. * * @return true if the MessageRouter instance is online and running. */ public boolean isOnline() { return this.online; } /** * Determines a connection's hostname and port, then concatenates the two * values, separated by a colon (:). * * @param channel Channel to get client information about. */ protected static String getClientString(SocketChannel channel) { Socket socket = channel.socket(); return socket.getInetAddress().getHostName() + ":" + socket.getPort(); } /** * Determines a connection's endpoint information (hostname and port) and * encapsulates them in a {@link NetworkDestination}. * * @param channel The SocketChannel of the network endpoint. * * @return NetworkDestination representation of the endpoint. */ protected static NetworkDestination getDestination(SocketChannel channel) { Socket socket = channel.socket(); return new NetworkDestination( socket.getInetAddress().getHostName(), socket.getPort()); } }
bsd-2-clause
marcjansen/mapfish-print
core/src/main/java/org/mapfish/print/test/util/ImageSimilarity.java
15963
package org.mapfish.print.test.util; import com.google.common.io.Files; import net.sf.jasperreports.engine.JRException; import net.sf.jasperreports.engine.JasperPrint; import net.sf.jasperreports.engine.export.JRGraphics2DExporter; import net.sf.jasperreports.export.SimpleExporterInput; import net.sf.jasperreports.export.SimpleGraphics2DExporterOutput; import net.sf.jasperreports.export.SimpleGraphics2DReportConfiguration; import org.apache.batik.transcoder.TranscoderException; import org.mapfish.print.SvgUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.RenderingHints; import java.awt.image.BufferedImage; import java.io.ByteArrayInputStream; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.Iterator; import java.util.List; import javax.imageio.ImageIO; import javax.imageio.ImageWriteParam; import javax.imageio.ImageWriter; import javax.imageio.stream.FileImageOutputStream; import javax.media.jai.iterator.RandomIter; import javax.media.jai.iterator.RandomIterFactory; /** * Class for comparing an expected image to an actual image. * <p> * CHECKSTYLE:OFF */ public final class ImageSimilarity { private static final Logger LOGGER = LoggerFactory.getLogger(ImageSimilarity.class); private static final boolean GENERATE_IN_SOURCE = true; private final BufferedImage expectedImage; private final BufferedImage maskImage; private final BufferedImage diffImage; private final File expectedPath; /** * The constructor, which creates the GUI and start the image processing task. */ public ImageSimilarity(final File expectedFile) throws IOException { this.expectedImage = expectedFile.exists() ? ImageIO.read(expectedFile) : null; if (GENERATE_IN_SOURCE) { this.expectedPath = new File(expectedFile.toString().replaceAll( "/out/", "/src/").replaceAll("/build/classes/test/", "/src/test/resources/")); } else { this.expectedPath = expectedFile; } final File maskFile = getRelatedFile("mask"); if (maskFile.exists()) { this.maskImage = ImageIO.read(maskFile); assert this.maskImage.getSampleModel().getNumBands() == 1; } else { this.maskImage = new BufferedImage( this.expectedImage.getWidth(), this.expectedImage.getHeight(), BufferedImage.TYPE_BYTE_GRAY); final Graphics2D graphics = this.maskImage.createGraphics(); try { graphics.setBackground(new Color(255, 255, 255)); graphics.clearRect(0, 0, this.expectedImage.getWidth(), this.expectedImage.getHeight()); } finally { graphics.dispose(); } } this.diffImage = new BufferedImage( this.expectedImage.getWidth(), this.expectedImage.getHeight(), BufferedImage.TYPE_INT_RGB); } /** * Write the image to a file in uncompressed tiff format. * * @param image image to write * @param file path and file name (extension will be ignored and changed to tiff. */ private static void writeUncompressedImage(BufferedImage image, String file) throws IOException { FileImageOutputStream out = null; try { final File parentFile = new File(file).getParentFile(); Iterator<ImageWriter> writers = ImageIO.getImageWritersBySuffix("png"); final ImageWriter next = writers.next(); final ImageWriteParam param = next.getDefaultWriteParam(); param.setCompressionMode(ImageWriteParam.MODE_DISABLED); final File outputFile = new File(parentFile, Files.getNameWithoutExtension(file) + ".png"); out = new FileImageOutputStream(outputFile); next.setOutput(out); next.write(image); } catch (Throwable e) { System.err.println(String.format( "Error writing the image generated by the test: %s%n\t", file)); e.printStackTrace(); } finally { if (out != null) { out.close(); } } } /** * Merges a list of graphic files into a single graphic. * * @param graphicFiles a list of graphic files * @param width the graphic width (required for svg files) * @param height the graphic height (required for svg files) * @return a single graphic */ public static BufferedImage mergeImages(List<URI> graphicFiles, int width, int height) throws IOException, TranscoderException { if (graphicFiles.isEmpty()) { throw new IllegalArgumentException("no graphics given"); } BufferedImage mergedImage = loadGraphic(graphicFiles.get(0), width, height); Graphics g = mergedImage.getGraphics(); for (int i = 1; i < graphicFiles.size(); i++) { BufferedImage image = loadGraphic(graphicFiles.get(i), width, height); g.drawImage(image, 0, 0, null); } g.dispose(); return mergedImage; } private static BufferedImage loadGraphic(final URI path, final int width, final int height) throws IOException, TranscoderException { File file = new File(path); if (file.getName().endsWith(".svg")) { return convertFromSvg(path, width, height); } else { BufferedImage originalImage = ImageIO.read(file); BufferedImage resizedImage = new BufferedImage(width, height, originalImage.getType()); Graphics2D g = resizedImage.createGraphics(); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.drawImage(originalImage, 0, 0, width, height, null); g.dispose(); return resizedImage; } } /** * Renders an SVG image into a {@link BufferedImage}. */ public static BufferedImage convertFromSvg(final URI svgFile, final int width, final int height) throws TranscoderException { return SvgUtil.convertFromSvg(svgFile, width, height); } /** * Exports a rendered {@link JasperPrint} to a {@link BufferedImage}. */ public static BufferedImage exportReportToImage(final JasperPrint jasperPrint, final Integer page) throws JRException { BufferedImage pageImage = new BufferedImage(jasperPrint.getPageWidth(), jasperPrint.getPageHeight(), BufferedImage.TYPE_INT_RGB); JRGraphics2DExporter exporter = new JRGraphics2DExporter(); exporter.setExporterInput(new SimpleExporterInput(jasperPrint)); SimpleGraphics2DExporterOutput output = new SimpleGraphics2DExporterOutput(); output.setGraphics2D((Graphics2D) pageImage.getGraphics()); exporter.setExporterOutput(output); SimpleGraphics2DReportConfiguration configuration = new SimpleGraphics2DReportConfiguration(); configuration.setPageIndex(page); exporter.setConfiguration(configuration); exporter.exportReport(); return pageImage; } public static void main(final String args[]) throws IOException { final String path = "core/src/test/resources/map-data"; final File root = new File(path); final Iterable<File> files = Files.fileTraverser().depthFirstPostOrder(root); for (File file: files) { if (Files.getFileExtension(file.getName()).equals("png")) { final BufferedImage img = ImageIO.read(file); writeUncompressedImage(img, file.getAbsolutePath()); } } } private File getRelatedFile(final String name) { final String expectedFileName = this.expectedPath.getName(); return new File(this.expectedPath.getParentFile(), (expectedFileName.contains("expected") ? expectedFileName.replace("expected", name) : name + "-" + expectedFileName)); } /** * This method calculates the distance between the signatures of an image and the reference one. The * signatures for the image passed as the parameter are calculated inside the method. * * @return a number between 0 and 10000 or Double.MAX_VALUE on images format error. */ private double calcDistance(final BufferedImage actual) { // There are several ways to calculate distances between two vectors, // we will calculate the sum of the distances between the RGB values of // pixels in the same positions. if (actual.getWidth() != this.expectedImage.getWidth()) { LOGGER.error("Not the same width (expected: {}, actual: {})", this.expectedImage.getWidth(), actual.getWidth()); return Double.MAX_VALUE; } if (actual.getHeight() != this.expectedImage.getHeight()) { LOGGER.error("Not the same height (expected: {}, actual: {})", this.expectedImage.getHeight(), actual.getHeight()); return Double.MAX_VALUE; } if (actual.getSampleModel().getNumBands() != this.expectedImage.getSampleModel().getNumBands()) { LOGGER.error("Not the same number of bands (expected: {}, actual: {})", this.expectedImage.getSampleModel().getNumBands(), actual.getSampleModel().getNumBands()); return Double.MAX_VALUE; } double dist = 0; double[] expectedPixel = new double[this.expectedImage.getSampleModel().getNumBands()]; double[] actualPixel = new double[this.expectedImage.getSampleModel().getNumBands()]; int[] maskPixel = new int[1]; RandomIter expectedIterator = RandomIterFactory.create(this.expectedImage, null); RandomIter actualIterator = RandomIterFactory.create(actual, null); RandomIter maskIterator = RandomIterFactory.create(this.maskImage, null); Graphics2D diffGraphics = this.diffImage.createGraphics(); for (int x = 0; x < actual.getWidth(); x++) { for (int y = 0; y < actual.getHeight(); y++) { expectedIterator.getPixel(x, y, expectedPixel); actualIterator.getPixel(x, y, actualPixel); maskIterator.getPixel(x, y, maskPixel); double squareDist = 0.0; for (int i = 0; i < this.expectedImage.getSampleModel().getNumBands(); i++) { double colorDist = (expectedPixel[i] - actualPixel[i]) * (maskPixel[0] / 255.0); if (colorDist > 7.0) { // allow a small color change (JPEG compression, anti-aliasing, ...) squareDist += colorDist * colorDist; } } double pxDiff = Math.sqrt(squareDist) / Math.sqrt(this.expectedImage.getSampleModel().getNumBands()); dist += pxDiff / 255; diffGraphics.setColor(new Color((int) Math.round(pxDiff), 0, 0)); diffGraphics.drawRect(x, y, 1, 1); } } diffGraphics.dispose(); // Normalise dist = dist / this.expectedImage.getWidth() / this.expectedImage.getHeight() * 10000; LOGGER.debug("Current distance: {}", dist); return dist; } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param actual the image to compare to "this" image. */ public void assertSimilarity(final File actual) throws IOException { assertSimilarity(actual, 1); } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param maxDistance the maximum distance between the two images. */ public void assertSimilarity( final byte[] graphicData, final double maxDistance) throws IOException { assertSimilarity(ImageIO.read(new ByteArrayInputStream(graphicData)), maxDistance); } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param graphicFiles a list of graphic files * @param width the graphic width (required for svg files) * @param height the graphic height (required for svg files) * @param maxDistance the maximum distance between the two images. */ public void assertSimilarity( final List<URI> graphicFiles, final int width, final int height, final double maxDistance) throws IOException, TranscoderException { assertSimilarity(mergeImages(graphicFiles, width, height), maxDistance); } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param maxDistance the maximum distance between the two images. */ public void assertSimilarity( final URI svgFile, final int width, final int height, final double maxDistance) throws IOException, TranscoderException { assertSimilarity(convertFromSvg(svgFile, width, height), maxDistance); } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param maxDistance the maximum distance between the two images. */ public void assertSimilarity( final JasperPrint jasperPrint, final Integer page, final double maxDistance) throws IOException, JRException { assertSimilarity(exportReportToImage(jasperPrint, page), maxDistance); } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param actualFile the file to compare to "this" image. * @param maxDistance the maximum distance between the two images. */ public void assertSimilarity(final File actualFile, final double maxDistance) throws IOException { assertSimilarity(ImageIO.read(actualFile), maxDistance); } /** * Check that the actual image and the image calculated by this object are within the given distance. * * @param actualImage the image to compare to "this" image. * @param maxDistance the maximum distance between the two images. */ public void assertSimilarity( final BufferedImage actualImage, final double maxDistance) throws IOException { if (!this.expectedPath.exists()) { ImageIO.write(actualImage, "png", expectedPath); throw new AssertionError("The expected file was missing and has been generated: " + expectedPath.getAbsolutePath()); } final double distance = calcDistance(actualImage); if (distance > maxDistance) { final File actualOutput = getRelatedFile("actual"); ImageIO.write(actualImage, "png", actualOutput); final File diffOutput = getRelatedFile("diff"); ImageIO.write(this.diffImage, "png", diffOutput); throw new AssertionError(String.format("similarity difference between images is: %s which is " + "greater than the max distance of %s%n" + "actual=%s%n" + "expected=%s", distance, maxDistance, actualOutput.getAbsolutePath(), this.expectedPath.getAbsolutePath())); } } }
bsd-2-clause
yajunyang/BioImage
lib/VIB-lib-master/src/main/java/inference/DistributionInference.java
1654
package inference; public class DistributionInference extends Inference implements InferenceCaller { public DistributionInference() { caller=this; } boolean evidOnly; double curDiriDenom; int curBin; /* the distribution */ double maxProb,minProb; double[] distribution; public void doit(int M) { super.initCount(); evidOnly=true; super.doit(M); double normal=logEvidences[M]; evidOnly=false; distribution=new double[K()]; int k; curDiriDenom=N()+M+1; double totp=0; maxProb=-1.0; minProb=1e300; for(curBin=0;curBin<K();curBin++) { super.doit(M); distribution[curBin]=Math.exp(logEvidences[M]-normal); System.err.println("Dist "+curBin+": "+distribution[curBin]); totp+=distribution[curBin]; if(maxProb<distribution[curBin]) maxProb=distribution[curBin]; if(minProb>distribution[curBin]) minProb=distribution[curBin]; } System.err.println("Total probability "+totp); } /* after doit(), this function calculates the new bin index */ public /*data*/double getProbability(/*data*/int originalBin) { /* TODO: binary search */ if(originalBin<0 || originalBin>=K()) return 0.0; return distribution[originalBin]; } public double getMaxProb() {return maxProb;}; public double getMinProb() {return minProb;}; public double logExpectationFactor(int m, int lower_bound,int upper_bound) { if(evidOnly || curBin<=lower_bound || curBin>upper_bound) return 0.0; return Math.log((getCount(lower_bound,upper_bound)+1)/curDiriDenom/(upper_bound-lower_bound)); } public double logPrior(int m) { return defaultLogPrior(m); } }
bsd-2-clause
jcodec/jcodec
src/main/java/org/jcodec/codecs/vpx/vp8/data/QuantCommon.java
5008
package org.jcodec.codecs.vpx.vp8.data; import static org.jcodec.codecs.vpx.vp8.data.CommonData.Comp.AC; import static org.jcodec.codecs.vpx.vp8.data.CommonData.Comp.DC; import static org.jcodec.codecs.vpx.vp8.data.CommonData.Quant.UV; import static org.jcodec.codecs.vpx.vp8.data.CommonData.Quant.Y1; import static org.jcodec.codecs.vpx.vp8.data.CommonData.Quant.Y2; import java.util.EnumMap; import org.jcodec.codecs.vpx.vp8.CommonUtils; import org.jcodec.codecs.vpx.vp8.data.CommonData.Comp; import org.jcodec.codecs.vpx.vp8.data.CommonData.Quant; /** * This class is part of JCodec ( www.jcodec.org ) This software is distributed * under FreeBSD License. * * The class is a direct java port of libvpx's * (https://github.com/webmproject/libvpx) relevant VP8 code with significant * java oriented refactoring. * * @author The JCodec project * */ public class QuantCommon { static final short[] dc_qlookup = { 4, 5, 6, 7, 8, 9, 10, 10, 11, 12, 13, 14, 15, 16, 17, 17, 18, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 25, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 91, 93, 95, 96, 98, 100, 101, 102, 104, 106, 108, 110, 112, 114, 116, 118, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 143, 145, 148, 151, 154, 157, }; static final short[] ac_qlookup = { 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 119, 122, 125, 128, 131, 134, 137, 140, 143, 146, 149, 152, 155, 158, 161, 164, 167, 170, 173, 177, 181, 185, 189, 193, 197, 201, 205, 209, 213, 217, 221, 225, 229, 234, 239, 245, 249, 254, 259, 264, 269, 274, 279, 284, }; public static interface QuantLookup { short call(short qindex, short delta); } public static EnumMap<Quant, EnumMap<Comp, QuantLookup>> lookup = new EnumMap<Quant, EnumMap<Comp, QuantLookup>>( Quant.class); static { // Preparing the Y1 quant functions EnumMap<Comp, QuantLookup> temp = new EnumMap<Comp, QuantLookup>(Comp.class); temp.put(DC, new QuantLookup() { @Override public short call(short qindex, short delta) { return dc_lookup(qindex, delta); } }); temp.put(AC, new QuantLookup() { @Override public short call(short qindex, short delta) { return ac_lookup(qindex, (short) 0); } }); lookup.put(Y1, temp); // Preparing the Y2 quant functions temp = new EnumMap<Comp, QuantLookup>(Comp.class); temp.put(DC, new QuantLookup() { @Override public short call(short qindex, short delta) { return (short) (dc_lookup(qindex, delta) << 1); } }); temp.put(AC, new QuantLookup() { @Override public short call(short qindex, short delta) { /* * For all x in [0..284], x*155/100 is bitwise equal to (x*101581) >> 16. The * smallest precision for that is '(x*6349) >> 12' but 16 is a good word size. */ return (short) Math.max(8, (ac_lookup(qindex, delta) * 101581) >> 16); } }); lookup.put(Y2, temp); // Preparing the UV quant functions temp = new EnumMap<Comp, QuantLookup>(Comp.class); temp.put(DC, new QuantLookup() { @Override public short call(short qindex, short delta) { return CommonUtils.clamp(dc_lookup(qindex, delta), (short) 0, (short) 132); } }); temp.put(AC, new QuantLookup() { @Override public short call(short qindex, short delta) { return ac_lookup(qindex, delta); } }); lookup.put(UV, temp); } static short dc_lookup(short QIndex, short Delta) { return dc_qlookup[CommonUtils.clamp((short) (QIndex + Delta), (short) 0, (short) 127)]; } static short ac_lookup(short QIndex, short Delta) { return ac_qlookup[CommonUtils.clamp((short) (QIndex + Delta), (short) 0, (short) 127)]; } public static short doLookup(CommonData cm, Quant q, Comp c, short qindex) { return doLookup(q, c, qindex, cm.delta_q.get(q).get(c)); } public static short doLookup(Quant q, Comp c, short qindex, short deltaQ) { return lookup.get(q).get(c).call(qindex, deltaQ); } }
bsd-2-clause
lell/libnp
test/unit/TestSpecialFunctions.java
904
package unit; import static org.junit.Assert.*; import org.junit.Test; import static libnp.statistics.SpecialFunctions.incompleteBeta; import static libnp.util.Float.compareFloats; public class TestSpecialFunctions { @Test public void test_incompleteBeta() { for (double x = 0.0; x <= 1.0; x += 0.01) { assertTrue(compareFloats( incompleteBeta(1.0, 1.0, x), x, 1e-10 ) == 0); } /*assertEquals( 966.05087898981337, incompleteBeta(1e-15, 1e-3, 1e-3), 1e-12);*/ assertTrue(compareFloats( incompleteBeta(0.5, 7.0, 1.0), 1.0, 1e-10 ) == 0); assertTrue(compareFloats( incompleteBeta(7.0, 0.5, 1.0), 1.0, 1e-10 ) == 0); assertTrue(compareFloats( incompleteBeta(7.0, 7.0, 1.0), 1.0, 1e-10 ) == 0); assertTrue(compareFloats( incompleteBeta(0.5, 0.5, 1.0), 1.0, 1e-10 ) == 0); } }
bsd-2-clause
CardinalDevelopment/Cardinal
src/main/java/in/twizmwaz/cardinal/module/region/parser/CircleRegionParser.java
2977
/* * Copyright (c) 2016, Kevin Phoenix * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package in.twizmwaz.cardinal.module.region.parser; import in.twizmwaz.cardinal.module.region.RegionException; import in.twizmwaz.cardinal.module.region.RegionParser; import in.twizmwaz.cardinal.module.region.exception.attribute.InvalidRegionAttributeException; import in.twizmwaz.cardinal.module.region.exception.attribute.MissingRegionAttributeException; import in.twizmwaz.cardinal.util.Numbers; import lombok.Getter; import org.bukkit.util.Vector; import org.jdom2.Element; @Getter public class CircleRegionParser implements RegionParser { private final Vector center; private final double radius; /** * Parses an element for a circle region. * * @param element The element. * @throws RegionException Thrown if the center or radius attributes are missing or invalid. */ public CircleRegionParser(Element element) throws RegionException { String centerValue = element.getAttributeValue("center"); if (centerValue == null) { throw new MissingRegionAttributeException("center", element); } double[] coords = Numbers.parseCoordinates(centerValue); if (coords == null || coords.length != 2) { throw new InvalidRegionAttributeException("center", element); } center = new Vector(coords[0], 0, coords[1]); String radiusValue = element.getAttributeValue("radius"); if (radiusValue == null) { throw new MissingRegionAttributeException("radius", element); } if (!Numbers.isDecimal(radiusValue)) { throw new InvalidRegionAttributeException("radius", element); } radius = Numbers.parseDouble(radiusValue); } }
bsd-2-clause
RealTimeGenomics/rtg-tools
test/com/rtg/usage/UsageLoggingTest.java
2750
/* * Copyright (c) 2014. Real Time Genomics Limited. * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rtg.usage; import java.io.File; import java.io.IOException; import java.util.Date; import java.util.Properties; import java.util.UUID; import com.rtg.util.TestUtils; import com.rtg.util.io.FileUtils; import com.rtg.util.io.TestDirectory; import junit.framework.TestCase; /** */ public class UsageLoggingTest extends TestCase { public void testUsageTracking() throws IOException { final Properties prop = new Properties(); prop.setProperty(UsageLogging.REQUIRE_USAGE, "false"); prop.setProperty(UsageLogging.USAGE_DESTINATION, UsageLogging.UsageDestination.FILE_OR_SERVER.toString()); try (final TestDirectory dir = new TestDirectory()) { final File cfgFile = UsageConfiguration.createSimpleConfigurationFile(new File(dir, "cfgFile"), dir.getPath(), null); final UUID runId = UUID.randomUUID(); final UsageLogging usage = new UsageLogging(prop, "testModule", runId, cfgFile, false); usage.recordBeginning(); usage.recordEnd(428428791, true); final File usageFile = FileUsageLoggingClient.ensureUsageFile(dir, new Date()); final String res = FileUtils.fileToString(usageFile); TestUtils.containsAll(res, "testModule", "428428791", runId.toString(), "Success", "Start"); assertTrue(usage.allowFileLogging()); assertFalse(usage.requireUsage()); } } }
bsd-2-clause
igorbotian/rsskit
src/main/java/com/rhcloud/igorbotian/rsskit/rest/facebook/api/FacebookAPIImpl.java
7156
package com.rhcloud.igorbotian.rsskit.rest.facebook.api; import com.fasterxml.jackson.databind.JsonNode; import com.rhcloud.igorbotian.rsskit.db.RsskitDataSource; import com.rhcloud.igorbotian.rsskit.db.facebook.FacebookEntityManager; import com.rhcloud.igorbotian.rsskit.db.facebook.FacebookEntityManagerImpl; import com.rhcloud.igorbotian.rsskit.rest.RestParseException; import com.rhcloud.igorbotian.rsskit.rest.facebook.FacebookException; import com.rhcloud.igorbotian.rsskit.rest.facebook.FacebookNotification; import com.rhcloud.igorbotian.rsskit.rest.facebook.FacebookPost; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import java.net.URL; import java.sql.SQLException; import java.util.*; /** * @author Igor Botian */ public class FacebookAPIImpl implements FacebookAPI { private static final Logger LOGGER = LogManager.getLogger(FacebookAPIImpl.class); private static final String API_VERSION = "2.3"; private final OAuthEndpoint oAuth = new OAuthEndpoint(); private final HomeEndpoint home; private final ObjectEndpoint objects; private final NotificationsEndpoint notifications; private final FacebookEntityManager entityManager; public FacebookAPIImpl(RsskitDataSource source) throws FacebookException { Objects.requireNonNull(source); try { this.entityManager = new FacebookEntityManagerImpl(source); } catch (SQLException e) { throw new FacebookException("Failed to initialize Facebook entity manager"); } this.home = new HomeEndpoint(this); this.objects = new ObjectEndpoint(this); this.notifications = new NotificationsEndpoint(this); } @Override public String requestAccessToken(String clientID, String clientSecret, String code, URL callbackURL) throws FacebookException { Objects.requireNonNull(clientID); Objects.requireNonNull(clientSecret); Objects.requireNonNull(code); Objects.requireNonNull(callbackURL); AccessToken token = oAuth.requestAccessToken(clientID, clientSecret, code, callbackURL); return entityManager.registerAccessToken(token.token, token.expires); } @Override public URL getAuthorizationURL(String clientID, Set<String> permissions, URL callbackURL) throws FacebookException { Objects.requireNonNull(clientID); Objects.requireNonNull(permissions); Objects.requireNonNull(callbackURL); return oAuth.getAuthorizationURL(clientID, permissions, callbackURL); } @Override public String version() { return API_VERSION; } @Override public boolean isAccessTokenExpired(String token) throws FacebookException { Objects.requireNonNull(token); return entityManager.isAcessTokenExpired(token); } @Override public List<FacebookPost> getNewsFeed(String token) throws FacebookException { Objects.requireNonNull(token); String accessToken = entityManager.getAccessToken(token); if (accessToken == null) { throw new FacebookException("Access token is not registered: " + token); } Date since = entityManager.getSince(token); List<FacebookPost> feed = home.getNewsFeed(accessToken, since); // always returning at least one post if (feed.size() > 1) { FacebookPost post = feed.get(1); entityManager.setSince(token, post.createdTime); } return feed; } @Override public List<FacebookNotification> getNotifications(String token, Integer limit) throws FacebookException { Objects.requireNonNull(token); String accessToken = entityManager.getAccessToken(token); if (accessToken == null) { throw new FacebookException("Access token is not registered: " + token); } List<IncompleteFacebookNotification> incompleteNotifications = this.notifications.get(accessToken, limit); List<FacebookNotification> notifications = new ArrayList<>(incompleteNotifications.size()); for (IncompleteFacebookNotification incompleteNotification : incompleteNotifications) { try { notifications.add(makeNotificationComplete(incompleteNotification, accessToken)); } catch (FacebookException e) { LOGGER.error("Failed to obtain an object of a specified notification: " + incompleteNotification.id, e); } } for (FacebookNotification notification : notifications) { if (notification.unread) { try { this.notifications.markAsRead(accessToken, notification); } catch (FacebookException e) { LOGGER.error("Failed to mark a specified notification as read: " + notification.link, e); } } } return notifications; } private FacebookNotification makeNotificationComplete(IncompleteFacebookNotification notification, String accessToken) throws FacebookException { assert notification != null; assert accessToken != null; FacebookPost post = null; if (notification.isObjectComplete()) { try { post = FacebookPost.parse(notification.object); } catch (RestParseException e) { LOGGER.warn("Failed to parse a notification object: " + notification.id, e); } } if (post == null) { if (notification.isObjectIdentified()) { try { post = FacebookPost.parse(objects.get(notification.getObjectID(), accessToken)); } catch (RestParseException e) { throw new FacebookException("Failed to get a Facebook object with a specified ID: " + notification.getObjectID(), e); } } else { throw new FacebookException("Notifications object has no ID attribute: " + notification.id); } } return new FacebookNotification( notification.id, notification.from, notification.createdTime, notification.title, notification.link, notification.unread, post ); } @Override public List<FacebookPost> getPostsFromNotifications(String token, Integer limit) throws FacebookException { Objects.requireNonNull(token); List<FacebookNotification> notifications = getNotifications(token, limit); List<FacebookPost> posts = new ArrayList<>(notifications.size()); for(FacebookNotification notification : notifications) { posts.add(notification.object); } return posts; } @Override public JsonNode getObject(String id, String accessToken) throws FacebookException { Objects.requireNonNull(id); return objects.get(id, accessToken); } }
bsd-2-clause
openCage/loracle-maven-plugin
src/main/java/de/pfabulist/loracle/license/Coordinates2License.java
19881
package de.pfabulist.loracle.license; import de.pfabulist.loracle.attribution.CopyrightHolder; import de.pfabulist.loracle.maven.Coordinates; import de.pfabulist.roast.nio.Files_; import de.pfabulist.roast.nio.Paths_; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import javax.annotation.Nullable; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.function.*; import static de.pfabulist.kleinod.text.Strings.getBytes; import static de.pfabulist.roast.NonnullCheck._nn; import static de.pfabulist.roast.NonnullCheck.n_; /** * Copyright (c) 2006 - 2016, Stephan Pfab * SPDX-License-Identifier: BSD-2-Clause */ @SuppressWarnings( { "PMD.UnusedPrivateField" } ) public class Coordinates2License { @SuppressFBWarnings( { "URF_UNREAD_FIELD", "URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD" } ) public static class MLicense { public String mavenLicenseName = ""; public String byName = "-"; public String mavenLicenseUrl = ""; public String byUrl = "-"; public String mavenLicenseComment = ""; public String byComment = "-"; public MLicense( String mavenLicenseName, String mavenLicenseUrl, String mavenLicenseComment ) { this.mavenLicenseName = mavenLicenseName; this.mavenLicenseUrl = mavenLicenseUrl; this.mavenLicenseComment = mavenLicenseComment; } public String getName() { return mavenLicenseName; } public String getUrl() { return mavenLicenseUrl; } public String getComment() { return mavenLicenseComment; } public void setByName( MappedLicense byName ) { this.byName = byName.toString(); } public void setByUrl( MappedLicense byUrl ) { this.byUrl = byUrl.toString(); } public void setByComment( MappedLicense byComment ) { this.byComment = byComment.toString(); } } @SuppressFBWarnings( { "URF_UNREAD_FIELD" } ) // txt only in toJson public static class LiCo { private Optional<String> license = Optional.empty(); private String licenseReason = ""; private List<MLicense> mavenLicenses = Collections.emptyList(); private Optional<CopyrightHolder> copyrightHolder = Optional.empty(); private String holderReason = ""; private String scope = "plugin"; private String message = ""; private String licenseTxt = ""; private String headerTxt = ""; private String licenseTxtLicense = ""; private String headerLicense = ""; private String pomLicense = ""; private String byCoordinates = ""; private String notice = ""; private String pomHeaderLicense = ""; private List<String> licenseFilenames = Collections.emptyList(); transient private boolean used = false; transient private List<String> useedBy = new ArrayList<>(); public String getPomHeader() { return pomHeader; } public void setPomHeader( String pomHeader ) { this.pomHeader = pomHeader; } private String pomHeader = ""; public void setNoticeLicense( MappedLicense noticeLicense ) { this.noticeLicense = noticeLicense.toString(); } private String noticeLicense = ""; public Optional<String> getLicense() { return license; } public String getNotice() { return notice; } public void setNotice( String notice ) { this.notice = notice; } public Optional<CopyrightHolder> getCopyrightHolder() { return copyrightHolder; } public void setLicense( MappedLicense mlicense ) { mlicense.ifPresent( l -> { license = Optional.of( l.toString() ); this.licenseReason = mlicense.toString(); } ); if( !mlicense.isPresent() ) { licenseReason = ""; license = Optional.empty(); } } public String getScope() { return scope; } public void setMessage( String message ) { this.message = message; } public boolean isUsed() { return used; } public Optional<CopyrightHolder> getHolder() { return copyrightHolder; } public String getMessage() { return message; } public void setHolder( Optional<CopyrightHolder> holder ) { this.copyrightHolder = holder; } public void setLicenseTxt( String licenseTxt ) { this.licenseTxt = licenseTxt; } public void setHeaderTxt( String headerTxt ) { this.headerTxt = headerTxt; } public String getLicenseReason() { return licenseReason; } public void setMLicenses( List<MLicense> mLicenses ) { this.mavenLicenses = mLicenses; } public List<MLicense> getMavenLicenses() { return mavenLicenses; } public String getLicenseTxt() { return licenseTxt; } public void setLicenseTxtLicense( MappedLicense licenseTxtLicense ) { if( licenseTxtLicense.isPresent() ) { this.licenseTxtLicense = licenseTxtLicense.toString(); } else { this.licenseTxtLicense = licenseTxtLicense.getReason(); } } public void setHeaderLicense( MappedLicense headerLicense ) { if( headerLicense.isPresent() ) { this.headerLicense = headerLicense.toString(); } else { this.headerLicense = headerLicense.getReason(); } } public String getLicenseTextLicense() { return licenseTxtLicense; } public String getHeaderLicense() { return headerLicense; } public void setPomLicense( MappedLicense pomLicense ) { if( pomLicense.isPresent() ) { this.pomLicense = pomLicense.toString(); } else { this.pomLicense = pomLicense.getReason(); } } public String getPomLicense() { return pomLicense; } public String getHeaderTxt() { return headerTxt; } public void setByCoordinates( MappedLicense byCoordinates ) { if( byCoordinates.isPresent() ) { this.byCoordinates = byCoordinates.toString(); } else { this.byCoordinates = byCoordinates.getReason(); } } public String getByCoordinates() { return byCoordinates; } public String getNoticeLicense() { return noticeLicense; } public String getPomHeaderLicense() { return pomHeaderLicense; } public void setPomHeaderLicense( MappedLicense license ) { this.pomHeaderLicense = license.toString(); } public void setMavenLicenses( List<MLicense> mavenLicenses ) { this.mavenLicenses = mavenLicenses; } public void setLicenseFilenames( List<String> licenseFilenames ) { this.licenseFilenames = licenseFilenames; } public List<String> getLicenseFilenames() { return licenseFilenames; } public void print() { System.out.println( "lico (coordinates2license)" ); System.out.println( " licenseTxt " + licenseTxt ); System.out.println( " licenseTxtLicense " + licenseTxtLicense ); System.out.println( " coo " + byCoordinates ); } } private Map<Coordinates, LiCo> list = new HashMap<>(); private boolean andIsOr = false; @Nullable transient private Findings log; private transient Optional<Coordinates> self = Optional.empty(); public void add( Coordinates coo ) { list.putIfAbsent( coo, new LiCo() ); _nn( list.get( coo ) ).used = true; } public void updateScope( Coordinates coo, String scope ) { if( !list.containsKey( coo ) ) { throw new IllegalStateException( "no such " + coo ); } LiCo liCo = _nn( list.get( coo ) ); String oldScope = _nn( liCo.scope ); if( getScopeLevel( scope ) < getScopeLevel( oldScope ) ) { liCo.scope = scope; } } public static int getScopeLevel( String scope ) { switch( scope ) { case "plugin": return 8; case "import": return 7; case "system": return 6; case "test": return 5; case "runtime": return 4; case "provided": return 3; case "optional": return 2; case "compile": return 1; default: // getLog().warn( "unexpected scope " + scope ); return 100; } } private Findings getLog() { return n_( log, () -> new IllegalStateException( "no logger" ) ); } public void update( BiConsumer<Coordinates, LiCo> f ) { list.forEach( ( c, coli ) -> { if( coli.isUsed() ) { f.accept( c, coli ); } } ); } public void update( Predicate<LiCo> pred, BiConsumer<Coordinates, LiCo> f ) { list.forEach( ( c, coli ) -> { if( coli.isUsed() && pred.test( coli ) ) { f.accept( c, coli ); } } ); } public void setSelf( Coordinates coo ) { self = Optional.of( coo ); } public Optional<LiCo> get( Coordinates coordinates ) { return Optional.ofNullable( list.get( coordinates ) ); } public void checkCompatibility( BiFunction<Coordinates, String, String> f ) { list.forEach( ( c, coli ) -> { if( coli.isUsed() ) { coli.getLicense().ifPresent( l -> { String message = _nn( f.apply( c, l ) ); if( !message.isEmpty() ) { scopeDependingLog( c, message ); } coli.setMessage( message ); } ); } } ); } private void scopeDependingLog( Coordinates coo, String message ) { String scope = n_( list.get( coo ), () -> new IllegalStateException( "huh" ) ).getScope(); if( !scope.equals( "plugin" ) && !scope.equals( "test" ) ) { getLog().error( message ); } else { getLog().warn( message ); } } public void summery() { list.entrySet().stream(). filter( e -> entryPred( e, ( coo, lico ) -> lico.isUsed() ) ). sorted( ( a, b ) -> { int scope = getScopeLevel( _nn( a.getValue() ).getScope() ) - getScopeLevel( _nn( b.getValue() ).getScope() ); if( scope != 0 ) { return scope; } return a.toString().compareTo( b.toString() ); } ). forEach( e -> { Coordinates c = _nn( e.getKey() ); LiCo lico = _nn( e.getValue() ); getLog().info( String.format( "%-80s %-10s %-50s ", c, lico.getScope(), lico.getLicense().map( Object::toString ).orElse( "-" ) ) + lico.getHolder().map( Object::toString ).orElse( "-" ) ); if( !lico.getLicense().isPresent() ) { lico.useedBy.forEach( u -> getLog().error( " used by: " + u ) ); getLog().error( " no license found" ); } if( !lico.getMessage().isEmpty() ) { lico.useedBy.forEach( u -> getLog().error( " used by " + u ) ); getLog().error( " " + lico.getMessage() ); } getLog().debug( " [sum] " + lico.getLicenseReason() ); getLog().debug( " by Coordinates " + lico.getByCoordinates() ); getLog().debug( " by Pom " + lico.getPomLicense() ); lico.getMavenLicenses().forEach( ml -> { getLog().debug( " by Pom Licenses" ); getLog().debug( " <" ); if( ml.byName.length() > 2 ) { getLog().debug( " " + ml.byName ); } else { getLog().debug( " [-] " + ml.getName() ); } if( ml.byUrl.length() > 2 ) { getLog().debug( " " + ml.byUrl ); } else { getLog().debug( " [-] " + ml.getUrl() ); } if( ml.byComment.length() > 2 ) { getLog().debug( " " + ml.byComment ); } else { getLog().debug( " [-] " + ml.getComment() ); } getLog().debug( " >" ); } ); getLog().debug( " by Pom Header " + ( lico.getPomHeader().isEmpty() ? "" : "[+] " ) + lico.getPomHeaderLicense() ); getLog().debug( " by License Text " + ( lico.getLicenseTxt().isEmpty() ? "" : "[+] " ) + lico.getLicenseTextLicense() ); getLog().debug( " by Header " + ( lico.getHeaderTxt().isEmpty() ? "" : "[+] " ) + lico.getHeaderLicense() ); getLog().debug( " by Notice " + ( lico.getNotice().isEmpty() ? "" : "[+] " ) + lico.getNoticeLicense() ); getLog().debug( "\n" ); } ); } public void generateNotice() { StringBuilder sb = new StringBuilder(); if( !self.isPresent() ) { sb.append( "what ???\n\n" ); } else { LiCo selfLico = _nn( get( _nn( self.get() ) ).get() ); sb.append( "This is " ).append( self.map( Object::toString ).orElse( "?" ) ).append( "\n" ); sb.append( "it is licensed under: " ).append( selfLico.getLicense().orElse( "no license") ).append( "\n" ); selfLico.getLicenseFilenames().forEach( fn -> sb.append( "see license file: " ).append( fn ).append( "\n" ) ); sb.append( "copyright holder: " ).append( selfLico.getCopyrightHolder().map( Object::toString ).orElse( "" ) ).append( "\n\n" ); sb.append( "It includes the following software:\n\n" ); // // sb.append( // " =========================================================================\n" + // " == NOTICE file corresponding Notice file standard as described in ==\n" + // " == the loracle-maven-plugin , ==\n" + // " == ==\n" + // " =========================================================================\n" + // "\n" + // " This is <loracle-maven-plugin-version foo>\n" + // " copyright pfabulist.de licensed under BSD-2-clause\n" + // "\n" + // " It includes the following software\n" + // " Please read the different LICENSE files present in the de.pfabulist.loracle directory of\n" + // " this distribution.\n\n\n" ); // list.entrySet().stream(). filter( e -> entryPred( e, ( coo, lico ) -> lico.isUsed() && getScopeLevel( lico.getScope() ) < getScopeLevel( "test" ) ) ). filter( e -> !_nn( e.getKey() ).equals( self.get() ) ). sorted( ( a, b ) -> entryComp( a, b, ( cooA, x, cooB, y ) -> cooA.toString().compareTo( cooB.toString() ) ) ). forEach( ec( ( coo, lico ) -> { sb.append( coo.getArtifactId() ).append( "\n" ). append( " full name: " ).append( coo.toString() ).append( "\n" ). append( " licensed under: " ).append( lico.getLicense().map( Object::toString ).orElse( "-" ) ).append( "\n" ). append( " copyright holder: " ).append( lico.getCopyrightHolder().map( Object::toString ).orElse( "" ) ).append( "\n" ); lico.getLicenseFilenames().forEach( fn -> sb.append( " see license file: " ).append( fn ).append( "\n" ) ); sb.append( "\n" ); } ) ); } Path ff = Paths_.get_( "target/generated-sources/loracle/licenses/" + self.map( c -> c.getArtifactId() + "/" ).orElse( "" ) + "NOTICE.txt" ); Files_.createDirectories( _nn( ff.getParent()) ); Files_.write( ff, getBytes( sb.toString() ) ); } public void getHolders( BiFunction<Coordinates, String, Optional<CopyrightHolder>> f ) { list.forEach( ( c, lico ) -> { if( lico.isUsed() ) { lico.getLicense().ifPresent( l -> lico.setHolder( _nn( f.apply( c, l ) ) ) ); } } ); } public void fromSrc( BiConsumer<Coordinates, LiCo> f ) { list.forEach( ( c, lico ) -> { if( lico.isUsed() ) { f.accept( c, lico ); } } ); } public void fromJar( BiConsumer<Coordinates, LiCo> f ) { list.forEach( ( c, lico ) -> { if( lico.isUsed() && !lico.getLicense().isPresent() ) { f.accept( c, lico ); } } ); } public void setLog( Findings log ) { this.log = log; } public boolean andIsOr() { return andIsOr; } public void setAndIsOr( boolean andIsOr ) { if( this.andIsOr != andIsOr ) { list.forEach( ( c, coli ) -> coli.setLicense( MappedLicense.empty() ) ); // todo just and, or ? } this.andIsOr = andIsOr; } public static <K, V> boolean entryPred( Map.Entry<K, V> entry, BiPredicate<K, V> bipred ) { return bipred.test( entry.getKey(), entry.getValue() ); } public static <K, V> void entryConsumer( Map.Entry<K, V> entry, BiConsumer<K, V> func ) { func.accept( entry.getKey(), entry.getValue() ); } public static <K, V> Consumer<Map.Entry<K, V>> ec( BiConsumer<K, V> func ) { return e -> func.accept( e.getKey(), e.getValue() ); } public interface Function4<A, B, C, D, R> { R apply( A a, B b, C c, D d ); } public static <K, V> int entryComp( Map.Entry<K, V> a, Map.Entry<K, V> b, Function4<K, V, K, V, Integer> f4 ) { return f4.apply( _nn( a.getKey() ), _nn( a.getValue() ), _nn( b.getKey() ), _nn( b.getValue() ) ); } public void addUse( Coordinates coo, String use ) { get( coo ).ifPresent( liCo -> liCo.useedBy.add( use ) ); } }
bsd-2-clause
codebulb/LambdaOmega
src/main/java/ch/codebulb/lambdaomega/package-info.java
370
/** * Contains the main classes. Abstractions are placed in a sub-package.<p> * * The following image illustrates how the most important main classes and their abstractions are related: * * <div style="clear: both; text-align: center;" class="separator"> <img border="0" src="../../../resources/classDiagram.png"> </div> */ package ch.codebulb.lambdaomega;
bsd-3-clause
infina/chocolatecraft
com/cbouton/chocolatecraft/ModItems.java
6114
/*Copyright 2012, infina (C. Bouton) All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the creator nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL infina BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/ package com.cbouton.chocolatecraft; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraftforge.fluids.Fluid; import com.cbouton.chocolatecraft.fluids.FluidChocolate; import com.cbouton.chocolatecraft.items.ItemChocolate; import com.cbouton.chocolatecraft.items.ItemChocolateBar; import com.cbouton.chocolatecraft.items.ItemChocolateMilk; import com.cbouton.chocolatecraft.items.ItemGelatin; import com.cbouton.chocolatecraft.items.ItemGraham; import com.cbouton.chocolatecraft.items.ItemHotChocolateMarshmallow; import com.cbouton.chocolatecraft.items.ItemHotCocoa; import com.cbouton.chocolatecraft.items.ItemMarshmallow; import com.cbouton.chocolatecraft.items.ItemPowderedSugar; import com.cbouton.chocolatecraft.items.ItemRoastedMarshmallow; import com.cbouton.chocolatecraft.items.ItemSmore; import com.cbouton.chocolatecraft.lib.ItemStatics; import cpw.mods.fml.common.registry.GameRegistry; import cpw.mods.fml.common.registry.LanguageRegistry; public class ModItems { public static void init() { Item ItemChocolate = new ItemChocolate(ItemStatics.ITEM_CHOCOLATE); Item ItemPowderedSugar = new ItemPowderedSugar(ItemStatics.ITEM_POWDEREDSUGAR); Item ItemChocolateBar = new ItemChocolateBar(ItemStatics.ITEM_CHOCOLATEBAR, 10, true); Item ItemHotCocoa = new ItemHotCocoa(ItemStatics.ITEM_HOTCOCOA, 10, false); Item ItemHotChocoMarsh = new ItemHotChocolateMarshmallow(ItemStatics.ITEM_HOTCHOCMARSH, 15, false); Item ItemMarshmallow = new ItemMarshmallow(ItemStatics.ITEM_MARSHMALLOW); Item ItemGelatin = new ItemGelatin(ItemStatics.ITEM_GELATIN); Item ItemChocolateMilk = new ItemChocolateMilk(ItemStatics.ITEM_CHOCOLATEMILK, 4, false); Item ItemRoastedMarshmallow = new ItemRoastedMarshmallow(ItemStatics.ITEM_ROASTEDMARSHMALLOW, 4, false); Item ItemGraham = new ItemGraham(ItemStatics.ITEM_GRAHAM, 4, false); Item ItemSmore = new ItemSmore(ItemStatics.ITEM_SMORE, 10, false); ItemStack chocolate = new ItemStack(ItemChocolate); ItemStack powderedsugar = new ItemStack(ItemPowderedSugar); ItemStack powderedsugar6 = new ItemStack(ItemPowderedSugar, 6); ItemStack chocolatebar20 = new ItemStack(ItemChocolateBar, 20); ItemStack chocolatebar = new ItemStack(ItemChocolateBar); ItemStack hotcocoa = new ItemStack(ItemHotCocoa); ItemStack hotcocoamarsh = new ItemStack(ItemHotChocoMarsh); ItemStack cocoabeans = new ItemStack(351,1,3); ItemStack bonemeal = new ItemStack(351,1,15); ItemStack milk = new ItemStack(335,1,0); ItemStack blazerod = new ItemStack(369,1,0); ItemStack water = new ItemStack(326,1,0); ItemStack sugar = new ItemStack(Item.sugar); ItemStack wheat = new ItemStack(Item.wheat); ItemStack gelatin = new ItemStack(ItemGelatin); ItemStack marshmallow = new ItemStack(ItemMarshmallow); ItemStack marshmallow4 = new ItemStack(ItemMarshmallow, 4); ItemStack roastedmarshmallow = new ItemStack(ItemRoastedMarshmallow); ItemStack graham = new ItemStack(ItemGraham); ItemStack smore = new ItemStack(ItemSmore); GameRegistry.addRecipe(chocolate, "CCC", "PMP", "CCC", 'C', cocoabeans, 'P', powderedsugar, 'M', milk); GameRegistry.addShapelessRecipe(powderedsugar6, sugar, sugar, sugar, sugar); GameRegistry.addRecipe(chocolatebar20, "CCC", "CCC", "CCC", 'C', chocolate); GameRegistry.addRecipe(hotcocoa, "CCC", " B ", " W ", 'C', chocolate, 'B', blazerod, 'W', water); GameRegistry.addRecipe(graham, " W ", " M ", " W ", 'W', wheat, 'M', milk); GameRegistry.addRecipe(smore, " G ", "CRC", " G ", 'C', chocolatebar, 'G', graham, 'R', roastedmarshmallow); GameRegistry.addShapelessRecipe(marshmallow4, powderedsugar, gelatin, sugar, sugar); GameRegistry.addShapelessRecipe(gelatin, bonemeal, water); GameRegistry.addShapelessRecipe(hotcocoamarsh, hotcocoa, marshmallow); GameRegistry.addShapelessRecipe(new ItemStack(ItemChocolateMilk), milk, chocolate); GameRegistry.addSmelting(ItemMarshmallow.itemID, roastedmarshmallow, 0.5f); LanguageRegistry.addName(ItemChocolate, "Chocolate"); LanguageRegistry.addName(ItemPowderedSugar, "Powdered Sugar"); LanguageRegistry.addName(ItemChocolateBar, "Chocolate Bar"); LanguageRegistry.addName(ItemHotCocoa, "Hot Cocoa"); LanguageRegistry.addName(ItemHotChocoMarsh, "Hot Chocolate w/ Marshmallows"); LanguageRegistry.addName(ItemMarshmallow, "Marshmallow"); LanguageRegistry.addName(ItemGelatin, "Gelatin"); LanguageRegistry.addName(ItemChocolateMilk, "Chocolate Milk"); LanguageRegistry.addName(ItemRoastedMarshmallow, "Roasted Marshmallow"); LanguageRegistry.addName(ItemGraham, "Graham Cracker"); LanguageRegistry.addName(ItemSmore, "S'more"); } }
bsd-3-clause
NCIP/lexevs-grid
LexEVSAnalyiticalService/src/org/LexGrid/LexBIG/cagrid/iso21090/converter/TextTextCustomConverter.java
1549
/* * Copyright: (c) Mayo Foundation for Medical Education and * Research (MFMER). All rights reserved. MAYO, MAYO CLINIC, and the * triple-shield Mayo logo are trademarks and service marks of MFMER. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/lexevs-grid/LICENSE.txt for details. */ package org.LexGrid.LexBIG.cagrid.iso21090.converter; import gov.nih.nci.iso21090.ST; public class TextTextCustomConverter extends AbstractCustomConverter<org.LexGrid.commonTypes.Text,org.LexGrid.iso21090.commonTypes.Text>{ @Override protected org.LexGrid.iso21090.commonTypes.Text aToB(org.LexGrid.commonTypes.Text a) { ST contentSt = new ST(); contentSt.setValue(a.getContent()); ST dataTypeSt = new ST(); dataTypeSt.setValue(a.getDataType()); org.LexGrid.iso21090.commonTypes.Text text = new org.LexGrid.iso21090.commonTypes.Text(); text.setTextValue(contentSt); text.setDataType(dataTypeSt); return text; } @Override protected org.LexGrid.commonTypes.Text bToA(org.LexGrid.iso21090.commonTypes.Text b) { org.LexGrid.commonTypes.Text text = new org.LexGrid.commonTypes.Text(); text.setContent(b.getTextValue().getValue()); if(b.getDataType() != null){ text.setDataType(b.getDataType().getValue()); } return text; } @Override protected Class<?> getAClass() { return org.LexGrid.commonTypes.Text.class; } @Override protected Class<?> getBClass() { return org.LexGrid.iso21090.commonTypes.Text.class; } }
bsd-3-clause
markles/GeoGit
src/osm/src/test/java/org/geogit/osm/internal/OSMMapOpTest.java
25855
/* Copyright (c) 2013 OpenPlans. All rights reserved. * This code is licensed under the BSD New License, available at the root * application directory. */ package org.geogit.osm.internal; import java.io.File; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import org.geogit.api.NodeRef; import org.geogit.api.RevFeature; import org.geogit.api.RevFeatureType; import org.geogit.api.plumbing.LsTreeOp; import org.geogit.api.plumbing.ResolveFeatureType; import org.geogit.api.plumbing.RevObjectParse; import org.geogit.api.porcelain.AddOp; import org.geogit.api.porcelain.CommitOp; import org.geogit.osm.internal.MappingRule.DefaultField; import org.geogit.osm.internal.log.ResolveOSMMappingLogFolder; import org.geogit.repository.WorkingTree; import org.geogit.storage.FieldType; import org.geogit.test.integration.RepositoryTestCase; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import org.opengis.feature.type.PropertyDescriptor; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class OSMMapOpTest extends RepositoryTestCase { @Rule public ExpectedException exception = ExpectedException.none(); @Override protected void setUpInternal() throws Exception { repo.configDatabase().put("user.name", "groldan"); repo.configDatabase().put("user.email", "groldan@opengeo.org"); } @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Test public void testMappingWays() throws Exception { // import and check that we have both ways and nodes String filename = OSMImportOp.class.getResource("ways.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filter = Maps.newHashMap(); filter.put("oneway", Lists.newArrayList("yes")); fields.put("geom", new AttributeDefinition("geom", FieldType.LINESTRING)); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("onewaystreets", filter, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:onewaystreets/31045880").call(RevFeature.class); assertTrue(revFeature.isPresent()); ImmutableList<Optional<Object>> values = revFeature.get().getValues(); assertEquals(4, values.size()); String wkt = "LINESTRING (7.1923367 50.7395887, 7.1923127 50.7396946, 7.1923444 50.7397419, 7.1924199 50.7397781)"; assertEquals(wkt, values.get(2).get().toString()); assertEquals("yes", values.get(1).get()); // Check that the corresponding log files have been added File osmMapFolder = geogit.command(ResolveOSMMappingLogFolder.class).call(); file = new File(osmMapFolder, "onewaystreets"); assertTrue(file.exists()); file = new File(osmMapFolder, geogit.getRepository().workingTree().getTree().getId() .toString()); assertTrue(file.exists()); } @Test public void testMappingDefaultFields() throws Exception { // import and check that we have both ways and nodes String filename = OSMImportOp.class.getResource("ways.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filter = Maps.newHashMap(); filter.put("oneway", Lists.newArrayList("yes")); fields.put("geom", new AttributeDefinition("geom", FieldType.LINESTRING)); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); ArrayList<DefaultField> defaultFields = Lists.newArrayList(); defaultFields.add(DefaultField.timestamp); defaultFields.add(DefaultField.visible); MappingRule mappingRule = new MappingRule("onewaystreets", filter, null, fields, defaultFields); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:onewaystreets/31045880").call(RevFeature.class); assertTrue(revFeature.isPresent()); ImmutableList<Optional<Object>> values = revFeature.get().getValues(); assertEquals(6, values.size()); String wkt = "LINESTRING (7.1923367 50.7395887, 7.1923127 50.7396946, 7.1923444 50.7397419, 7.1924199 50.7397781)"; assertEquals(wkt, values.get(4).get().toString()); assertEquals("yes", values.get(3).get()); assertEquals(true, values.get(2).get()); assertEquals(1318750940000L, values.get(1).get()); Optional<RevFeatureType> revFeatureType = geogit.command(ResolveFeatureType.class) .setRefSpec("HEAD:onewaystreets/31045880").call(); assertTrue(revFeatureType.isPresent()); ImmutableList<PropertyDescriptor> descriptors = revFeatureType.get().sortedDescriptors(); assertEquals("timestamp", descriptors.get(1).getName().toString()); assertEquals("visible", descriptors.get(2).getName().toString()); } @Test public void testMappingOnlyClosedPolygons() throws Exception { // import and check that we have both ways and nodes String filename = OSMImportOp.class.getResource("ways_restriction.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filter = Maps.newHashMap(); filter.put("geom", Lists.newArrayList("closed")); fields.put("geom", new AttributeDefinition("geom", FieldType.POLYGON)); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("polygons", filter, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:polygons/31045880").call(RevFeature.class); assertTrue(revFeature.isPresent()); ImmutableList<Optional<Object>> values = revFeature.get().getValues(); assertEquals(4, values.size()); String wkt = "POLYGON ((7.1923367 50.7395887, 7.1923127 50.7396946, 7.1923444 50.7397419, 7.1924199 50.7397781, 7.1923367 50.7395887))"; assertEquals(wkt, values.get(2).get().toString()); assertEquals("yes", values.get(1).get()); revFeature = geogit.command(RevObjectParse.class).setRefSpec("HEAD:polygons/24777894") .call(RevFeature.class); assertFalse(revFeature.isPresent()); } @Test public void testExcludePoligonsWithLessThan3Points() throws Exception { // import and check that we have both ways and nodes String filename = OSMImportOp.class.getResource("ways_restriction.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filter = Maps.newHashMap(); fields.put("geom", new AttributeDefinition("geom", FieldType.POLYGON)); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("polygons", filter, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:polygons/31045880").call(RevFeature.class); assertTrue(revFeature.isPresent()); revFeature = geogit.command(RevObjectParse.class).setRefSpec("HEAD:polygons/24777894") .call(RevFeature.class); assertTrue(revFeature.isPresent()); revFeature = geogit.command(RevObjectParse.class).setRefSpec("HEAD:polygons/51502277") .call(RevFeature.class); assertFalse(revFeature.isPresent()); } @Test public void testMappingOnlyOpenLines() throws Exception { // import and check that we have both ways and nodes String filename = OSMImportOp.class.getResource("ways_restriction.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filter = Maps.newHashMap(); filter.put("geom", Lists.newArrayList("open")); fields.put("geom", new AttributeDefinition("geom", FieldType.LINESTRING)); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("nonclosed", filter, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:nonclosed/31045880").call(RevFeature.class); assertFalse(revFeature.isPresent()); revFeature = geogit.command(RevObjectParse.class).setRefSpec("HEAD:nonclosed/24777894") .call(RevFeature.class); assertTrue(revFeature.isPresent()); revFeature = geogit.command(RevObjectParse.class).setRefSpec("HEAD:nonclosed/51502277") .call(RevFeature.class); assertTrue(revFeature.isPresent()); } @Test public void testMappingNodes() throws Exception { // import and check that we have nodes String filename = OSMImportOp.class.getResource("nodes.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> mappings = Maps.newHashMap(); mappings.put("highway", Lists.newArrayList("bus_stop")); fields.put("geom", new AttributeDefinition("geom", FieldType.POINT)); fields.put("name", new AttributeDefinition("name", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("busstops", mappings, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:busstops/507464799").call(RevFeature.class); assertTrue(revFeature.isPresent()); Optional<RevFeatureType> featureType = geogit.command(ResolveFeatureType.class) .setRefSpec("HEAD:busstops/507464799").call(); assertTrue(featureType.isPresent()); ImmutableList<Optional<Object>> values = revFeature.get().getValues(); assertEquals(3, values.size()); String wkt = "POINT (7.1959361 50.739397)"; assertEquals(wkt, values.get(2).get().toString()); assertEquals(507464799l, values.get(0).get()); // Check that the corresponding log files have been added File osmMapFolder = geogit.command(ResolveOSMMappingLogFolder.class).call(); file = new File(osmMapFolder, "busstops"); assertTrue(file.exists()); file = new File(osmMapFolder, geogit.getRepository().workingTree().getTree().getId() .toString()); assertTrue(file.exists()); } @Test public void testMappingWithExclusion() throws Exception { // import and check that we have nodes String filename = OSMImportOp.class.getResource("nodes.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filter = Maps.newHashMap(); Map<String, List<String>> filterExclude = Maps.newHashMap(); filter.put("highway", Lists.newArrayList("bus_stop")); filterExclude.put("public_transport", Lists.newArrayList("stop_position")); fields.put("geom", new AttributeDefinition("geom", FieldType.POINT)); fields.put("name", new AttributeDefinition("name", FieldType.STRING)); fields.put("name", new AttributeDefinition("name", FieldType.STRING)); MappingRule mappingRule = new MappingRule("busstops", filter, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:busstops/507464799").call(RevFeature.class); assertTrue(revFeature.isPresent()); Optional<RevFeatureType> featureType = geogit.command(ResolveFeatureType.class) .setRefSpec("HEAD:busstops/507464799").call(); assertTrue(featureType.isPresent()); ImmutableList<Optional<Object>> values = revFeature.get().getValues(); assertEquals(3, values.size()); String wkt = "POINT (7.1959361 50.739397)"; assertEquals(wkt, values.get(2).get().toString()); assertEquals(507464799l, values.get(0).get()); // Check that the excluded feature is missing revFeature = geogit.command(RevObjectParse.class).setRefSpec("HEAD:busstops/507464865") .call(RevFeature.class); assertFalse(revFeature.isPresent()); } @Test public void testMappingNodesWithAlias() throws Exception { // import and check that we have nodes String filename = OSMImportOp.class.getResource("nodes.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define mapping Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> mappings = Maps.newHashMap(); mappings.put("highway", Lists.newArrayList("bus_stop")); fields.put("geom", new AttributeDefinition("the_geometry", FieldType.POINT)); fields.put("name", new AttributeDefinition("the_name", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("busstops", mappings, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); // Check that mapping was correctly performed Optional<RevFeature> revFeature = geogit.command(RevObjectParse.class) .setRefSpec("HEAD:busstops/507464799").call(RevFeature.class); assertTrue(revFeature.isPresent()); Optional<RevFeatureType> featureType = geogit.command(ResolveFeatureType.class) .setRefSpec("HEAD:busstops/507464799").call(); assertTrue(featureType.isPresent()); ImmutableList<Optional<Object>> values = revFeature.get().getValues(); ImmutableList<PropertyDescriptor> descriptors = featureType.get().sortedDescriptors(); assertEquals("the_name", descriptors.get(1).getName().getLocalPart()); assertEquals("Gielgen", values.get(1).get()); assertEquals("the_geometry", descriptors.get(2).getName().getLocalPart()); } @Test public void testMappingwithNoGeometry() throws Exception { // Test that an exception is thrown when the mapping does not contain a geometry field String filename = OSMImportOp.class.getResource("ways.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); unstaged = workTree.countUnstaged("node").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); // Define a wrong mapping without geometry Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filters = Maps.newHashMap(); filters.put("oneway", Lists.newArrayList("yes")); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("onewaystreets", filters, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); // Try to create a mapping try { geogit.command(OSMMapOp.class).setMapping(mapping).call(); fail(); } catch (NullPointerException e) { assertTrue(e.getMessage().startsWith( "The mapping rule does not define a geometry field")); } } @Test public void testMappingWithNoFilter() throws Exception { // Test that if no filter is specified in a mapping rule, all entities pass the filter String filename = OSMImportOp.class.getResource("ways.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filters = Maps.newHashMap(); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); fields.put("geom", new AttributeDefinition("geom", FieldType.LINESTRING)); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("allways", filters, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); Iterator<NodeRef> allways = geogit.command(LsTreeOp.class).setReference("HEAD:allways") .call(); assertTrue(allways.hasNext()); Iterator<NodeRef> ways = geogit.command(LsTreeOp.class).setReference("HEAD:allways").call(); ArrayList<NodeRef> listWays = Lists.newArrayList(ways); ArrayList<NodeRef> listAllways = Lists.newArrayList(allways); assertEquals(listWays.size(), listAllways.size()); } @Test public void testMappingWithEmptyTagValueList() throws Exception { // Test that when no tags are specified, all entities pass the filter String filename = OSMImportOp.class.getResource("ways.xml").getFile(); File file = new File(filename); geogit.command(OSMImportOp.class).setDataSource(file.getAbsolutePath()).call(); WorkingTree workTree = geogit.getRepository().workingTree(); long unstaged = workTree.countUnstaged("way").getCount(); assertTrue(unstaged > 0); geogit.command(AddOp.class).call(); geogit.command(CommitOp.class).setMessage("msg").call(); Map<String, AttributeDefinition> fields = Maps.newHashMap(); Map<String, List<String>> filters = Maps.newHashMap(); fields.put("lit", new AttributeDefinition("lit", FieldType.STRING)); fields.put("geom", new AttributeDefinition("geom", FieldType.POINT)); filters.put("highway", new ArrayList<String>()); Map<String, List<String>> filterExclude = Maps.newHashMap(); MappingRule mappingRule = new MappingRule("mapped", filters, filterExclude, fields, null); List<MappingRule> mappingRules = Lists.newArrayList(); mappingRules.add(mappingRule); Mapping mapping = new Mapping(mappingRules); geogit.command(OSMMapOp.class).setMapping(mapping).call(); Iterator<NodeRef> iter = geogit.command(LsTreeOp.class).setReference("HEAD:mapped").call(); ArrayList<NodeRef> list = Lists.newArrayList(iter); assertEquals(4, list.size()); } }
bsd-3-clause
HowardLander/DataBridge
util/src/test/java/org/renci/databridge/util/AmqpMessageMiniFrameworkTest.java
3100
package org.renci.databridge.util; import java.io.*; import java.util.logging.Logger; import java.util.logging.Level; import junit.framework.Assert; import junit.framework.TestCase; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.matchers.JUnitMatchers; import org.junit.Rule; import com.rabbitmq.client.*; import org.renci.databridge.message.TestMessage; public class AmqpMessageMiniFrameworkTest { @BeforeClass public static void setUpBeforeClass () throws Exception { } @AfterClass public static void tearDownAfterClass () throws Exception { } protected String receivedString; /** * @todo The first invocation of this test will fail, and subsequent succeed, because consumer receives from the exchange the message put in it by the last invocation. This is possibly related to the same process both producing and consuming. * @todo Ordering of producer/consumer seems very sensitive. Consumer/producer will not work. Again, possibly related to same process doing both. * * AMQPComms requires specification of a primary queue, so on the sender we don't do AMQPComms.bindTheQueue (headers) to prevent producer from getting the message it just produced sent to its own queue. */ @Test public void testBaseTransport () throws Exception { System.out.println ("Testing message transport..."); // set up test message (which contains headers) AMQPMessageType amt = new TestMessage (); String headers = amt.getBindHeaders (); // send the test message InputStream is2 = getClass ().getResourceAsStream ("/AmqpMessageMiniFrameworkTest-sender.conf"); AMQPComms ac = new AMQPComms (is2); // ac.bindTheQueue (headers); String messageTestString = "Test sending and receiving TestMessage"; AMQPMessage theMessage = new AMQPMessage (messageTestString.getBytes ()); Boolean persistence = true; ac.publishMessage (theMessage, headers, persistence); // start handler thread InputStream is = getClass ().getResourceAsStream ("/AmqpMessageMiniFrameworkTest-handler.conf"); AMQPMessageHandler amh = new AMQPMessageHandler () { public void handle (AMQPMessage amqpMessage, Object extra) { receivedString = new String (amqpMessage.getBytes ()); System.out.println ("AMQPMessageHandler message received: " + receivedString); } public void handleException (Exception exception) { System.out.println ("AMQPMessageHandler received exception: "); exception.printStackTrace (); } }; Logger logger = null; AMQPMessageListener aml = new AMQPMessageListener (is, amt, amh, logger); aml.start (); Thread.sleep (2000); // just to be on the safe side... aml.terminate (); aml.join (); // did we receive the message? TestCase.assertEquals ("Received message content does not match what was sent.", messageTestString, receivedString); } @Rule public ExpectedException thrown = ExpectedException.none(); }
bsd-3-clause
broadinstitute/hellbender
src/main/java/org/broadinstitute/hellbender/tools/walkers/annotator/StrandBiasBySample.java
5373
package org.broadinstitute.hellbender.tools.walkers.annotator; import com.google.common.annotations.VisibleForTesting; import htsjdk.variant.variantcontext.Allele; import htsjdk.variant.variantcontext.Genotype; import htsjdk.variant.variantcontext.GenotypeBuilder; import htsjdk.variant.variantcontext.VariantContext; import htsjdk.variant.vcf.VCFFormatHeaderLine; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.broadinstitute.barclay.help.DocumentedFeature; import org.broadinstitute.hellbender.engine.ReferenceContext; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.genotyper.AlleleLikelihoods; import org.broadinstitute.hellbender.utils.help.HelpConstants; import org.broadinstitute.hellbender.utils.read.GATKRead; import org.broadinstitute.hellbender.utils.variant.GATKVCFConstants; import org.broadinstitute.hellbender.utils.variant.GATKVCFHeaderLines; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * Number of forward and reverse reads that support REF and ALT alleles * * <p>Strand bias is a type of sequencing bias in which one DNA strand is favored over the other, which can result in incorrect evaluation of the amount of evidence observed for one allele vs. the other. The StrandBiasBySample annotation produces read counts per allele and per strand that are used by other annotation modules (FisherStrand and StrandOddsRatio) to estimate strand bias using statistical approaches. * * <p>This annotation produces 4 values, corresponding to the number of reads that support the following (in that order):</p> * <ul> * <li>the reference allele on the forward strand</li> * <li>the reference allele on the reverse strand</li> * <li>the alternate allele on the forward strand</li> * <li>the alternate allele on the reverse strand</li> * </ul> * * <h3>Example</h3> * <pre>GT:AD:GQ:PL:SB 0/1:53,51:99:1758,0,1835:23,30,33,18</pre> * <p>In this example, the reference allele is supported by 23 forward reads and 30 reverse reads, the alternate allele is supported by 33 forward reads and 18 reverse reads.</p> * * <h3>Caveats</h3> * <ul> * <li>This annotation can only be generated by HaplotypeCaller (it will not work when called from VariantAnnotator).</li> * </ul> * * <h3>Related annotations</h3> * <ul> * <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_FisherStrand.php">FisherStrand</a></b> uses Fisher's Exact Test to evaluate strand bias.</li> * <li><b><a href="https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_walkers_annotator_StrandOddsRatio.php">StrandOddsRatio</a></b> is an updated form of FisherStrand that uses a symmetric odds ratio calculation.</li> * </ul> */ @DocumentedFeature(groupName=HelpConstants.DOC_CAT_ANNOTATORS, groupSummary=HelpConstants.DOC_CAT_ANNOTATORS_SUMMARY, summary="Number of forward and reverse reads that support REF and ALT alleles (SB)") public final class StrandBiasBySample implements GenotypeAnnotation, StandardMutectAnnotation { private final static Logger logger = LogManager.getLogger(StrandBiasBySample.class); @Override public void annotate(final ReferenceContext ref, final VariantContext vc, final Genotype g, final GenotypeBuilder gb, final AlleleLikelihoods<GATKRead, Allele> likelihoods) { Utils.nonNull(vc); Utils.nonNull(g); Utils.nonNull(gb); if ( likelihoods == null || !g.isCalled() ) { logger.warn(AnnotationUtils.generateMissingDataWarning(vc, g, likelihoods)); return; } final int[][] table = FisherStrand.getContingencyTable(likelihoods, vc, 0, Arrays.asList(g.getSampleName())); gb.attribute(GATKVCFConstants.STRAND_BIAS_BY_SAMPLE_KEY, getContingencyArray(table)); } //For now this is only for 2x2 contingency tables private static final int ARRAY_DIM = 2; /** * Helper function to turn the FisherStrand 2x2 table into the SB annotation array * @param table the 2x2 table used by the FisherStrand annotation * @return the array used by the per-sample Strand Bias annotation */ @VisibleForTesting static List<Integer> getContingencyArray(final int[][] table) { if(table.length != ARRAY_DIM || table[0].length != ARRAY_DIM) { throw new IllegalArgumentException("Expecting a " + ARRAY_DIM + "x" + ARRAY_DIM + " strand bias table."); } final List<Integer> list = new ArrayList<>(ARRAY_DIM * ARRAY_DIM); list.add(table[0][0]); list.add(table[0][1]); list.add(table[1][0]); list.add(table[1][1]); return list; } @Override public List<String> getKeyNames() { return Collections.singletonList(GATKVCFConstants.STRAND_BIAS_BY_SAMPLE_KEY); } public static int getAltForwardCountFromFlattenedContingencyTable(final int[] contingencyTable) { return contingencyTable[ARRAY_DIM]; } public static int getAltReverseCountFromFlattenedContingencyTable(final int[] contingencyTable) { return contingencyTable[ARRAY_DIM+1]; } }
bsd-3-clause
ColonelThirtyTwo/FRC-Team-3676-Robot-Code
src/net/aisd/martin/frc/commands/ArcadePrecisionDriveCommand.java
1377
package net.aisd.martin.frc.commands; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.command.Command; import net.aisd.martin.frc.Subsystems; /** * * @author Colonel Thirty Two */ public class ArcadePrecisionDriveCommand extends Command { public ArcadePrecisionDriveCommand() { super(ArcadePrecisionDriveCommand.class.getName()); setInterruptible(true); requires(Subsystems.drivesystem); } protected void initialize() { } protected void execute() { Joystick stickCoarse = Subsystems.joystick1system.joystick; Joystick stickFine = Subsystems.joystick2system.joystick; //If the control is set to fine then use fine controls if(stickFine.getRawButton(5)) { //left-y=2 , right-y=5 , left-x=1 , right-x=4 // 3 is the triggers? Subsystems.drivesystem.driver.setMaxOutput(.5); Subsystems.drivesystem.driver.arcadeDrive(stickFine, 5, stickFine, 4); } else if (stickCoarse.getRawButton(6)) { Subsystems.drivesystem.driver.setMaxOutput(.5); Subsystems.drivesystem.driver.arcadeDrive(stickCoarse, 2, stickCoarse, 1); } else { Subsystems.drivesystem.driver.setMaxOutput(1); Subsystems.drivesystem.driver.arcadeDrive(stickCoarse, 5, stickCoarse, 4); } } protected boolean isFinished() { return false; } protected void end() { } protected void interrupted() { } }
bsd-3-clause
motech/motech-server-pillreminder
modules/message-campaign/message-campaign/src/main/java/org/motechproject/server/messagecampaign/scheduler/AbsoluteCampaignSchedulerService.java
2356
package org.motechproject.server.messagecampaign.scheduler; import org.joda.time.LocalDate; import org.motechproject.event.MotechEvent; import org.motechproject.scheduler.MotechSchedulerService; import org.motechproject.scheduler.domain.RunOnceSchedulableJob; import org.motechproject.server.messagecampaign.EventKeys; import org.motechproject.server.messagecampaign.dao.AllMessageCampaigns; import org.motechproject.server.messagecampaign.domain.campaign.AbsoluteCampaign; import org.motechproject.server.messagecampaign.domain.campaign.CampaignEnrollment; import org.motechproject.server.messagecampaign.domain.message.AbsoluteCampaignMessage; import org.motechproject.server.messagecampaign.domain.message.CampaignMessage; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.Map; import static org.motechproject.commons.date.util.DateUtil.newDateTime; @Component public class AbsoluteCampaignSchedulerService extends CampaignSchedulerService<AbsoluteCampaignMessage, AbsoluteCampaign> { @Autowired public AbsoluteCampaignSchedulerService(MotechSchedulerService schedulerService, AllMessageCampaigns allMessageCampaigns) { super(schedulerService, allMessageCampaigns); } @Override protected void scheduleMessageJob(CampaignEnrollment enrollment, CampaignMessage campaignMessage) { Map<String, Object> params = jobParams(campaignMessage.messageKey(), enrollment); MotechEvent motechEvent = new MotechEvent(EventKeys.SEND_MESSAGE, params); LocalDate startDate = ((AbsoluteCampaignMessage) campaignMessage).date(); RunOnceSchedulableJob runOnceSchedulableJob = new RunOnceSchedulableJob(motechEvent, newDateTime(startDate, deliverTimeFor(enrollment, campaignMessage)).toDate()); getSchedulerService().scheduleRunOnceJob(runOnceSchedulableJob); } @Override public void stop(CampaignEnrollment enrollment) { AbsoluteCampaign campaign = (AbsoluteCampaign) getAllMessageCampaigns().get(enrollment.getCampaignName()); for (AbsoluteCampaignMessage message : campaign.getMessages()) { getSchedulerService().safeUnscheduleRunOnceJob(EventKeys.SEND_MESSAGE, messageJobIdFor(message.messageKey(), enrollment.getExternalId(), enrollment.getCampaignName())); } } }
bsd-3-clause
iotsap/ngsi10testserver
TestClientToNGSI10TestServer/src/test/java/com/sap/research/fiware/ngsi10/test/querycontext/TestUnallowedMethodsOnQueryContext.java
1069
package com.sap.research.fiware.ngsi10.test.querycontext; import java.net.HttpURLConnection; import org.junit.Test; import com.sap.research.fiware.ngsi10.test.Constants; import com.sap.research.fiware.ngsi10.test.HttpRequestBuilder; import com.sap.research.fiware.ngsi10.test.TestHelpers; public class TestUnallowedMethodsOnQueryContext { @Test public void getOnQueryContextMustResultInNotAllowed() throws Exception { HttpURLConnection conn = HttpRequestBuilder.GET .url(Constants.URL_QUERY_CONTEXT) .build(); TestHelpers.assertMethodNotAllowed(conn, "POST"); } @Test public void putOnQueryContextMustResultInNotAllowed() throws Exception { HttpURLConnection conn = HttpRequestBuilder.PUT .url(Constants.URL_QUERY_CONTEXT) .build(); TestHelpers.assertMethodNotAllowed(conn, "POST"); } @Test public void deleteOnQueryContextMustResultInNotAllowed() throws Exception { HttpURLConnection conn = HttpRequestBuilder.DELETE .url(Constants.URL_QUERY_CONTEXT) .build(); TestHelpers.assertMethodNotAllowed(conn, "POST"); } }
bsd-3-clause
MjAbuz/carrot2
workbench/org.carrot2.workbench.core/src/org/carrot2/workbench/core/helpers/DisposeBin.java
6512
/* * Carrot2 project. * * Copyright (C) 2002-2015, Dawid Weiss, Stanisław Osiński. * All rights reserved. * * Refer to the full license file "carrot2.LICENSE" * in the root folder of the repository checkout or at: * http://www.carrot2.org/carrot2.LICENSE */ package org.carrot2.workbench.core.helpers; import java.util.ArrayList; import java.util.HashMap; import org.carrot2.workbench.editors.IAttributeEventProvider; import org.carrot2.workbench.editors.IAttributeListener; import org.eclipse.core.commands.operations.OperationStatus; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Plugin; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.viewers.IPostSelectionProvider; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.swt.graphics.Resource; import org.eclipse.swt.widgets.Widget; import org.eclipse.ui.actions.ActionFactory.IWorkbenchAction; import org.eclipse.ui.forms.widgets.FormToolkit; import org.carrot2.shaded.guava.common.collect.Lists; /** * A collection of disposable resources (of multiple types that do not share a common * interface). The dispose bin can also register various kinds of listeners and unregister * them automatically at {@link #dispose()}. */ public final class DisposeBin { private static abstract class Disposer<T> { protected final T t; Disposer(T t) { this.t = t; } public abstract void dispose(); }; private static class ResourceDisposer extends Disposer<Resource> { public ResourceDisposer(Resource r) { super(r); } public void dispose() { t.dispose(); } }; private static class FormToolkitDisposer extends Disposer<FormToolkit> { public FormToolkitDisposer(FormToolkit t) { super(t); } public void dispose() { t.dispose(); } }; private static class WidgetDisposer extends Disposer<Widget> { public WidgetDisposer(Widget t) { super(t); } public void dispose() { t.dispose(); } }; private static class ActionDisposer extends Disposer<IWorkbenchAction> { public ActionDisposer(IWorkbenchAction t) { super(t); } public void dispose() { t.dispose(); } }; private static class ListenerPair { public final Object registrar; public final Object listener; public ListenerPair(Object registrar, Object listener) { this.registrar = registrar; this.listener = listener; } } /* * */ private final HashMap<Object, Disposer<?>> resources = new HashMap<Object, Disposer<?>>(); /* * */ private final ArrayList<ListenerPair> listeners = Lists.newArrayList(); /* * */ private final Plugin plugin; public DisposeBin() { this(null); } public DisposeBin(Plugin plugin) { this.plugin = plugin; } public void add(Resource... resources) { for (Resource r : resources) { this.resources.put(r, new ResourceDisposer(r)); } } public void add(FormToolkit toolkit) { this.resources.put(toolkit, new FormToolkitDisposer(toolkit)); } public void add(Widget w) { this.resources.put(w, new WidgetDisposer(w)); } public void add(IWorkbenchAction action) { this.resources.put(action, new ActionDisposer(action)); } public void dispose() { for (Disposer<?> disposer : resources.values()) { try { disposer.dispose(); } catch (Throwable e) { if (plugin != null) { IStatus status = new OperationStatus(IStatus.ERROR, plugin .getBundle().getSymbolicName(), -1, "Resource disposal failed.", e); plugin.getLog().log(status); } } } resources.clear(); for (ListenerPair p : listeners) { try { if (p.registrar instanceof IPreferenceStore) { ((IPreferenceStore) p.registrar) .removePropertyChangeListener((IPropertyChangeListener) p.listener); } else if (p.registrar instanceof IAttributeEventProvider) { ((IAttributeEventProvider) p.registrar) .removeAttributeListener((IAttributeListener) p.listener); } else if (p.registrar instanceof IPostSelectionProvider) { ((IPostSelectionProvider) p.registrar) .removePostSelectionChangedListener((ISelectionChangedListener) p.listener); } else { throw new RuntimeException("Unhandled registrar: " + p.registrar); } } catch (Throwable t) { if (plugin != null) { IStatus status = new OperationStatus(IStatus.ERROR, plugin .getBundle().getSymbolicName(), -1, "Listener disposal failed.", t); plugin.getLog().log(status); } } } listeners.clear(); } /* * */ public void registerPropertyChangeListener(IPreferenceStore provider, IPropertyChangeListener l) { provider.addPropertyChangeListener(l); listeners.add(new ListenerPair(provider, l)); } /* * */ public void registerAttributeChangeListener(IAttributeEventProvider provider, IAttributeListener l) { provider.addAttributeListener(l); listeners.add(new ListenerPair(provider, l)); } /* * */ public void registerPostSelectionChangedListener(IPostSelectionProvider searchEditor, ISelectionChangedListener l) { searchEditor.addPostSelectionChangedListener(l); } }
bsd-3-clause
marlonpg/EarthDefenders
server/RankSystem/src/com/gambasoft/service/RankSystemService.java
3753
package com.gambasoft.service; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.List; import javax.ws.rs.core.Response; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; import com.gambasoft.dao.RankSystemDAO; import com.gambasoft.vo.Score; public class RankSystemService { static final Logger LOGGER = Logger.getLogger(RankSystemService.class); private final static String SECURITY = "PutYouRSecurityCode"; public static Response addScore(final String game, final String name, final String score, final String countrycode, final String hash) throws NoSuchAlgorithmException { final String METHOD = "addScore"; LOGGER.info(String.format("[CLASS] ==== [METHOD] - %s ==== [PARAMETERS] - (academic) = %s,%s,%s,%s,%s", METHOD, game, name, score, countrycode, hash)); Response response = paramValidation(game, name, score, countrycode, hash); if (response == null) { String securityString = name + score + SECURITY; MessageDigest md = MessageDigest.getInstance("MD5"); md.update(securityString.getBytes()); byte byteData[] = md.digest(); // convert the byte to hex format method 1 StringBuffer sb = new StringBuffer(); for (int i = 0; i < byteData.length; i++) { sb.append(Integer.toString((byteData[i] & 0xff) + 0x100, 16).substring(1)); } System.out.println("Digest(in hex format):: " + sb.toString()); if (sb.toString().equals(hash)) { RankSystemDAO.getRankSystemDAO().addScore(game, name, score, countrycode); response = Response.status(200).header("Access-Control-Allow-Origin", "*").build(); } else { response = Response.status(401).header("Access-Control-Allow-Origin", "*").build(); } } return response; } private static Response paramValidation(final String game, final String name, final String score, final String countrycode, final String hash) { final String METHOD = "paramValidation"; LOGGER.info(String.format("[CLASS] ==== [METHOD] - %s ==== [PARAMETERS] - (academic) = %s,%s,%s,%s,%s", METHOD, game, name, score, countrycode, hash)); if (StringUtils.isBlank(game)) { return Response.status(400).header("Access-Control-Allow-Origin", "*").entity("Parameter game is required").build(); } else if (StringUtils.isBlank(name)) { return Response.status(400).header("Access-Control-Allow-Origin", "*").entity("Parameter name is required").build(); } else if (StringUtils.isBlank(score)) { return Response.status(400).header("Access-Control-Allow-Origin", "*").entity("Parameter score is required").build(); } else if (StringUtils.isBlank(countrycode)) { return Response.status(400).header("Access-Control-Allow-Origin", "*").entity("Parameter countrycode is required").build(); } else if (StringUtils.isBlank(hash)) { return Response.status(400).header("Access-Control-Allow-Origin", "*").entity("Parameter hash is required").build(); } return null; } public static Response getRankTopX(final int numberOfBestPlayers) { final String METHOD = "getScore"; LOGGER.info(String.format("[CLASS] ==== [METHOD] - %s ==== [PARAMETERS]", METHOD)); String rank = RankSystemDAO.getRankSystemDAO().getRankTopX(numberOfBestPlayers); return Response.status(200).header("Access-Control-Allow-Origin", "*").entity(rank).build(); //return rank; } public static Response getRankJSON(final int numberOfBestPlayers) { final String METHOD = "getScore"; LOGGER.info(String.format("[CLASS] ==== [METHOD] - %s ==== [PARAMETERS]", METHOD)); List<Score> scores = RankSystemDAO.getRankSystemDAO().getRankJSON(numberOfBestPlayers); return Response.status(200).header("Access-Control-Allow-Origin", "*").entity(scores).build(); //return rank; } }
bsd-3-clause
apptentive/apptentive-android
apptentive/src/testCommon/java/com/apptentive/android/sdk/network/MockHttpURLConnection.java
4005
/* * Copyright (c) 2017, Apptentive, Inc. All Rights Reserved. * Please refer to the LICENSE file for the terms and conditions * under which redistribution and use of this file is permitted. */ package com.apptentive.android.sdk.network; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.HttpURLConnection; import java.net.ProtocolException; import java.util.HashMap; import java.util.Map; public class MockHttpURLConnection extends HttpURLConnection { private static final Map<Integer, String> statusLookup; static { statusLookup = new HashMap<>(); statusLookup.put(200, "OK"); statusLookup.put(204, "No Content"); statusLookup.put(400, "Bad Request"); statusLookup.put(500, "Internal Server Error"); } boolean throwsExceptionOnConnect; boolean throwsExceptionOnDisconnect; private ResponseHandler responseHandler = new DefaultResponseHandler(200, "", ""); // HTTP OK by default private int lastResponseCode; // remember the last returned HTTP response code to properly resolve response message protected MockHttpURLConnection() { super(null); } @Override public boolean usingProxy() { return false; } @Override public void connect() throws IOException { connected = true; } @Override public void disconnect() { connected = false; if (throwsExceptionOnDisconnect) { throw new RuntimeException("Disconnection error"); } } @Override public InputStream getInputStream() throws IOException { return new ByteArrayInputStream(responseHandler.getResponseData().getBytes("UTF-8")); } @Override public InputStream getErrorStream() { try { return new ByteArrayInputStream(responseHandler.getErrorData().getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { throw new AssertionError(e); } } @Override public OutputStream getOutputStream() throws IOException { return new ByteArrayOutputStream(); } @Override public int getResponseCode() throws IOException { if (throwsExceptionOnConnect) { throw new IOException("Connection error"); } lastResponseCode = responseHandler.getResponseCode(); return lastResponseCode; } @Override public String getResponseMessage() throws IOException { return statusLookup.get(lastResponseCode); } @Override public void setRequestMethod(String method) throws ProtocolException { } public void setMockResponseCode(int mockResponseCode) { ((DefaultResponseHandler) responseHandler).setResponseCode(mockResponseCode); } public void setMockResponseData(String responseData) { ((DefaultResponseHandler) responseHandler).setResponseData(responseData); } public void setMockResponseHandler(ResponseHandler handler) { responseHandler = handler; } public interface ResponseHandler { int getResponseCode(); String getResponseData(); String getErrorData(); } public static class DefaultResponseHandler implements ResponseHandler { private int responseCode; private String responseData; private String errorData; public DefaultResponseHandler() { this(200, "", ""); } public DefaultResponseHandler(int responseCode, String responseData, String errorData) { this.responseCode = responseCode; this.responseData = responseData; this.errorData = errorData; } public DefaultResponseHandler setResponseCode(int responseCode) { this.responseCode = responseCode; return this; } @Override public int getResponseCode() { return responseCode; } public DefaultResponseHandler setResponseData(String responseData) { this.responseData = responseData; return this; } @Override public String getResponseData() { return responseData; } public DefaultResponseHandler setErrorData(String errorData) { this.errorData = errorData; return this; } @Override public String getErrorData() { return errorData; } } }
bsd-3-clause
jjfiv/chai
src/main/java/ciir/jfoley/chai/collections/util/SetFns.java
2479
package ciir.jfoley.chai.collections.util; import ciir.jfoley.chai.lang.Module; import gnu.trove.set.hash.TIntHashSet; import java.util.*; import java.util.concurrent.atomic.AtomicInteger; /** * @author jfoley. */ public class SetFns extends Module { public static <T> Set<T> intersection(List<? extends Collection<? extends T>> sets) { if(sets.isEmpty()) return Collections.emptySet(); Set<T> accum = new HashSet<>(); accum.addAll(sets.get(0)); for(int i=1; i<sets.size(); i++) { accum = intersection(accum, sets.get(i)); } return accum; } public static <T> Set<T> intersection(Collection<? extends T> lhs, Collection<? extends T> rhs) { Collection<? extends T> minSet = lhs.size() < rhs.size() ? lhs : rhs; Collection<? extends T> maxSet = lhs.size() < rhs.size() ? rhs : lhs; HashSet<T> isect = new HashSet<>(); for(T x : minSet) { if(maxSet.contains(x)) { isect.add(x); } } return isect; } public static <T> Set<T> union(Collection<T> lhs, Collection<T> rhs) { HashSet<T> results = new HashSet<>(lhs.size() + rhs.size()); results.addAll(lhs); results.addAll(rhs); return results; } public static double jaccardIndex(TIntHashSet a, TIntHashSet b) { TIntHashSet union = new TIntHashSet(a); union.addAll(b); // count up intersection: AtomicInteger count = new AtomicInteger(); a.forEach(x -> { if(b.contains(x)) { count.incrementAndGet(); } return true; }); double unionSize = union.size(); double isectSize = count.get(); return isectSize / unionSize; } public static <T> double jaccardIndex(Collection<T> a, Collection<T> b) { double unionSize = union(a,b).size(); if(unionSize == 0) return 0; double isectSize = intersection(a, b).size(); return isectSize / unionSize; } public static <T> double jaccardDistance(Collection<T> a, Collection<T> b) { return 1.0 - jaccardIndex(a, b); } public static <T> double diceCoefficient(Collection<T> a, Collection<T> b) { double isectSize = intersection(a, b).size(); return 2 * isectSize / (a.size() + b.size()); } /** Return the items in A that are not in B. */ public static <T> Set<T> difference(Collection<T> A, Collection<T> B) { HashSet<T> results = new HashSet<>(A); results.removeAll(B); return results; } public static <T> boolean containsAny(Set<T> haystack, Collection<? extends T> needles) { for (T needle : needles) { if(haystack.contains(needle)) return true; } return false; } }
bsd-3-clause
DataBiosphere/terra-cloud-resource-lib
google-iam/src/test/java/bio/terra/cloudres/google/iam/ServiceAccountNameTest.java
982
package bio.terra.cloudres.google.iam; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrows; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; @Tag("unit") public class ServiceAccountNameTest { @Test public void name() { String name = "projects/my-project/serviceAccounts/foo@bar.com"; ServiceAccountName serviceAccountName = ServiceAccountName.fromNameFormat(name); assertEquals("my-project", serviceAccountName.projectId()); assertEquals("foo@bar.com", serviceAccountName.email()); assertEquals(name, serviceAccountName.formatName()); } @Test public void invalidName() { assertThrows(IllegalArgumentException.class, () -> ServiceAccountName.fromNameFormat("foo")); } @Test public void emailAccountId() { assertEquals( "foo@my-project.iam.gserviceaccount.com", ServiceAccountName.emailFromAccountId("foo", "my-project")); } }
bsd-3-clause
NCIP/catissue-security-manager
software/SecurityManager/src/test/java/edu/wustl/securityManager/dbunit/test/TestSecurityManager.java
18371
/*L * Copyright Washington University in St. Louis * Copyright SemanticBits * Copyright Persistent Systems * Copyright Krishagni * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/catissue-security-manager/LICENSE.txt for details. */ package edu.wustl.securityManager.dbunit.test; import java.util.List; import org.junit.Test; import edu.wustl.common.domain.AbstractDomainObject; import edu.wustl.common.util.logger.Logger; import edu.wustl.security.exception.SMException; import edu.wustl.security.global.Constants; import edu.wustl.security.manager.SecurityManager; import edu.wustl.security.manager.SecurityManagerFactory; import gov.nih.nci.security.authorization.domainobjects.Role; import gov.nih.nci.security.authorization.domainobjects.User; /** * Test cases for SecurityManager class. * @author deepti_shelar */ public class TestSecurityManager extends SecurityManagerBaseTestCase { /** * logger Logger - Generic logger. */ protected static org.apache.log4j.Logger logger = Logger.getLogger(SecurityManager.class); public void setUp() throws Exception { count++; securityManager = SecurityManagerFactory.getSecurityManager(); removeAllUsers(); insertSampleCSMUser(); super.setUp(); } /** * Inserts a sample User. * @throws SMException e */ private void insertSampleCSMUser() throws SMException { User user = new User(); String newVal = loginName + count; user.setDepartment(newVal); user.setEmailId(newVal + "@test.com"); user.setFirstName(newVal); user.setLoginName(newVal); user.setOrganization(newVal); user.setPassword(newVal); user.setTitle(newVal); user.setLastName(newVal); securityManager.createUser(user); } /** * assigns the given group name to the user with the given login name. * @param loginName login name * @param groupName grp name * @throws SMException e */ private void assignGroupToUser(String loginName, String groupName) throws SMException { User user = securityManager.getUser(loginName); String userId = user.getUserId().toString(); securityManager.assignUserToGroup(groupName, userId); } /** * Removes all users from the system. */ private void removeAllUsers() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); securityManager.removeUser(userId.toString()); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Returns the user matching with the login name. * @param loginName * name * @return User */ private User getUserByLoginName(String loginName) { User user = null; try { user = securityManager.getUser(loginName); } catch (SMException e) { logger.error(e.getStackTrace()); } return user; } /** * Test GetProtectionGroupByName. */ @Test public void testGetProtectionGroupByName() { AbstractDomainObject obj = new edu.wustl.catissuecore.domain.User(); obj.setId(Long.valueOf(Constants.INDEX_ONE)); try { String[] protGrp = securityManager.getProtectionGroupByName(obj); assertEquals("ADMINISTRATORS_DATA_GROUP", protGrp[0]); assertNotNull(protGrp); } catch (SMException e) { logger.error(e.getStackTrace()); } } /** * Test Create user method. */ public void testCreateUser() { User user = new User(); String newVal = loginName + ++count; user.setDepartment(newVal); user.setEmailId(newVal + "@test.com"); user.setFirstName(newVal); user.setLoginName(newVal); user.setOrganization(newVal); user.setPassword(newVal); user.setTitle(newVal); try { securityManager.createUser(user); User addedUser = getUserByLoginName(newVal); assertEquals(newVal, addedUser.getLoginName()); } catch (SMException e) { logger.error(e.getStackTrace()); } } /** * Test Create user method. */ public void testCreateUserException() { User user = new User(); String newVal = loginName + count; user.setDepartment(newVal); user.setEmailId(newVal + "@test.com"); user.setFirstName(newVal); user.setLoginName(newVal); user.setOrganization(newVal); user.setPassword(newVal); user.setTitle(newVal); try { securityManager.createUser(user); User addedUser = getUserByLoginName(newVal); assertEquals(newVal, addedUser.getLoginName()); } catch (SMException e) { logger.error("User already exists so cant create a new user" + e.getStackTrace()); } } /** * Test getUser method. */ public void testGetUser() { try { User user = securityManager.getUser(loginName + count); assertEquals(user.getLoginName(), loginName + count); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test getUser method. */ public void testGetUserException() { User user = null; try { user = securityManager.getUser(loginName + count); } catch (Exception e) { assertNull(user); logger.error(e.getStackTrace()); } } /** * test RemoveUser. */ public void testRemoveUser() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); securityManager.removeUser(userId.toString()); } allUsers = securityManager.getUsers(); assertEquals(allUsers.size(), 0); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * test RemoveUser. */ public void testRemoveUserException() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); securityManager.removeUser(userId.toString() + "fail"); } allUsers = securityManager.getUsers(); assertEquals(allUsers.size(), 0); } catch (Exception e) { System.out.println("cant remove user"); logger.error(e.getStackTrace()); } } /** * test RemoveUser. */ public void testRemoveUserException1() { try { List<User> allUsers = securityManager.getUsers(); System.setProperty("gov.nih.nci.security.configFile", null); for (User user : allUsers) { Long userId = user.getUserId(); securityManager.removeUser(userId.toString() + "fail"); } allUsers = securityManager.getUsers(); assertEquals(allUsers.size(), 0); } catch (Exception e) { System.out.println("cant remove user"); logger.error(e.getStackTrace()); } } /** * test getRoles(). */ public void testGetRoles() { try { List<Role> roles = securityManager.getRoles(); assertEquals(roles.size(), 4); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * test getRoles(). */ public void testGetRolesException() { try { System.setProperty("gov.nih.nci.security.configFile", null); List<Role> roles = securityManager.getRoles(); assertEquals(roles.size(), 4); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * test GetGroupIdForRole(). */ public void testGetGroupIdForRole() { try { List<Role> roles = securityManager.getRoles(); for (Role role : roles) { String groupIdForRole = securityManager.getGroupIdForRole(role.getId().toString()); assertNotNull(groupIdForRole); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * testGetUserById. */ public void testGetUserById() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); User userById = securityManager.getUserById(userId.toString()); assertNotNull(userById); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test Login method. */ public void testGetUserByIdException() { User userById = null; try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); userById = securityManager.getUserById(userId.toString() + "1"); assertNotNull(userById); } } catch (SMException e) { assertNull(userById); System.out.println("****" + e.getMessage()); logger.error(e.getStackTrace()); } } /** * Test getUsers method. */ public void testGetUsers() { try { List<User> allUsers = securityManager.getUsers(); assertEquals(allUsers.size(), 1); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test getUsers method. */ public void testGetUsersException() { try { System.setProperty("gov.nih.nci.security.configFile", null); List<User> allUsers = securityManager.getUsers(); assertEquals(allUsers.size(), 1); } catch (Exception e) { System.out.println("error in get Users"); logger.error(e.getStackTrace()); } } /** * test assign user to group. */ public void testAssignUserToGroup() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); securityManager.assignUserToGroup("TECHNICIAN_GROUP", userId.toString()); String userGroup = securityManager.getRoleName(userId); assertEquals(userGroup, "Technician"); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * test assign user to group. */ public void testAssignUserToGroupException() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); securityManager.assignUserToGroup("TECHNICIAN_GROUP", userId.toString() + "fail"); String userGroup = securityManager.getRoleName(userId); assertEquals(userGroup, "Technician"); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * test remove user from group. */ public void testRemoveUserFromGroup() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); assignGroupToUser(loginName + count, adminGroup); securityManager.removeUserFromGroup(adminGroup, userId.toString()); String userGroup = securityManager.getRoleName(userId); assertEquals(userGroup, ""); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test modifyUser. */ public void testModifyUser() { try { User userByLoginName = getUserByLoginName(loginName + count); assertEquals(loginName + count, userByLoginName.getLastName()); userByLoginName.setLoginName("modifiedLastName"); securityManager.modifyUser(userByLoginName); User modifiedUser = getUserByLoginName("modifiedLastName"); assertNotNull(modifiedUser); assertEquals("modifiedLastName", modifiedUser.getLoginName()); assertEquals(loginName + count, modifiedUser.getLastName()); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test modifyUser. */ public void testModifyUserException() { try { User userByLoginName = getUserByLoginName(loginName + count); userByLoginName.setUserId(userByLoginName.getUserId() + 1); assertEquals(loginName + count, userByLoginName.getLastName()); userByLoginName.setLoginName("modifiedLastName"); securityManager.modifyUser(userByLoginName); User modifiedUser = getUserByLoginName("modifiedLastName"); assertNotNull(modifiedUser); assertEquals("modifiedLastName", modifiedUser.getLoginName()); assertEquals(loginName + count, modifiedUser.getLastName()); } catch (SMException e) { System.out.println("exception in modifying"); logger.error(e.getStackTrace()); } } /** * test assignAdditionalGroupsToUser. */ public void testAssignAdditionalGroupsToUser() { User userByLoginName = getUserByLoginName(loginName + count); String[] groupIds = {"1"}; try { assignGroupToUser(loginName + count, "PUBLIC_GROUP"); securityManager.assignAdditionalGroupsToUser(userByLoginName.getUserId().toString(), groupIds); // assertEquals(userByLoginName.getGroups().size(), 1); String userGroup = securityManager.getRoleName(userByLoginName.getUserId()); assertEquals(userGroup, "Administrator"); } catch (SMException e) { logger.error(e.getStackTrace()); } } /** * test assignAdditionalGroupsToUser. */ public void testAssignAdditionalGroupsToUserException() { User userByLoginName = getUserByLoginName(loginName + count); String[] groupIds = {"1"}; try { //assignGroupToUser(loginName+count, "PUBLIC_GROUP"); securityManager.assignAdditionalGroupsToUser(userByLoginName.getUserId().toString() + "fail", groupIds); // assertEquals(userByLoginName.getGroups().size(), 1); String userGroup = securityManager.getRoleName(userByLoginName.getUserId()); assertEquals(userGroup, "Administrator"); } catch (SMException e) { logger.error(e.getStackTrace()); } } /** * test AssignRoleToUser(). */ public void testAssignRoleToUser() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); assignGroupToUser(user.getLoginName(), adminGroup); securityManager.assignRoleToUser(userId.toString(), "1"); Role userRole = securityManager.getUserRole(userId); assertEquals("Administrator", userRole.getName()); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test testGetGroupIdForRoleFromExistingUser. */ public void testGetGroupIdForRoleFromExistingUser() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); assignGroupToUser(loginName + count, adminGroup); String roleID = securityManager.getUserRole(userId).getId().toString(); String userGrp = securityManager.getGroupIdForRole(roleID); assertEquals(userGrp, "1"); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test Login method. */ public void testLogin() { try { String val = loginName + count; boolean loginSuccess = securityManager.login(val, val); assertTrue(loginSuccess); } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * Test Login method fail. */ public void testLoginFail() { try { String val = loginName + count; boolean loginSuccess = securityManager.login(val + "fail", val); assertFalse(loginSuccess); } catch (Exception e) { System.out.println("exception in login"); logger.error(e.getStackTrace()); } } /** * Test testGetRoleName1. */ public void testGetRoleName1() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); String userGrp = securityManager.getRoleName(userId + 1); assertNotNull(userGrp); } } catch (Exception e) { System.out.println("exception in getting role name"); logger.error(e.getStackTrace()); } } /** * Test testGetUserRole. */ public void testGetUserRole() { try { List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); assignGroupToUser(loginName + count, "PUBLIC_GROUP"); Role userRole = securityManager.getUserRole(userId); assertNotNull(userRole); } } catch (Exception e) { logger.error(e.getStackTrace()); } } /** * testGetUserRoleException. */ public void testGetUserRoleException() { Role userRole = null; try { System.setProperty("gov.nih.nci.security.configFile", null); List<User> allUsers = securityManager.getUsers(); for (User user : allUsers) { Long userId = user.getUserId(); //assignGroupToUser(loginName+count, "PUBLIC_GROUP"); userRole = securityManager.getUserRole(userId); } } catch (Exception e) { assertNull(userRole); logger.error(e.getStackTrace()); } } /** * test getuserGroup() */ /* public void testGetRoleNameForAdmin() { try { User user = getUserByLoginName(loginName + count); assignGroupToUser(loginName + count, adminGroup); String userGroup = securityManager.getRoleName(user.getUserId()); assertEquals("Administrator", userGroup); } catch (Exception e) { logger.error(e.getStackTrace()); } } *//** * test getuserGroup() */ /* public void testGetRoleNameForScientist() { try { User user = getUserByLoginName(loginName + count); String userGroup = ""; assignGroupToUser(loginName + count, "PUBLIC_GROUP"); userGroup = securityManager.getRoleName(user.getUserId()); assertEquals("Scientist", userGroup); } catch (Exception e) { logger.error(e.getStackTrace()); } } *//** * test getuserGroup() */ /* public void testGetRoleNameForSupervisor() { try { User user = getUserByLoginName(loginName + count); assignGroupToUser(loginName + count, "SUPERVISOR_GROUP"); String userGroup = securityManager.getRoleName(user.getUserId()); assertEquals("Supervisor", userGroup); } catch (Exception e) { logger.error(e.getStackTrace()); } } *//** * test getuserGroup() */ /* public void testGetRoleNameForTech() { try { User user = getUserByLoginName(loginName + count); assignGroupToUser(loginName + count, "TECHNICIAN_GROUP"); String userGroup = securityManager.getRoleName(user.getUserId()); assertEquals("Technician", userGroup); } catch (Exception e) { logger.error(e.getStackTrace()); } }*/ }
bsd-3-clause
Civcraft/CivModCore
src/main/java/vg/civcraft/mc/civmodcore/command/Command.java
570
package vg.civcraft.mc.civmodcore.command; import org.bukkit.command.CommandSender; import java.util.List; public interface Command { boolean execute(CommandSender sender, String[] args); List<String> tabComplete(CommandSender sender, String[] args); String getName(); String getDescription(); String getUsage(); String getIdentifier(); int getMaxArguments(); int getMinArguments(); void postSetup(); boolean getSenderMustBePlayer(); boolean getErrorOnTooManyArgs(); void setSender(CommandSender sender); void setArgs(String[] args); }
bsd-3-clause
x-clone/brackit.brackitdb
brackitdb-server/src/test/java/org/brackit/server/node/el/ElementlessRecordAccessTest.java
2659
/* * [New BSD License] * Copyright (c) 2011-2012, Brackit Project Team <info@brackit.org> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Brackit Project Team nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.brackit.server.node.el; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertTrue; import org.junit.Test; /** * @author Sebastian Baechle * */ public class ElementlessRecordAccessTest { @Test public void testEncodeDecode() { String storedValue = "1,2,3,4"; for (byte type = 0; type < 6; type++) { for (int pcr = 1; pcr < 514; pcr++) { encodeDecode(storedValue, type, pcr); } for (int pcr = Integer.MAX_VALUE - 514; pcr < Integer.MAX_VALUE; pcr++) { encodeDecode(storedValue, type, pcr); } } } private void encodeDecode(String val, byte type, int pcr) { byte[] record = ElRecordAccess.createRecord(pcr, type, val); int rType = ElRecordAccess.getType(record); int rPCR = ElRecordAccess.getPCR(record); String rVal = ElRecordAccess.getValue(record); assertEquals("type is the same", type, rType); assertEquals("pcr is the same", pcr, rPCR); assertTrue("value is the same", val.equals(rVal)); } }
bsd-3-clause
groupon/nakala
src/main/java/com/groupon/nakala/analysis/Analysis.java
1729
/* Copyright (c) 2013, Groupon, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of GROUPON nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.groupon.nakala.analysis; import com.groupon.nakala.db.DataStore; import com.groupon.nakala.exceptions.StoreException; /** * @author npendar@groupon.com */ public interface Analysis { public void store(DataStore ds) throws StoreException; }
bsd-3-clause
googleapis/api-client-staging
generated/java/proto-google-common-protos/src/main/java/com/google/longrunning/ListOperationsRequest.java
26248
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/longrunning/operations.proto package com.google.longrunning; /** * <pre> * The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. * </pre> * * Protobuf type {@code google.longrunning.ListOperationsRequest} */ public final class ListOperationsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.longrunning.ListOperationsRequest) ListOperationsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListOperationsRequest.newBuilder() to construct. private ListOperationsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListOperationsRequest() { name_ = ""; filter_ = ""; pageToken_ = ""; } @Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private ListOperationsRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { String s = input.readStringRequireUtf8(); filter_ = s; break; } case 16: { pageSize_ = input.readInt32(); break; } case 26: { String s = input.readStringRequireUtf8(); pageToken_ = s; break; } case 34: { String s = input.readStringRequireUtf8(); name_ = s; break; } default: { if (!parseUnknownField( input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return OperationsProto.internal_static_google_longrunning_ListOperationsRequest_descriptor; } @Override protected FieldAccessorTable internalGetFieldAccessorTable() { return OperationsProto.internal_static_google_longrunning_ListOperationsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ListOperationsRequest.class, Builder.class); } public static final int NAME_FIELD_NUMBER = 4; private volatile Object name_; /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public String getName() { Object ref = name_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); name_ = s; return s; } } /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public com.google.protobuf.ByteString getNameBytes() { Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 1; private volatile Object filter_; /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public String getFilter() { Object ref = filter_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public com.google.protobuf.ByteString getFilterBytes() { Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_; /** * <pre> * The standard list page size. * </pre> * * <code>int32 page_size = 2;</code> */ public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; private volatile Object pageToken_; /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public String getPageToken() { Object ref = pageToken_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!getFilterBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, filter_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!getPageTokenBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!getNameBytes().isEmpty()) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, name_); } unknownFields.writeTo(output); } @Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!getFilterBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, filter_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(2, pageSize_); } if (!getPageTokenBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!getNameBytes().isEmpty()) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, name_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @Override public boolean equals(final Object obj) { if (obj == this) { return true; } if (!(obj instanceof ListOperationsRequest)) { return super.equals(obj); } ListOperationsRequest other = (ListOperationsRequest) obj; if (!getName() .equals(other.getName())) return false; if (!getFilter() .equals(other.getFilter())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken() .equals(other.getPageToken())) return false; if (!unknownFields.equals(other.unknownFields)) return false; return true; } @Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static ListOperationsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ListOperationsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ListOperationsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ListOperationsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ListOperationsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static ListOperationsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static ListOperationsRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static ListOperationsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static ListOperationsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static ListOperationsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static ListOperationsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static ListOperationsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(ListOperationsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @Override protected Builder newBuilderForType( BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * The request message for [Operations.ListOperations][google.longrunning.Operations.ListOperations]. * </pre> * * Protobuf type {@code google.longrunning.ListOperationsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.longrunning.ListOperationsRequest) com.google.longrunning.ListOperationsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return OperationsProto.internal_static_google_longrunning_ListOperationsRequest_descriptor; } @Override protected FieldAccessorTable internalGetFieldAccessorTable() { return OperationsProto.internal_static_google_longrunning_ListOperationsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( ListOperationsRequest.class, Builder.class); } // Construct using com.google.longrunning.ListOperationsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3 .alwaysUseFieldBuilders) { } } @Override public Builder clear() { super.clear(); name_ = ""; filter_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return OperationsProto.internal_static_google_longrunning_ListOperationsRequest_descriptor; } @Override public ListOperationsRequest getDefaultInstanceForType() { return ListOperationsRequest.getDefaultInstance(); } @Override public ListOperationsRequest build() { ListOperationsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @Override public ListOperationsRequest buildPartial() { ListOperationsRequest result = new ListOperationsRequest(this); result.name_ = name_; result.filter_ = filter_; result.pageSize_ = pageSize_; result.pageToken_ = pageToken_; onBuilt(); return result; } @Override public Builder clone() { return super.clone(); } @Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return super.setField(field, value); } @Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, Object value) { return super.setRepeatedField(field, index, value); } @Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, Object value) { return super.addRepeatedField(field, value); } @Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof ListOperationsRequest) { return mergeFrom((ListOperationsRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(ListOperationsRequest other) { if (other == ListOperationsRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @Override public final boolean isInitialized() { return true; } @Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { ListOperationsRequest parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (ListOperationsRequest) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private Object name_ = ""; /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public String getName() { Object ref = name_; if (!(ref instanceof String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); name_ = s; return s; } else { return (String) ref; } } /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public com.google.protobuf.ByteString getNameBytes() { Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public Builder setName( String value) { if (value == null) { throw new NullPointerException(); } name_ = value; onChanged(); return this; } /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public Builder clearName() { name_ = getDefaultInstance().getName(); onChanged(); return this; } /** * <pre> * The name of the operation's parent resource. * </pre> * * <code>string name = 4;</code> */ public Builder setNameBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; onChanged(); return this; } private Object filter_ = ""; /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public String getFilter() { Object ref = filter_; if (!(ref instanceof String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (String) ref; } } /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public com.google.protobuf.ByteString getFilterBytes() { Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public Builder setFilter( String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; onChanged(); return this; } /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); onChanged(); return this; } /** * <pre> * The standard list filter. * </pre> * * <code>string filter = 1;</code> */ public Builder setFilterBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; onChanged(); return this; } private int pageSize_ ; /** * <pre> * The standard list page size. * </pre> * * <code>int32 page_size = 2;</code> */ public int getPageSize() { return pageSize_; } /** * <pre> * The standard list page size. * </pre> * * <code>int32 page_size = 2;</code> */ public Builder setPageSize(int value) { pageSize_ = value; onChanged(); return this; } /** * <pre> * The standard list page size. * </pre> * * <code>int32 page_size = 2;</code> */ public Builder clearPageSize() { pageSize_ = 0; onChanged(); return this; } private Object pageToken_ = ""; /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public String getPageToken() { Object ref = pageToken_; if (!(ref instanceof String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (String) ref; } } /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public com.google.protobuf.ByteString getPageTokenBytes() { Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public Builder setPageToken( String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; onChanged(); return this; } /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); onChanged(); return this; } /** * <pre> * The standard list page token. * </pre> * * <code>string page_token = 3;</code> */ public Builder setPageTokenBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; onChanged(); return this; } @Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.longrunning.ListOperationsRequest) } // @@protoc_insertion_point(class_scope:google.longrunning.ListOperationsRequest) private static final ListOperationsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new ListOperationsRequest(); } public static ListOperationsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListOperationsRequest> PARSER = new com.google.protobuf.AbstractParser<ListOperationsRequest>() { @Override public ListOperationsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new ListOperationsRequest(input, extensionRegistry); } }; public static com.google.protobuf.Parser<ListOperationsRequest> parser() { return PARSER; } @Override public com.google.protobuf.Parser<ListOperationsRequest> getParserForType() { return PARSER; } @Override public ListOperationsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
bsd-3-clause
groupon/nakala
src/main/java/com/groupon/util/URLUtils.java
1935
/* Copyright (c) 2013, Groupon, Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. Neither the name of GROUPON nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.groupon.util; import java.io.IOException; import java.net.URLEncoder; /** * @author alasdair@groupon.com */ public class URLUtils { public static String encode(String url, String charset) { try { return URLEncoder.encode(url, charset); } catch (IOException e) { throw new RuntimeException(e); } } public static String encode(String url) { return encode(url, "ISO-8859-1"); } }
bsd-3-clause
NCIP/c3pr
codebase/projects/ws-client/src/edu/duke/cabig/c3pr/webservice/iso21090/BAGCD.java
2098
/******************************************************************************* * Copyright Duke Comprehensive Cancer Center and SemanticBits * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/c3pr/LICENSE.txt for details. *******************************************************************************/ package edu.duke.cabig.c3pr.webservice.iso21090; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for BAG_CD complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="BAG_CD"> * &lt;complexContent> * &lt;extension base="{uri:iso.org:21090}COLL_CD"> * &lt;sequence> * &lt;element name="item" type="{uri:iso.org:21090}CD" maxOccurs="unbounded" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "BAG_CD", propOrder = { "item" }) public class BAGCD extends COLLCD { protected List<CD> item; /** * Gets the value of the item property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the item property. * * <p> * For example, to add a new item, do as follows: * <pre> * getItem().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link CD } * * */ public List<CD> getItem() { if (item == null) { item = new ArrayList<CD>(); } return this.item; } }
bsd-3-clause