repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
aws/aws-sdk-java | aws-java-sdk-memorydb/src/main/java/com/amazonaws/services/memorydb/model/transform/UpdateParameterGroupRequestMarshaller.java | 2481 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.memorydb.model.transform;
import java.util.List;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.memorydb.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* UpdateParameterGroupRequestMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class UpdateParameterGroupRequestMarshaller {
private static final MarshallingInfo<String> PARAMETERGROUPNAME_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ParameterGroupName").build();
private static final MarshallingInfo<List> PARAMETERNAMEVALUES_BINDING = MarshallingInfo.builder(MarshallingType.LIST)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ParameterNameValues").build();
private static final UpdateParameterGroupRequestMarshaller instance = new UpdateParameterGroupRequestMarshaller();
public static UpdateParameterGroupRequestMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(UpdateParameterGroupRequest updateParameterGroupRequest, ProtocolMarshaller protocolMarshaller) {
if (updateParameterGroupRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(updateParameterGroupRequest.getParameterGroupName(), PARAMETERGROUPNAME_BINDING);
protocolMarshaller.marshall(updateParameterGroupRequest.getParameterNameValues(), PARAMETERNAMEVALUES_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
thingtrack/konekti | core/konekti.domain/src/main/java/com/thingtrack/konekti/domain/Area.java | 5722 | /*
* Copyright 2011 Thingtrack, S.L.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.thingtrack.konekti.domain;
/*
* #%L
* Konekti Domain Layer
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Thingtrack s.l.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import java.io.Serializable;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
/**
* Entity class
* <p>
* Represents the third level in the @see <a href="http://www.isa-95.com">ISA 95</a> organisational distribution
* @author Thingtrack S.L.
*
*/
@SuppressWarnings("serial")
@Entity
@Table(name="AREA")
public class Area implements Serializable {
/**
* Unique identifier
*/
@Id
@Column(name="AREA_ID")
@GeneratedValue(strategy=GenerationType.IDENTITY)
private Integer areaId;
/**
* Code
*/
@Column(name="CODE", nullable=false, unique=true, length=64)
private String code;
/**
* Name
*/
@Column(name="NAME", length=64)
private String name;
/**
* Description
*/
@Column(name="DESCRIPTION", length=512)
private String description;
/**
* {@link AreaType}
*/
@ManyToOne
@JoinColumn(name="AREA_TYPE_ID", nullable=false)
private AreaType areaType;
/**
* {@link Location}
*/
@ManyToOne
@JoinColumn(name="LOCATION_ID", nullable=false)
private Location location;
/**
* Comment
*/
@Column(name="COMMENT", length=512)
private String Comment;
/**
* Active
*/
@Column(name="ACTIVE", nullable=false)
private Boolean active=true;
public Area() {
}
public Area(String code, AreaType areaType) {
this(code, areaType, true);
}
public Area(String code, AreaType areaType, Boolean active) {
this.code = code;
this.areaType = areaType;
this.active = active;
}
/**
* @param areaId the areaId to set
*/
public void setAreaId(Integer areaId) {
this.areaId = areaId;
}
/**
* @return the areaId
*/
public Integer getAreaId() {
return areaId;
}
/**
* @param code the code to set
*/
public void setCode(String code) {
this.code = code;
}
/**
* @return the code
*/
public String getCode() {
return code;
}
/**
* @param name the name to set
*/
public void setName(String name) {
this.name = name;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @param description the description to set
*/
public void setDescription(String description) {
this.description = description;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @param comment the comment to set
*/
public void setComment(String comment) {
Comment = comment;
}
/**
* @return the comment
*/
public String getComment() {
return Comment;
}
/**
* @param active the active to set
*/
public void setActive(Boolean active) {
this.active = active;
}
/**
* @return the active
*/
public Boolean getActive() {
return active;
}
/**
* @param areaType the areaType to set
*/
public void setAreaType(AreaType areaType) {
this.areaType = areaType;
}
/**
* @return the areaType
*/
public AreaType getAreaType() {
return areaType;
}
/**
* @param location the location to set
*/
public void setLocation(Location location) {
this.location = location;
if (!location.getAreas().contains(this)) {
location.addArea(this);
}
}
/**
* @return the location
*/
public Location getLocation() {
return location;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((code == null) ? 0 : code.hashCode());
result = prime * result
+ ((areaId == null) ? 0 : areaId.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (!(obj instanceof Area))
return false;
Area other = (Area) obj;
if (code == null) {
if (other.code != null)
return false;
} else if (!code.equals(other.code))
return false;
if (areaId == null) {
if (other.areaId != null)
return false;
} else if (!areaId.equals(other.areaId))
return false;
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "Area [areaId=" + areaId + ", code=" + code + "]";
}
}
| apache-2.0 |
weld/core | tests-arquillian/src/test/java/org/jboss/weld/tests/ejb/proxy/privateMethods/SFSessionBean.java | 1097 | /*
* JBoss, Home of Professional Open Source
* Copyright 2016, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.ejb.proxy.privateMethods;
import jakarta.ejb.LocalBean;
import jakarta.ejb.Stateful;
import jakarta.inject.Inject;
@Stateful
@LocalBean
public class SFSessionBean {
@Inject
TestedBean testedBean;
public boolean ping() {
testedBean.ping();
return true;
}
}
| apache-2.0 |
ExtrabiomesXL/EBXS-AutumnWoods | src/main/java/extrabiomes/autumn/proxy/ClientProxy.java | 674 | package extrabiomes.autumn.proxy;
import net.minecraft.client.Minecraft;
import cpw.mods.fml.client.registry.ISimpleBlockRenderingHandler;
import cpw.mods.fml.client.registry.RenderingRegistry;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
@SideOnly(Side.CLIENT)
public class ClientProxy extends CommonProxy {
public static final Minecraft MC = Minecraft.getMinecraft();
@Override
public int registerBlockHandler(ISimpleBlockRenderingHandler handler)
{
final int renderId = RenderingRegistry.getNextAvailableRenderId();
RenderingRegistry.registerBlockHandler(renderId, handler);
return renderId;
}
}
| apache-2.0 |
Sayi/poi-tl | poi-tl/src/main/java/com/deepoove/poi/data/Attachments.java | 3020 | /*
* Copyright 2014-2021 Sayi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.deepoove.poi.data;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.apache.poi.xwpf.usermodel.XWPFDocument;
import com.deepoove.poi.XWPFTemplate;
import com.deepoove.poi.exception.ResolverException;
import com.deepoove.poi.util.ByteUtils;
import com.deepoove.poi.util.PoitlIOUtils;
/**
* Factory method to create {@link AttachmentRenderData}
*
* @author Sayi
*
*/
public class Attachments {
private Attachments() {
}
public static AttachmentBuilder ofLocal(String src, AttachmentType fileType) {
return ofBytes(ByteUtils.getLocalByteArray(new File(src)), fileType);
}
public static AttachmentBuilder ofWord(XWPFDocument src) {
try {
return ofStream(PoitlIOUtils.docToInputStream(src), AttachmentType.DOCX);
} catch (IOException e) {
throw new ResolverException("Cannot compile attachment document", e);
}
}
public static AttachmentBuilder ofWordTemplate(XWPFTemplate src) {
try {
return ofStream(PoitlIOUtils.templateToInputStream(src), AttachmentType.DOCX);
} catch (IOException e) {
throw new ResolverException("Cannot compile attachment document", e);
}
}
public static AttachmentBuilder ofWorkbook(XSSFWorkbook src) {
try {
return ofStream(PoitlIOUtils.docToInputStream(src), AttachmentType.XLSX);
} catch (IOException e) {
throw new ResolverException("Cannot compile attachment document", e);
}
}
public static AttachmentBuilder ofStream(InputStream inputStream, AttachmentType fileType) {
return ofBytes(ByteUtils.toByteArray(inputStream), fileType);
}
public static AttachmentBuilder ofBytes(byte[] bytes, AttachmentType fileType) {
return new AttachmentBuilder(bytes, fileType);
}
/**
* Builder to build {@link AttachmentRenderData}
*
*/
public static class AttachmentBuilder implements RenderDataBuilder<AttachmentRenderData> {
AttachmentRenderData data;
private AttachmentBuilder(byte[] bytes, AttachmentType fileType) {
data = new AttachmentRenderData(bytes);
data.setFileType(fileType);
}
@Override
public AttachmentRenderData create() {
return data;
}
}
}
| apache-2.0 |
dorzey/assertj-core | src/main/java/org/assertj/core/error/ShouldNotContainValue.java | 1345 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2016 the original author or authors.
*/
package org.assertj.core.error;
/**
* Creates an error message indicating that an assertion that verifies a map does not contains a value.
*
* @author Nicolas François
*/
public class ShouldNotContainValue extends BasicErrorMessageFactory {
/**
* Creates a new </code>{@link ShouldNotContainValue}</code>.
* @param actual the actual value in the failed assertion.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldNotContainValue(Object actual, Object value) {
return new ShouldNotContainValue(actual, value);
}
private ShouldNotContainValue(Object actual, Object value) {
super("%nExpecting:%n <%s>%nnot to contain value:%n <%s>", actual, value);
}
}
| apache-2.0 |
aravindc/databenecommons | src/main/java/org/databene/commons/operation/MaxNumberStringOperation.java | 1672 | /*
* Copyright (C) 2004-2015 Volker Bergmann (volker.bergmann@bergmann-it.de).
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.databene.commons.operation;
import org.databene.commons.Converter;
import org.databene.commons.Operation;
import org.databene.commons.converter.NumberParser;
/**
* Returns the maximum value of several number literals.
* Created: 08.03.2008 07:18:09
* @since 0.4.0
* @author Volker Bergmann
*/
@SuppressWarnings("unchecked")
public class MaxNumberStringOperation implements Operation<String, String> {
@SuppressWarnings("rawtypes")
private MaxOperation<ComparableWrapper> operation;
private Converter<String, ?> parser;
@SuppressWarnings("rawtypes")
public MaxNumberStringOperation() {
this.operation = new MaxOperation<ComparableWrapper>();
this.parser = new NumberParser();
}
@Override
public String perform(String... args) {
ComparableWrapper<String>[] wrappers = ComparableWrapper.wrapAll(args, parser);
ComparableWrapper<String> min = operation.perform(wrappers);
return min.realObject;
}
} | apache-2.0 |
Sellegit/j2objc | runtime/src/main/java/apple/foundation/NSURLProtocolClientAdapter.java | 2291 | package apple.foundation;
import java.io.*;
import java.nio.*;
import java.util.*;
import com.google.j2objc.annotations.*;
import com.google.j2objc.runtime.*;
import com.google.j2objc.runtime.block.*;
import apple.audiotoolbox.*;
import apple.corefoundation.*;
import apple.coregraphics.*;
import apple.coreservices.*;
import apple.uikit.*;
import apple.coreanimation.*;
import apple.coredata.*;
import apple.coremedia.*;
import apple.security.*;
import apple.dispatch.*;
/*<javadoc>*/
/*</javadoc>*/
@Adapter
public abstract class NSURLProtocolClientAdapter
extends Object
implements NSURLProtocolClient {
@NotImplemented("URLProtocol:wasRedirectedToRequest:redirectResponse:")
public void wasRedirectedToRequest(NSURLProtocol protocol, NSURLRequest request, NSURLResponse redirectResponse) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocol:cachedResponseIsValid:")
public void cachedResponseIsValid(NSURLProtocol protocol, NSCachedURLResponse cachedResponse) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocol:didReceiveResponse:cacheStoragePolicy:")
public void didReceiveResponse(NSURLProtocol protocol, NSURLResponse response, @Representing("NSURLCacheStoragePolicy") long policy) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocol:didLoadData:")
public void didLoadData(NSURLProtocol protocol, NSData data) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocolDidFinishLoading:")
public void didFinishLoading(NSURLProtocol protocol) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocol:didFailWithError:")
public void didFail(NSURLProtocol protocol, NSError error) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocol:didReceiveAuthenticationChallenge:")
public void didReceiveAuthenticationChallenge(NSURLProtocol protocol, NSURLAuthenticationChallenge challenge) { throw new UnsupportedOperationException(); }
@NotImplemented("URLProtocol:didCancelAuthenticationChallenge:")
public void didCancelAuthenticationChallenge(NSURLProtocol protocol, NSURLAuthenticationChallenge challenge) { throw new UnsupportedOperationException(); }
}
| apache-2.0 |
datalorax/datagrids | coherence-util/src/main/java/org/acc/coherence/versioning/temporal/VValue.java | 4220 | package org.acc.coherence.versioning.temporal;
import com.tangosol.dev.component.Extractor;
import com.tangosol.io.pof.annotation.Portable;
import com.tangosol.io.pof.annotation.PortableProperty;
import com.tangosol.io.pof.reflect.SimplePofPath;
import com.tangosol.util.ValueExtractor;
import com.tangosol.util.extractor.PofExtractor;
import com.tangosol.util.extractor.ReflectionExtractor;
import java.io.Serializable;
/**
* Wrapper type for versioned values. Note, for testing this supports both Pof and Java serialisation.
*/
@Portable
public class VValue<DomainValue> implements Versioned<DomainValue>, Serializable {
private static final long serialVersionUID = 7597546340408031504L;
public static final int METADATA_POF_ID = 1;
public static final PofExtractor VERSION_POF_EXTRACTOR = new PofExtractor(int.class, new SimplePofPath(new int[]{METADATA_POF_ID, MetaData.VERSION_POF_ID}));
public static final PofExtractor CREATED_POF_EXTRACTED = new PofExtractor(long.class, new SimplePofPath(new int[]{METADATA_POF_ID, MetaData.CREATED_POF_ID}));
public static final ValueExtractor CREATED_JAVA_EXTRACTED = new ReflectionExtractor("getCreated");
@PortableProperty(value = METADATA_POF_ID)
private MetaData metaData = new MetaData();
@PortableProperty(value = DOMAIN_POF_ID)
private DomainValue domainValue;
@SuppressWarnings("UnusedDeclaration") // Used by Coherence
@Deprecated // Only
public VValue() {
}
public VValue(DomainValue value) {
domainValue = value;
}
@Override
public int getVersion() {
return metaData.getVersion();
}
@Override
public void setVersion(int version) {
metaData.setVersion(version);
}
@Override
public DomainValue getDomainObject() {
return domainValue;
}
public void setCreated(long timestamp) {
metaData.setCreated(timestamp);
}
public long getCreated() {
return metaData.getCreated();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
VValue that = (VValue) o;
if (domainValue != null ? !domainValue.equals(that.domainValue) : that.domainValue != null) return false;
if (!metaData.equals(that.metaData)) return false;
return true;
}
@Override
public int hashCode() {
return 31 * metaData.hashCode() + (domainValue != null ? domainValue.hashCode() : 0);
}
@Override
public String toString() {
return "VValue{metaData=" + metaData + ", domainValue=" + domainValue + '}';
}
@Portable
public static class MetaData implements Serializable { // Todo(ac): make more generic
private static final long serialVersionUID = -1668679611641295582L;
public final static int VERSION_POF_ID = 1;
public final static int CREATED_POF_ID = 2;
private static final int NOT_SET = -1;
@PortableProperty(value = VERSION_POF_ID)
private int version = NOT_SET;
@PortableProperty(value = CREATED_POF_ID)
private long created = NOT_SET;
public MetaData() {
}
public int getVersion() {
return version;
}
public void setVersion(int version) {
this.version = version;
}
public void setCreated(long timestamp) {
created = timestamp;
}
public long getCreated() {
return created;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MetaData that = (MetaData) o;
if (created != that.created) return false;
if (version != that.version) return false;
return true;
}
@Override
public int hashCode() {
return 31 * version + (int) (created ^ (created >>> 32));
}
@Override
public String toString() {
return "MetaData{v" + version + ", created=" + created + '}';
}
}
} | apache-2.0 |
jonfhancock/MindBody | MindBodyHancock/src/com/jonfhancock/mindbody/models/SessionType.java | 2988 | package com.jonfhancock.mindbody.models;
import android.content.ContentProviderOperation.Builder;
import android.database.Cursor;
import android.os.Parcel;
import android.os.Parcelable;
import com.alexgilleran.icesoap.annotation.XMLField;
import com.alexgilleran.icesoap.annotation.XMLObject;
import com.jonfhancock.mindbody.data.provider.MindBodyContent.Appointment.Columns;
@XMLObject("//SessionType")
public class SessionType implements Parcelable {
private static final String FIELD_ID = "ID";
private static final String FIELD_DEFAULT_TIME_LENGTH = "DefaultTimeLength";
private static final String FIELD_PROGRAM_ID = "ProgramID";
private static final String FIELD_NAME = "Name";
@XMLField(FIELD_ID)
private long mID;
@XMLField(FIELD_DEFAULT_TIME_LENGTH)
private int mDefaultTimeLength;
@XMLField(FIELD_PROGRAM_ID)
private long mProgramID;
@XMLField(FIELD_NAME)
private String mName;
public SessionType() {
}
public void setID(long iD) {
mID = iD;
}
public long getID() {
return mID;
}
public void setDefaultTimeLength(int defaultTimeLength) {
mDefaultTimeLength = defaultTimeLength;
}
public int getDefaultTimeLength() {
return mDefaultTimeLength;
}
public void setProgramID(long programID) {
mProgramID = programID;
}
public long getProgramID() {
return mProgramID;
}
public void setName(String name) {
mName = name;
}
public String getName() {
return mName;
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SessionType) {
return ((SessionType) obj).getID() == mID;
}
return false;
}
@Override
public int hashCode() {
return ((Long) mID).hashCode();
}
public SessionType(Parcel in) {
mID = in.readLong();
mDefaultTimeLength = in.readInt();
mProgramID = in.readLong();
mName = in.readString();
}
public SessionType(Cursor c) {
mID = c.getLong(Columns.SESSION_TYPE_ID.getIndex());
mDefaultTimeLength = c.getInt(Columns.SESSION_TYPE_DEFAULT_TIME
.getIndex());
mProgramID = c.getLong(Columns.SESSION_TYPE_PROGRAM_ID.getIndex());
mName = c.getString(Columns.SESSION_TYPE_NAME.getIndex());
}
public Builder addValues(Builder builder) {
builder.withValue(Columns.SESSION_TYPE_ID.getName(), mID)
.withValue(Columns.SESSION_TYPE_DEFAULT_TIME.getName(),
mDefaultTimeLength)
.withValue(Columns.SESSION_TYPE_PROGRAM_ID.getName(),
mProgramID)
.withValue(Columns.SESSION_TYPE_NAME.getName(), mName);
return builder;
}
@Override
public int describeContents() {
return 0;
}
public static final Parcelable.Creator<SessionType> CREATOR = new Parcelable.Creator<SessionType>() {
public SessionType createFromParcel(Parcel in) {
return new SessionType(in);
}
public SessionType[] newArray(int size) {
return new SessionType[size];
}
};
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(mID);
dest.writeInt(mDefaultTimeLength);
dest.writeLong(mProgramID);
dest.writeString(mName);
}
} | apache-2.0 |
benhook1013/Cyber_World | src/cyber_world/Characters/Player/State/Parser/In_World_Matcher_Map_Editor.java | 2261 | package cyber_world.Characters.Player.State.Parser;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import cyber_world.Characters.Commands.Action_Command;
import cyber_world.Characters.Commands.Character_Commands.Map_Editor.Create_Exit;
import cyber_world.Characters.Commands.Character_Commands.Map_Editor.Create_Room;
import cyber_world.Characters.Commands.Character_Commands.Map_Editor.Destroy_Exit;
import cyber_world.Characters.Commands.Character_Commands.Map_Editor.Destroy_Room;
/*
* Copyright 2017 Ben Hook
* In_World_Matcher_Map_Editor.java
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
public class In_World_Matcher_Map_Editor implements Input_Parser_Matcher_Interface {
private Pattern createExitsPattern = Pattern.compile("(?i)CREATE EXITS? (\\w+)");
private Pattern createRoomPattern = Pattern.compile("(?i)CREATE ROOM (\\w+)");
private Pattern destroyExitsPattern = Pattern.compile("(?i)DESTROY EXITS? (\\w+)");
private Pattern destroyRoomPattern = Pattern.compile("(?i)DESTROY ROOM (\\w+)");
public In_World_Matcher_Map_Editor() {
}
@Override
public Action_Command match(String text) {
Matcher matcher;
if ((matcher = createExitsPattern.matcher(text)).matches()) {
return new Create_Exit(matcher.group(1));
} else if ((matcher = createRoomPattern.matcher(text)).matches()) {
return new Create_Room(matcher.group(1));
} else if ((matcher = destroyExitsPattern.matcher(text)).matches()) {
return new Destroy_Exit(matcher.group(1));
} else if ((matcher = destroyRoomPattern.matcher(text)).matches()) {
return new Destroy_Room(matcher.group(1));
} else {
return null;
}
}
}
| apache-2.0 |
XinyueZ/Hybrid | app/src/main/java/com/hybrid/app/SettingsActivity.java | 750 | package com.hybrid.app;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBarPreferenceActivity;
/**
* Setting .
*/
public final class SettingsActivity extends ActionBarPreferenceActivity {
/**
* Show an instance of SettingsActivity.
* @param context A context object.
*/
public static void showInstance(Context context) {
Intent intent = new Intent(context, SettingsActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
context.startActivity(intent);
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
addPreferencesFromResource(R.xml.settings);
}
}
| apache-2.0 |
rostam/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/model/api/epgm/LogicalGraphOperators.java | 28211 | /*
* Copyright © 2014 - 2019 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradoop.flink.model.api.epgm;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.DataSet;
import org.gradoop.common.model.impl.pojo.Edge;
import org.gradoop.common.model.impl.pojo.GraphHead;
import org.gradoop.common.model.impl.pojo.Vertex;
import org.gradoop.flink.model.api.functions.AggregateFunction;
import org.gradoop.flink.model.api.functions.EdgeAggregateFunction;
import org.gradoop.flink.model.api.functions.TransformationFunction;
import org.gradoop.flink.model.api.functions.VertexAggregateFunction;
import org.gradoop.flink.model.api.operators.BinaryGraphToGraphOperator;
import org.gradoop.flink.model.api.operators.GraphsToGraphOperator;
import org.gradoop.flink.model.api.operators.UnaryBaseGraphToBaseGraphOperator;
import org.gradoop.flink.model.api.operators.UnaryGraphToCollectionOperator;
import org.gradoop.flink.model.impl.epgm.GraphCollection;
import org.gradoop.flink.model.impl.epgm.LogicalGraph;
import org.gradoop.flink.model.impl.operators.grouping.Grouping;
import org.gradoop.flink.model.impl.operators.grouping.GroupingStrategy;
import org.gradoop.flink.model.impl.operators.matching.common.MatchStrategy;
import org.gradoop.flink.model.impl.operators.matching.common.statistics.GraphStatistics;
import org.gradoop.flink.model.impl.operators.neighborhood.Neighborhood;
import org.gradoop.flink.model.impl.operators.sampling.SamplingAlgorithm;
import org.gradoop.flink.model.impl.operators.subgraph.Subgraph;
import java.util.List;
import java.util.Objects;
/**
* Defines the operators that are available on a {@link LogicalGraph}.
*/
public interface LogicalGraphOperators extends GraphBaseOperators {
//----------------------------------------------------------------------------
// Unary Operators
//----------------------------------------------------------------------------
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* Note, that this method used no statistics about the data graph which may result in bad
* runtime performance. Use {@link LogicalGraphOperators#cypher(String, GraphStatistics)} to
* provide statistics for the query planner.
*
* @param query Cypher query
* @return graph collection containing matching subgraphs
* @deprecated because of API restructuring.
* Please use {@link LogicalGraph#query(String)} instead.
*/
@Deprecated
GraphCollection cypher(String query);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* Note, that this method used no statistics about the data graph which may result in bad
* runtime performance. Use {@link LogicalGraphOperators#cypher(String, GraphStatistics)} to
* provide statistics for the query planner.
*
* In addition, the operator can be supplied with a construction pattern allowing the creation
* of new graph elements based on variable bindings of the match pattern. Consider the following
* example:
*
* <pre>
* <code>graph.cypher(
* "MATCH (a:Author)-[:WROTE]->(:Paper)<-[:WROTE]-(b:Author) WHERE a <> b",
* "(a)-[:CO_AUTHOR]->(b)")
* </code>
* </pre>
*
* The query pattern is looking for pairs of authors that worked on the same paper. The
* construction pattern defines a new edge of type CO_AUTHOR between the two entities.
*
* @param query Cypher query string
* @param constructionPattern Construction pattern
* @return graph collection containing the output of the construct pattern
* @deprecated because of API restructuring.
* Please use {@link LogicalGraph#query(String, String)} instead.
*/
@Deprecated
GraphCollection cypher(String query, String constructionPattern);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* @param query Cypher query
* @param graphStatistics statistics about the data graph
* @return graph collection containing matching subgraphs
* @deprecated because of API restructuring.
* Please use {@link LogicalGraph#query(String, GraphStatistics)} instead.
*/
@Deprecated
GraphCollection cypher(String query, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* In addition, the operator can be supplied with a construction pattern allowing the creation
* of new graph elements based on variable bindings of the match pattern. Consider the following
* example:
*
* <pre>
* <code>graph.cypher(
* "MATCH (a:Author)-[:WROTE]->(:Paper)<-[:WROTE]-(b:Author) WHERE a <> b",
* "(a)-[:CO_AUTHOR]->(b)")
* </code>
* </pre>
*
* The query pattern is looking for pairs of authors that worked on the same paper. The
* construction pattern defines a new edge of type CO_AUTHOR between the two entities.
*
* @param query Cypher query
* @param constructionPattern Construction pattern
* @param graphStatistics statistics about the data graph
* @return graph collection containing the output of the construct pattern
* @deprecated because of API restructuring.
* Please use {@link LogicalGraph#query(String, String, GraphStatistics)} instead.
*/
@Deprecated
GraphCollection cypher(String query, String constructionPattern, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine.
*
* @param query Cypher query
* @param attachData attach original vertex and edge data to the result
* @param vertexStrategy morphism setting for vertex mapping
* @param edgeStrategy morphism setting for edge mapping
* @param graphStatistics statistics about the data graph
* @return graph collection containing matching subgraphs
* @deprecated because of API restructuring.
* Please use {@link LogicalGraph#query(String, boolean, MatchStrategy, MatchStrategy, GraphStatistics)} instead.
*/
@Deprecated
GraphCollection cypher(String query, boolean attachData,
MatchStrategy vertexStrategy, MatchStrategy edgeStrategy, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine.
*
* @param query Cypher query
* @param constructionPattern Construction pattern
* @param attachData attach original vertex and edge data to the result
* @param vertexStrategy morphism setting for vertex mapping
* @param edgeStrategy morphism setting for edge mapping
* @param graphStatistics statistics about the data graph
* @return graph collection containing matching subgraphs
* @deprecated because of API restructuring.
* Please use {@link LogicalGraph#query(String, String, boolean, MatchStrategy, MatchStrategy, GraphStatistics)} instead.
*/
@Deprecated
GraphCollection cypher(String query, String constructionPattern, boolean attachData,
MatchStrategy vertexStrategy, MatchStrategy edgeStrategy, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* Note, that this method used no statistics about the data graph which may result in bad
* runtime performance. Use {@link LogicalGraphOperators#query(String, GraphStatistics)} to
* provide statistics for the query planner.
*
* @param query Cypher query
* @return graph collection containing matching subgraphs
*/
GraphCollection query(String query);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* Note, that this method used no statistics about the data graph which may result in bad
* runtime performance. Use {@link LogicalGraphOperators#query(String, GraphStatistics)} to
* provide statistics for the query planner.
*
* In addition, the operator can be supplied with a construction pattern allowing the creation
* of new graph elements based on variable bindings of the match pattern. Consider the following
* example:
*
* <pre>
* <code>graph.query(
* "MATCH (a:Author)-[:WROTE]->(:Paper)<-[:WROTE]-(b:Author) WHERE a <> b",
* "(a)-[:CO_AUTHOR]->(b)")
* </code>
* </pre>
*
* The query pattern is looking for pairs of authors that worked on the same paper. The
* construction pattern defines a new edge of type CO_AUTHOR between the two entities.
*
* @param query Cypher query string
* @param constructionPattern Construction pattern
* @return graph collection containing the output of the construct pattern
*/
GraphCollection query(String query, String constructionPattern);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* @param query Cypher query
* @param graphStatistics statistics about the data graph
* @return graph collection containing matching subgraphs
*/
GraphCollection query(String query, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine. The engine uses default morphism
* strategies, which is vertex homomorphism and edge isomorphism. The vertex and edge data of
* the data graph elements is attached to the resulting vertices.
*
* In addition, the operator can be supplied with a construction pattern allowing the creation
* of new graph elements based on variable bindings of the match pattern. Consider the following
* example:
*
* <pre>
* <code>graph.query(
* "MATCH (a:Author)-[:WROTE]->(:Paper)<-[:WROTE]-(b:Author) WHERE a <> b",
* "(a)-[:CO_AUTHOR]->(b)")
* </code>
* </pre>
*
* The query pattern is looking for pairs of authors that worked on the same paper. The
* construction pattern defines a new edge of type CO_AUTHOR between the two entities.
*
* @param query Cypher query
* @param constructionPattern Construction pattern
* @param graphStatistics statistics about the data graph
* @return graph collection containing the output of the construct pattern
*/
GraphCollection query(String query, String constructionPattern, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine.
*
* @param query Cypher query
* @param attachData attach original vertex and edge data to the result
* @param vertexStrategy morphism setting for vertex mapping
* @param edgeStrategy morphism setting for edge mapping
* @param graphStatistics statistics about the data graph
* @return graph collection containing matching subgraphs
*/
GraphCollection query(String query, boolean attachData, MatchStrategy vertexStrategy,
MatchStrategy edgeStrategy, GraphStatistics graphStatistics);
/**
* Evaluates the given query using the Cypher query engine.
*
* @param query Cypher query
* @param constructionPattern Construction pattern
* @param attachData attach original vertex and edge data to the result
* @param vertexStrategy morphism setting for vertex mapping
* @param edgeStrategy morphism setting for edge mapping
* @param graphStatistics statistics about the data graph
* @return graph collection containing matching subgraphs
*/
GraphCollection query(String query, String constructionPattern, boolean attachData,
MatchStrategy vertexStrategy, MatchStrategy edgeStrategy, GraphStatistics graphStatistics);
/**
* Creates a copy of the logical graph.
*
* Note that this method creates new graph head, vertex and edge instances.
*
* @return projected logical graph
*/
LogicalGraph copy();
/**
* Transforms the elements of the logical graph using the given transformation
* functions. The identity of the elements is preserved.
*
* @param graphHeadTransformationFunction graph head transformation function
* @param vertexTransformationFunction vertex transformation function
* @param edgeTransformationFunction edge transformation function
* @return transformed logical graph
*/
LogicalGraph transform(
TransformationFunction<GraphHead> graphHeadTransformationFunction,
TransformationFunction<Vertex> vertexTransformationFunction,
TransformationFunction<Edge> edgeTransformationFunction);
/**
* Transforms the graph head of the logical graph using the given
* transformation function. The identity of the graph is preserved.
*
* @param graphHeadTransformationFunction graph head transformation function
* @return transformed logical graph
*/
LogicalGraph transformGraphHead(
TransformationFunction<GraphHead> graphHeadTransformationFunction);
/**
* Transforms the vertices of the logical graph using the given transformation
* function. The identity of the vertices is preserved.
*
* @param vertexTransformationFunction vertex transformation function
* @return transformed logical graph
*/
LogicalGraph transformVertices(TransformationFunction<Vertex> vertexTransformationFunction);
/**
* Transforms the edges of the logical graph using the given transformation
* function. The identity of the edges is preserved.
*
* @param edgeTransformationFunction edge transformation function
* @return transformed logical graph
*/
LogicalGraph transformEdges(TransformationFunction<Edge> edgeTransformationFunction);
/**
* Returns the subgraph that is induced by the vertices which fulfill the
* given filter function.
*
* @param vertexFilterFunction vertex filter function
* @return vertex-induced subgraph as a new logical graph
*/
LogicalGraph vertexInducedSubgraph(FilterFunction<Vertex> vertexFilterFunction);
/**
* Returns the subgraph that is induced by the edges which fulfill the given
* filter function.
*
* @param edgeFilterFunction edge filter function
* @return edge-induced subgraph as a new logical graph
*/
LogicalGraph edgeInducedSubgraph(FilterFunction<Edge> edgeFilterFunction);
/**
* Returns a subgraph of the logical graph which contains only those vertices
* and edges that fulfil the given vertex and edge filter function
* respectively.
*
* Note, that the operator does not verify the consistency of the resulting
* graph. Use {#toGellyGraph().subgraph()} for that behaviour.
*
* @param vertexFilterFunction vertex filter function
* @param edgeFilterFunction edge filter function
* @return logical graph which fulfils the given predicates and is a subgraph
* of that graph
*/
default LogicalGraph subgraph(FilterFunction<Vertex> vertexFilterFunction,
FilterFunction<Edge> edgeFilterFunction) {
Objects.requireNonNull(vertexFilterFunction);
Objects.requireNonNull(edgeFilterFunction);
return subgraph(vertexFilterFunction, edgeFilterFunction, Subgraph.Strategy.BOTH);
}
/**
* Returns a subgraph of the logical graph which contains only those vertices
* and edges that fulfil the given vertex and edge filter function
* respectively.
*
* Note, that the operator does not verify the consistency of the resulting
* graph. Use {#toGellyGraph().subgraph()} for that behaviour.
*
* @param vertexFilterFunction vertex filter function
* @param edgeFilterFunction edge filter function
* @param strategy execution strategy for the operator
* @return logical graph which fulfils the given predicates and is a subgraph
* of that graph
*/
LogicalGraph subgraph(FilterFunction<Vertex> vertexFilterFunction,
FilterFunction<Edge> edgeFilterFunction, Subgraph.Strategy strategy);
/**
* Applies the given aggregate functions to the logical graph and stores the
* result of those functions at the resulting graph using the given property
* keys.
*
* @param aggregateFunctions computes aggregates on the logical graph
* @return logical graph with additional properties storing the aggregates
*/
LogicalGraph aggregate(AggregateFunction... aggregateFunctions);
/**
* Creates a new graph from a randomly chosen subset of nodes and their
* associated edges.
*
* @param algorithm used sampling algorithm
* @return logical graph with random nodes and their associated edges
*/
LogicalGraph sample(SamplingAlgorithm algorithm);
/**
* Creates a condensed version of the logical graph by grouping vertices based on the specified
* property keys.
*
* Vertices are grouped by the given property keys. Edges are implicitly grouped along with their
* incident vertices.
*
* Note: To group vertices by their type label, one needs to add the specific symbol
* {@link Grouping#LABEL_SYMBOL} to the respective grouping keys.
*
* @param vertexGroupingKeys property keys to group vertices
*
* @return summary graph
* @see Grouping
*/
LogicalGraph groupBy(List<String> vertexGroupingKeys);
/**
* Creates a condensed version of the logical graph by grouping vertices and edges based on given
* property keys.
*
* Vertices are grouped by the given property keys. Edges are implicitly grouped along with their
* incident vertices and explicitly by the specified edge grouping keys.
*
* One needs to at least specify a list of vertex grouping keys. Any other argument may be
* {@code null}.
*
* Note: To group vertices/edges by their type label, one needs to add the specific symbol
* {@link Grouping#LABEL_SYMBOL} to the respective grouping keys.
*
* @param vertexGroupingKeys property keys to group vertices
* @param edgeGroupingKeys property keys to group edges
*
* @return summary graph
* @see Grouping
*/
LogicalGraph groupBy(List<String> vertexGroupingKeys, List<String> edgeGroupingKeys);
/**
* Creates a condensed version of the logical graph by grouping vertices and edges based on given
* property keys.
*
* Vertices are grouped by the given property keys. Edges are implicitly grouped along with their
* incident vertices and explicitly by the specified edge grouping keys. Furthermore, one can
* specify sets of vertex and edge aggregate functions which are applied on vertices/edges
* represented by the same super vertex/edge.
*
* One needs to at least specify a list of vertex grouping keys. Any other argument may be
* {@code null}.
*
* Note: To group vertices/edges by their type label, one needs to add the specific symbol
* {@link Grouping#LABEL_SYMBOL} to the respective grouping keys.
*
* @param vertexGroupingKeys property keys to group vertices
* @param vertexAggregateFunctions aggregate functions to apply on super vertices
* @param edgeGroupingKeys property keys to group edges
* @param edgeAggregateFunctions aggregate functions to apply on super edges
* @param groupingStrategy execution strategy for vertex grouping
*
* @return summary graph
* @see Grouping
*/
LogicalGraph groupBy(
List<String> vertexGroupingKeys, List<AggregateFunction> vertexAggregateFunctions,
List<String> edgeGroupingKeys, List<AggregateFunction> edgeAggregateFunctions,
GroupingStrategy groupingStrategy);
/**
* Sets the aggregation result of the given function as property for each vertex. All edges where
* the vertex is relevant get joined first and then grouped. The relevant edges are specified
* using the direction which may direct to the vertex, or from the vertex or both.
*
* @param function aggregate function
* @param edgeDirection incoming, outgoing edges or both
*
* @return logical graph where vertices store aggregated information about connected edges
*/
LogicalGraph reduceOnEdges(
EdgeAggregateFunction function, Neighborhood.EdgeDirection edgeDirection);
/**
* Sets the aggregation result of the given function as property for each vertex. All vertices
* of relevant edges get joined first and then grouped by the vertex. The relevant edges are
* specified using the direction which may direct to the vertex, or from the vertex or both.
*
* @param function aggregate function
* @param edgeDirection incoming, outgoing edges or both
*
* @return logical graph where vertices store aggregated information about connected vertices
*/
LogicalGraph reduceOnNeighbors(
VertexAggregateFunction function, Neighborhood.EdgeDirection edgeDirection);
/**
* Checks, if another logical graph contains exactly the same vertices and
* edges (by id) as this graph.
*
* @param other other graph
* @return 1-element dataset containing true, if equal by element ids
*/
DataSet<Boolean> equalsByElementIds(LogicalGraph other);
/**
* Checks, if another logical graph contains vertices and edges with the same
* attached data (i.e. label and properties) as this graph.
*
* @param other other graph
* @return 1-element dataset containing true, iff equal by element data
*/
DataSet<Boolean> equalsByElementData(LogicalGraph other);
/**
* Checks, if another logical graph has the same attached data and contains
* vertices and edges with the same attached data as this graph.
*
* @param other other graph
* @return 1-element dataset containing true, iff equal by element data
*/
DataSet<Boolean> equalsByData(LogicalGraph other);
/**
* Generates all combinations of the supplied vertex grouping keys according to the definition of
* the rollUp operation in SQL and uses them together with all edge grouping keys for separate
* grouping operations. For example, specifying the vertex grouping keys A, B and C leads to
* three differently grouped graphs {A,B,C},{A,B},{A} within the resulting graph collection.
*
* @param vertexGroupingKeys grouping keys to group vertices
* @param vertexAggregateFunctions aggregate functions to apply on super vertices
* @param edgeGroupingKeys grouping keys to group edges
* @param edgeAggregateFunctions aggregate functions to apply on super edges
* @return graph collection containing all resulting graphs
*/
GraphCollection groupVerticesByRollUp(
List<String> vertexGroupingKeys, List<AggregateFunction> vertexAggregateFunctions,
List<String> edgeGroupingKeys, List<AggregateFunction> edgeAggregateFunctions);
/**
* Generates all combinations of the supplied edge grouping keys according to the definition of
* the rollUp operation in SQL and uses them together with all vertex grouping keys for separate
* grouping operations. For example, specifying the edge grouping keys A, B and C leads to
* three differently grouped graphs {A,B,C},{A,B},{A} within the resulting graph collection.
*
* @param vertexGroupingKeys grouping keys to group vertices
* @param vertexAggregateFunctions aggregate functions to apply on super vertices
* @param edgeGroupingKeys grouping keys to group edges
* @param edgeAggregateFunctions aggregate functions to apply on super edges
* @return graph collection containing all resulting graphs
*/
GraphCollection groupEdgesByRollUp(
List<String> vertexGroupingKeys, List<AggregateFunction> vertexAggregateFunctions,
List<String> edgeGroupingKeys, List<AggregateFunction> edgeAggregateFunctions);
//----------------------------------------------------------------------------
// Binary Operators
//----------------------------------------------------------------------------
/**
* Creates a new logical graph by combining the vertex and edge sets of
* this graph and the given graph. Vertex and edge equality is based on their
* identifiers.
*
* @param otherGraph logical graph to combine this graph with
* @return logical graph containing all vertices and edges of the
* input graphs
*/
LogicalGraph combine(LogicalGraph otherGraph);
/**
* Creates a new logical graph containing the overlapping vertex and edge
* sets of this graph and the given graph. Vertex and edge equality is
* based on their identifiers.
*
* @param otherGraph logical graph to compute overlap with
* @return logical graph that contains all vertices and edges that exist in
* both input graphs
*/
LogicalGraph overlap(LogicalGraph otherGraph);
/**
* Creates a new logical graph containing only vertices and edges that
* exist in that graph but not in the other graph. Vertex and edge equality
* is based on their identifiers.
*
* @param otherGraph logical graph to exclude from that graph
* @return logical that contains only vertices and edges that are not in
* the other graph
*/
LogicalGraph exclude(LogicalGraph otherGraph);
//----------------------------------------------------------------------------
// Auxiliary Operators
//----------------------------------------------------------------------------
/**
* Splits the graph into multiple logical graphs using the property value
* which is assigned to the given property key. Vertices and edges that do
* not have this property will be removed from the resulting collection.
*
* @param propertyKey split property key
* @return graph collection
*/
GraphCollection splitBy(String propertyKey);
/**
* Creates a logical graph using the given unary graph operator.
*
* @param operator unary graph to graph operator
* @return result of given operator
*/
LogicalGraph callForGraph(UnaryBaseGraphToBaseGraphOperator<LogicalGraph> operator);
/**
* Creates a logical graph from that graph and the input graph using the
* given binary operator.
*
* @param operator binary graph to graph operator
* @param otherGraph other graph
* @return result of given operator
*/
LogicalGraph callForGraph(BinaryGraphToGraphOperator operator, LogicalGraph otherGraph);
/**
* Creates a logical graph from that graph and other graphs using the given
* operator.
*
* @param operator multi graph to graph operator
* @param otherGraphs other graphs
* @return result of given operator
*/
LogicalGraph callForGraph(GraphsToGraphOperator operator, LogicalGraph... otherGraphs);
/**
* Creates a graph collection from that graph using the given unary graph
* operator.
*
* @param operator unary graph to collection operator
* @return result of given operator
*/
GraphCollection callForCollection(UnaryGraphToCollectionOperator operator);
}
| apache-2.0 |
debun8/Jokester | jokelibrary/src/test/java/com/pixby/jokelibrary/ExampleUnitTest.java | 314 | package com.pixby.jokelibrary;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
} | apache-2.0 |
rayrelay/devnote | devnote-example/src/main/java/jp/gr/java_conf/tame/swing/slider/MThumbSliderAdditional.java | 501 | /* (swing1.1.1) */
package jp.gr.java_conf.tame.swing.slider;
import java.awt.*;
/**
* @version 1.0 09/08/99
*/
//
// MThumbSliderAdditionalUI <--> BasicMThumbSliderUI
// <--> MetalMThumbSliderUI
// <--> MotifMThumbSliderUI
//
public interface MThumbSliderAdditional {
public Rectangle getTrackRect();
public Dimension getThumbSize();
public int xPositionForValue(int value);
public int yPositionForValue(int value);
}
| apache-2.0 |
LorenzReinhart/ONOSnew | incubator/net/src/main/java/org/onosproject/incubator/net/virtual/impl/VirtualNetworkFlowObjectiveManager.java | 29274 | /*
* Copyright 2017-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.incubator.net.virtual.impl;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalCause;
import com.google.common.cache.RemovalNotification;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.util.KryoNamespace;
import org.onosproject.incubator.net.virtual.AbstractVnetService;
import org.onosproject.incubator.net.virtual.NetworkId;
import org.onosproject.incubator.net.virtual.VirtualNetworkFlowObjectiveStore;
import org.onosproject.incubator.net.virtual.VirtualNetworkService;
import org.onosproject.net.DeviceId;
import org.onosproject.net.behaviour.NextGroup;
import org.onosproject.net.behaviour.Pipeliner;
import org.onosproject.net.behaviour.PipelinerContext;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.driver.AbstractHandlerBehaviour;
import org.onosproject.net.flow.DefaultFlowRule;
import org.onosproject.net.flow.DefaultTrafficSelector;
import org.onosproject.net.flow.DefaultTrafficTreatment;
import org.onosproject.net.flow.FlowRule;
import org.onosproject.net.flow.FlowRuleOperations;
import org.onosproject.net.flow.FlowRuleOperationsContext;
import org.onosproject.net.flow.FlowRuleService;
import org.onosproject.net.flow.TrafficSelector;
import org.onosproject.net.flow.TrafficTreatment;
import org.onosproject.net.flowobjective.FilteringObjective;
import org.onosproject.net.flowobjective.FlowObjectiveService;
import org.onosproject.net.flowobjective.FlowObjectiveStore;
import org.onosproject.net.flowobjective.FlowObjectiveStoreDelegate;
import org.onosproject.net.flowobjective.ForwardingObjective;
import org.onosproject.net.flowobjective.NextObjective;
import org.onosproject.net.flowobjective.Objective;
import org.onosproject.net.flowobjective.ObjectiveError;
import org.onosproject.net.flowobjective.ObjectiveEvent;
import org.onosproject.net.group.DefaultGroupKey;
import org.onosproject.net.group.GroupKey;
import org.slf4j.Logger;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.onlab.util.BoundedThreadPool.newFixedThreadPool;
import static org.onlab.util.Tools.groupedThreads;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Provides implementation of the flow objective programming service for virtual networks.
*/
// NOTE: This manager is designed to provide flow objective programming service
// for virtual networks. Actually, virtual networks don't need to consider
// the different implementation of data-path pipeline. But, the interfaces
// and usages of flow objective service are still valuable for virtual network.
// This manager is working as an interpreter from FlowObjective to FlowRules
// to provide symmetric interfaces with ONOS core services.
// The behaviours are based on DefaultSingleTablePipeline.
public class VirtualNetworkFlowObjectiveManager extends AbstractVnetService
implements FlowObjectiveService {
public static final int INSTALL_RETRY_ATTEMPTS = 5;
public static final long INSTALL_RETRY_INTERVAL = 1000; // ms
private final Logger log = getLogger(getClass());
protected DeviceService deviceService;
// Note: The following dependencies are added on behalf of the pipeline
// driver behaviours to assure these services are available for their
// initialization.
protected FlowRuleService flowRuleService;
protected VirtualNetworkFlowObjectiveStore virtualFlowObjectiveStore;
protected FlowObjectiveStore flowObjectiveStore;
private final FlowObjectiveStoreDelegate delegate;
private final PipelinerContext context = new InnerPipelineContext();
private final Map<DeviceId, Pipeliner> pipeliners = Maps.newConcurrentMap();
// local stores for queuing fwd and next objectives that are waiting for an
// associated next objective execution to complete. The signal for completed
// execution comes from a pipeline driver, in this or another controller
// instance, via the DistributedFlowObjectiveStore.
private final Map<Integer, Set<PendingFlowObjective>> pendingForwards =
Maps.newConcurrentMap();
private final Map<Integer, Set<PendingFlowObjective>> pendingNexts =
Maps.newConcurrentMap();
// local store to track which nextObjectives were sent to which device
// for debugging purposes
private Map<Integer, DeviceId> nextToDevice = Maps.newConcurrentMap();
private ExecutorService executorService;
public VirtualNetworkFlowObjectiveManager(VirtualNetworkService manager,
NetworkId networkId) {
super(manager, networkId);
deviceService = manager.get(networkId(), DeviceService.class);
flowRuleService = manager.get(networkId(), FlowRuleService.class);
executorService = newFixedThreadPool(4, groupedThreads("onos/virtual/objective-installer", "%d", log));
virtualFlowObjectiveStore =
serviceDirectory.get(VirtualNetworkFlowObjectiveStore.class);
delegate = new InternalStoreDelegate();
virtualFlowObjectiveStore.setDelegate(networkId(), delegate);
flowObjectiveStore = new StoreConvertor();
}
@Override
public void filter(DeviceId deviceId, FilteringObjective filteringObjective) {
executorService.execute(new ObjectiveInstaller(deviceId, filteringObjective));
}
@Override
public void forward(DeviceId deviceId, ForwardingObjective forwardingObjective) {
if (forwardingObjective.nextId() == null ||
forwardingObjective.op() == Objective.Operation.REMOVE ||
flowObjectiveStore.getNextGroup(forwardingObjective.nextId()) != null ||
!queueFwdObjective(deviceId, forwardingObjective)) {
// fast path
executorService.execute(new ObjectiveInstaller(deviceId, forwardingObjective));
}
}
@Override
public void next(DeviceId deviceId, NextObjective nextObjective) {
nextToDevice.put(nextObjective.id(), deviceId);
if (nextObjective.op() == Objective.Operation.ADD ||
flowObjectiveStore.getNextGroup(nextObjective.id()) != null ||
!queueNextObjective(deviceId, nextObjective)) {
// either group exists or we are trying to create it - let it through
executorService.execute(new ObjectiveInstaller(deviceId, nextObjective));
}
}
@Override
public int allocateNextId() {
return flowObjectiveStore.allocateNextId();
}
@Override
public void initPolicy(String policy) {
}
@Override
public List<String> getNextMappings() {
List<String> mappings = new ArrayList<>();
Map<Integer, NextGroup> allnexts = flowObjectiveStore.getAllGroups();
// XXX if the NextGroup after de-serialization actually stored info of the deviceId
// then info on any nextObj could be retrieved from one controller instance.
// Right now the drivers on one instance can only fetch for next-ids that came
// to them.
// Also, we still need to send the right next-id to the right driver as potentially
// there can be different drivers for different devices. But on that account,
// no instance should be decoding for another instance's nextIds.
for (Map.Entry<Integer, NextGroup> e : allnexts.entrySet()) {
// get the device this next Objective was sent to
DeviceId deviceId = nextToDevice.get(e.getKey());
mappings.add("NextId " + e.getKey() + ": " +
((deviceId != null) ? deviceId : "nextId not in this onos instance"));
if (deviceId != null) {
// this instance of the controller sent the nextObj to a driver
Pipeliner pipeliner = getDevicePipeliner(deviceId);
List<String> nextMappings = pipeliner.getNextMappings(e.getValue());
if (nextMappings != null) {
mappings.addAll(nextMappings);
}
}
}
return mappings;
}
@Override
public List<String> getPendingFlowObjectives() {
List<String> pendingFlowObjectives = new ArrayList<>();
for (Integer nextId : pendingForwards.keySet()) {
Set<PendingFlowObjective> pfwd = pendingForwards.get(nextId);
StringBuilder pend = new StringBuilder();
pend.append("NextId: ")
.append(nextId);
for (PendingFlowObjective pf : pfwd) {
pend.append("\n FwdId: ")
.append(String.format("%11s", pf.flowObjective().id()))
.append(", DeviceId: ")
.append(pf.deviceId())
.append(", Selector: ")
.append(((ForwardingObjective) pf.flowObjective())
.selector().criteria());
}
pendingFlowObjectives.add(pend.toString());
}
for (Integer nextId : pendingNexts.keySet()) {
Set<PendingFlowObjective> pnext = pendingNexts.get(nextId);
StringBuilder pend = new StringBuilder();
pend.append("NextId: ")
.append(nextId);
for (PendingFlowObjective pn : pnext) {
pend.append("\n NextOp: ")
.append(pn.flowObjective().op())
.append(", DeviceId: ")
.append(pn.deviceId())
.append(", Treatments: ")
.append(((NextObjective) pn.flowObjective())
.next());
}
pendingFlowObjectives.add(pend.toString());
}
return pendingFlowObjectives;
}
@Override
public List<String> getPendingNexts() {
return getPendingFlowObjectives();
}
private boolean queueFwdObjective(DeviceId deviceId, ForwardingObjective fwd) {
boolean queued = false;
synchronized (pendingForwards) {
// double check the flow objective store, because this block could run
// after a notification arrives
if (flowObjectiveStore.getNextGroup(fwd.nextId()) == null) {
pendingForwards.compute(fwd.nextId(), (id, pending) -> {
PendingFlowObjective pendfo = new PendingFlowObjective(deviceId, fwd);
if (pending == null) {
return Sets.newHashSet(pendfo);
} else {
pending.add(pendfo);
return pending;
}
});
queued = true;
}
}
if (queued) {
log.debug("Queued forwarding objective {} for nextId {} meant for device {}",
fwd.id(), fwd.nextId(), deviceId);
}
return queued;
}
private boolean queueNextObjective(DeviceId deviceId, NextObjective next) {
// we need to hold off on other operations till we get notified that the
// initial group creation has succeeded
boolean queued = false;
synchronized (pendingNexts) {
// double check the flow objective store, because this block could run
// after a notification arrives
if (flowObjectiveStore.getNextGroup(next.id()) == null) {
pendingNexts.compute(next.id(), (id, pending) -> {
PendingFlowObjective pendfo = new PendingFlowObjective(deviceId, next);
if (pending == null) {
return Sets.newHashSet(pendfo);
} else {
pending.add(pendfo);
return pending;
}
});
queued = true;
}
}
if (queued) {
log.debug("Queued next objective {} with operation {} meant for device {}",
next.id(), next.op(), deviceId);
}
return queued;
}
/**
* Task that passes the flow objective down to the driver. The task will
* make a few attempts to find the appropriate driver, then eventually give
* up and report an error if no suitable driver could be found.
*/
private class ObjectiveInstaller implements Runnable {
private final DeviceId deviceId;
private final Objective objective;
private final int numAttempts;
public ObjectiveInstaller(DeviceId deviceId, Objective objective) {
this(deviceId, objective, 1);
}
public ObjectiveInstaller(DeviceId deviceId, Objective objective, int attemps) {
this.deviceId = checkNotNull(deviceId);
this.objective = checkNotNull(objective);
this.numAttempts = checkNotNull(attemps);
}
@Override
public void run() {
try {
Pipeliner pipeliner = getDevicePipeliner(deviceId);
if (pipeliner != null) {
if (objective instanceof NextObjective) {
nextToDevice.put(objective.id(), deviceId);
pipeliner.next((NextObjective) objective);
} else if (objective instanceof ForwardingObjective) {
pipeliner.forward((ForwardingObjective) objective);
} else {
pipeliner.filter((FilteringObjective) objective);
}
//Attempts to check if pipeliner is null for retry attempts
} else if (numAttempts < INSTALL_RETRY_ATTEMPTS) {
Thread.sleep(INSTALL_RETRY_INTERVAL);
executorService.execute(new ObjectiveInstaller(deviceId, objective, numAttempts + 1));
} else {
// Otherwise we've tried a few times and failed, report an
// error back to the user.
objective.context().ifPresent(
c -> c.onError(objective, ObjectiveError.NOPIPELINER));
}
//Exception thrown
} catch (Exception e) {
log.warn("Exception while installing flow objective", e);
}
}
}
private class InternalStoreDelegate implements FlowObjectiveStoreDelegate {
@Override
public void notify(ObjectiveEvent event) {
if (event.type() == ObjectiveEvent.Type.ADD) {
log.debug("Received notification of obj event {}", event);
Set<PendingFlowObjective> pending;
// first send all pending flows
synchronized (pendingForwards) {
// needs to be synchronized for queueObjective lookup
pending = pendingForwards.remove(event.subject());
}
if (pending == null) {
log.debug("No forwarding objectives pending for this "
+ "obj event {}", event);
} else {
log.debug("Processing {} pending forwarding objectives for nextId {}",
pending.size(), event.subject());
pending.forEach(p -> getDevicePipeliner(p.deviceId())
.forward((ForwardingObjective) p.flowObjective()));
}
// now check for pending next-objectives
synchronized (pendingNexts) {
// needs to be synchronized for queueObjective lookup
pending = pendingNexts.remove(event.subject());
}
if (pending == null) {
log.debug("No next objectives pending for this "
+ "obj event {}", event);
} else {
log.debug("Processing {} pending next objectives for nextId {}",
pending.size(), event.subject());
pending.forEach(p -> getDevicePipeliner(p.deviceId())
.next((NextObjective) p.flowObjective()));
}
}
}
}
/**
* Retrieves (if it exists) the device pipeline behaviour from the cache.
* Otherwise it warms the caches and triggers the init method of the Pipeline.
* For virtual network, it returns OVS pipeliner.
*
* @param deviceId the id of the device associated to the pipeline
* @return the implementation of the Pipeliner behaviour
*/
private Pipeliner getDevicePipeliner(DeviceId deviceId) {
return pipeliners.computeIfAbsent(deviceId, this::initPipelineHandler);
}
/**
* Creates and initialize {@link Pipeliner}.
* <p>
* Note: Expected to be called under per-Device lock.
* e.g., {@code pipeliners}' Map#compute family methods
*
* @param deviceId Device to initialize pipeliner
* @return {@link Pipeliner} instance or null
*/
private Pipeliner initPipelineHandler(DeviceId deviceId) {
//FIXME: do we need a standard pipeline for virtual device?
Pipeliner pipeliner = new DefaultVirtualDevicePipeline();
pipeliner.init(deviceId, context);
return pipeliner;
}
// Processing context for initializing pipeline driver behaviours.
private class InnerPipelineContext implements PipelinerContext {
@Override
public ServiceDirectory directory() {
return serviceDirectory;
}
@Override
public FlowObjectiveStore store() {
return flowObjectiveStore;
}
}
/**
* Data class used to hold a pending flow objective that could not
* be processed because the associated next object was not present.
* Note that this pending flow objective could be a forwarding objective
* waiting for a next objective to complete execution. Or it could a
* next objective (with a different operation - remove, addToExisting, or
* removeFromExisting) waiting for a next objective with the same id to
* complete execution.
*/
private class PendingFlowObjective {
private final DeviceId deviceId;
private final Objective flowObj;
public PendingFlowObjective(DeviceId deviceId, Objective flowObj) {
this.deviceId = deviceId;
this.flowObj = flowObj;
}
public DeviceId deviceId() {
return deviceId;
}
public Objective flowObjective() {
return flowObj;
}
@Override
public int hashCode() {
return Objects.hash(deviceId, flowObj);
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof PendingFlowObjective)) {
return false;
}
final PendingFlowObjective other = (PendingFlowObjective) obj;
if (this.deviceId.equals(other.deviceId) &&
this.flowObj.equals(other.flowObj)) {
return true;
}
return false;
}
}
/**
* This class is a wrapping class from VirtualNetworkFlowObjectiveStore
* to FlowObjectiveStore for PipelinerContext.
*/
private class StoreConvertor implements FlowObjectiveStore {
@Override
public void setDelegate(FlowObjectiveStoreDelegate delegate) {
virtualFlowObjectiveStore.setDelegate(networkId(), delegate);
}
@Override
public void unsetDelegate(FlowObjectiveStoreDelegate delegate) {
virtualFlowObjectiveStore.unsetDelegate(networkId(), delegate);
}
@Override
public boolean hasDelegate() {
return virtualFlowObjectiveStore.hasDelegate(networkId());
}
@Override
public void putNextGroup(Integer nextId, NextGroup group) {
virtualFlowObjectiveStore.putNextGroup(networkId(), nextId, group);
}
@Override
public NextGroup getNextGroup(Integer nextId) {
return virtualFlowObjectiveStore.getNextGroup(networkId(), nextId);
}
@Override
public NextGroup removeNextGroup(Integer nextId) {
return virtualFlowObjectiveStore.removeNextGroup(networkId(), nextId);
}
@Override
public Map<Integer, NextGroup> getAllGroups() {
return virtualFlowObjectiveStore.getAllGroups(networkId());
}
@Override
public int allocateNextId() {
return virtualFlowObjectiveStore.allocateNextId(networkId());
}
}
/**
* Simple single table pipeline abstraction for virtual networks.
*/
private class DefaultVirtualDevicePipeline
extends AbstractHandlerBehaviour implements Pipeliner {
private final Logger log = getLogger(getClass());
private DeviceId deviceId;
private Cache<Integer, NextObjective> pendingNext;
private KryoNamespace appKryo = new KryoNamespace.Builder()
.register(GroupKey.class)
.register(DefaultGroupKey.class)
.register(SingleGroup.class)
.register(byte[].class)
.build("DefaultVirtualDevicePipeline");
@Override
public void init(DeviceId deviceId, PipelinerContext context) {
this.deviceId = deviceId;
pendingNext = CacheBuilder.newBuilder()
.expireAfterWrite(20, TimeUnit.SECONDS)
.removalListener((RemovalNotification<Integer, NextObjective> notification) -> {
if (notification.getCause() == RemovalCause.EXPIRED) {
notification.getValue().context()
.ifPresent(c -> c.onError(notification.getValue(),
ObjectiveError.FLOWINSTALLATIONFAILED));
}
}).build();
}
@Override
public void filter(FilteringObjective filter) {
TrafficTreatment.Builder actions;
switch (filter.type()) {
case PERMIT:
actions = (filter.meta() == null) ?
DefaultTrafficTreatment.builder().punt() :
DefaultTrafficTreatment.builder(filter.meta());
break;
case DENY:
actions = (filter.meta() == null) ?
DefaultTrafficTreatment.builder() :
DefaultTrafficTreatment.builder(filter.meta());
actions.drop();
break;
default:
log.warn("Unknown filter type: {}", filter.type());
actions = DefaultTrafficTreatment.builder().drop();
}
TrafficSelector.Builder selector = DefaultTrafficSelector.builder();
filter.conditions().forEach(selector::add);
if (filter.key() != null) {
selector.add(filter.key());
}
FlowRule.Builder ruleBuilder = DefaultFlowRule.builder()
.forDevice(deviceId)
.withSelector(selector.build())
.withTreatment(actions.build())
.fromApp(filter.appId())
.withPriority(filter.priority());
if (filter.permanent()) {
ruleBuilder.makePermanent();
} else {
ruleBuilder.makeTemporary(filter.timeout());
}
installObjective(ruleBuilder, filter);
}
@Override
public void forward(ForwardingObjective fwd) {
TrafficSelector selector = fwd.selector();
if (fwd.treatment() != null) {
// Deal with SPECIFIC and VERSATILE in the same manner.
FlowRule.Builder ruleBuilder = DefaultFlowRule.builder()
.forDevice(deviceId)
.withSelector(selector)
.fromApp(fwd.appId())
.withPriority(fwd.priority())
.withTreatment(fwd.treatment());
if (fwd.permanent()) {
ruleBuilder.makePermanent();
} else {
ruleBuilder.makeTemporary(fwd.timeout());
}
installObjective(ruleBuilder, fwd);
} else {
NextObjective nextObjective = pendingNext.getIfPresent(fwd.nextId());
if (nextObjective != null) {
pendingNext.invalidate(fwd.nextId());
nextObjective.next().forEach(treat -> {
FlowRule.Builder ruleBuilder = DefaultFlowRule.builder()
.forDevice(deviceId)
.withSelector(selector)
.fromApp(fwd.appId())
.withPriority(fwd.priority())
.withTreatment(treat);
if (fwd.permanent()) {
ruleBuilder.makePermanent();
} else {
ruleBuilder.makeTemporary(fwd.timeout());
}
installObjective(ruleBuilder, fwd);
});
} else {
fwd.context().ifPresent(c -> c.onError(fwd,
ObjectiveError.GROUPMISSING));
}
}
}
private void installObjective(FlowRule.Builder ruleBuilder, Objective objective) {
FlowRuleOperations.Builder flowBuilder = FlowRuleOperations.builder();
switch (objective.op()) {
case ADD:
flowBuilder.add(ruleBuilder.build());
break;
case REMOVE:
flowBuilder.remove(ruleBuilder.build());
break;
default:
log.warn("Unknown operation {}", objective.op());
}
flowRuleService.apply(flowBuilder.build(new FlowRuleOperationsContext() {
@Override
public void onSuccess(FlowRuleOperations ops) {
objective.context().ifPresent(context -> context.onSuccess(objective));
}
@Override
public void onError(FlowRuleOperations ops) {
objective.context()
.ifPresent(context ->
context.onError(objective,
ObjectiveError.FLOWINSTALLATIONFAILED));
}
}));
}
@Override
public void next(NextObjective nextObjective) {
pendingNext.put(nextObjective.id(), nextObjective);
flowObjectiveStore.putNextGroup(nextObjective.id(),
new SingleGroup(
new DefaultGroupKey(
appKryo.serialize(nextObjective.id()))));
nextObjective.context().ifPresent(context -> context.onSuccess(nextObjective));
}
@Override
public List<String> getNextMappings(NextGroup nextGroup) {
// Default single table pipeline does not use nextObjectives or groups
return null;
}
private class SingleGroup implements NextGroup {
private final GroupKey key;
public SingleGroup(GroupKey key) {
this.key = key;
}
public GroupKey key() {
return key;
}
@Override
public byte[] data() {
return appKryo.serialize(key);
}
}
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p463/Production9264.java | 1963 | package org.gradle.test.performance.mediummonolithicjavaproject.p463;
public class Production9264 {
private Production9261 property0;
public Production9261 getProperty0() {
return property0;
}
public void setProperty0(Production9261 value) {
property0 = value;
}
private Production9262 property1;
public Production9262 getProperty1() {
return property1;
}
public void setProperty1(Production9262 value) {
property1 = value;
}
private Production9263 property2;
public Production9263 getProperty2() {
return property2;
}
public void setProperty2(Production9263 value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | apache-2.0 |
evandor/skysail-framework | skysail.server/src/io/skysail/server/security/AuthenticatedAuthorizer.java | 442 | package io.skysail.server.security;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.subject.Subject;
import org.restlet.Request;
import org.restlet.Response;
import org.restlet.security.Authorizer;
public class AuthenticatedAuthorizer extends Authorizer {
@Override
protected boolean authorize(Request request, Response response) {
Subject subject = SecurityUtils.getSubject();
return subject.isAuthenticated();
}
}
| apache-2.0 |
shiyi-zhang/javaLearn | src/main/java/org/zsy/repository/model/AppInfoCompetitor.java | 1538 | package org.zsy.repository.model;
import java.util.Date;
public class AppInfoCompetitor {
private String appId;
private String competitorId;
private Double grow;
private String createdBy;
private Date createdTime;
private String updatedBy;
private Date updatedTime;
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId == null ? null : appId.trim();
}
public String getCompetitorId() {
return competitorId;
}
public void setCompetitorId(String competitorId) {
this.competitorId = competitorId == null ? null : competitorId.trim();
}
public Double getGrow() {
return grow;
}
public void setGrow(Double grow) {
this.grow = grow;
}
public String getCreatedBy() {
return createdBy;
}
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy == null ? null : createdBy.trim();
}
public Date getCreatedTime() {
return createdTime;
}
public void setCreatedTime(Date createdTime) {
this.createdTime = createdTime;
}
public String getUpdatedBy() {
return updatedBy;
}
public void setUpdatedBy(String updatedBy) {
this.updatedBy = updatedBy == null ? null : updatedBy.trim();
}
public Date getUpdatedTime() {
return updatedTime;
}
public void setUpdatedTime(Date updatedTime) {
this.updatedTime = updatedTime;
}
} | apache-2.0 |
alibaba/atlas | atlas-gradle-plugin/dexpatch/src/main/java/com/taobao/android/dx/cf/iface/AttributeList.java | 2402 | /*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taobao.android.dx.cf.iface;
/**
* Interface for lists of attributes.
*/
public interface AttributeList {
/**
* Get whether this instance is mutable. Note that the
* {@code AttributeList} interface itself doesn't provide any means
* of mutation, but that doesn't mean that there isn't a non-interface
* way of mutating an instance.
*
* @return {@code true} iff this instance is somehow mutable
*/
public boolean isMutable();
/**
* Get the number of attributes in the list.
*
* @return the size
*/
public int size();
/**
* Get the {@code n}th attribute.
*
* @param n {@code n >= 0, n < size();} which attribute
* @return {@code non-null;} the attribute in question
*/
public Attribute get(int n);
/**
* Get the total length of this list in bytes, when part of a
* class file. The returned value includes the two bytes for the
* {@code attributes_count} length indicator.
*
* @return {@code >= 2;} the total length, in bytes
*/
public int byteLength();
/**
* Get the first attribute in the list with the given name, if any.
*
* @param name {@code non-null;} attribute name
* @return {@code null-ok;} first attribute in the list with the given name,
* or {@code null} if there is none
*/
public Attribute findFirst(String name);
/**
* Get the next attribute in the list after the given one, with the same
* name, if any.
*
* @param attrib {@code non-null;} attribute to start looking after
* @return {@code null-ok;} next attribute after {@code attrib} with the
* same name as {@code attrib}
*/
public Attribute findNext(Attribute attrib);
}
| apache-2.0 |
soumabrata-chakraborty/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/component/ComponentEvent.java | 2302 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.service.component;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.event.AbstractEvent;
import org.apache.hadoop.yarn.service.component.instance.ComponentInstance;
public class ComponentEvent extends AbstractEvent<ComponentEventType> {
private long desired;
private final String name;
private final ComponentEventType type;
private Container container;
private ComponentInstance instance;
private ContainerStatus status;
public ComponentEvent(String name, ComponentEventType type) {
super(type);
this.name = name;
this.type = type;
}
public String getName() {
return name;
}
public ComponentEventType getType() {
return type;
}
public long getDesired() {
return desired;
}
public ComponentEvent setDesired(long desired) {
this.desired = desired;
return this;
}
public Container getContainer() {
return container;
}
public ComponentEvent setContainer(Container container) {
this.container = container;
return this;
}
public ComponentInstance getInstance() {
return instance;
}
public ComponentEvent setInstance(ComponentInstance instance) {
this.instance = instance;
return this;
}
public ContainerStatus getStatus() {
return status;
}
public ComponentEvent setStatus(ContainerStatus status) {
this.status = status;
return this;
}
}
| apache-2.0 |
NikitashP/spring-cloud-microservice | price-microservice/src/main/java/com/globomart/pricing/PricingServiceApplication.java | 584 | package com.globomart.pricing;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.netflix.hystrix.EnableHystrix;
import org.springframework.cloud.netflix.zuul.EnableZuulProxy;
@SpringBootApplication
@EnableDiscoveryClient
@EnableZuulProxy
@EnableHystrix
public class PricingServiceApplication {
public static void main(String[] args) {
SpringApplication.run(PricingServiceApplication.class, args);
}
}
| apache-2.0 |
mksmbrtsh/LLRPexplorer | src/org/llrp/ltk/generated/parameters/SpecIndex.java | 7026 | /*
*
* This file was generated by LLRP Code Generator
* see http://llrp-toolkit.cvs.sourceforge.net/llrp-toolkit/
* for more information
* Generated on: Sun Apr 08 14:14:11 EDT 2012;
*
*/
/*
* Copyright 2007 ETH Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
*
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*
*/
package org.llrp.ltk.generated.parameters;
import maximsblog.blogspot.com.llrpexplorer.Logger;
import org.jdom2.Content;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.Namespace;
import org.llrp.ltk.exceptions.InvalidLLRPMessageException;
import org.llrp.ltk.exceptions.MissingParameterException;
import org.llrp.ltk.generated.LLRPConstants;
import org.llrp.ltk.types.LLRPBitList;
import org.llrp.ltk.types.LLRPMessage;
import org.llrp.ltk.types.SignedShort;
import org.llrp.ltk.types.TLVParameter;
import org.llrp.ltk.types.TVParameter;
import org.llrp.ltk.types.UnsignedShort;
import java.util.LinkedList;
import java.util.List;
/**
* This parameter carries the SpecIndex information. The SpecIndex indicates the item within the ROSpec that was being executed at the time the tag was observed.
See also {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=82&view=fit">LLRP Specification Section 13.2.3.4</a>}
and {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=145&view=fit">LLRP Specification Section 16.2.7.3.4</a>}
*/
/**
* This parameter carries the SpecIndex information. The SpecIndex indicates the item within the ROSpec that was being executed at the time the tag was observed.
See also {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=82&view=fit">LLRP Specification Section 13.2.3.4</a>}
and {@link <a href="http://www.epcglobalinc.org/standards/llrp/llrp_1_0_1-standard-20070813.pdf#page=145&view=fit">LLRP Specification Section 16.2.7.3.4</a>}
.
*/
public class SpecIndex extends TVParameter {
public static final SignedShort TYPENUM = new SignedShort(14);
private static final Logger LOGGER = Logger.getLogger(SpecIndex.class);
protected UnsignedShort specIndex;
/**
* empty constructor to create new parameter.
*/
public SpecIndex() {
}
/**
* Constructor to create parameter from binary encoded parameter
* calls decodeBinary to decode parameter.
* @param list to be decoded
*/
public SpecIndex(LLRPBitList list) {
decodeBinary(list);
}
/**
* Constructor to create parameter from xml encoded parameter
* calls decodeXML to decode parameter.
* @param element to be decoded
*/
public SpecIndex(Element element) throws InvalidLLRPMessageException {
decodeXML(element);
}
/**
* {@inheritDoc}
*/
public LLRPBitList encodeBinarySpecific() {
LLRPBitList resultBits = new LLRPBitList();
if (specIndex == null) {
LOGGER.warn(" specIndex not set");
throw new MissingParameterException(
" specIndex not set for Parameter of Type SpecIndex");
}
resultBits.append(specIndex.encodeBinary());
return resultBits;
}
/**
* {@inheritDoc}
*/
public Content encodeXML(String name, Namespace ns) {
// element in namespace defined by parent element
Element element = new Element(name, ns);
// child element are always in default LLRP namespace
ns = Namespace.getNamespace("llrp", LLRPConstants.LLRPNAMESPACE);
if (specIndex == null) {
LOGGER.warn(" specIndex not set");
throw new MissingParameterException(" specIndex not set");
} else {
element.addContent(specIndex.encodeXML("SpecIndex", ns));
}
//parameters
return element;
}
/**
* {@inheritDoc}
*/
protected void decodeBinarySpecific(LLRPBitList binary) {
int position = 0;
int tempByteLength;
int tempLength = 0;
int count;
SignedShort type;
int fieldCount;
Custom custom;
specIndex = new UnsignedShort(binary.subList(position,
UnsignedShort.length()));
position += UnsignedShort.length();
}
/**
* {@inheritDoc}
*/
public void decodeXML(Element element) throws InvalidLLRPMessageException {
List<Element> tempList = null;
boolean atLeastOnce = false;
Custom custom;
Element temp = null;
// child element are always in default LLRP namespace
Namespace ns = Namespace.getNamespace(LLRPConstants.LLRPNAMESPACE);
temp = element.getChild("SpecIndex", ns);
if (temp != null) {
specIndex = new UnsignedShort(temp);
}
element.removeChild("SpecIndex", ns);
if (element.getChildren().size() > 0) {
String message = "SpecIndex has unknown element " +
((Element) element.getChildren().get(0)).getName();
throw new InvalidLLRPMessageException(message);
}
}
//setters
/**
* set specIndex of type UnsignedShort .
* @param specIndex to be set
*/
public void setSpecIndex(final UnsignedShort specIndex) {
this.specIndex = specIndex;
}
// end setter
//getters
/**
* get specIndex of type UnsignedShort.
* @return type UnsignedShort to be set
*/
public UnsignedShort getSpecIndex() {
return this.specIndex;
}
// end getters
//add methods
// end add
/**
* return length of parameter. For TV Parameter it is always length of its field plus 8 bits for type.
* @return Integer giving length
*/
public static Integer length() {
int tempLength = PARAMETERTYPELENGTH;
// the length of a TV parameter in bits is always the type
tempLength += UnsignedShort.length();
return tempLength;
}
/**
* {@inheritDoc}
*/
public SignedShort getTypeNum() {
return TYPENUM;
}
/**
* {@inheritDoc}
*/
public String getName() {
return "SpecIndex";
}
/**
* return string representation. All field values but no parameters are included
* @return String
*/
public String toString() {
String result = "SpecIndex: ";
result += ", specIndex: ";
result += specIndex;
result = result.replaceFirst(", ", "");
return result;
}
}
| apache-2.0 |
yangjiandong/MobileBase.G | MobileBase/src/main/java/com/ek/mobileapp/widget/KsToggleButton.java | 2020 | package com.ek.mobileapp.widget;
import com.ek.mobilebapp.R;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.widget.ToggleButton;
public class KsToggleButton extends ToggleButton {
private Drawable enableChecked = null;
private Drawable disableChecked = null;
private Drawable enable = null;
private Drawable disable = null;
public KsToggleButton(Context paramContext) {
super(paramContext);
a(paramContext);
}
public KsToggleButton(Context paramContext, AttributeSet paramAttributeSet) {
super(paramContext, paramAttributeSet);
a(paramContext);
}
public KsToggleButton(Context paramContext, AttributeSet paramAttributeSet, int paramInt) {
super(paramContext, paramAttributeSet, paramInt);
a(paramContext);
}
private void setDr() {
if (super.isChecked()) {
if (super.isEnabled())
setBackgroundDrawable(this.enableChecked);
else {
setBackgroundDrawable(this.disableChecked);
}
} else {
setBackgroundDrawable(this.enable);
if (super.isEnabled())
setBackgroundDrawable(this.enable);
else
setBackgroundDrawable(this.disable);
}
}
protected void a(Context paramContext) {
this.disableChecked = paramContext.getResources().getDrawable(R.drawable.toggle_btn_on_disable_background);
this.enableChecked = paramContext.getResources().getDrawable(R.drawable.toggle_btn_on_background);
this.enable = paramContext.getResources().getDrawable(R.drawable.toggle_btn_off_background);
this.disable = paramContext.getResources().getDrawable(R.drawable.toggle_btn_off_disable_background);
}
protected void onFinishInflate() {
setDr();
}
public void refreshDrawableState() {
super.refreshDrawableState();
setDr();
}
}
| apache-2.0 |
hxgJG/WSNMonitor-master | app/src/main/java/com/app/hexuegang/wsnmonitor/mywidget/MyAlertDialog.java | 6404 | package com.app.hexuegang.wsnmonitor.mywidget;
import android.annotation.SuppressLint;
import android.app.Dialog;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.app.hexuegang.wsnmonitor.R;
/**
* Created by hexuegang on 2017/1/20.
*/
public class MyAlertDialog
{
private Context context;
private Dialog dialog;
private LinearLayout lLayout_bg;
private TextView txt_title;
private TextView txt_msg;
private Button btn_neg;
private Button btn_pos;
private ImageView img_line;
private DisplayMetrics displayMetrics;
private boolean showTitle = false;
private boolean showMsg = false;
private boolean showPosBtn = false;
private boolean showNegBtn = false;
public MyAlertDialog(Context context)
{
this.context = context;
WindowManager windowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
displayMetrics=new DisplayMetrics();
windowManager.getDefaultDisplay().getMetrics(displayMetrics);
}
@SuppressLint("InflateParams")
public MyAlertDialog Builder()
{
View view = LayoutInflater.from(context).inflate(R.layout.dialog_alert, null);
lLayout_bg = (LinearLayout) view.findViewById(R.id.lLayout_bg);
txt_title = (TextView) view.findViewById(R.id.txt_title);
txt_title.setVisibility(View.GONE);
txt_msg = (TextView) view.findViewById(R.id.txt_msg);
txt_msg.setVisibility(View.GONE);
btn_neg = (Button) view.findViewById(R.id.btn_neg);
btn_neg.setVisibility(View.GONE);
btn_pos = (Button) view.findViewById(R.id.btn_pos);
btn_pos.setVisibility(View.GONE);
img_line = (ImageView) view.findViewById(R.id.img_line);
img_line.setVisibility(View.GONE);
// 定义Dialog布局和参数
dialog = new Dialog(context, R.style.MyDialogStyle);
dialog.setContentView(view);
// 调整dialog背景大小
lLayout_bg.setLayoutParams(new FrameLayout.LayoutParams((int) (displayMetrics.widthPixels * 0.85), WindowManager.LayoutParams.WRAP_CONTENT));
return this;
}
public MyAlertDialog setTitle(String title)
{
showTitle = true;
if ("".equals(title))
{
txt_title.setText("提示");
}
else
{
txt_title.setText(title);
}
return this;
}
public MyAlertDialog setAlignCenter()
{
txt_msg.setGravity(Gravity.CENTER);
return this;
}
public MyAlertDialog setTitle(int resid)
{
return this.setTitle(context.getResources().getString(resid));
}
public MyAlertDialog setMsg(String msg)
{
showMsg = true;
if ("".equals(msg)) {
txt_msg.setText("内容");
} else {
txt_msg.setText(msg);
}
return this;
}
public MyAlertDialog setMsg(int resid)
{
return this.setMsg(context.getResources().getString(resid));
}
public MyAlertDialog setCancelable(boolean cancel)
{
dialog.setCancelable(cancel);
return this;
}
public MyAlertDialog setPositiveButton(String text, final View.OnClickListener listener) {
showPosBtn = true;
if ("".equals(text))
{
btn_pos.setText("确定");
} else {
btn_pos.setText(text);
}
btn_pos.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
if(listener!=null)
{
listener.onClick(v);
}
dialog.dismiss();
}
});
return this;
}
public MyAlertDialog setNegativeButton(String text, final View.OnClickListener listener) {
showNegBtn = true;
if ("".equals(text))
{
btn_neg.setText("取消");
}
else
{
btn_neg.setText(text);
}
btn_neg.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v) {
if(listener!=null)
{
listener.onClick(v);
}
dialog.dismiss();
}
});
return this;
}
private void setLayout()
{
if (!showTitle && !showMsg)
{
txt_title.setText("提示");
txt_title.setVisibility(View.VISIBLE);
}
if (showTitle)
{
txt_title.setVisibility(View.VISIBLE);
}
if (showMsg)
{
txt_msg.setVisibility(View.VISIBLE);
}
if (!showPosBtn && !showNegBtn)
{
btn_pos.setText("确定");
btn_pos.setVisibility(View.VISIBLE);
btn_pos.setBackgroundResource(R.drawable.selector_alertdialog_single);
btn_pos.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v) {
dialog.dismiss();
}
});
}
if (showPosBtn && showNegBtn)
{
btn_pos.setVisibility(View.VISIBLE);
btn_pos.setBackgroundResource(R.drawable.selector_alertdialog_right);
btn_neg.setVisibility(View.VISIBLE);
btn_neg.setBackgroundResource(R.drawable.selector_alertdialog_left);
img_line.setVisibility(View.VISIBLE);
}
if (showPosBtn && !showNegBtn) {
btn_pos.setVisibility(View.VISIBLE);
btn_pos.setBackgroundResource(R.drawable.selector_alertdialog_single);
}
if (!showPosBtn && showNegBtn) {
btn_neg.setVisibility(View.VISIBLE);
btn_neg.setBackgroundResource(R.drawable.selector_alertdialog_single);
}
}
public void show()
{
setLayout();
dialog.show();
}
public void dismiss()
{
dialog.dismiss();
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-computeoptimizer/src/main/java/com/amazonaws/services/computeoptimizer/model/DeleteRecommendationPreferencesRequest.java | 19492 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.computeoptimizer.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/compute-optimizer-2019-11-01/DeleteRecommendationPreferences"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DeleteRecommendationPreferencesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The target resource type of the recommendation preference to delete.
* </p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto Scaling
* groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an Auto Scaling
* group.
* </p>
*/
private String resourceType;
/**
* <p>
* An object that describes the scope of the recommendation preference to delete.
* </p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management accounts of
* an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html">Activating
* enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
* </p>
*/
private Scope scope;
/**
* <p>
* The name of the recommendation preference to delete.
* </p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can be
* activated through preferences. Therefore, it is also the only recommendation preference that can be deleted.
* </p>
*/
private java.util.List<String> recommendationPreferenceNames;
/**
* <p>
* The target resource type of the recommendation preference to delete.
* </p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto Scaling
* groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an Auto Scaling
* group.
* </p>
*
* @param resourceType
* The target resource type of the recommendation preference to delete.</p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto
* Scaling groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an
* Auto Scaling group.
* @see ResourceType
*/
public void setResourceType(String resourceType) {
this.resourceType = resourceType;
}
/**
* <p>
* The target resource type of the recommendation preference to delete.
* </p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto Scaling
* groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an Auto Scaling
* group.
* </p>
*
* @return The target resource type of the recommendation preference to delete.</p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto
* Scaling groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an
* Auto Scaling group.
* @see ResourceType
*/
public String getResourceType() {
return this.resourceType;
}
/**
* <p>
* The target resource type of the recommendation preference to delete.
* </p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto Scaling
* groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an Auto Scaling
* group.
* </p>
*
* @param resourceType
* The target resource type of the recommendation preference to delete.</p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto
* Scaling groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an
* Auto Scaling group.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResourceType
*/
public DeleteRecommendationPreferencesRequest withResourceType(String resourceType) {
setResourceType(resourceType);
return this;
}
/**
* <p>
* The target resource type of the recommendation preference to delete.
* </p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto Scaling
* groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an Auto Scaling
* group.
* </p>
*
* @param resourceType
* The target resource type of the recommendation preference to delete.</p>
* <p>
* The <code>Ec2Instance</code> option encompasses standalone instances and instances that are part of Auto
* Scaling groups. The <code>AutoScalingGroup</code> option encompasses only instances that are part of an
* Auto Scaling group.
* @return Returns a reference to this object so that method calls can be chained together.
* @see ResourceType
*/
public DeleteRecommendationPreferencesRequest withResourceType(ResourceType resourceType) {
this.resourceType = resourceType.toString();
return this;
}
/**
* <p>
* An object that describes the scope of the recommendation preference to delete.
* </p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management accounts of
* an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html">Activating
* enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
* </p>
*
* @param scope
* An object that describes the scope of the recommendation preference to delete.</p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management
* accounts of an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html"
* >Activating enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
*/
public void setScope(Scope scope) {
this.scope = scope;
}
/**
* <p>
* An object that describes the scope of the recommendation preference to delete.
* </p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management accounts of
* an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html">Activating
* enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
* </p>
*
* @return An object that describes the scope of the recommendation preference to delete.</p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management
* accounts of an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html"
* >Activating enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
*/
public Scope getScope() {
return this.scope;
}
/**
* <p>
* An object that describes the scope of the recommendation preference to delete.
* </p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management accounts of
* an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html">Activating
* enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
* </p>
*
* @param scope
* An object that describes the scope of the recommendation preference to delete.</p>
* <p>
* You can delete recommendation preferences that are created at the organization level (for management
* accounts of an organization only), account level, and resource level. For more information, see <a
* href="https://docs.aws.amazon.com/compute-optimizer/latest/ug/enhanced-infrastructure-metrics.html"
* >Activating enhanced infrastructure metrics</a> in the <i>Compute Optimizer User Guide</i>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DeleteRecommendationPreferencesRequest withScope(Scope scope) {
setScope(scope);
return this;
}
/**
* <p>
* The name of the recommendation preference to delete.
* </p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can be
* activated through preferences. Therefore, it is also the only recommendation preference that can be deleted.
* </p>
*
* @return The name of the recommendation preference to delete.</p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can
* be activated through preferences. Therefore, it is also the only recommendation preference that can be
* deleted.
* @see RecommendationPreferenceName
*/
public java.util.List<String> getRecommendationPreferenceNames() {
return recommendationPreferenceNames;
}
/**
* <p>
* The name of the recommendation preference to delete.
* </p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can be
* activated through preferences. Therefore, it is also the only recommendation preference that can be deleted.
* </p>
*
* @param recommendationPreferenceNames
* The name of the recommendation preference to delete.</p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can
* be activated through preferences. Therefore, it is also the only recommendation preference that can be
* deleted.
* @see RecommendationPreferenceName
*/
public void setRecommendationPreferenceNames(java.util.Collection<String> recommendationPreferenceNames) {
if (recommendationPreferenceNames == null) {
this.recommendationPreferenceNames = null;
return;
}
this.recommendationPreferenceNames = new java.util.ArrayList<String>(recommendationPreferenceNames);
}
/**
* <p>
* The name of the recommendation preference to delete.
* </p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can be
* activated through preferences. Therefore, it is also the only recommendation preference that can be deleted.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setRecommendationPreferenceNames(java.util.Collection)} or
* {@link #withRecommendationPreferenceNames(java.util.Collection)} if you want to override the existing values.
* </p>
*
* @param recommendationPreferenceNames
* The name of the recommendation preference to delete.</p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can
* be activated through preferences. Therefore, it is also the only recommendation preference that can be
* deleted.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RecommendationPreferenceName
*/
public DeleteRecommendationPreferencesRequest withRecommendationPreferenceNames(String... recommendationPreferenceNames) {
if (this.recommendationPreferenceNames == null) {
setRecommendationPreferenceNames(new java.util.ArrayList<String>(recommendationPreferenceNames.length));
}
for (String ele : recommendationPreferenceNames) {
this.recommendationPreferenceNames.add(ele);
}
return this;
}
/**
* <p>
* The name of the recommendation preference to delete.
* </p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can be
* activated through preferences. Therefore, it is also the only recommendation preference that can be deleted.
* </p>
*
* @param recommendationPreferenceNames
* The name of the recommendation preference to delete.</p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can
* be activated through preferences. Therefore, it is also the only recommendation preference that can be
* deleted.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RecommendationPreferenceName
*/
public DeleteRecommendationPreferencesRequest withRecommendationPreferenceNames(java.util.Collection<String> recommendationPreferenceNames) {
setRecommendationPreferenceNames(recommendationPreferenceNames);
return this;
}
/**
* <p>
* The name of the recommendation preference to delete.
* </p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can be
* activated through preferences. Therefore, it is also the only recommendation preference that can be deleted.
* </p>
*
* @param recommendationPreferenceNames
* The name of the recommendation preference to delete.</p>
* <p>
* Enhanced infrastructure metrics (<code>EnhancedInfrastructureMetrics</code>) is the only feature that can
* be activated through preferences. Therefore, it is also the only recommendation preference that can be
* deleted.
* @return Returns a reference to this object so that method calls can be chained together.
* @see RecommendationPreferenceName
*/
public DeleteRecommendationPreferencesRequest withRecommendationPreferenceNames(RecommendationPreferenceName... recommendationPreferenceNames) {
java.util.ArrayList<String> recommendationPreferenceNamesCopy = new java.util.ArrayList<String>(recommendationPreferenceNames.length);
for (RecommendationPreferenceName value : recommendationPreferenceNames) {
recommendationPreferenceNamesCopy.add(value.toString());
}
if (getRecommendationPreferenceNames() == null) {
setRecommendationPreferenceNames(recommendationPreferenceNamesCopy);
} else {
getRecommendationPreferenceNames().addAll(recommendationPreferenceNamesCopy);
}
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getResourceType() != null)
sb.append("ResourceType: ").append(getResourceType()).append(",");
if (getScope() != null)
sb.append("Scope: ").append(getScope()).append(",");
if (getRecommendationPreferenceNames() != null)
sb.append("RecommendationPreferenceNames: ").append(getRecommendationPreferenceNames());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DeleteRecommendationPreferencesRequest == false)
return false;
DeleteRecommendationPreferencesRequest other = (DeleteRecommendationPreferencesRequest) obj;
if (other.getResourceType() == null ^ this.getResourceType() == null)
return false;
if (other.getResourceType() != null && other.getResourceType().equals(this.getResourceType()) == false)
return false;
if (other.getScope() == null ^ this.getScope() == null)
return false;
if (other.getScope() != null && other.getScope().equals(this.getScope()) == false)
return false;
if (other.getRecommendationPreferenceNames() == null ^ this.getRecommendationPreferenceNames() == null)
return false;
if (other.getRecommendationPreferenceNames() != null
&& other.getRecommendationPreferenceNames().equals(this.getRecommendationPreferenceNames()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getResourceType() == null) ? 0 : getResourceType().hashCode());
hashCode = prime * hashCode + ((getScope() == null) ? 0 : getScope().hashCode());
hashCode = prime * hashCode + ((getRecommendationPreferenceNames() == null) ? 0 : getRecommendationPreferenceNames().hashCode());
return hashCode;
}
@Override
public DeleteRecommendationPreferencesRequest clone() {
return (DeleteRecommendationPreferencesRequest) super.clone();
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-kinesis/src/main/java/com/amazonaws/services/kinesisfirehose/model/ContentEncoding.java | 1779 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisfirehose.model;
import javax.annotation.Generated;
/**
*
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public enum ContentEncoding {
NONE("NONE"),
GZIP("GZIP");
private String value;
private ContentEncoding(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return ContentEncoding corresponding to the value
*
* @throws IllegalArgumentException
* If the specified value does not map to one of the known values in this enum.
*/
public static ContentEncoding fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
}
for (ContentEncoding enumEntry : ContentEncoding.values()) {
if (enumEntry.toString().equals(value)) {
return enumEntry;
}
}
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
| apache-2.0 |
vkolodrevskiy/spring-social-odnoklassniki | spring-social-odnoklassniki/src/main/java/org/springframework/social/odnoklassniki/config/xml/OdnoklassnikiNamespaceHandler.java | 1265 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.odnoklassniki.config.xml;
import org.springframework.social.config.xml.AbstractProviderConfigBeanDefinitionParser;
import org.springframework.social.config.xml.AbstractProviderConfigNamespaceHandler;
/**
* {@link org.springframework.beans.factory.xml.NamespaceHandler} for Spring Social Odnoklassniki.
*
* @author vkolodrevskiy
*/
public class OdnoklassnikiNamespaceHandler extends AbstractProviderConfigNamespaceHandler {
@Override
protected AbstractProviderConfigBeanDefinitionParser getProviderConfigBeanDefinitionParser() {
return new OdnoklassnikiConfigBeanDefinitionParser();
}
}
| apache-2.0 |
KustovAA/job4j_courses | chapter_002/src/main/java/ru/job4j/strategy/package-info.java | 171 | /**
* Шаблон стратегия.
*
* @author Andrey Kustov (mailto:kustov-duha13v@yandex.ru)
* @version $1.0$
* @since 12.04.2017
*/
package ru.job4j.strategy; | apache-2.0 |
whiteley/jetty8 | jetty-continuation/src/main/java/org/eclipse/jetty/continuation/FauxContinuation.java | 14561 | //
// ========================================================================
// Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.continuation;
import java.util.ArrayList;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.ServletResponseWrapper;
import org.eclipse.jetty.continuation.ContinuationFilter.FilteredContinuation;
/* ------------------------------------------------------------ */
/**
* A blocking implementation of Continuation.
* This implementation of Continuation is used by the {@link ContinuationFilter}
* when there are is no native or asynchronous continuation type available.
*/
class FauxContinuation implements FilteredContinuation
{
// common exception used for all continuations.
// Turn on debug in ContinuationFilter to see real stack trace.
private final static ContinuationThrowable __exception = new ContinuationThrowable();
private static final int __HANDLING=1; // Request dispatched to filter/servlet
private static final int __SUSPENDING=2; // Suspend called, but not yet returned to container
private static final int __RESUMING=3; // resumed while suspending
private static final int __COMPLETING=4; // resumed while suspending or suspended
private static final int __SUSPENDED=5; // Suspended and parked
private static final int __UNSUSPENDING=6;
private static final int __COMPLETE=7;
private final ServletRequest _request;
private ServletResponse _response;
private int _state=__HANDLING;
private boolean _initial=true;
private boolean _resumed=false;
private boolean _timeout=false;
private boolean _responseWrapped=false;
private long _timeoutMs=30000; // TODO configure
private ArrayList<ContinuationListener> _listeners;
FauxContinuation(final ServletRequest request)
{
_request=request;
}
/* ------------------------------------------------------------ */
public void onComplete()
{
if (_listeners!=null)
for (ContinuationListener l:_listeners)
l.onComplete(this);
}
/* ------------------------------------------------------------ */
public void onTimeout()
{
if (_listeners!=null)
for (ContinuationListener l:_listeners)
l.onTimeout(this);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#isResponseWrapped()
*/
public boolean isResponseWrapped()
{
return _responseWrapped;
}
/* ------------------------------------------------------------ */
public boolean isInitial()
{
synchronized(this)
{
return _initial;
}
}
/* ------------------------------------------------------------ */
public boolean isResumed()
{
synchronized(this)
{
return _resumed;
}
}
/* ------------------------------------------------------------ */
public boolean isSuspended()
{
synchronized(this)
{
switch(_state)
{
case __HANDLING:
return false;
case __SUSPENDING:
case __RESUMING:
case __COMPLETING:
case __SUSPENDED:
return true;
case __UNSUSPENDING:
default:
return false;
}
}
}
/* ------------------------------------------------------------ */
public boolean isExpired()
{
synchronized(this)
{
return _timeout;
}
}
/* ------------------------------------------------------------ */
public void setTimeout(long timeoutMs)
{
_timeoutMs = timeoutMs;
}
/* ------------------------------------------------------------ */
public void suspend(ServletResponse response)
{
_response=response;
_responseWrapped=response instanceof ServletResponseWrapper;
suspend();
}
/* ------------------------------------------------------------ */
public void suspend()
{
synchronized (this)
{
switch(_state)
{
case __HANDLING:
_timeout=false;
_resumed=false;
_state=__SUSPENDING;
return;
case __SUSPENDING:
case __RESUMING:
return;
case __COMPLETING:
case __SUSPENDED:
case __UNSUSPENDING:
throw new IllegalStateException(this.getStatusString());
default:
throw new IllegalStateException(""+_state);
}
}
}
/* ------------------------------------------------------------ */
/* (non-Javadoc)
* @see org.mortbay.jetty.Suspendor#resume()
*/
public void resume()
{
synchronized (this)
{
switch(_state)
{
case __HANDLING:
_resumed=true;
return;
case __SUSPENDING:
_resumed=true;
_state=__RESUMING;
return;
case __RESUMING:
case __COMPLETING:
return;
case __SUSPENDED:
fauxResume();
_resumed=true;
_state=__UNSUSPENDING;
break;
case __UNSUSPENDING:
_resumed=true;
return;
default:
throw new IllegalStateException(this.getStatusString());
}
}
}
/* ------------------------------------------------------------ */
public void complete()
{
// just like resume, except don't set _resumed=true;
synchronized (this)
{
switch(_state)
{
case __HANDLING:
throw new IllegalStateException(this.getStatusString());
case __SUSPENDING:
_state=__COMPLETING;
break;
case __RESUMING:
break;
case __COMPLETING:
return;
case __SUSPENDED:
_state=__COMPLETING;
fauxResume();
break;
case __UNSUSPENDING:
return;
default:
throw new IllegalStateException(this.getStatusString());
}
}
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#getServletResponse()
*/
public boolean enter(ServletResponse response)
{
_response=response;
return true;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#getServletResponse()
*/
public ServletResponse getServletResponse()
{
return _response;
}
/* ------------------------------------------------------------ */
void handling()
{
synchronized (this)
{
_responseWrapped=false;
switch(_state)
{
case __HANDLING:
throw new IllegalStateException(this.getStatusString());
case __SUSPENDING:
case __RESUMING:
throw new IllegalStateException(this.getStatusString());
case __COMPLETING:
return;
case __SUSPENDED:
fauxResume();
case __UNSUSPENDING:
_state=__HANDLING;
return;
default:
throw new IllegalStateException(""+_state);
}
}
}
/* ------------------------------------------------------------ */
/**
* @return true if handling is complete
*/
public boolean exit()
{
synchronized (this)
{
switch(_state)
{
case __HANDLING:
_state=__COMPLETE;
onComplete();
return true;
case __SUSPENDING:
_initial=false;
_state=__SUSPENDED;
fauxSuspend(); // could block and change state.
if (_state==__SUSPENDED || _state==__COMPLETING)
{
onComplete();
return true;
}
_initial=false;
_state=__HANDLING;
return false;
case __RESUMING:
_initial=false;
_state=__HANDLING;
return false;
case __COMPLETING:
_initial=false;
_state=__COMPLETE;
onComplete();
return true;
case __SUSPENDED:
case __UNSUSPENDING:
default:
throw new IllegalStateException(this.getStatusString());
}
}
}
/* ------------------------------------------------------------ */
protected void expire()
{
// just like resume, except don't set _resumed=true;
synchronized (this)
{
_timeout=true;
}
onTimeout();
synchronized (this)
{
switch(_state)
{
case __HANDLING:
return;
case __SUSPENDING:
_timeout=true;
_state=__RESUMING;
fauxResume();
return;
case __RESUMING:
return;
case __COMPLETING:
return;
case __SUSPENDED:
_timeout=true;
_state=__UNSUSPENDING;
break;
case __UNSUSPENDING:
_timeout=true;
return;
default:
throw new IllegalStateException(this.getStatusString());
}
}
}
private void fauxSuspend()
{
long expire_at = System.currentTimeMillis()+_timeoutMs;
long wait=_timeoutMs;
while (_timeoutMs>0 && wait>0)
{
try
{
this.wait(wait);
}
catch (InterruptedException e)
{
break;
}
wait=expire_at-System.currentTimeMillis();
}
if (_timeoutMs>0 && wait<=0)
expire();
}
private void fauxResume()
{
_timeoutMs=0;
this.notifyAll();
}
@Override
public String toString()
{
return getStatusString();
}
String getStatusString()
{
synchronized (this)
{
return
((_state==__HANDLING)?"HANDLING":
(_state==__SUSPENDING)?"SUSPENDING":
(_state==__SUSPENDED)?"SUSPENDED":
(_state==__RESUMING)?"RESUMING":
(_state==__UNSUSPENDING)?"UNSUSPENDING":
(_state==__COMPLETING)?"COMPLETING":
("???"+_state))+
(_initial?",initial":"")+
(_resumed?",resumed":"")+
(_timeout?",timeout":"");
}
}
public void addContinuationListener(ContinuationListener listener)
{
if (_listeners==null)
_listeners=new ArrayList<ContinuationListener>();
_listeners.add(listener);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#getAttribute(java.lang.String)
*/
public Object getAttribute(String name)
{
return _request.getAttribute(name);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#removeAttribute(java.lang.String)
*/
public void removeAttribute(String name)
{
_request.removeAttribute(name);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#setAttribute(java.lang.String, java.lang.Object)
*/
public void setAttribute(String name, Object attribute)
{
_request.setAttribute(name,attribute);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#undispatch()
*/
public void undispatch()
{
if (isSuspended())
{
if (ContinuationFilter.__debug)
throw new ContinuationThrowable();
throw __exception;
}
throw new IllegalStateException("!suspended");
}
}
| apache-2.0 |
lgoldstein/communitychest | apps/apache/maven/pomrunner/src/main/java/net/community/apps/apache/maven/pomrunner/resources/ResourcesAnchor.java | 666 | package net.community.apps.apache.maven.pomrunner.resources;
import net.community.apps.common.resources.BaseAnchor;
/**
* <P>Copyright 2007 as per GPLv2</P>
*
* <P>Serves as access "anchor" for the various application resources</P>
*
* @author Lyor G.
* @since Aug 8, 2007 2:04:24 PM
*/
public class ResourcesAnchor extends BaseAnchor {
private ResourcesAnchor () // no instance
{
super();
}
private static ResourcesAnchor _instance /* =null */;
public static synchronized ResourcesAnchor getInstance ()
{
if (null == _instance)
_instance = new ResourcesAnchor();
return _instance;
}
}
| apache-2.0 |
googleapis/java-dataflow | proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/Disk.java | 32878 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/dataflow/v1beta3/environment.proto
package com.google.dataflow.v1beta3;
/**
*
*
* <pre>
* Describes the data disk used by a workflow job.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.Disk}
*/
public final class Disk extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.Disk)
DiskOrBuilder {
private static final long serialVersionUID = 0L;
// Use Disk.newBuilder() to construct.
private Disk(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Disk() {
diskType_ = "";
mountPoint_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Disk();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Disk(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
sizeGb_ = input.readInt32();
break;
}
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
diskType_ = s;
break;
}
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
mountPoint_ = s;
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.EnvironmentProto
.internal_static_google_dataflow_v1beta3_Disk_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.EnvironmentProto
.internal_static_google_dataflow_v1beta3_Disk_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.Disk.class, com.google.dataflow.v1beta3.Disk.Builder.class);
}
public static final int SIZE_GB_FIELD_NUMBER = 1;
private int sizeGb_;
/**
*
*
* <pre>
* Size of disk in GB. If zero or unspecified, the service will
* attempt to choose a reasonable default.
* </pre>
*
* <code>int32 size_gb = 1;</code>
*
* @return The sizeGb.
*/
@java.lang.Override
public int getSizeGb() {
return sizeGb_;
}
public static final int DISK_TYPE_FIELD_NUMBER = 2;
private volatile java.lang.Object diskType_;
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @return The diskType.
*/
@java.lang.Override
public java.lang.String getDiskType() {
java.lang.Object ref = diskType_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
diskType_ = s;
return s;
}
}
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @return The bytes for diskType.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDiskTypeBytes() {
java.lang.Object ref = diskType_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
diskType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MOUNT_POINT_FIELD_NUMBER = 3;
private volatile java.lang.Object mountPoint_;
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @return The mountPoint.
*/
@java.lang.Override
public java.lang.String getMountPoint() {
java.lang.Object ref = mountPoint_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
mountPoint_ = s;
return s;
}
}
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @return The bytes for mountPoint.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMountPointBytes() {
java.lang.Object ref = mountPoint_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
mountPoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (sizeGb_ != 0) {
output.writeInt32(1, sizeGb_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(diskType_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, diskType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mountPoint_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, mountPoint_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (sizeGb_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, sizeGb_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(diskType_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, diskType_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(mountPoint_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, mountPoint_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.dataflow.v1beta3.Disk)) {
return super.equals(obj);
}
com.google.dataflow.v1beta3.Disk other = (com.google.dataflow.v1beta3.Disk) obj;
if (getSizeGb() != other.getSizeGb()) return false;
if (!getDiskType().equals(other.getDiskType())) return false;
if (!getMountPoint().equals(other.getMountPoint())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SIZE_GB_FIELD_NUMBER;
hash = (53 * hash) + getSizeGb();
hash = (37 * hash) + DISK_TYPE_FIELD_NUMBER;
hash = (53 * hash) + getDiskType().hashCode();
hash = (37 * hash) + MOUNT_POINT_FIELD_NUMBER;
hash = (53 * hash) + getMountPoint().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.dataflow.v1beta3.Disk parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.Disk parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.Disk parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.dataflow.v1beta3.Disk parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.dataflow.v1beta3.Disk prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Describes the data disk used by a workflow job.
* </pre>
*
* Protobuf type {@code google.dataflow.v1beta3.Disk}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.Disk)
com.google.dataflow.v1beta3.DiskOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.dataflow.v1beta3.EnvironmentProto
.internal_static_google_dataflow_v1beta3_Disk_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.dataflow.v1beta3.EnvironmentProto
.internal_static_google_dataflow_v1beta3_Disk_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.dataflow.v1beta3.Disk.class,
com.google.dataflow.v1beta3.Disk.Builder.class);
}
// Construct using com.google.dataflow.v1beta3.Disk.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
sizeGb_ = 0;
diskType_ = "";
mountPoint_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.dataflow.v1beta3.EnvironmentProto
.internal_static_google_dataflow_v1beta3_Disk_descriptor;
}
@java.lang.Override
public com.google.dataflow.v1beta3.Disk getDefaultInstanceForType() {
return com.google.dataflow.v1beta3.Disk.getDefaultInstance();
}
@java.lang.Override
public com.google.dataflow.v1beta3.Disk build() {
com.google.dataflow.v1beta3.Disk result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.dataflow.v1beta3.Disk buildPartial() {
com.google.dataflow.v1beta3.Disk result = new com.google.dataflow.v1beta3.Disk(this);
result.sizeGb_ = sizeGb_;
result.diskType_ = diskType_;
result.mountPoint_ = mountPoint_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.dataflow.v1beta3.Disk) {
return mergeFrom((com.google.dataflow.v1beta3.Disk) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.dataflow.v1beta3.Disk other) {
if (other == com.google.dataflow.v1beta3.Disk.getDefaultInstance()) return this;
if (other.getSizeGb() != 0) {
setSizeGb(other.getSizeGb());
}
if (!other.getDiskType().isEmpty()) {
diskType_ = other.diskType_;
onChanged();
}
if (!other.getMountPoint().isEmpty()) {
mountPoint_ = other.mountPoint_;
onChanged();
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.dataflow.v1beta3.Disk parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.dataflow.v1beta3.Disk) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int sizeGb_;
/**
*
*
* <pre>
* Size of disk in GB. If zero or unspecified, the service will
* attempt to choose a reasonable default.
* </pre>
*
* <code>int32 size_gb = 1;</code>
*
* @return The sizeGb.
*/
@java.lang.Override
public int getSizeGb() {
return sizeGb_;
}
/**
*
*
* <pre>
* Size of disk in GB. If zero or unspecified, the service will
* attempt to choose a reasonable default.
* </pre>
*
* <code>int32 size_gb = 1;</code>
*
* @param value The sizeGb to set.
* @return This builder for chaining.
*/
public Builder setSizeGb(int value) {
sizeGb_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Size of disk in GB. If zero or unspecified, the service will
* attempt to choose a reasonable default.
* </pre>
*
* <code>int32 size_gb = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearSizeGb() {
sizeGb_ = 0;
onChanged();
return this;
}
private java.lang.Object diskType_ = "";
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @return The diskType.
*/
public java.lang.String getDiskType() {
java.lang.Object ref = diskType_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
diskType_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @return The bytes for diskType.
*/
public com.google.protobuf.ByteString getDiskTypeBytes() {
java.lang.Object ref = diskType_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
diskType_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @param value The diskType to set.
* @return This builder for chaining.
*/
public Builder setDiskType(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
diskType_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearDiskType() {
diskType_ = getDefaultInstance().getDiskType();
onChanged();
return this;
}
/**
*
*
* <pre>
* Disk storage type, as defined by Google Compute Engine. This
* must be a disk type appropriate to the project and zone in which
* the workers will run. If unknown or unspecified, the service
* will attempt to choose a reasonable default.
* For example, the standard persistent disk type is a resource name
* typically ending in "pd-standard". If SSD persistent disks are
* available, the resource name typically ends with "pd-ssd". The
* actual valid values are defined the Google Compute Engine API,
* not by the Cloud Dataflow API; consult the Google Compute Engine
* documentation for more information about determining the set of
* available disk types for a particular project and zone.
* Google Compute Engine Disk types are local to a particular
* project in a particular zone, and so the resource name will
* typically look something like this:
* compute.googleapis.com/projects/project-id/zones/zone/diskTypes/pd-standard
* </pre>
*
* <code>string disk_type = 2;</code>
*
* @param value The bytes for diskType to set.
* @return This builder for chaining.
*/
public Builder setDiskTypeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
diskType_ = value;
onChanged();
return this;
}
private java.lang.Object mountPoint_ = "";
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @return The mountPoint.
*/
public java.lang.String getMountPoint() {
java.lang.Object ref = mountPoint_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
mountPoint_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @return The bytes for mountPoint.
*/
public com.google.protobuf.ByteString getMountPointBytes() {
java.lang.Object ref = mountPoint_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
mountPoint_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @param value The mountPoint to set.
* @return This builder for chaining.
*/
public Builder setMountPoint(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
mountPoint_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearMountPoint() {
mountPoint_ = getDefaultInstance().getMountPoint();
onChanged();
return this;
}
/**
*
*
* <pre>
* Directory in a VM where disk is mounted.
* </pre>
*
* <code>string mount_point = 3;</code>
*
* @param value The bytes for mountPoint to set.
* @return This builder for chaining.
*/
public Builder setMountPointBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
mountPoint_ = value;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.Disk)
}
// @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.Disk)
private static final com.google.dataflow.v1beta3.Disk DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.Disk();
}
public static com.google.dataflow.v1beta3.Disk getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Disk> PARSER =
new com.google.protobuf.AbstractParser<Disk>() {
@java.lang.Override
public Disk parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Disk(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Disk> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Disk> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.dataflow.v1beta3.Disk getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| apache-2.0 |
arangodb/arangodb-java-driver | src/main/java/com/arangodb/model/OptionsBuilder.java | 4851 | /*
* DISCLAIMER
*
* Copyright 2016 ArangoDB GmbH, Cologne, Germany
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright holder is ArangoDB GmbH, Cologne, Germany
*/
package com.arangodb.model;
import com.arangodb.entity.EdgeDefinition;
import com.arangodb.entity.Permissions;
import com.arangodb.entity.ViewType;
import com.arangodb.velocypack.VPackSlice;
import java.util.Collection;
/**
* @author Mark Vollmary
* @author Michele Rastelli
*/
public class OptionsBuilder {
private OptionsBuilder() {
super();
}
public static UserCreateOptions build(final UserCreateOptions options, final String user, final String passwd) {
return options.user(user).passwd(passwd);
}
/**
* @deprecated use {@link #build(PersistentIndexOptions, Iterable)} instead. Since ArangoDB 3.7 a hash index is an
* alias for a persistent index.
*/
@Deprecated
public static HashIndexOptions build(final HashIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
/**
* @deprecated use {@link #build(PersistentIndexOptions, Iterable)} instead. Since ArangoDB 3.7 a skiplist index is
* an alias for a persistent index.
*/
@Deprecated
public static SkiplistIndexOptions build(final SkiplistIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
public static PersistentIndexOptions build(final PersistentIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
public static GeoIndexOptions build(final GeoIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
public static FulltextIndexOptions build(final FulltextIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
public static TtlIndexOptions build(final TtlIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
public static ZKDIndexOptions build(final ZKDIndexOptions options, final Iterable<String> fields) {
return options.fields(fields);
}
public static CollectionCreateOptions build(final CollectionCreateOptions options, final String name) {
return options.name(name);
}
public static AqlQueryOptions build(final AqlQueryOptions options, final String query, final VPackSlice bindVars) {
return options.query(query).bindVars(bindVars);
}
public static AqlQueryExplainOptions build(
final AqlQueryExplainOptions options,
final String query,
final VPackSlice bindVars) {
return options.query(query).bindVars(bindVars);
}
public static AqlQueryParseOptions build(final AqlQueryParseOptions options, final String query) {
return options.query(query);
}
public static GraphCreateOptions build(
final GraphCreateOptions options,
final String name,
final Collection<EdgeDefinition> edgeDefinitions) {
return options.name(name).edgeDefinitions(edgeDefinitions);
}
public static TransactionOptions build(final TransactionOptions options, final String action) {
return options.action(action);
}
public static CollectionRenameOptions build(final CollectionRenameOptions options, final String name) {
return options.name(name);
}
public static UserAccessOptions build(final UserAccessOptions options, final Permissions grant) {
return options.grant(grant);
}
public static AqlFunctionCreateOptions build(
final AqlFunctionCreateOptions options,
final String name,
final String code) {
return options.name(name).code(code);
}
public static VertexCollectionCreateOptions build(
final VertexCollectionCreateOptions options,
final String collection) {
return options.collection(collection);
}
public static ViewCreateOptions build(final ViewCreateOptions options, final String name, final ViewType type) {
return options.name(name).type(type);
}
public static ViewRenameOptions build(final ViewRenameOptions options, final String name) {
return options.name(name);
}
}
| apache-2.0 |
schnurlei/jdynameta | jdy/jdy.base/src/main/java/de/jdynameta/base/creation/ObjectReader.java | 1351 | /**
*
* Copyright 2011 (C) Rainer Schneider,Roggenburg <schnurlei@googlemail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package de.jdynameta.base.creation;
import java.io.Serializable;
import de.jdynameta.base.metainfo.filter.ClassInfoQuery;
import de.jdynameta.base.objectlist.ObjectList;
import de.jdynameta.base.value.JdyPersistentException;
/**
* @author Rainer
*
*/
public interface ObjectReader extends Serializable
{
/**
* Builds a Select statement from the filter and read the values from the DB
*
* @author Rainer Schneider
*/
public abstract <TCreatedObjFromValueObj> ObjectList<TCreatedObjFromValueObj> loadValuesFromDb(ClassInfoQuery filter,
ObjectCreator<TCreatedObjFromValueObj> aObjCreator) throws JdyPersistentException;
}
| apache-2.0 |
mrtamm/sqlstore | src/test/java/ws/rocket/sqlstore/test/db/derby/package-info.java | 710 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Classes used for Derby database testing.
*/
package ws.rocket.sqlstore.test.db.derby;
| apache-2.0 |
mjanicek/rembulan | rembulan-runtime/src/main/java/net/sandius/rembulan/impl/DefaultStateContext.java | 932 | /*
* Copyright 2016 Miroslav Janíček
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sandius.rembulan.impl;
import net.sandius.rembulan.MetatableAccessor;
import net.sandius.rembulan.TableFactory;
class DefaultStateContext extends AbstractStateContext {
protected DefaultStateContext(TableFactory tableFactory, MetatableAccessor metatableAccessor) {
super(tableFactory, metatableAccessor);
}
}
| apache-2.0 |
rapodaca/Terasology | src/org/terasology/logic/generators/ChunkGeneratorFlora.java | 7196 | /*
* Copyright 2011 Benjamin Glatzel <benjamin.glatzel@me.com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.logic.generators;
import org.terasology.logic.manager.Config;
import org.terasology.logic.world.Chunk;
import org.terasology.model.blocks.management.BlockManager;
/**
* Generates some trees, flowers and high grass.
*
* @author Benjamin Glatzel <benjamin.glatzel@me.com>
*/
public class ChunkGeneratorFlora extends ChunkGeneratorTerrain {
private static final double DESERT_GRASS_DENSITY = Config.getInstance().getDesertGrassDensity();
private static final double FOREST_GRASS_DENSITY = Config.getInstance().getForrestGrassDensity();
private static final double PLAINS_GRASS_DENSITY = Config.getInstance().getPlainsGrassDensity();
private static final double SNOW_GRASS_DENSITY = Config.getInstance().getSnowGrassDensity();
private static final double MOUNTAINS_GRASS_DENSITY = Config.getInstance().getMountainGrassDensity();
public ChunkGeneratorFlora(GeneratorManager generatorManager) {
super(generatorManager);
}
@Override
public void generate(Chunk c) {
for (int y = 0; y < Chunk.CHUNK_DIMENSION_Y; y++) {
for (int x = 0; x < Chunk.CHUNK_DIMENSION_X; x++) {
for (int z = 0; z < Chunk.CHUNK_DIMENSION_Z; z++) {
generateGrassAndFlowers(c, x, y, z);
}
}
}
generateTrees(c);
}
/**
* Generates trees on the given chunk.
*
* @param c The chunk
*/
private void generateTrees(Chunk c) {
for (int y = 32; y < Chunk.CHUNK_DIMENSION_Y; y++) {
for (int x = 0; x < Chunk.CHUNK_DIMENSION_X; x += 4) {
for (int z = 0; z < Chunk.CHUNK_DIMENSION_Z; z += 4) {
BIOME_TYPE biome = calcBiomeTypeForGlobalPosition(c.getBlockWorldPosX(x), c.getBlockWorldPosZ(z));
int randX = x + c.getRandom().randomInt() % 12 + 6;
int randZ = z + c.getRandom().randomInt() % 12 + 6;
if (c.getBlock(randX, y, randZ) == BlockManager.getInstance().getBlock("Grass").getId() || c.getBlock(randX, y, randZ) == BlockManager.getInstance().getBlock("Snow").getId() || c.getBlock(randX, y, randZ) == BlockManager.getInstance().getBlock("Sand").getId()) {
double rand = Math.abs(c.getRandom().randomDouble());
int randomGeneratorId;
int size = _parent.getTreeGenerators(biome).size();
if (size > 0) {
randomGeneratorId = Math.abs(c.getRandom().randomInt()) % size;
TreeGenerator treeGen = _parent.getTreeGenerator(biome, randomGeneratorId);
if (rand < treeGen.getGenProbability()) {
generateTree(c, treeGen, randX, y, randZ);
}
}
}
}
}
}
}
/**
* Generates grass or a flower on the given chunk.
*
* @param c The chunk
* @param x Position on the x-axis
* @param y Position on the y-axis
* @param z Position on the z-axis
*/
private void generateGrassAndFlowers(Chunk c, int x, int y, int z) {
if ((c.getBlock(x, y, z) == BlockManager.getInstance().getBlock("Grass").getId() || c.getBlock(x, y, z) == BlockManager.getInstance().getBlock("Sand").getId() || c.getBlock(x, y, z) == BlockManager.getInstance().getBlock("Snow").getId()) && c.getBlock(x, y + 1, z) == 0x0) {
double grassRand = (c.getRandom().randomDouble() + 1.0) / 2.0;
double grassProb = 1.0;
BIOME_TYPE biome = calcBiomeTypeForGlobalPosition(c.getBlockWorldPosX(x), c.getBlockWorldPosZ(z));
switch (biome) {
case PLAINS:
grassProb = 1.0 - PLAINS_GRASS_DENSITY;
break;
case MOUNTAINS:
grassProb = 1.0 - MOUNTAINS_GRASS_DENSITY;
break;
case FOREST:
grassProb = 1.0 - FOREST_GRASS_DENSITY;
break;
case SNOW:
grassProb = 1.0 - SNOW_GRASS_DENSITY;
break;
case DESERT:
grassProb = 1.0 - DESERT_GRASS_DENSITY;
break;
}
if (grassRand > grassProb) {
/*
* Generate tall grass.
*/
double rand = c.getRandom().standNormalDistrDouble();
if (rand > -0.4 && rand < 0.4) {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("TallGrass1").getId());
} else if (rand > -0.6 && rand < 0.6) {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("TallGrass2").getId());
} else {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("TallGrass3").getId());
}
double flowerRand = c.getRandom().randomDouble();
/*
* Generate flowers.
*/
if (c.getRandom().standNormalDistrDouble() < -2) {
if (flowerRand >= -1.0 && flowerRand < 0.2) {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("RedFlower").getId());
} else if (flowerRand >= 0.2 && flowerRand < 0.6) {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("YellowFlower").getId());
} else if (flowerRand >= 0.6 && flowerRand < 0.7) {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("BrownShroom").getId());
} else if (flowerRand >= 0.7 && flowerRand < 0.8) {
c.setBlock(x, y + 1, z, BlockManager.getInstance().getBlock("RedShroom").getId());
}
}
}
}
}
/**
* Generates a tree on the given chunk.
*
* @param c The chunk
* @param treeGen The tree generator
* @param x Position on the x-axis
* @param y Position on the y-axis
* @param z Position on the z-axis
*/
private void generateTree(Chunk c, TreeGenerator treeGen, int x, int y, int z) {
if (!c.canBlockSeeTheSky(x, y + 1, z))
return;
treeGen.generate(c.getRandom(), c.getBlockWorldPosX(x), y + 1, c.getBlockWorldPosZ(z), false);
}
}
| apache-2.0 |
xiaofengzhouxf/mriya | mriya.provider/src/main/java/com/jason/mriya/provider/invoker/RemoteInvokerFactory.java | 1987 | package com.jason.mriya.provider.invoker;
import java.util.Collection;
import java.util.HashMap;
import com.jason.mriya.client.contants.TranProtocol;
import com.jason.mriya.client.exception.MriyaRuntimeException;
import com.jason.mriya.provider.export.RemoteExporter;
import com.jason.mriya.provider.invoker.hessian.HessianInvoker;
/**
*
* <pre>
* <p>文件名称: RemoteInvokerFactory.java</p>
*
* <p>文件功能: RemoteInvoker 工厂</p>
*
* <p>编程者: xiaofeng.zhou</p>
*
* <p>初作时间: 2014年9月22日 上午8:41:12</p>
*
* <p>版本: version 1.0 </p>
* </pre>
*/
public class RemoteInvokerFactory {
private final static HashMap<String, RemoteInvoker> invokerCache = new HashMap<String, RemoteInvoker>(
10);
private final static HashMap<String, RemoteExporter> exportCache = new HashMap<String, RemoteExporter>(
10);
public static RemoteInvoker create(RemoteExporter exporter) {
if (exporter == null) {
throw new MriyaRuntimeException("exporter can't be null.");
}
if (invokerCache.containsKey(exporter.getName())) {
return invokerCache.get(exporter.getName());
}
RemoteInvoker invoker = null;
// FIXME 后续来考虑扩展,serviceloader之类
if (TranProtocol.valueOf(exporter.getProtocol().toUpperCase()) == TranProtocol.HESSIAN) {
invoker = new HessianInvoker(exporter.getName(),
exporter.getGroupId(), exporter.getService(),
exporter.getServiceInterface() != null ? exporter
.getServiceInterface() : exporter.getService()
.getClass());
invokerCache.put(exporter.getName(), invoker);
exportCache.put(exporter.getGroupId() + exporter.getName(),
exporter);
} else {
throw new MriyaRuntimeException("Not support this protocol: "
+ exporter.getProtocol());
}
return invoker;
}
public static RemoteInvoker lookup(String bean) {
return invokerCache.get(bean);
}
public static Collection<RemoteExporter> listService() {
return exportCache.values();
}
}
| apache-2.0 |
ankitsinghal/phoenix | phoenix-core/src/it/java/org/apache/phoenix/end2end/QueryWithLimitIT.java | 5768 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you maynot use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicablelaw or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.end2end;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.RejectedExecutionException;
import org.apache.phoenix.compile.ExplainPlan;
import org.apache.phoenix.compile.ExplainPlanAttributes;
import org.apache.phoenix.jdbc.PhoenixPreparedStatement;
import org.apache.phoenix.query.QueryServices;
import org.apache.phoenix.util.PropertiesUtil;
import org.apache.phoenix.util.ReadOnlyProps;
import org.apache.phoenix.util.TestUtil;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.phoenix.thirdparty.com.google.common.collect.Maps;
public class QueryWithLimitIT extends BaseUniqueNamesOwnClusterIT {
private String tableName;
@Before
public void generateTableName() {
tableName = generateUniqueName();
}
@BeforeClass
public static synchronized void doSetup() throws Exception {
Map<String,String> props = Maps.newHashMapWithExpectedSize(3);
// Must update config before starting server
props.put(QueryServices.STATS_GUIDEPOST_WIDTH_BYTES_ATTRIB, Long.toString(50));
props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(1));
props.put(QueryServices.SEQUENCE_SALT_BUCKETS_ATTRIB, Integer.toString(0)); // Prevents RejectedExecutionException when creatomg sequence table
props.put(QueryServices.THREAD_POOL_SIZE_ATTRIB, Integer.toString(4));
props.put(QueryServices.LOG_SALT_BUCKETS_ATTRIB, Integer.toString(0)); // Prevents RejectedExecutionException when creating log table
setUpTestDriver(new ReadOnlyProps(props.entrySet().iterator()));
}
@Test
public void testQueryWithLimitAndStats() throws Exception {
Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
try {
conn.createStatement().execute("create table " + tableName + "\n" +
" (i1 integer not null, i2 integer not null\n" +
" CONSTRAINT pk PRIMARY KEY (i1,i2))");
initTableValues(conn, 100);
String query = "SELECT i1 FROM " + tableName +" LIMIT 1";
ResultSet rs = conn.createStatement().executeQuery(query);
assertTrue(rs.next());
assertEquals(0, rs.getInt(1));
assertFalse(rs.next());
ExplainPlan plan = conn.prepareStatement(query)
.unwrap(PhoenixPreparedStatement.class).optimizeQuery()
.getExplainPlan();
ExplainPlanAttributes explainPlanAttributes =
plan.getPlanStepsAsAttributes();
assertEquals("SERIAL 1-WAY",
explainPlanAttributes.getIteratorTypeAndScanSize());
assertEquals("FULL SCAN ",
explainPlanAttributes.getExplainScanType());
assertEquals(tableName,
explainPlanAttributes.getTableName());
assertEquals("SERVER FILTER BY FIRST KEY ONLY",
explainPlanAttributes.getServerWhereFilter());
assertEquals(1, explainPlanAttributes.getServerRowLimit().intValue());
assertEquals(1, explainPlanAttributes.getClientRowLimit().intValue());
} finally {
conn.close();
}
}
@Test
public void testQueryWithoutLimitFails() throws Exception {
Properties props = PropertiesUtil.deepCopy(TestUtil.TEST_PROPERTIES);
Connection conn = DriverManager.getConnection(getUrl(), props);
conn.createStatement().execute("create table " + tableName + "\n" +
" (i1 integer not null, i2 integer not null\n" +
" CONSTRAINT pk PRIMARY KEY (i1,i2))");
initTableValues(conn, 100);
conn.createStatement().execute("UPDATE STATISTICS " + tableName);
String query = "SELECT i1 FROM " + tableName;
try {
ResultSet rs = conn.createStatement().executeQuery(query);
rs.next();
fail();
} catch (SQLException e) {
assertTrue(e.getCause() instanceof RejectedExecutionException);
}
conn.close();
}
protected void initTableValues(Connection conn, int nRows) throws Exception {
PreparedStatement stmt = conn.prepareStatement(
"upsert into " + tableName +
" VALUES (?, ?)");
for (int i = 0; i < nRows; i++) {
stmt.setInt(1, i);
stmt.setInt(2, i+1);
stmt.execute();
}
conn.commit();
}
}
| apache-2.0 |
automenta/vivisect | src/main/java/example/SimpleTree.java | 1136 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package example;
import automenta.vivisect.dimensionalize.AbegoTreeLayout;
import org.jgrapht.Graph;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.DirectedPseudograph;
/**
*
* @author me
*/
public class SimpleTree {
public static void main(String[] args) {
Graph g = new DirectedPseudograph(DefaultEdge.class);
g.addVertex("root");
g.addVertex("a");
g.addVertex("b");
g.addEdge("root", "a");
g.addEdge("root", "b");
g.addVertex("aa");
g.addVertex("ab");
g.addEdge("a", "aa");
g.addEdge("a", "ab");
g.addVertex("ba");
g.addEdge("b", "ba");
g.addVertex("root2");
g.addVertex("c");
g.addEdge("root2", "c");
g.addEdge("c", "ba");
//g.addEdge("ab", "c");
new SimpleDirectedGraph(g, new AbegoTreeLayout());
}
}
| apache-2.0 |
sjaco002/incubator-asterixdb | asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalScalarFunctionEvaluatorFactory.java | 1712 | /*
* Copyright 2009-2012 by The Regents of the University of California
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* you may obtain a copy of the License from
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.uci.ics.asterix.external.library;
import edu.uci.ics.asterix.om.functions.IExternalFunctionInfo;
import edu.uci.ics.hyracks.algebricks.common.exceptions.AlgebricksException;
import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluator;
import edu.uci.ics.hyracks.algebricks.runtime.base.ICopyEvaluatorFactory;
import edu.uci.ics.hyracks.data.std.api.IDataOutputProvider;
public class ExternalScalarFunctionEvaluatorFactory implements ICopyEvaluatorFactory {
private static final long serialVersionUID = 1L;
private final IExternalFunctionInfo finfo;
private final ICopyEvaluatorFactory[] args;
public ExternalScalarFunctionEvaluatorFactory(IExternalFunctionInfo finfo, ICopyEvaluatorFactory[] args)
throws AlgebricksException {
this.finfo = finfo;
this.args = args;
}
@Override
public ICopyEvaluator createEvaluator(IDataOutputProvider output) throws AlgebricksException {
return (ExternalScalarFunction) ExternalFunctionProvider.getExternalFunctionEvaluator(finfo, args, output);
}
}
| apache-2.0 |
Elewei/learn-java | Users-Manager-System/src/com/elewei/view/Error.java | 1460 | package com.elewei.view;
import java.io.IOException;
import java.io.PrintWriter;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* Servlet implementation class Error
*/
@WebServlet("/Error")
public class Error extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public Error() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
//response.getWriter().append("Served at: ").append(request.getContextPath());
response.setContentType("text/html;charset=utf-8");
PrintWriter out = response.getWriter();
out.print("<h3>操作失败</h3>");
out.println("<a href='/UsersManager/ManagerUsers'>返回用户管理界面</a>");
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
doGet(request, response);
}
}
| apache-2.0 |
josecalles/Tistiq | app/src/main/java/com/josecalles/tistiq/injection/app/AppComponent.java | 1189 | /*
*
* Copyright 2015, Jose Calles
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.josecalles.tistiq.injection.app;
import com.josecalles.tistiq.TistiqApplication;
import com.josecalles.tistiq.injection.storage.StorageModule;
import com.josecalles.tistiq.mvp.model.RealmInteractor;
import com.josecalles.tistiq.mvp.model.SharedPrefLogger;
import dagger.Component;
import javax.inject.Singleton;
@Singleton
@Component(
modules = {
AppModule.class, StorageModule.class
})
public interface AppComponent {
void inject(TistiqApplication app);
RealmInteractor provideRealmInteractor();
SharedPrefLogger provideSharedPrefLogger();
}
| apache-2.0 |
spinnaker/clouddriver | clouddriver-lambda/src/test/java/com/netflix/spinnaker/clouddriver/lambda/service/LambdaServiceTest.java | 7745 | package com.netflix.spinnaker.clouddriver.lambda.service;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.amazonaws.services.lambda.AWSLambda;
import com.amazonaws.services.lambda.model.FunctionConfiguration;
import com.amazonaws.services.lambda.model.GetFunctionResult;
import com.amazonaws.services.lambda.model.GetPolicyResult;
import com.amazonaws.services.lambda.model.ListFunctionsResult;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.clouddriver.aws.security.AmazonClientProvider;
import com.netflix.spinnaker.clouddriver.aws.security.NetflixAmazonCredentials;
import com.netflix.spinnaker.clouddriver.core.limits.ServiceLimitConfiguration;
import com.netflix.spinnaker.clouddriver.lambda.service.config.LambdaServiceConfig;
import java.util.List;
import java.util.Map;
import org.junit.jupiter.api.Test;
class LambdaServiceTest {
private ObjectMapper objectMapper = new ObjectMapper();
private AmazonClientProvider clientProvider = mock(AmazonClientProvider.class);
private LambdaServiceConfig lambdaServiceConfig = mock(LambdaServiceConfig.class);
private ServiceLimitConfiguration serviceLimitConfiguration =
mock(ServiceLimitConfiguration.class);
private String REGION = "us-west-2";
private NetflixAmazonCredentials netflixAmazonCredentials = mock(NetflixAmazonCredentials.class);
@Test
void getAllFunctionsWhenFunctionsResultIsNullExpectEmpty() throws InterruptedException {
when(lambdaServiceConfig.getRetry()).thenReturn(new LambdaServiceConfig.Retry());
when(lambdaServiceConfig.getConcurrency()).thenReturn(new LambdaServiceConfig.Concurrency());
when(serviceLimitConfiguration.getLimit(any(), any(), any(), any(), any())).thenReturn(1.0);
AWSLambda lambda = mock(AWSLambda.class); // returns null by default
when(clientProvider.getAmazonLambda(any(), any())).thenReturn(lambda);
LambdaService lambdaService =
new LambdaService(
clientProvider,
netflixAmazonCredentials,
REGION,
objectMapper,
lambdaServiceConfig,
serviceLimitConfiguration);
List<Map<String, Object>> allFunctions = lambdaService.getAllFunctions();
assertEquals(0, allFunctions.size());
}
@Test
void getAllFunctionsWhenFunctionsResultIsEmptyExpectEmpty() throws InterruptedException {
when(lambdaServiceConfig.getRetry()).thenReturn(new LambdaServiceConfig.Retry());
when(lambdaServiceConfig.getConcurrency()).thenReturn(new LambdaServiceConfig.Concurrency());
when(serviceLimitConfiguration.getLimit(any(), any(), any(), any(), any())).thenReturn(1.0);
ListFunctionsResult functionsResult = mock(ListFunctionsResult.class);
when(functionsResult.getFunctions()).thenReturn(List.of()); // Empty list
AWSLambda lambda = mock(AWSLambda.class);
when(lambda.listFunctions()).thenReturn(functionsResult);
when(clientProvider.getAmazonLambda(any(), any())).thenReturn(lambda);
LambdaService lambdaService =
new LambdaService(
clientProvider,
netflixAmazonCredentials,
REGION,
objectMapper,
lambdaServiceConfig,
serviceLimitConfiguration);
List<Map<String, Object>> allFunctions = lambdaService.getAllFunctions();
assertEquals(0, allFunctions.size());
}
@Test
void getAllFunctionsWhenFunctionNameIsEmptyExpectEmpty() throws InterruptedException {
when(lambdaServiceConfig.getRetry()).thenReturn(new LambdaServiceConfig.Retry());
when(lambdaServiceConfig.getConcurrency()).thenReturn(new LambdaServiceConfig.Concurrency());
when(serviceLimitConfiguration.getLimit(any(), any(), any(), any(), any())).thenReturn(1.0);
ListFunctionsResult functionsResult = mock(ListFunctionsResult.class);
when(functionsResult.getFunctions()).thenReturn(List.of(new FunctionConfiguration()));
AWSLambda lambda = mock(AWSLambda.class);
when(lambda.listFunctions(any())).thenReturn(functionsResult);
when(clientProvider.getAmazonLambda(any(), any())).thenReturn(lambda);
LambdaService lambdaService =
new LambdaService(
clientProvider,
netflixAmazonCredentials,
REGION,
objectMapper,
lambdaServiceConfig,
serviceLimitConfiguration);
List<Map<String, Object>> allFunctions = lambdaService.getAllFunctions();
assertEquals(0, allFunctions.size());
}
@Test
void getAllFunctionsWhenFunctionNameIsNotEmptyExpectNotEmpty() throws InterruptedException {
when(lambdaServiceConfig.getRetry()).thenReturn(new LambdaServiceConfig.Retry());
when(lambdaServiceConfig.getConcurrency()).thenReturn(new LambdaServiceConfig.Concurrency());
when(serviceLimitConfiguration.getLimit(any(), any(), any(), any(), any())).thenReturn(1.0);
ListFunctionsResult functionsResult = mock(ListFunctionsResult.class);
FunctionConfiguration functionConfiguration = new FunctionConfiguration();
functionConfiguration.setFunctionName("testFunction");
when(functionsResult.getFunctions()).thenReturn(List.of(functionConfiguration));
AWSLambda lambda = mock(AWSLambda.class);
when(lambda.listFunctions(any())).thenReturn(functionsResult);
GetFunctionResult functionResult = new GetFunctionResult();
functionResult.setConfiguration(functionConfiguration);
when(lambda.getFunction(any())).thenReturn(functionResult);
GetPolicyResult getPolicyResult = new GetPolicyResult();
getPolicyResult.setPolicy(
"{\n"
+ " \"Version\": \"2012-10-17\",\n"
+ " \"Statement\": [\n"
+ " {\n"
+ " \"Sid\": \"FirstStatement\",\n"
+ " \"Effect\": \"Allow\",\n"
+ " \"Action\": [\"iam:ChangePassword\"],\n"
+ " \"Resource\": \"*\"\n"
+ " },\n"
+ " {\n"
+ " \"Sid\": \"SecondStatement\",\n"
+ " \"Effect\": \"Allow\",\n"
+ " \"Action\": \"s3:ListAllMyBuckets\",\n"
+ " \"Resource\": \"*\"\n"
+ " },\n"
+ " {\n"
+ " \"Sid\": \"ThirdStatement\",\n"
+ " \"Effect\": \"Allow\",\n"
+ " \"Principal\": {\"AWS\":[ \"elasticloadbalancing.amazonaws.com\"]},\n"
+ " \"Action\": [\n"
+ " \"lambda:InvokeFunction\",\n"
+ " \"s3:List*\",\n"
+ " \"s3:Get*\"\n"
+ " ],\n"
+ " \"Resource\": [\n"
+ " \"arn:aws:s3:::confidential-data\",\n"
+ " \"arn:aws:s3:::confidential-data/*\"\n"
+ " ],\n"
+ " \"Condition\": {\"ArnLike\":{ \"AWS:SourceArn\": \"arn:aws:elasticloadbalancing:something:something:targetgroup/targetGroupName/abc\"}}\n"
+ " }\n"
+ " ]\n"
+ "}");
when(lambda.getPolicy(any())).thenReturn(getPolicyResult);
when(clientProvider.getAmazonLambda(any(), any())).thenReturn(lambda);
LambdaService lambdaService =
new LambdaService(
clientProvider,
netflixAmazonCredentials,
REGION,
objectMapper,
lambdaServiceConfig,
serviceLimitConfiguration);
List<Map<String, Object>> allFunctions = lambdaService.getAllFunctions();
assertEquals(1, allFunctions.size());
Map<String, Object> function = allFunctions.get(0);
assertEquals("testFunction", function.get("functionName"));
}
}
| apache-2.0 |
eugeneiiim/AndroidCollections | src/com/google/common/collect/EmptyImmutableSet.java | 2202 | /*
* Copyright (C) 2007 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import java.util.Collection;
import java.util.Set;
import androidcollections.annotations.Nullable;
import com.google.common.annotations.GwtCompatible;
/**
* An empty immutable set.
*
* @author Kevin Bourrillion
*/
@GwtCompatible(serializable = true, emulated = true)
final class EmptyImmutableSet extends ImmutableSet<Object> {
static final EmptyImmutableSet INSTANCE = new EmptyImmutableSet();
private EmptyImmutableSet() {}
public int size() {
return 0;
}
@Override public boolean isEmpty() {
return true;
}
@Override public boolean contains(Object target) {
return false;
}
@Override public UnmodifiableIterator<Object> iterator() {
return Iterators.emptyIterator();
}
private static final Object[] EMPTY_ARRAY = new Object[0];
@Override public Object[] toArray() {
return EMPTY_ARRAY;
}
@Override public <T> T[] toArray(T[] a) {
if (a.length > 0) {
a[0] = null;
}
return a;
}
@Override public boolean containsAll(Collection<?> targets) {
return targets.isEmpty();
}
@Override public boolean equals(@Nullable Object object) {
if (object instanceof Set) {
Set<?> that = (Set<?>) object;
return that.isEmpty();
}
return false;
}
@Override public final int hashCode() {
return 0;
}
@Override boolean isHashCodeFast() {
return true;
}
@Override public String toString() {
return "[]";
}
Object readResolve() {
return INSTANCE; // preserve singleton property
}
private static final long serialVersionUID = 0;
}
| apache-2.0 |
NationalSecurityAgency/ghidra | Ghidra/Processors/MIPS/src/test.processors/java/ghidra/test/processors/MIPS64_64addr_O0_EmulatorTest.java | 1354 | /* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.test.processors;
import ghidra.test.processors.support.ProcessorEmulatorTestAdapter;
import junit.framework.Test;
public class MIPS64_64addr_O0_EmulatorTest extends ProcessorEmulatorTestAdapter {
private static final String LANGUAGE_ID = "MIPS:BE:64:default";
private static final String COMPILER_SPEC_ID = "default";
private static final String[] REG_DUMP_SET = new String[] {};
public MIPS64_64addr_O0_EmulatorTest(String name) throws Exception {
super(name, LANGUAGE_ID, COMPILER_SPEC_ID, REG_DUMP_SET);
}
@Override
protected String getProcessorDesignator() {
return "MIPS64_64addr_GCC_O0";
}
public static Test suite() {
return ProcessorEmulatorTestAdapter.buildEmulatorTestSuite(
MIPS64_64addr_O0_EmulatorTest.class);
}
}
| apache-2.0 |
gwtproject/gwt-typedarrays | gwt-typedarrays/src/main/java/org/gwtproject/typedarrays/shared/Float64Array.java | 3846 | /*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.gwtproject.typedarrays.shared;
import jsinterop.annotations.JsOverlay;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
import jsinterop.base.Js;
import jsinterop.base.JsArrayLike;
/**
* A view representing an {@link ArrayBuffer} as 32-bit floats.
*
* @see <a
* href="http://www.khronos.org/registry/typedarray/specs/latest/#7">http://www.khronos.org/registry/typedarray/specs/latest/#7</a>
*/
@JsType(isNative = true, name = "Float64Array", namespace = JsPackage.GLOBAL)
public interface Float64Array extends ArrayBufferView {
@JsOverlay int BYTES_PER_ELEMENT = 8;
/**
* The length in elements of this view.
*
* @return non-negative length
*/
@JsProperty(name = "length")
int length();
/**
* Retrieve one element of this view.
*
* @param index the index within the array to read from
* @return the requested element
*/
@JsOverlay
default double get(int index) {
return Js.<JsArrayLike<Double>>uncheckedCast(this).getAt(index);
}
/**
* Set one element in this view.
*
* @param index the index within the array to write to
* @param value the value to write
*/
@JsOverlay
default void set(int index, double value) {
Js.<JsArrayLike<Double>>uncheckedCast(this).setAt(index, value);
}
/**
* Set multiple elements in this view from another view, storing starting at 0.
*
* @param array the values to write to this aray
*/
void set(Float64Array array);
/**
* Set multiple elements in this view from another view, storing starting at the requested offset.
*
* @param array the values to write to this array
* @param offset the offset within this array to start writing
*/
void set(Float64Array array, int offset);
/**
* Set multiple elements in this view from an array, storing starting at 0.
*
* @param array the values to write to this array
*/
void set(double[] array);
/**
* Set multiple elements in this view from an array, storing starting at the requested offset.
*
* @param array the values to write to this aray
* @param offset the offset within this array to start writing
*/
void set(double[] array, int offset);
/**
* Create a new view from the same array, from {@code offset} to the end of this view. These
* offset is clamped to legal indices into this view, so it is not an error to specify an invalid
* index.
*
* @param begin offset into this view if non-negative; if negative, an index from the end of this
* view
* @return a new {@link Float64Array} instance
*/
Float64Array subarray(int begin);
/**
* Create a new view from the same array, from {@code offset} to (but not including) {@code end}
* in this view. These indices are clamped to legal indices into this view, so it is not an error
* to specify invalid indices.
*
* @param begin offset into this view if non-negative; if negative, an index from the end of this
* view
* @param end offset into this view if non-negative; if negative, an index from the end of this
* view
* @return a new {@link Float64Array} instance
*/
Float64Array subarray(int begin, int end);
}
| apache-2.0 |
TU-Berlin-SNET/tresor-pdp | modules/contexthandler/src/main/java/org/snet/tresor/pdp/contexthandler/saml/xacml3/XACML3RequestType.java | 1885 |
package org.snet.tresor.pdp.contexthandler.saml.xacml3;
import java.util.List;
import javax.xml.namespace.QName;
import org.opensaml.xacml.XACMLConstants;
import org.opensaml.xacml.ctx.ActionType;
import org.opensaml.xacml.ctx.EnvironmentType;
import org.opensaml.xacml.ctx.RequestType;
import org.opensaml.xacml.ctx.ResourceType;
import org.opensaml.xacml.ctx.SubjectType;
import org.opensaml.xacml.impl.AbstractXACMLObject;
import org.opensaml.xml.XMLObject;
import org.w3c.dom.Element;
/**
* Basic representation of a XACML3 request
*/
public class XACML3RequestType extends AbstractXACMLObject implements RequestType {
public static final String DEFAULT_ELEMENT_LOCAL_NAME = "Request";
public static final QName DEFAULT_ELEMENT_NAME = new QName(
"urn:oasis:names:tc:xacml:3.0:core:schema:wd-17",
DEFAULT_ELEMENT_LOCAL_NAME,
XACMLConstants.XACMLCONTEXT_PREFIX);
public static final String TYPE_LOCAL_NAME = "RequestType";
public static final QName TYPE_NAME = new QName(
"urn:oasis:names:tc:xacml:3.0:core:schema:wd-17",
XACMLConstants.XACMLCONTEXT_PREFIX);
protected XACML3RequestType(String namespaceURI, String elementLocalName,
String namespacePrefix) {
super(namespaceURI, elementLocalName, namespacePrefix);
}
public XACML3RequestType(String namespaceURI, String elementLocalName,
String namespacePrefix, Element element) {
super(namespaceURI, elementLocalName, namespacePrefix);
this.setDOM(element);
}
public List<XMLObject> getOrderedChildren() {
return null;
}
public List<SubjectType> getSubjects() {
return null;
}
public List<ResourceType> getResources() {
return null;
}
public ActionType getAction() {
return null;
}
public void setAction(ActionType newAction) { }
public EnvironmentType getEnvironment() {
return null;
}
public void setEnvironment(EnvironmentType environment) { }
}
| apache-2.0 |
vector-solutions/TimeToPrayer | app/src/main/java/com/algorepublic/cityhistory/prayertimings/GPSTracker.java | 6215 | package com.algorepublic.cityhistory.prayertimings;
import android.app.AlertDialog;
import android.app.Service;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.os.IBinder;
import android.provider.Settings;
import android.util.Log;
public class GPSTracker extends Service implements LocationListener {
private final Context mContext;
// flag for GPS status
boolean isGPSEnabled = false;
// flag for network status
boolean isNetworkEnabled = false;
// flag for GPS status
boolean canGetLocation = false;
Location location; // location
double latitude; // latitude
double longitude; // longitude
// The minimum distance to change Updates in meters
private static final long MIN_DISTANCE_CHANGE_FOR_UPDATES = 10; // 10 meters
// The minimum time between updates in milliseconds
private static final long MIN_TIME_BW_UPDATES = 1000 * 60 * 1; // 1 minute
// Declaring a Location Manager
protected LocationManager locationManager;
public GPSTracker(Context context) {
this.mContext = context;
getLocation();
}
public Location getLocation() {
try {
locationManager = (LocationManager) mContext
.getSystemService(LOCATION_SERVICE);
// getting GPS status
isGPSEnabled = locationManager
.isProviderEnabled(LocationManager.GPS_PROVIDER);
// getting network status
isNetworkEnabled = locationManager
.isProviderEnabled(LocationManager.NETWORK_PROVIDER);
if (!isGPSEnabled && !isNetworkEnabled) {
// no network provider is enabled
} else {
this.canGetLocation = true;
// First get location from Network Provider
if (isNetworkEnabled) {
locationManager.requestLocationUpdates(
LocationManager.NETWORK_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
Log.d("Network", "Network");
if (locationManager != null) {
location = locationManager
.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
if (location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
// if GPS Enabled get lat/long using GPS Services
if (isGPSEnabled) {
if (location == null) {
locationManager.requestLocationUpdates(
LocationManager.GPS_PROVIDER,
MIN_TIME_BW_UPDATES,
MIN_DISTANCE_CHANGE_FOR_UPDATES, this);
Log.d("GPS Enabled", "GPS Enabled");
if (locationManager != null) {
location = locationManager
.getLastKnownLocation(LocationManager.GPS_PROVIDER);
if (location != null) {
latitude = location.getLatitude();
longitude = location.getLongitude();
}
}
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return location;
}
/**
* Stop using GPS listener
* Calling this function will stop using GPS in your app
* */
public void stopUsingGPS(){
if(locationManager != null){
locationManager.removeUpdates(GPSTracker.this);
}
}
/**
* Function to get latitude
* */
public double getLatitude(){
if(location != null){
latitude = location.getLatitude();
}
// return latitude
return latitude;
}
/**
* Function to get longitude
* */
public double getLongitude(){
if(location != null){
longitude = location.getLongitude();
}
// return longitude
return longitude;
}
/**
* Function to check GPS/wifi enabled
* @return boolean
* */
public boolean canGetLocation() {
return this.canGetLocation;
}
/**
* Function to show settings alert dialog
* On pressing Settings button will lauch Settings Options
* */
public void showSettingsAlert(){
AlertDialog.Builder alertDialog = new AlertDialog.Builder(mContext);
// Setting Dialog Title
alertDialog.setTitle("GPS is settings");
// Setting Dialog Message
alertDialog.setMessage("GPS is not enabled. Do you want to go to settings menu?");
// On pressing Settings button
alertDialog.setPositiveButton("Settings", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog,int which) {
Intent intent = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
mContext.startActivity(intent);
}
});
// on pressing cancel button
alertDialog.setNegativeButton("Cancel", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.cancel();
}
});
// Showing Alert Message
alertDialog.show();
}
public void onLocationChanged(Location location) {
}
public void onProviderDisabled(String provider) {
}
public void onProviderEnabled(String provider) {
}
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public IBinder onBind(Intent arg0) {
return null;
}
} | apache-2.0 |
ihorvitruk/buddysearch | presentation/src/main/java/com/buddysearch/android/presentation/di/module/ViewModule.java | 874 | package com.buddysearch.android.presentation.di.module;
import com.buddysearch.android.domain.Messenger;
import com.buddysearch.android.library.presentation.mvp.view.View;
import com.buddysearch.android.presentation.R;
import com.buddysearch.android.presentation.di.scope.ViewScope;
import dagger.Module;
import dagger.Provides;
@Module
public class ViewModule {
private View view;
public ViewModule(View view) {
this.view = view;
}
@Provides
@ViewScope
Messenger providesMessenger() {
return new Messenger() {
@Override
public void showNoNetworkMessage() {
view.showMessage(R.string.no_internet_connection);
}
@Override
public void showFromCacheMessage() {
view.showMessage(R.string.data_from_cache);
}
};
}
}
| apache-2.0 |
BVTheEpic/JavaPlusPlus | JavaPlusPlusArrays.java | 3330 | import java.util.ArrayList;
public class JavaPlusPlusArrays
{
public static boolean contains(boolean[] arr, boolean b) // checks if an array contains a desired boolean
{
for (boolean x : arr)
{
if (x == b) return true;
}
return false;
}
public static boolean contains(byte[] arr, byte b) // checks if an array contains a desired byte
{
for (int x : arr)
{
if (x == b) return true;
}
return false;
}
public static boolean contains(char[] arr, char c) // checks if an array contains a desired char
{
for (char x : arr)
{
if (x == c) return true;
}
return false;
}
public static boolean contains(double[] arr, double d) // checks if an array contains a desired double
{
for (double x : arr)
{
if (x == d) return true;
}
return false;
}
public static boolean contains(float[] arr, float f) // checks if an array contains a desired float
{
for (float x : arr)
{
if (x == f) return true;
}
return false;
}
public static boolean contains(int[] arr, int i) // checks if an array contains a desired int
{
for (int x : arr)
{
if (x == i) return true;
}
return false;
}
public static boolean contains(long[] arr, long l) // checks if an array contains a desired long
{
for (long x : arr)
{
if (x == l) return true;
}
return false;
}
public static boolean contains(short[] arr, short s) // checks if an array contains a desired short
{
for (short x : arr)
{
if (x == s) return true;
}
return false;
}
public static boolean contains(ArrayList<Object> arr, Object o)// checks if an ArrayList contains a desired Object, should be compatible with all reference types
{
for (Object x : arr)
{
if (x.equals(o)) return true;
}
return false;
}
public static double[] range(double start, double end) // returns an array containing doubles start to end, inclusive
{
double[] nums = new double[end - start + 1.0];
for (double i = start; i <= end; i++)
{
nums[i - start] = i;
}
return nums;
}
public static float[] range(float start, float end) // returns an array containing floats start to end, inclusive
{
float[] nums = new float[end - start + 1.00];
for (float i = start; i <= end; i++)
{
nums[i - start] = i;
}
return nums;
}
public static int[] range(int start, int end) // returns an array containing ints start to end, inclusive
{
int[] nums = new int[end - start + 1];
for (int i = start; i <= end; i++)
{
nums[i - start] = i;
}
return nums;
}
public static ArrayList<Double> range(double start, double end) // returns an ArrayList containing doubles start to end, inclusive
{
ArrayList<Double> nums = new ArrayList<Double>();
for (Double i = start; i <= end; i++)
{
nums.add(i);
}
return nums;
}
public static ArrayList<Float> range(float start, float end) // returns an ArrayList containing floats start to end, inclusive
{
ArrayList<Float> nums = new ArrayList<Float>();
for (Float i = start; i <= end; i++)
{
nums.add(i);
}
return nums;
}
public static ArrayList<Integer> range(int start, int end) // returns an ArrayList containing ints start to end, inclusive
{
ArrayList<Integer> nums = new ArrayList<Integer>();
for (Integer i = start; i <= end; i++)
{
nums.add(i);
}
return nums;
}
}
| apache-2.0 |
davityle/ngAndroid | ng-processor/src/main/resources/attributes/NgClick.java | 1609 | /*
* Copyright 2015 Tyler Davis
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ngandroid.lib.ngattributes;
import android.view.View;
import com.ngandroid.lib.R;
import com.ngandroid.lib.ng.Scope;
import com.ngandroid.lib.utils.Tuple;
class NgClick implements NgAttribute {
private static NgClick ourInstance = new NgClick();
static NgClick getInstance() {
return ourInstance;
}
private NgClick() {
}
@Override
public void attach(Scope scope, View view, int layoutId, int viewId, Tuple<String, String>[] models) {
attach(scope, view, layoutId, viewId, getAttribute(), false);
}
public void attach(Scope scope, View view, int layoutId, int viewId, int attr, boolean isLongClick){
Executor executor = new Executor(scope, layoutId, viewId, attr);
if(!isLongClick)
view.setOnClickListener(executor);
else
view.setOnLongClickListener(executor);
}
@Override
public int getAttribute() {
return R.styleable.ngAndroid_ngClick;
}
}
| apache-2.0 |
OpenFeign/feign | core/src/test/java/feign/LoggerTest.java | 18846 | /*
* Copyright 2012-2022 The Feign Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package feign;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import org.assertj.core.api.SoftAssertions;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.rules.ExpectedException;
import org.junit.rules.RuleChain;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.junit.runners.model.Statement;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.util.*;
import java.util.concurrent.TimeUnit;
import feign.Logger.Level;
import feign.Request.ProtocolVersion;
import static java.util.Objects.nonNull;
import static feign.Util.enumForName;
@RunWith(Enclosed.class)
public class LoggerTest {
public final ExpectedException thrown = ExpectedException.none();
public final MockWebServer server = new MockWebServer();
public final RecordingLogger logger = new RecordingLogger();
/** Ensure expected exception handling is done before logger rule. */
@Rule
public final RuleChain chain = RuleChain.outerRule(server).around(logger).around(thrown);
interface SendsStuff {
@RequestLine("POST /")
@Headers({"Content-Type: application/json", "X-Token: qwerty"})
@Body("%7B\"customer_name\": \"{customer_name}\", \"user_name\": \"{user_name}\", \"password\": \"{password}\"%7D")
String login(
@Param("customer_name") String customer,
@Param("user_name") String user,
@Param("password") String password);
}
@RunWith(Parameterized.class)
public static class LogLevelEmitsTest extends LoggerTest {
private final Level logLevel;
public LogLevelEmitsTest(Level logLevel, List<String> expectedMessages) {
this.logLevel = logLevel;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.NONE, Collections.emptyList()},
{Level.BASIC, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- HTTP/1.1 200 OK \\([0-9]+ms\\)")},
{Level.HEADERS, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- HTTP/1.1 200 OK \\([0-9]+ms\\)",
"\\[SendsStuff#login\\] content-length: 3",
"\\[SendsStuff#login\\] <--- END HTTP \\(3-byte body\\)")},
{Level.FULL, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ",
"\\[SendsStuff#login\\] \\{\"customer_name\": \"netflix\", \"user_name\": \"denominator\", \"password\": \"password\"\\}",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- HTTP/1.1 200 OK \\([0-9]+ms\\)",
"\\[SendsStuff#login\\] content-length: 3",
"\\[SendsStuff#login\\] ",
"\\[SendsStuff#login\\] foo",
"\\[SendsStuff#login\\] <--- END HTTP \\(3-byte body\\)")}
});
}
@Test
public void levelEmits() {
server.enqueue(new MockResponse().setHeader("Y-Powered-By", "Mock").setBody("foo"));
SendsStuff api = Feign.builder()
.logger(logger)
.logLevel(logLevel)
.target(SendsStuff.class, "http://localhost:" + server.getPort());
api.login("netflix", "denominator", "password");
}
}
@RunWith(Parameterized.class)
public static class ReasonPhraseOptional extends LoggerTest {
private final Level logLevel;
public ReasonPhraseOptional(Level logLevel, List<String> expectedMessages) {
this.logLevel = logLevel;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.BASIC, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- HTTP/1.1 200 \\([0-9]+ms\\)")},
});
}
@Test
public void reasonPhraseOptional() {
server.enqueue(new MockResponse().setStatus("HTTP/1.1 " + 200));
SendsStuff api = Feign.builder()
.logger(logger)
.logLevel(logLevel)
.target(SendsStuff.class, "http://localhost:" + server.getPort());
api.login("netflix", "denominator", "password");
}
}
@RunWith(Parameterized.class)
public static class HttpProtocolVersionTest extends LoggerTest {
private final Level logLevel;
private final String protocolVersionName;
public HttpProtocolVersionTest(Level logLevel, String protocolVersionName,
List<String> expectedMessages) {
this.logLevel = logLevel;
this.protocolVersionName = protocolVersionName;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.BASIC, null, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- HTTP/1.1 200 \\([0-9]+ms\\)")},
{Level.BASIC, "HTTP/1.1", Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- HTTP/1.1 200 \\([0-9]+ms\\)")},
{Level.BASIC, "HTTP/2.0", Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- HTTP/2.0 200 \\([0-9]+ms\\)")},
{Level.BASIC, "HTTP-XYZ", Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- UNKNOWN 200 \\([0-9]+ms\\)")}
});
}
@Test
public void testHttpProtocolVersion() {
server.enqueue(new MockResponse().setStatus("HTTP/1.1 " + 200));
SendsStuff api = Feign.builder()
.client(new TestProtocolVersionClient(protocolVersionName))
.logger(logger)
.logLevel(logLevel)
.target(SendsStuff.class, "http://localhost:" + server.getPort());
api.login("netflix", "denominator", "password");
}
}
@RunWith(Parameterized.class)
public static class ReadTimeoutEmitsTest extends LoggerTest {
private final Level logLevel;
public ReadTimeoutEmitsTest(Level logLevel, List<String> expectedMessages) {
this.logLevel = logLevel;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.NONE, Collections.emptyList()},
{Level.BASIC, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- ERROR SocketTimeoutException: Read timed out \\([0-9]+ms\\)")},
{Level.HEADERS, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- ERROR SocketTimeoutException: Read timed out \\([0-9]+ms\\)")},
{Level.FULL, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://localhost:[0-9]+/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ",
"\\[SendsStuff#login\\] \\{\"customer_name\": \"netflix\", \"user_name\": \"denominator\", \"password\": \"password\"\\}",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- ERROR SocketTimeoutException: Read timed out \\([0-9]+ms\\)",
"(?s)\\[SendsStuff#login\\] java.net.SocketTimeoutException: Read timed out.*",
"\\[SendsStuff#login\\] <--- END ERROR")}
});
}
@Test
public void levelEmitsOnReadTimeout() {
server.enqueue(new MockResponse().throttleBody(1, 1, TimeUnit.SECONDS).setBody("foo"));
thrown.expect(FeignException.class);
SendsStuff api = Feign.builder()
.logger(logger)
.logLevel(logLevel)
.options(new Request.Options(10 * 1000, TimeUnit.MILLISECONDS, 50, TimeUnit.MILLISECONDS,
true))
.retryer(new Retryer() {
@Override
public void continueOrPropagate(RetryableException e) {
throw e;
}
@Override
public Retryer clone() {
return this;
}
})
.target(SendsStuff.class, "http://localhost:" + server.getPort());
api.login("netflix", "denominator", "password");
}
}
@RunWith(Parameterized.class)
public static class UnknownHostEmitsTest extends LoggerTest {
private final Level logLevel;
public UnknownHostEmitsTest(Level logLevel, List<String> expectedMessages) {
this.logLevel = logLevel;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.NONE, Collections.emptyList()},
{Level.BASIC, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://robofu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: robofu.abc \\([0-9]+ms\\)")},
{Level.HEADERS, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://robofu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: robofu.abc \\([0-9]+ms\\)")},
{Level.FULL, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://robofu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ",
"\\[SendsStuff#login\\] \\{\"customer_name\": \"netflix\", \"user_name\": \"denominator\", \"password\": \"password\"\\}",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: robofu.abc \\([0-9]+ms\\)",
"(?s)\\[SendsStuff#login\\] java.net.UnknownHostException: robofu.abc.*",
"\\[SendsStuff#login\\] <--- END ERROR")}
});
}
@Test
public void unknownHostEmits() {
SendsStuff api = Feign.builder()
.logger(logger)
.logLevel(logLevel)
.retryer(new Retryer() {
@Override
public void continueOrPropagate(RetryableException e) {
throw e;
}
@Override
public Retryer clone() {
return this;
}
})
.target(SendsStuff.class, "http://robofu.abc");
thrown.expect(FeignException.class);
api.login("netflix", "denominator", "password");
}
}
@RunWith(Parameterized.class)
public static class FormatCharacterTest
extends LoggerTest {
private final Level logLevel;
public FormatCharacterTest(Level logLevel, List<String> expectedMessages) {
this.logLevel = logLevel;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.NONE, Collections.emptyList()},
{Level.BASIC, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://sna%fu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: sna%fu.abc \\([0-9]+ms\\)")},
{Level.HEADERS, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://sna%fu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: sna%fu.abc \\([0-9]+ms\\)")},
{Level.FULL, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://sna%fu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] Content-Length: 80",
"\\[SendsStuff#login\\] Content-Type: application/json",
"\\[SendsStuff#login\\] ",
"\\[SendsStuff#login\\] \\{\"customer_name\": \"netflix\", \"user_name\": \"denominator\", \"password\": \"password\"\\}",
"\\[SendsStuff#login\\] ---> END HTTP \\(80-byte body\\)",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: sna%fu.abc \\([0-9]+ms\\)",
"(?s)\\[SendsStuff#login\\] java.net.UnknownHostException: sna%fu.abc.*",
"\\[SendsStuff#login\\] <--- END ERROR")}
});
}
@Test
public void formatCharacterEmits() {
SendsStuff api = Feign.builder()
.logger(logger)
.logLevel(logLevel)
.retryer(new Retryer() {
@Override
public void continueOrPropagate(RetryableException e) {
throw e;
}
@Override
public Retryer clone() {
return this;
}
})
.target(SendsStuff.class, "http://sna%25fu.abc");
thrown.expect(FeignException.class);
api.login("netflix", "denominator", "password");
}
}
@RunWith(Parameterized.class)
public static class RetryEmitsTest extends LoggerTest {
private final Level logLevel;
public RetryEmitsTest(Level logLevel, List<String> expectedMessages) {
this.logLevel = logLevel;
logger.expectMessages(expectedMessages);
}
@Parameters
public static Iterable<Object[]> data() {
return Arrays.asList(new Object[][] {
{Level.NONE, Collections.emptyList()},
{Level.BASIC, Arrays.asList(
"\\[SendsStuff#login\\] ---> POST http://robofu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: robofu.abc \\([0-9]+ms\\)",
"\\[SendsStuff#login\\] ---> RETRYING",
"\\[SendsStuff#login\\] ---> POST http://robofu.abc/ HTTP/1.1",
"\\[SendsStuff#login\\] <--- ERROR UnknownHostException: robofu.abc \\([0-9]+ms\\)")}
});
}
@Test
public void retryEmits() {
thrown.expect(FeignException.class);
SendsStuff api = Feign.builder()
.logger(logger)
.logLevel(logLevel)
.retryer(new Retryer() {
boolean retried;
@Override
public void continueOrPropagate(RetryableException e) {
if (!retried) {
retried = true;
return;
}
throw e;
}
@Override
public Retryer clone() {
return this;
}
})
.target(SendsStuff.class, "http://robofu.abc");
api.login("netflix", "denominator", "password");
}
}
private static final class RecordingLogger extends Logger implements TestRule {
private static final String PREFIX_X = "x-";
private static final String PREFIX_Y = "y-";
private final List<String> messages = new ArrayList<>();
private final List<String> expectedMessages = new ArrayList<>();
@Override
protected boolean shouldLogRequestHeader(String header) {
return !header.toLowerCase().startsWith(PREFIX_X);
}
@Override
protected boolean shouldLogResponseHeader(String header) {
return !header.toLowerCase().startsWith(PREFIX_Y);
}
void expectMessages(List<String> expectedMessages) {
this.expectedMessages.addAll(expectedMessages);
}
@Override
protected void log(String configKey, String format, Object... args) {
messages.add(methodTag(configKey) + String.format(format, args));
}
@Override
public Statement apply(final Statement base, Description description) {
return new Statement() {
@Override
public void evaluate() throws Throwable {
base.evaluate();
SoftAssertions softly = new SoftAssertions();
softly.assertThat(messages.size()).isEqualTo(expectedMessages.size());
for (int i = 0; i < messages.size() && i < expectedMessages.size(); i++) {
softly.assertThat(messages.get(i)).matches(expectedMessages.get(i));
}
softly.assertAll();
}
};
}
}
private static final class TestProtocolVersionClient extends Client.Default {
private final String protocolVersionName;
public TestProtocolVersionClient(String protocolVersionName) {
super(null, null);
this.protocolVersionName = protocolVersionName;
}
@Override
Response convertResponse(HttpURLConnection connection, Request request)
throws IOException {
Response response = super.convertResponse(connection, request);
if (nonNull((protocolVersionName))) {
response = response.toBuilder()
.protocolVersion(enumForName(ProtocolVersion.class, protocolVersionName))
.build();
}
return response;
}
}
}
| apache-2.0 |
crate/crate | server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java | 40104 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.settings;
import io.crate.common.collections.Tuple;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.lucene.search.spell.LevenshteinDistance;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.regex.Regex;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
/**
* A basic setting service that can be used for per-index and per-cluster settings.
* This service offers transactional application of updates settings.
*/
public abstract class AbstractScopedSettings {
public static final String ARCHIVED_SETTINGS_PREFIX = "archived.";
private static final Pattern KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])*[-\\w]+$");
private static final Pattern GROUP_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+$");
private static final Pattern AFFIX_KEY_PATTERN = Pattern.compile("^(?:[-\\w]+[.])+[*](?:[.][-\\w]+)+$");
protected final Logger logger = LogManager.getLogger(this.getClass());
private final Settings settings;
private final List<SettingUpdater<?>> settingUpdaters = new CopyOnWriteArrayList<>();
private final Map<String, Setting<?>> complexMatchers;
private final Map<String, Setting<?>> keySettings;
private final Map<Setting<?>, SettingUpgrader<?>> settingUpgraders;
private final Setting.Property scope;
private Settings lastSettingsApplied;
protected AbstractScopedSettings(
final Settings settings,
final Set<Setting<?>> settingsSet,
final Set<SettingUpgrader<?>> settingUpgraders,
final Setting.Property scope) {
this.settings = settings;
this.lastSettingsApplied = Settings.EMPTY;
this.settingUpgraders =
Collections.unmodifiableMap(
settingUpgraders.stream().collect(Collectors.toMap(SettingUpgrader::getSetting, Function.identity())));
this.scope = scope;
Map<String, Setting<?>> complexMatchers = new HashMap<>();
Map<String, Setting<?>> keySettings = new HashMap<>();
for (Setting<?> setting : settingsSet) {
if (setting.getProperties().contains(scope) == false) {
throw new IllegalArgumentException("Setting must be a " + scope + " setting but has: " + setting.getProperties());
}
validateSettingKey(setting);
if (setting.hasComplexMatcher()) {
Setting<?> overlappingSetting = findOverlappingSetting(setting, complexMatchers);
if (overlappingSetting != null) {
throw new IllegalArgumentException("complex setting key: [" + setting.getKey() + "] overlaps existing setting key: [" +
overlappingSetting.getKey() + "]");
}
complexMatchers.putIfAbsent(setting.getKey(), setting);
} else {
keySettings.putIfAbsent(setting.getKey(), setting);
}
}
this.complexMatchers = Collections.unmodifiableMap(complexMatchers);
this.keySettings = Collections.unmodifiableMap(keySettings);
}
protected void validateSettingKey(Setting<?> setting) {
if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey())
|| isValidAffixKey(setting.getKey())) == false || setting.getKey().endsWith(".0")) {
throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]");
}
}
protected AbstractScopedSettings(Settings nodeSettings, Settings scopeSettings, AbstractScopedSettings other) {
this.settings = nodeSettings;
this.lastSettingsApplied = scopeSettings;
this.scope = other.scope;
complexMatchers = other.complexMatchers;
keySettings = other.keySettings;
settingUpgraders = Map.copyOf(other.settingUpgraders);
settingUpdaters.addAll(other.settingUpdaters);
}
/**
* Returns <code>true</code> iff the given key is a valid settings key otherwise <code>false</code>
*/
public static boolean isValidKey(String key) {
return KEY_PATTERN.matcher(key).matches();
}
private static boolean isValidGroupKey(String key) {
return GROUP_KEY_PATTERN.matcher(key).matches();
}
// pkg private for tests
static boolean isValidAffixKey(String key) {
return AFFIX_KEY_PATTERN.matcher(key).matches();
}
/**
* Validates the given settings by running it through all update listeners without applying it. This
* method will not change any settings but will fail if any of the settings can't be applied.
*/
public synchronized Settings validateUpdate(Settings settings) {
final Settings current = Settings.builder().put(this.settings).put(settings).build();
final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build();
List<RuntimeException> exceptions = new ArrayList<>();
for (SettingUpdater<?> settingUpdater : settingUpdaters) {
try {
// ensure running this through the updater / dynamic validator
// don't check if the value has changed we wanna test this anyways
settingUpdater.getValue(current, previous);
} catch (RuntimeException ex) {
exceptions.add(ex);
logger.debug(() -> new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex);
}
}
// here we are exhaustive and record all settings that failed.
ExceptionsHelper.rethrowAndSuppress(exceptions);
return current;
}
/**
* Applies the given settings to all the settings consumers or to none of them. The settings
* will be merged with the node settings before they are applied while given settings override existing node
* settings.
* @param newSettings the settings to apply
* @return the unmerged applied settings
*/
public synchronized Settings applySettings(Settings newSettings) {
if (lastSettingsApplied != null && newSettings.equals(lastSettingsApplied)) {
// nothing changed in the settings, ignore
return newSettings;
}
final Settings current = Settings.builder().put(this.settings).put(newSettings).build();
final Settings previous = Settings.builder().put(this.settings).put(this.lastSettingsApplied).build();
try {
List<Runnable> applyRunnables = new ArrayList<>();
for (SettingUpdater<?> settingUpdater : settingUpdaters) {
try {
applyRunnables.add(settingUpdater.updater(current, previous));
} catch (Exception ex) {
logger.warn(() -> new ParameterizedMessage("failed to prepareCommit settings for [{}]", settingUpdater), ex);
throw ex;
}
}
for (Runnable settingUpdater : applyRunnables) {
settingUpdater.run();
}
} catch (Exception ex) {
logger.warn("failed to apply settings", ex);
throw ex;
}
return lastSettingsApplied = newSettings;
}
/**
* Adds a settings consumer with a predicate that is only evaluated at update time.
* <p>
* Note: Only settings registered in {@link SettingsModule} can be changed dynamically.
* </p>
* @param validator an additional validator that is only applied to updates of this setting.
* This is useful to add additional validation to settings at runtime compared to at startup time.
*/
public synchronized <T> void addSettingsUpdateConsumer(Setting<T> setting, Consumer<T> consumer, Consumer<T> validator) {
if (setting != get(setting.getKey())) {
throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]");
}
addSettingsUpdater(setting.newUpdater(consumer, logger, validator));
}
/**
* Adds a settings consumer for affix settings. Affix settings have a namespace associated to it that needs to be available to the
* consumer in order to be processed correctly.
*/
public synchronized <T> void addAffixUpdateConsumer(Setting.AffixSetting<T> setting,
BiConsumer<String, T> consumer,
BiConsumer<String, T> validator) {
ensureSettingIsRegistered(setting);
addSettingsUpdater(setting.newAffixUpdater(consumer, logger, validator));
}
private void ensureSettingIsRegistered(Setting.AffixSetting<?> setting) {
final Setting<?> registeredSetting = this.complexMatchers.get(setting.getKey());
if (setting != registeredSetting) {
throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]");
}
}
/**
* Adds a settings consumer for affix settings. Affix settings have a namespace associated to it that needs to be available to the
* consumer in order to be processed correctly. This consumer will get a namespace to value map instead of each individual namespace
* and value as in {@link #addAffixUpdateConsumer(Setting.AffixSetting, BiConsumer, BiConsumer)}
*/
public synchronized <T> void addAffixMapUpdateConsumer(Setting.AffixSetting<T> setting, Consumer<Map<String, T>> consumer,
BiConsumer<String, T> validator) {
final Setting<?> registeredSetting = this.complexMatchers.get(setting.getKey());
if (setting != registeredSetting) {
throw new IllegalArgumentException("Setting is not registered for key [" + setting.getKey() + "]");
}
addSettingsUpdater(setting.newAffixMapUpdater(consumer, logger, validator));
}
synchronized void addSettingsUpdater(SettingUpdater<?> updater) {
this.settingUpdaters.add(updater);
}
/**
* Adds a settings consumer that accepts the values for two settings.
* See {@link #addSettingsUpdateConsumer(Setting, Setting, BiConsumer, BiConsumer)} for details.
*/
public synchronized <A, B> void addSettingsUpdateConsumer(Setting<A> a, Setting<B> b, BiConsumer<A, B> consumer) {
addSettingsUpdateConsumer(a, b, consumer, (i, j) -> {});
}
/**
* Adds a settings consumer that accepts the values for two settings. The consumer is only notified if one or both settings change
* and if the provided validator succeeded.
* <p>
* Note: Only settings registered in {@link SettingsModule} can be changed dynamically.
* </p>
* This method registers a compound updater that is useful if two settings are depending on each other.
* The consumer is always provided with both values even if only one of the two changes.
*/
public synchronized <A, B> void addSettingsUpdateConsumer(Setting<A> a, Setting<B> b,
BiConsumer<A, B> consumer, BiConsumer<A, B> validator) {
if (a != get(a.getKey())) {
throw new IllegalArgumentException("Setting is not registered for key [" + a.getKey() + "]");
}
if (b != get(b.getKey())) {
throw new IllegalArgumentException("Setting is not registered for key [" + b.getKey() + "]");
}
addSettingsUpdater(Setting.compoundUpdater(consumer, validator, a, b, logger));
}
/**
* Adds a settings consumer.
* <p>
* Note: Only settings registered in {@link org.elasticsearch.cluster.ClusterModule} can be changed dynamically.
* </p>
*/
public synchronized <T> void addSettingsUpdateConsumer(Setting<T> setting, Consumer<T> consumer) {
addSettingsUpdateConsumer(setting, consumer, (s) -> {});
}
/**
* Validates that all settings are registered and valid.
*
* @param settings the settings to validate
* @param validateDependencies true if dependent settings should be validated
* @see Setting#getSettingsDependencies(String)
*/
public final void validate(final Settings settings, final boolean validateDependencies) {
validate(settings, validateDependencies, false, false);
}
/**
* Validates that all settings are registered and valid.
*
* @param settings the settings to validate
* @param validateDependencies true if dependent settings should be validated
* @param validateInternalOrPrivateIndex true if internal index settings should be validated
* @see Setting#getSettingsDependencies(String)
*/
public final void validate(final Settings settings, final boolean validateDependencies, final boolean validateInternalOrPrivateIndex) {
validate(settings, validateDependencies, false, false, validateInternalOrPrivateIndex);
}
/**
* Validates that all settings are registered and valid.
*
* @param settings the settings
* @param validateDependencies true if dependent settings should be validated
* @param ignorePrivateSettings true if private settings should be ignored during validation
* @param ignoreArchivedSettings true if archived settings should be ignored during validation
* @see Setting#getSettingsDependencies(String)
*/
public final void validate(
final Settings settings,
final boolean validateDependencies,
final boolean ignorePrivateSettings,
final boolean ignoreArchivedSettings) {
validate(settings, validateDependencies, ignorePrivateSettings, ignoreArchivedSettings, false);
}
/**
* Validates that all settings are registered and valid.
*
* @param settings the settings
* @param validateDependencies true if dependent settings should be validated
* @param ignorePrivateSettings true if private settings should be ignored during validation
* @param ignoreArchivedSettings true if archived settings should be ignored during validation
* @param validateInternalOrPrivateIndex true if index internal settings should be validated
* @see Setting#getSettingsDependencies(String)
*/
public final void validate(
final Settings settings,
final boolean validateDependencies,
final boolean ignorePrivateSettings,
final boolean ignoreArchivedSettings,
final boolean validateInternalOrPrivateIndex) {
final List<RuntimeException> exceptions = new ArrayList<>();
for (final String key : settings.keySet()) { // settings iterate in deterministic fashion
final Setting<?> setting = getRaw(key);
if (((isPrivateSetting(key) || (setting != null && setting.isPrivateIndex())) && ignorePrivateSettings)) {
continue;
}
if (key.startsWith(ARCHIVED_SETTINGS_PREFIX) && ignoreArchivedSettings) {
continue;
}
try {
validate(key, settings, validateDependencies, validateInternalOrPrivateIndex);
} catch (final RuntimeException ex) {
exceptions.add(ex);
}
}
ExceptionsHelper.rethrowAndSuppress(exceptions);
}
/**
* Validates that the settings is valid.
*
* @param key the key of the setting to validate
* @param settings the settings
* @param validateDependencies true if dependent settings should be validated
* @throws IllegalArgumentException if the setting is invalid
*/
void validate(final String key, final Settings settings, final boolean validateDependencies) {
validate(key, settings, validateDependencies, false);
}
/**
* Validates that the settings is valid.
*
* @param key the key of the setting to validate
* @param settings the settings
* @param validateDependencies true if dependent settings should be validated
* @param validateInternalOrPrivateIndex true if internal index settings should be validated
* @throws IllegalArgumentException if the setting is invalid
*/
void validate(
final String key, final Settings settings, final boolean validateDependencies, final boolean validateInternalOrPrivateIndex) {
Setting<?> setting = getRaw(key);
if (setting == null) {
LevenshteinDistance ld = new LevenshteinDistance();
List<Tuple<Float, String>> scoredKeys = new ArrayList<>();
for (String k : this.keySettings.keySet()) {
float distance = ld.getDistance(key, k);
if (distance > 0.7f) {
scoredKeys.add(new Tuple<>(distance, k));
}
}
CollectionUtil.timSort(scoredKeys, (a,b) -> b.v1().compareTo(a.v1()));
String msg = "unknown setting [" + key + "]";
List<String> keys = scoredKeys.stream().map((a) -> a.v2()).collect(Collectors.toList());
if (keys.isEmpty() == false) {
msg += " did you mean " + (keys.size() == 1 ? "[" + keys.get(0) + "]" : "any of " + keys.toString()) + "?";
} else {
msg += " please check that any required plugins are installed," +
" or check the breaking changes documentation for removed settings";
}
throw new IllegalArgumentException(msg);
} else {
Set<Setting.SettingDependency> settingsDependencies = setting.getSettingsDependencies(key);
if (setting.hasComplexMatcher()) {
setting = setting.getConcreteSetting(key);
}
if (validateDependencies && settingsDependencies.isEmpty() == false) {
for (final Setting.SettingDependency settingDependency : settingsDependencies) {
final Setting<?> dependency = settingDependency.getSetting();
// validate the dependent setting is set
if (dependency.existsOrFallbackExists(settings) == false) {
final String message = String.format(
Locale.ROOT,
"missing required setting [%s] for setting [%s]",
dependency.getKey(),
setting.getKey());
throw new IllegalArgumentException(message);
}
// validate the dependent setting value
settingDependency.validate(setting.getKey(), setting.get(settings), dependency.get(settings));
}
}
// the only time that validateInternalOrPrivateIndex should be true is if this call is coming via the update settings API
if (validateInternalOrPrivateIndex) {
if (setting.isInternalIndex()) {
throw new IllegalArgumentException(
"can not update internal setting [" + setting.getKey() + "]; this setting is managed via a dedicated API");
} else if (setting.isPrivateIndex()) {
throw new IllegalArgumentException(
"can not update private setting [" + setting.getKey() + "]; this setting is managed by CrateDB");
}
}
}
Iterator<? extends Setting<?>> validationDependencies = setting.getValidationDependencies();
if (validationDependencies.hasNext()) {
Settings previousSettings = this.lastSettingsApplied;
Settings.Builder settingsInclDependencies = Settings.builder().put(settings);
while (validationDependencies.hasNext()) {
Setting<?> dependency = validationDependencies.next();
if (!settings.hasValue(dependency.getKey()) && previousSettings.hasValue(dependency.getKey())) {
settingsInclDependencies.copy(dependency.getKey(), previousSettings);
}
}
setting.get(settingsInclDependencies.build());
} else {
setting.get(settings);
}
}
/**
* Transactional interface to update settings.
* @see Setting
* @param <T> the type of the value of the setting
*/
public interface SettingUpdater<T> {
/**
* Returns true if this updaters setting has changed with the current update
* @param current the current settings
* @param previous the previous setting
* @return true if this updaters setting has changed with the current update
*/
boolean hasChanged(Settings current, Settings previous);
/**
* Returns the instance value for the current settings. This method is stateless and idempotent.
* This method will throw an exception if the source of this value is invalid.
*/
T getValue(Settings current, Settings previous);
/**
* Applies the given value to the updater. This methods will actually run the update.
*/
void apply(T value, Settings current, Settings previous);
/**
* Updates this updaters value if it has changed.
* @return <code>true</code> iff the value has been updated.
*/
default boolean apply(Settings current, Settings previous) {
if (hasChanged(current, previous)) {
T value = getValue(current, previous);
apply(value, current, previous);
return true;
}
return false;
}
/**
* Returns a callable runnable that calls {@link #apply(Object, Settings, Settings)} if the settings
* actually changed. This allows to defer the update to a later point in time while keeping type safety.
* If the value didn't change the returned runnable is a noop.
*/
default Runnable updater(Settings current, Settings previous) {
if (hasChanged(current, previous)) {
T value = getValue(current, previous);
return () -> {
apply(value, current, previous);
};
}
return () -> {};
}
}
/**
* Returns the {@link Setting} for the given key or <code>null</code> if the setting can not be found.
*/
public final Setting<?> get(String key) {
Setting<?> raw = getRaw(key);
if (raw == null) {
return null;
}
if (raw.hasComplexMatcher()) {
return raw.getConcreteSetting(key);
} else {
return raw;
}
}
private Setting<?> getRaw(String key) {
Setting<?> setting = keySettings.get(key);
if (setting != null) {
return setting;
}
for (Map.Entry<String, Setting<?>> entry : complexMatchers.entrySet()) {
if (entry.getValue().match(key)) {
assert assertMatcher(key, 1);
assert entry.getValue().hasComplexMatcher();
return entry.getValue();
}
}
return null;
}
private boolean assertMatcher(String key, int numComplexMatchers) {
List<Setting<?>> list = new ArrayList<>();
for (Map.Entry<String, Setting<?>> entry : complexMatchers.entrySet()) {
if (entry.getValue().match(key)) {
list.add(entry.getValue().getConcreteSetting(key));
}
}
assert list.size() == numComplexMatchers
: "Expected " + numComplexMatchers + " complex matchers to match key [" + key + "] but got: " + list.toString();
return true;
}
/**
* Returns <code>true</code> if the setting for the given key is dynamically updateable. Otherwise <code>false</code>.
*/
public boolean isDynamicSetting(String key) {
final Setting<?> setting = get(key);
return setting != null && setting.isDynamic();
}
/**
* Returns <code>true</code> if the setting for the given key is final. Otherwise <code>false</code>.
*/
public boolean isFinalSetting(String key) {
final Setting<?> setting = get(key);
return setting != null && setting.isFinal();
}
/**
* Returns a settings object that contains all settings that are not
* already set in the given source. The diff contains either the default value for each
* setting or the settings value in the given default settings.
*/
public Settings diff(Settings source, Settings defaultSettings) {
Settings.Builder builder = Settings.builder();
for (Setting<?> setting : keySettings.values()) {
setting.diff(builder, source, defaultSettings);
}
for (Setting<?> setting : complexMatchers.values()) {
setting.diff(builder, source, defaultSettings);
}
return builder.build();
}
/**
* Returns the value for the given setting.
*/
public <T> T get(Setting<T> setting) {
if (setting.getProperties().contains(scope) == false) {
throw new IllegalArgumentException("settings scope doesn't match the setting scope [" + this.scope + "] not in [" +
setting.getProperties() + "]");
}
if (get(setting.getKey()) == null) {
throw new IllegalArgumentException("setting " + setting.getKey() + " has not been registered");
}
return setting.get(this.lastSettingsApplied, settings);
}
/**
* Updates a target settings builder with new, updated or deleted settings from a given settings builder.
* <p>
* Note: This method will only allow updates to dynamic settings. if a non-dynamic setting is updated an
* {@link IllegalArgumentException} is thrown instead.
* </p>
*
* @param toApply the new settings to apply
* @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be
* removed from this builder
* @param updates a settings builder that holds all updates applied to target
* @param type a free text string to allow better exceptions messages
* @return <code>true</code> if the target has changed otherwise <code>false</code>
*/
public boolean updateDynamicSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
return updateSettings(toApply, target, updates, type, true);
}
/**
* Updates a target settings builder with new, updated or deleted settings from a given settings builder.
*
* @param toApply the new settings to apply
* @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be
* removed from this builder
* @param updates a settings builder that holds all updates applied to target
* @param type a free text string to allow better exceptions messages
* @return <code>true</code> if the target has changed otherwise <code>false</code>
*/
public boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type) {
return updateSettings(toApply, target, updates, type, false);
}
/**
* Returns <code>true</code> if the given key is a valid delete key
*/
private boolean isValidDelete(String key, boolean onlyDynamic) {
return isFinalSetting(key) == false && // it's not a final setting
(onlyDynamic && isDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings
|| get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived
|| (onlyDynamic == false && get(key) != null)); // if it's not dynamic AND we have a key
}
/**
* Updates a target settings builder with new, updated or deleted settings from a given settings builder.
*
* @param toApply the new settings to apply
* @param target the target settings builder that the updates are applied to. All keys that have explicit null value in toApply will be
* removed from this builder
* @param updates a settings builder that holds all updates applied to target
* @param type a free text string to allow better exceptions messages
* @param onlyDynamic if <code>false</code> all settings are updated otherwise only dynamic settings are updated. if set to
* <code>true</code> and a non-dynamic setting is updated an exception is thrown.
* @return <code>true</code> if the target has changed otherwise <code>false</code>
*/
private boolean updateSettings(Settings toApply, Settings.Builder target, Settings.Builder updates, String type, boolean onlyDynamic) {
boolean changed = false;
final Set<String> toRemove = new HashSet<>();
Settings.Builder settingsBuilder = Settings.builder();
final Predicate<String> canUpdate = (key) -> (
isFinalSetting(key) == false && // it's not a final setting
((onlyDynamic == false && get(key) != null) || isDynamicSetting(key)));
for (String key : toApply.keySet()) {
boolean isDelete = toApply.hasValue(key) == false;
if (isDelete && (isValidDelete(key, onlyDynamic) || key.endsWith("*"))) {
// this either accepts null values that suffice the canUpdate test OR wildcard expressions (key ends with *)
// we don't validate if there is any dynamic setting with that prefix yet we could do in the future
toRemove.add(key);
// we don't set changed here it's set after we apply deletes below if something actually changed
} else if (get(key) == null) {
throw new IllegalArgumentException(type + " setting [" + key + "], not recognized");
} else if (isDelete == false && canUpdate.test(key)) {
validate(key, toApply, false); // we might not have a full picture here do to a dependency validation
settingsBuilder.copy(key, toApply);
updates.copy(key, toApply);
changed = true;
} else {
if (isFinalSetting(key)) {
throw new IllegalArgumentException("final " + type + " setting [" + key + "], not updateable");
} else {
throw new IllegalArgumentException(type + " setting [" + key + "], not dynamically updateable");
}
}
}
changed |= applyDeletes(toRemove, target, k -> isValidDelete(k, onlyDynamic));
target.put(settingsBuilder.build());
return changed;
}
private static boolean applyDeletes(Set<String> deletes, Settings.Builder builder, Predicate<String> canRemove) {
boolean changed = false;
for (String entry : deletes) {
Set<String> keysToRemove = new HashSet<>();
Set<String> keySet = builder.keys();
for (String key : keySet) {
if (Regex.simpleMatch(entry, key) && canRemove.test(key)) {
// we have to re-check with canRemove here since we might have a wildcard expression foo.* that matches
// dynamic as well as static settings if that is the case we might remove static settings since we resolve the
// wildcards late
keysToRemove.add(key);
}
}
for (String key : keysToRemove) {
builder.remove(key);
changed = true;
}
}
return changed;
}
private static Setting<?> findOverlappingSetting(Setting<?> newSetting, Map<String, Setting<?>> complexMatchers) {
assert newSetting.hasComplexMatcher();
if (complexMatchers.containsKey(newSetting.getKey())) {
// we return null here because we use a putIfAbsent call when inserting into the map, so if it exists then we already checked
// the setting to make sure there are no overlapping settings.
return null;
}
for (Setting<?> existingSetting : complexMatchers.values()) {
if (newSetting.match(existingSetting.getKey()) || existingSetting.match(newSetting.getKey())) {
return existingSetting;
}
}
return null;
}
/**
* Upgrade all settings eligible for upgrade in the specified settings instance.
*
* @param settings the settings instance that might contain settings to be upgraded
* @return a new settings instance if any settings required upgrade, otherwise the same settings instance as specified
*/
public Settings upgradeSettings(final Settings settings) {
final Settings.Builder builder = Settings.builder();
boolean changed = false; // track if any settings were upgraded
for (final String key : settings.keySet()) {
final Setting<?> setting = getRaw(key);
final SettingUpgrader<?> upgrader = settingUpgraders.get(setting);
if (upgrader == null) {
// the setting does not have an upgrader, copy the setting
builder.copy(key, settings);
} else {
// the setting has an upgrader, so mark that we have changed a setting and apply the upgrade logic
changed = true;
// noinspection ConstantConditions
if (setting.getConcreteSetting(key).isListSetting()) {
final List<String> value = settings.getAsList(key);
final String upgradedKey = upgrader.getKey(key);
final List<String> upgradedValue = upgrader.getListValue(value);
builder.putList(upgradedKey, upgradedValue);
} else {
final String value = settings.get(key);
final String upgradedKey = upgrader.getKey(key);
final String upgradedValue = upgrader.getValue(value);
builder.put(upgradedKey, upgradedValue);
}
}
}
// we only return a new instance if there was an upgrade
return changed ? builder.build() : settings;
}
/**
* Archives invalid or unknown settings. Any setting that is not recognized or fails validation
* will be archived. This means the setting is prefixed with {@value ARCHIVED_SETTINGS_PREFIX}
* and remains in the settings object. This can be used to detect invalid settings via APIs.
*
* @param settings the {@link Settings} instance to scan for unknown or invalid settings
* @param unknownConsumer callback on unknown settings (consumer receives unknown key and its
* associated value)
* @param invalidConsumer callback on invalid settings (consumer receives invalid key, its
* associated value and an exception)
* @return a {@link Settings} instance with the unknown or invalid settings archived
*/
public Settings archiveUnknownOrInvalidSettings(
final Settings settings,
final Consumer<Map.Entry<String, String>> unknownConsumer,
final BiConsumer<Map.Entry<String, String>, IllegalArgumentException> invalidConsumer) {
Settings.Builder builder = Settings.builder();
boolean changed = false;
for (String key : settings.keySet()) {
try {
Setting<?> setting = get(key);
if (setting != null) {
setting.get(settings);
builder.copy(key, settings);
} else {
if (key.startsWith(ARCHIVED_SETTINGS_PREFIX) || isPrivateSetting(key)) {
builder.copy(key, settings);
} else {
changed = true;
unknownConsumer.accept(new Entry(key, settings));
/*
* We put them back in here such that tools can check from the outside if there are any indices with invalid
* settings. The setting can remain there but we want users to be aware that some of their setting are invalid and
* they can research why and what they need to do to replace them.
*/
builder.copy(ARCHIVED_SETTINGS_PREFIX + key, key, settings);
}
}
} catch (IllegalArgumentException ex) {
changed = true;
invalidConsumer.accept(new Entry(key, settings), ex);
/*
* We put them back in here such that tools can check from the outside if there are any indices with invalid settings. The
* setting can remain there but we want users to be aware that some of their setting are invalid and they can research why
* and what they need to do to replace them.
*/
builder.copy(ARCHIVED_SETTINGS_PREFIX + key, key, settings);
}
}
if (changed) {
return builder.build();
} else {
return settings;
}
}
private static final class Entry implements Map.Entry<String, String> {
private final String key;
private final Settings settings;
private Entry(String key, Settings settings) {
this.key = key;
this.settings = settings;
}
@Override
public String getKey() {
return key;
}
@Override
public String getValue() {
return settings.get(key);
}
@Override
public String setValue(String value) {
throw new UnsupportedOperationException();
}
}
/**
* Returns <code>true</code> iff the setting is a private setting ie. it should be treated as valid even though it has no internal
* representation. Otherwise <code>false</code>
*/
// TODO this should be replaced by Setting.Property.HIDDEN or something like this.
public boolean isPrivateSetting(String key) {
return false;
}
}
| apache-2.0 |
mhenke/gerrit-topic-reviews | gerrit-sshd/src/main/java/com/google/gerrit/sshd/commands/ListProjects.java | 8716 | // Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.sshd.commands;
import com.google.gerrit.reviewdb.Project;
import com.google.gerrit.server.IdentifiedUser;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectControl;
import com.google.gerrit.server.project.ProjectState;
import com.google.gerrit.sshd.BaseCommand;
import com.google.inject.Inject;
import org.apache.sshd.server.Environment;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.Repository;
import org.kohsuke.args4j.Option;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeMap;
final class ListProjects extends BaseCommand {
private static final String NODE_PREFIX = "|-- ";
private static final String LAST_NODE_PREFIX = "`-- ";
private static final String DEFAULT_TAB_SEPARATOR = "|";
private static final String NOT_VISIBLE_PROJECT = "(x)";
@Inject
private IdentifiedUser currentUser;
@Inject
private ProjectCache projectCache;
@Inject
private GitRepositoryManager repoManager;
@Option(name = "--show-branch", aliases = {"-b"}, usage = "displays the sha of each project in the specified branch")
private String showBranch;
@Option(name = "--tree", aliases = {"-t"}, usage = "displays project inheritance in a tree-like format\n" +
"this option does not work together with the show-branch option")
private boolean showTree;
private String currentTabSeparator = DEFAULT_TAB_SEPARATOR;
@Override
public void start(final Environment env) {
startThread(new CommandRunnable() {
@Override
public void run() throws Exception {
parseCommandLine();
ListProjects.this.display();
}
});
}
private void display() throws Failure {
if (showTree && (showBranch != null)) {
throw new UnloggedFailure(1, "fatal: --tree and --show-branch options are not compatible.");
}
final PrintWriter stdout = toPrintWriter(out);
TreeMap<String, TreeNode> treeMap = null;
if (showTree) {
treeMap = new TreeMap<String, TreeNode>();
}
try {
for (final Project.NameKey projectName : projectCache.all()) {
final ProjectState e = projectCache.get(projectName);
if (e == null) {
// If we can't get it from the cache, pretend its not present.
//
continue;
}
final ProjectControl pctl = e.controlFor(currentUser);
if (!showTree) {
if (!pctl.isVisible()) {
// Require the project itself to be visible to the user.
//
continue;
}
if (showBranch != null) {
final Ref ref = getBranchRef(projectName);
if (ref == null || ref.getObjectId() == null
|| !pctl.controlForRef(ref.getLeaf().getName()).isVisible()) {
// No branch, or the user can't see this branch, so skip it.
//
continue;
}
stdout.print(ref.getObjectId().name());
stdout.print(' ');
}
stdout.print(projectName.get() + "\n");
} else {
treeMap.put(projectName.get(),
new TreeNode(pctl.getProject(), pctl.isVisible()));
}
}
if (showTree && treeMap.size() > 0) {
final List<TreeNode> sortedNodes = new ArrayList<TreeNode>();
// Builds the inheritance tree using a list.
//
for (final TreeNode key : treeMap.values()) {
final String parentName = key.getParentName();
if (parentName != null) {
final TreeNode node = treeMap.get(parentName);
if (node != null) {
node.addChild(key);
} else {
sortedNodes.add(key);
}
} else {
sortedNodes.add(key);
}
}
// Builds a fake root node, which contains the sorted projects.
//
final TreeNode fakeRoot = new TreeNode(null, sortedNodes, false);
printElement(stdout, fakeRoot, -1, false, sortedNodes.get(sortedNodes.size() - 1));
stdout.flush();
}
} finally {
stdout.flush();
}
}
private Ref getBranchRef(Project.NameKey projectName) {
try {
final Repository r = repoManager.openRepository(projectName);
try {
return r.getRef(showBranch);
} finally {
r.close();
}
} catch (IOException ioe) {
return null;
}
}
/** Class created to manipulate the nodes of the project inheritance tree **/
private static class TreeNode {
private final List<TreeNode> children;
private final Project project;
private final boolean isVisible;
/**
* Constructor
* @param p Project
*/
public TreeNode(Project p, boolean visible) {
this.children = new ArrayList<TreeNode>();
this.project = p;
this.isVisible = visible;
}
/**
* Constructor used for creating the fake node
* @param p Project
* @param c List of nodes
*/
public TreeNode(Project p, List<TreeNode> c, boolean visible) {
this.children = c;
this.project = p;
this.isVisible = visible;
}
/**
* Returns if the the node is leaf
* @return True if is lead, false, otherwise
*/
public boolean isLeaf() {
return children.size() == 0;
}
/**
* Returns the project parent name
* @return Project parent name
*/
public String getParentName() {
if (project.getParent() != null) {
return project.getParent().get();
}
return null;
}
/**
* Adds a child to the list
* @param node TreeNode child
*/
public void addChild(TreeNode node) {
children.add(node);
}
/**
* Returns the project instance
* @return Project instance
*/
public Project getProject() {
return project;
}
/**
* Returns the list of children nodes
* @return List of children nodes
*/
public List<TreeNode> getChildren() {
return children;
}
/**
* Returns if the project is visible to the user
* @return True if is visible, false, otherwise
*/
public boolean isVisible() {
return isVisible;
}
}
/**
* Used to display the project inheritance tree recursively
* @param stdout PrintWriter used do print
* @param node Tree node
* @param level Current level of the tree
* @param isLast True, if is the last node of a level, false, otherwise
* @param lastParentNode Last "root" parent node
*/
private void printElement(final PrintWriter stdout, TreeNode node, int level, boolean isLast,
final TreeNode lastParentNode) {
// Checks if is not the "fake" root project.
//
if (node.getProject() != null) {
// Check if is not the last "root" parent node,
// so the "|" separator will not longer be needed.
//
if (!currentTabSeparator.equals(" ")) {
final String nodeProject = node.getProject().getName();
final String lastParentProject = lastParentNode.getProject().getName();
if (nodeProject.equals(lastParentProject)) {
currentTabSeparator = " ";
}
}
if (level > 0) {
stdout.print(String.format("%-" + 4 * level + "s", currentTabSeparator));
}
final String prefix = isLast ? LAST_NODE_PREFIX : NODE_PREFIX ;
String printout;
if (node.isVisible()) {
printout = prefix + node.getProject().getName();
} else {
printout = prefix + NOT_VISIBLE_PROJECT;
}
stdout.print(printout + "\n");
}
if (node.isLeaf()) {
return;
} else {
final List<TreeNode> children = node.getChildren();
++level;
for(TreeNode treeNode : children) {
final boolean isLastIndex = children.indexOf(treeNode) == children.size() - 1;
printElement(stdout, treeNode, level, isLastIndex, lastParentNode);
}
}
}
}
| apache-2.0 |
sergiodurand/Cparser | src/lcr/c2bsv/parser/ASTOR.java | 713 | /* Generated By:JJTree: Do not edit this line. ASTOR.java Version 4.3 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=false,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
package lcr.c2bsv.parser;
public
class ASTOR extends SimpleNode {
public ASTOR(int id) {
super(id);
}
public ASTOR(C2BSVParser p, int id) {
super(p, id);
}
public StringBuilder toBSV() {
SimpleNode n0 = (SimpleNode)jjtGetChild(0);
SimpleNode n1 = (SimpleNode)jjtGetChild(1);
return new StringBuilder("(" + n0.toBSV() + " || " + n1.toBSV() + ")");
}
}
/* JavaCC - OriginalChecksum=adbe8c053cea4478fe0a0b997ba5d162 (do not edit this line) */
| apache-2.0 |
cloudera/oozie | core/src/main/java/org/apache/oozie/command/wf/ActionEndCommand.java | 10505 | /**
* Copyright (c) 2010 Yahoo! Inc. All rights reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.oozie.command.wf;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.oozie.DagELFunctions;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.WorkflowJobBean;
import org.apache.oozie.action.ActionExecutor;
import org.apache.oozie.action.ActionExecutorException;
import org.apache.oozie.client.OozieClient;
import org.apache.oozie.client.WorkflowAction;
import org.apache.oozie.client.WorkflowJob;
import org.apache.oozie.client.SLAEvent.SlaAppType;
import org.apache.oozie.client.SLAEvent.Status;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.service.ActionService;
import org.apache.oozie.service.Services;
import org.apache.oozie.service.UUIDService;
import org.apache.oozie.store.StoreException;
import org.apache.oozie.store.WorkflowStore;
import org.apache.oozie.util.Instrumentation;
import org.apache.oozie.util.XLog;
import org.apache.oozie.util.db.SLADbOperations;
import org.apache.oozie.workflow.WorkflowInstance;
public class ActionEndCommand extends ActionCommand<Void> {
public static final String COULD_NOT_END = "COULD_NOT_END";
public static final String END_DATA_MISSING = "END_DATA_MISSING";
private String id;
private String jobId = null;
public ActionEndCommand(String id, String type) {
super("action.end", type, 0);
this.id = id;
}
@Override
protected Void call(WorkflowStore store) throws StoreException, CommandException {
WorkflowJobBean workflow = store.getWorkflow(jobId, false);
setLogInfo(workflow);
WorkflowActionBean action = store.getAction(id, false);
setLogInfo(action);
if (action.isPending()
&& (action.getStatus() == WorkflowActionBean.Status.DONE
|| action.getStatus() == WorkflowActionBean.Status.END_RETRY || action.getStatus() == WorkflowActionBean.Status.END_MANUAL)) {
if (workflow.getStatus() == WorkflowJob.Status.RUNNING) {
ActionExecutor executor = Services.get().get(ActionService.class).getExecutor(action.getType());
Configuration conf = workflow.getWorkflowInstance().getConf();
int maxRetries = conf.getInt(OozieClient.ACTION_MAX_RETRIES, executor.getMaxRetries());
long retryInterval = conf.getLong(OozieClient.ACTION_RETRY_INTERVAL, executor.getRetryInterval());
executor.setMaxRetries(maxRetries);
executor.setRetryInterval(retryInterval);
if (executor != null) {
boolean isRetry = false;
if (action.getStatus() == WorkflowActionBean.Status.END_RETRY
|| action.getStatus() == WorkflowActionBean.Status.END_MANUAL) {
isRetry = true;
}
ActionExecutorContext context = new ActionCommand.ActionExecutorContext(workflow, action, isRetry);
try {
XLog.getLog(getClass()).debug(
"End, name [{0}] type [{1}] status[{2}] external status [{3}] signal value [{4}]",
action.getName(), action.getType(), action.getStatus(), action.getExternalStatus(),
action.getSignalValue());
WorkflowInstance wfInstance = workflow.getWorkflowInstance();
DagELFunctions.setActionInfo(wfInstance, action);
workflow.setWorkflowInstance(wfInstance);
incrActionCounter(action.getType(), 1);
Instrumentation.Cron cron = new Instrumentation.Cron();
cron.start();
executor.end(context, action);
cron.stop();
addActionCron(action.getType(), cron);
if (!context.isEnded()) {
XLog.getLog(getClass()).warn(XLog.OPS,
"Action Ended, ActionExecutor [{0}] must call setEndData()", executor.getType());
action.setErrorInfo(END_DATA_MISSING, "Execution Ended, but End Data Missing from Action");
failJob(context);
store.updateAction(action);
store.updateWorkflow(workflow);
return null;
}
action.setRetries(0);
action.setEndTime(new Date());
store.updateAction(action);
store.updateWorkflow(workflow);
Status slaStatus = null;
switch (action.getStatus()) {
case OK:
slaStatus = Status.SUCCEEDED;
break;
case KILLED:
slaStatus = Status.KILLED;
break;
case FAILED:
slaStatus = Status.FAILED;
break;
case ERROR:
XLog.getLog(getClass()).info("ERROR is considered as FAILED for SLA");
slaStatus = Status.KILLED;
break;
default: // TODO: What will happen for other Action
// status
slaStatus = Status.FAILED;
break;
}
SLADbOperations.writeStausEvent(action.getSlaXml(), action.getId(), store, slaStatus,
SlaAppType.WORKFLOW_ACTION);
queueCallable(new NotificationCommand(workflow, action));
XLog.getLog(getClass()).debug(
"Queuing commands for action=" + id + ", status=" + action.getStatus()
+ ", Set pending=" + action.getPending());
queueCallable(new SignalCommand(workflow.getId(), id));
}
catch (ActionExecutorException ex) {
XLog.getLog(getClass()).warn(
"Error ending action [{0}]. ErrorType [{1}], ErrorCode [{2}], Message [{3}]",
action.getName(), ex.getErrorType(), ex.getErrorCode(), ex.getMessage());
action.setErrorInfo(ex.getErrorCode(), ex.getMessage());
action.setEndTime(null);
switch (ex.getErrorType()) {
case TRANSIENT:
if (!handleTransient(context, executor, WorkflowAction.Status.END_RETRY)) {
handleNonTransient(store, context, executor, WorkflowAction.Status.END_MANUAL);
action.setPendingAge(new Date());
action.setRetries(0);
}
action.setEndTime(null);
break;
case NON_TRANSIENT:
handleNonTransient(store, context, executor, WorkflowAction.Status.END_MANUAL);
action.setEndTime(null);
break;
case ERROR:
handleError(context, executor, COULD_NOT_END, false, WorkflowAction.Status.ERROR);
queueCallable(new SignalCommand(workflow.getId(), id));
break;
case FAILED:
failJob(context);
break;
}
store.updateAction(action);
store.updateWorkflow(workflow);
}
}
else {
throw new CommandException(ErrorCode.E0802, action.getType());
}
}
else {
XLog.getLog(getClass()).warn("Job state is not {0}. Skipping ActionEnd Execution",
WorkflowJob.Status.RUNNING.toString());
}
}
else {
XLog.getLog(getClass()).debug("Action pending={0}, status={1}. Skipping ActionEnd Execution",
action.getPending(), action.getStatusStr());
}
return null;
}
@Override
protected Void execute(WorkflowStore store) throws CommandException, StoreException {
XLog.getLog(getClass()).debug("STARTED ActionEndCommand for action " + id);
try {
jobId = Services.get().get(UUIDService.class).getId(id);
if (lock(jobId)) {
call(store);
}
else {
queueCallable(new ActionEndCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
XLog.getLog(getClass()).warn("ActionEnd lock was not acquired - failed {0}", id);
}
}
catch (InterruptedException e) {
queueCallable(new ActionEndCommand(id, type), LOCK_FAILURE_REQUEUE_INTERVAL);
XLog.getLog(getClass()).warn("ActionEnd lock was not acquired - interrupted exception failed {0}", id);
}
finally {
XLog.getLog(getClass()).debug("ENDED ActionEndCommand for action " + id);
}
return null;
}
}
| apache-2.0 |
sculptor/sculptor | sculptor-examples/library-example/src/test/java/org/sculptor/examples/library/media/domain/BookBuilderTest.java | 925 | package org.sculptor.examples.library.media.domain;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.sculptor.examples.library.media.domain.MediaCharacterBuilder.mediaCharacter;
import java.time.LocalDateTime;
public class BookBuilderTest {
@Test
public void assertBuild() {
LocalDateTime now = LocalDateTime.now();
Book book = BookBuilder.book()
.createdBy("me")
.createdDate(now)
.title("Ender's Game")
.isbn("Some-ISBN")
.addMediaCharacter(mediaCharacter()
.name("Ender")
.build())
.build();
assertNotNull(book);
assertEquals("me", book.getCreatedBy());
assertEquals(now, book.getCreatedDate());
assertEquals("Ender's Game", book.getTitle());
assertEquals("Some-ISBN", book.getIsbn());
assertEquals(1, book.getMediaCharacters().size());
}
}
| apache-2.0 |
josealmeida/opereffa | OpenEHRRefImpExtensions/src/uk/ac/ucl/chime/wrappers/RMTreeConstructor.java | 4792 | /*******************************************************************************
* Copyright 2012 Sevket Seref Arikan, David Ingram
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package uk.ac.ucl.chime.wrappers;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import org.apache.commons.lang.NotImplementedException;
import org.openehr.am.archetype.Archetype;
import org.openehr.am.archetype.assertion.Assertion;
import org.openehr.am.archetype.assertion.ExpressionBinaryOperator;
import org.openehr.am.archetype.assertion.ExpressionItem;
import org.openehr.am.archetype.assertion.ExpressionLeaf;
import org.openehr.am.archetype.constraintmodel.ArchetypeInternalRef;
import org.openehr.am.archetype.constraintmodel.ArchetypeSlot;
import org.openehr.am.archetype.constraintmodel.CAttribute;
import org.openehr.am.archetype.constraintmodel.CComplexObject;
import org.openehr.am.archetype.constraintmodel.CObject;
import org.openehr.am.archetype.constraintmodel.CPrimitiveObject;
import org.openehr.am.archetype.constraintmodel.primitive.CDuration;
import org.openehr.am.archetype.constraintmodel.primitive.CString;
import org.openehr.am.archetype.ontology.ArchetypeTerm;
import org.openehr.am.archetype.ontology.OntologyDefinitions;
import org.openehr.am.openehrprofile.datatypes.quantity.CDvQuantity;
import org.openehr.am.openehrprofile.datatypes.quantity.CDvQuantityItem;
import org.openehr.build.RMObjectBuilder;
import org.openehr.build.RMObjectBuildingException;
import org.openehr.build.SystemValue;
import org.openehr.rm.RMObject;
import org.openehr.rm.datastructure.itemstructure.representation.Cluster;
import org.openehr.rm.datastructure.itemstructure.representation.Element;
import org.openehr.rm.datastructure.itemstructure.representation.Item;
import org.openehr.rm.datatypes.basic.DataValue;
import org.openehr.rm.datatypes.quantity.DvOrdinal;
import org.openehr.rm.datatypes.quantity.DvQuantity;
import org.openehr.rm.datatypes.quantity.datetime.DvDateTime;
import org.openehr.rm.datatypes.quantity.datetime.DvDuration;
import org.openehr.rm.datatypes.text.CodePhrase;
import org.openehr.rm.datatypes.text.DvCodedText;
import org.openehr.rm.datatypes.text.DvText;
import org.openehr.rm.support.basic.Interval;
import org.openehr.rm.support.measurement.MeasurementService;
import org.openehr.rm.support.measurement.SimpleMeasurementService;
import org.openehr.rm.support.terminology.TerminologyService;
import org.openehr.terminology.SimpleTerminologyService;
public class RMTreeConstructor {
//Internal copy of the passed in Archetype
private Archetype _archetype;
private RMObjectBuilder _builder;
private JPanel _panelToFill;
private int _verticalGap = 10;
private int _lastY = 10;
private int _lastX = 10;
private int _defaultHeight = 25;
protected static CodePhrase lang = new CodePhrase("ISO_639-1", "en");
protected static CodePhrase charset = new CodePhrase("IANA_character-sets","UTF-8");
protected static TerminologyService ts;
protected static MeasurementService ms;
static {
try {
ts = SimpleTerminologyService.getInstance();
ms = SimpleMeasurementService.getInstance();
} catch (Exception e) {
throw new RuntimeException(
"failed to start terminology or measure service");
}
}
/*
* Construct a reference model based tree using the archetype passed in
* @param arc
*/
public RMTreeConstructor(Archetype arc, JPanel pnlToFill) throws Exception{
_archetype = arc;
_panelToFill = pnlToFill;
}
private void addLabel(String lblContent){
JLabel lblClusterName = new JLabel(lblContent);
_panelToFill.add(lblClusterName );
lblClusterName.setBounds(_lastX, _lastY, 250, _defaultHeight);
_lastY += _defaultHeight + _verticalGap;
_panelToFill.revalidate();
}
private void addTextBox(String content){
JTextField fld = new JTextField();
_panelToFill.add(fld);
fld.setBounds(_lastX + 30 + 200, _lastY - ( _defaultHeight + _verticalGap), 150, _defaultHeight);
fld.setText(content);
_panelToFill.revalidate();
}
}
| apache-2.0 |
remibergsma/cosmic | cosmic-core/engine/components-api/src/main/java/com/cloud/network/lb/LoadBalancingRulesManager.java | 2311 | package com.cloud.network.lb;
import com.cloud.context.CallContext;
import com.cloud.exception.NetworkRuleConflictException;
import com.cloud.exception.ResourceUnavailableException;
import com.cloud.network.lb.LoadBalancingRule.LbDestination;
import com.cloud.network.lb.LoadBalancingRule.LbHealthCheckPolicy;
import com.cloud.network.lb.LoadBalancingRule.LbSslCert;
import com.cloud.network.lb.LoadBalancingRule.LbStickinessPolicy;
import com.cloud.network.rules.LbStickinessMethod;
import com.cloud.network.rules.LoadBalancer;
import com.cloud.network.rules.LoadBalancerContainer.Scheme;
import com.cloud.user.Account;
import java.util.List;
public interface LoadBalancingRulesManager {
LoadBalancer createPublicLoadBalancer(String xId, String name, String description, int srcPort, int destPort, long sourceIpId, String protocol, String algorithm,
boolean openFirewall, CallContext caller, String lbProtocol, Boolean forDisplay, int clientTimeout, int serverTimeout)
throws NetworkRuleConflictException;
boolean removeAllLoadBalanacersForIp(long ipId, Account caller, long callerUserId);
boolean removeAllLoadBalanacersForNetwork(long networkId, Account caller, long callerUserId);
List<LbDestination> getExistingDestinations(long lbId);
List<LbStickinessPolicy> getStickinessPolicies(long lbId);
List<LbStickinessMethod> getStickinessMethods(long networkid);
List<LbHealthCheckPolicy> getHealthCheckPolicies(long lbId);
LbSslCert getLbSslCert(long lbId);
/**
* Remove vm from all load balancers
*
* @param vmId
* @return true if removal is successful
*/
boolean removeVmFromLoadBalancers(long vmId);
boolean applyLoadBalancersForNetwork(long networkId, Scheme scheme) throws ResourceUnavailableException;
String getLBCapability(long networkid, String capabilityName);
boolean configureLbAutoScaleVmGroup(long vmGroupid, String currentState) throws ResourceUnavailableException;
boolean revokeLoadBalancersForNetwork(long networkId, Scheme scheme) throws ResourceUnavailableException;
boolean validateLbRule(LoadBalancingRule lbRule);
void removeLBRule(LoadBalancer rule);
void isLbServiceSupportedInNetwork(long networkId, Scheme scheme);
}
| apache-2.0 |
evanchooly/morphia | morphia/src/main/java/org/mongodb/morphia/mapping/codec/FloatArrayCodec.java | 1623 | package org.mongodb.morphia.mapping.codec;
import org.bson.BsonReader;
import org.bson.BsonType;
import org.bson.BsonWriter;
import org.bson.codecs.Codec;
import org.bson.codecs.DecoderContext;
import org.bson.codecs.EncoderContext;
import org.mongodb.morphia.mapping.Mapper;
import java.util.ArrayList;
import java.util.List;
class FloatArrayCodec implements Codec<float[]> {
private Codec<Float> codec;
private Mapper mapper;
FloatArrayCodec(Mapper mapper) {
this.mapper = mapper;
}
@Override
public Class<float[]> getEncoderClass() {
return float[].class;
}
private Codec<Float> getCodec() {
if(codec == null) {
codec = mapper.getCodecRegistry().get(Float.class);
}
return codec;
}
@Override
public void encode(final BsonWriter writer, final float[] value, final EncoderContext encoderContext) {
writer.writeStartArray();
for (final float cur : value) {
getCodec().encode(writer, cur, encoderContext);
}
writer.writeEndArray();
}
@Override
public float[] decode(final BsonReader reader, final DecoderContext decoderContext) {
reader.readStartArray();
List<Float> list = new ArrayList<>();
while (reader.readBsonType() != BsonType.END_OF_DOCUMENT) {
list.add(getCodec().decode(reader, decoderContext));
}
reader.readEndArray();
float[] array = new float[list.size()];
for (int i = 0; i < list.size(); i++) {
array[i] = list.get(i);
}
return array;
}
}
| apache-2.0 |
hixuym/sf-framework | sf-metrics-graphite/src/main/java/io/sunflower/metrics/graphite/GraphiteReporterFactory.java | 3277 | package io.sunflower.metrics.graphite;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.ScheduledReporter;
import com.codahale.metrics.graphite.Graphite;
import com.codahale.metrics.graphite.GraphiteReporter;
import com.codahale.metrics.graphite.GraphiteUDP;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonTypeName;
import io.sunflower.metrics.BaseReporterFactory;
import io.sunflower.validation.OneOf;
import io.sunflower.validation.PortRange;
import org.hibernate.validator.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
/**
* A factory for {@link GraphiteReporter} instances.
* <p/>
* <b>Configuration Parameters:</b>
* <table>
* <tr>
* <td>Name</td>
* <td>Default</td>
* <td>Description</td>
* </tr>
* <tr>
* <td>host</td>
* <td>localhost</td>
* <td>The hostname of the Graphite server to report to.</td>
* </tr>
* <tr>
* <td>port</td>
* <td>2003</td>
* <td>The port of the Graphite server to report to.</td>
* </tr>
* <tr>
* <td>prefix</td>
* <td><i>None</i></td>
* <td>The prefix for Metric key names to report to Graphite.</td>
* </tr>
* <tr>
* <td>transport</td>
* <td><i>tcp</i></td>
* <td>The transport used to report to Graphite. One of {@code tcp} or
* {@code udp}.</td>
* </tr>
* </table>
*/
@JsonTypeName("graphite")
public class GraphiteReporterFactory extends BaseReporterFactory {
@NotEmpty
private String host = "localhost";
@PortRange
private int port = 2003;
@NotNull
private String prefix = "";
@NotNull
@OneOf(value = {"tcp", "udp"}, ignoreCase = true)
private String transport = "tcp";
@JsonProperty
public String getHost() {
return host;
}
@JsonProperty
public void setHost(String host) {
this.host = host;
}
@JsonProperty
public int getPort() {
return port;
}
@JsonProperty
public void setPort(int port) {
this.port = port;
}
@JsonProperty
public String getPrefix() {
return prefix;
}
@JsonProperty
public void setPrefix(String prefix) {
this.prefix = prefix;
}
@JsonProperty
public String getTransport() {
return transport;
}
@JsonProperty
public void setTransport(String transport) {
this.transport = transport;
}
@Override
public ScheduledReporter build(MetricRegistry registry) {
GraphiteReporter.Builder builder = builder(registry);
if ("udp".equalsIgnoreCase(transport)) {
return builder.build(new GraphiteUDP(host, port));
} else {
return builder.build(new Graphite(host, port));
}
}
protected GraphiteReporter.Builder builder(MetricRegistry registry) {
return GraphiteReporter.forRegistry(registry)
.convertDurationsTo(getDurationUnit())
.convertRatesTo(getRateUnit())
.filter(getFilter())
.prefixedWith(getPrefix())
.disabledMetricAttributes(getDisabledAttributes());
}
}
| apache-2.0 |
tateshitah/jspwiki | jspwiki-war/src/main/java/org/apache/wiki/tags/AdminBeanIteratorInfo.java | 1582 | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.tags;
import javax.servlet.jsp.tagext.TagData;
import javax.servlet.jsp.tagext.TagExtraInfo;
import javax.servlet.jsp.tagext.VariableInfo;
/**
* Just provides iteration support for AdminBeanIteratorTag
*
* @since 2.6.
*/
public class AdminBeanIteratorInfo extends TagExtraInfo
{
/**
* {@inheritDoc}
*/
@Override
public VariableInfo[] getVariableInfo(TagData data)
{
VariableInfo[] var = { new VariableInfo( data.getAttributeString("id"),
"org.apache.wiki.ui.admin.AdminBean",
true,
VariableInfo.NESTED )
};
return var;
}
}
| apache-2.0 |
gemxd/gemfirexd-oss | tests/sql/src/main/java/sql/dmlDistTxStatements/TradeSellOrdersDMLDistTxStmt.java | 60779 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package sql.dmlDistTxStatements;
import hydra.Log;
import hydra.TestConfig;
import java.math.BigDecimal;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import com.gemstone.gemfire.cache.query.Struct;
import com.gemstone.gemfire.internal.Assert;
import sql.SQLBB;
import sql.SQLHelper;
import sql.SQLPrms;
import sql.SQLTest;
import sql.dmlStatements.TradeSellOrdersDMLStmt;
import sql.sqlTx.RangeForeignKey;
import sql.sqlTx.SQLDistTxTest;
import sql.sqlTx.SQLTxBatchingFKBB;
import sql.sqlTx.SQLTxPartitionInfoBB;
import sql.sqlTx.SQLTxSecondBB;
import sql.sqlutil.DMLDistTxStmtsFactory;
import sql.sqlutil.ResultSetHelper;
import util.TestException;
import util.TestHelper;
public class TradeSellOrdersDMLDistTxStmt extends TradeSellOrdersDMLStmt
implements DMLDistTxStmtIF {
public static String cidPrefix = "_cid_";
public static String sidPrefix = "_sid_";
static final boolean isConcUpdateTx = TestConfig.tab().booleanAt(SQLPrms.isConcUpdateTx, false);
protected static String[] selectForUpdate = {
"select oid, status from trade.sellorders where sid = ? and ask>? for update of status ",
"select * from trade.sellorders where cid >= ? and cid <? for update of qty, status ",
"select * from trade.sellorders where cid = ? and sid= ? for update ",
};
protected static String[] updateByPK = {
"update trade.sellorders set status = ? where oid = ? ",
"update trade.sellorders set qty = ?, status = ? where oid = ? ",
"update trade.sellorders set order_time = ? where oid = ? ",
};
protected static String[] delete = {
"delete from trade.sellorders where cid=? and sid=? and oid <?",
"delete from trade.sellorders where oid=? ",
"delete from trade.sellorders where cid>? and cid <? and oid <?",
"delete from trade.sellorders where cid<? and sid = ? and oid < ?",
};
public static boolean isReplicate;
static {
try {
String partition = (String)SQLTxPartitionInfoBB.getBB().getSharedMap().get("trade." + getTableName());
if (partition.equalsIgnoreCase("replicate")) isReplicate = true;
else isReplicate = false;
} catch (NullPointerException npe) {
isReplicate = false;
}
}
static {
Log.getLogWriter().info("isReplicate is " + isReplicate);
}
protected boolean hasSecondary = isReplicate ||
(batchingWithSecondaryData && (Boolean) SQLBB.getBB().getSharedMap().get(SQLTest.hasRedundancy));
@SuppressWarnings("unchecked")
@Override
public void deleteDerby(Connection dConn, int index) {
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
Object[] data = derbyOps.get(index);
SQLException gfxdse = (SQLException) data[8];
try {
//Connection dConn, int cid, int cid1, int sid, int oid, int whichDelete, int updateCount
deleteFromDerbyTable(dConn, (Integer)data[2], (Integer)data[3], (Integer)data[4],
(Integer)data[5], (Integer)data[6], (Integer)data[7]);
} catch (SQLException derbyse) {
SQLHelper.compareExceptions(derbyse, gfxdse);
}
}
@SuppressWarnings("unchecked")
protected void addDeleteToDerbyTx(int cid, int cid1, int sid, int oid, int whichDelete,
int updateCount, SQLException gfxdse){
Object[] data = new Object[9];
data[0] = DMLDistTxStmtsFactory.TRADE_SELLORDERS;
data[1] = "delete";
data[2] = cid;
data[3] = cid1;
data[4] = sid;
data[5] = oid;
data[6] = whichDelete;
data[7] = updateCount;
data[8] = gfxdse;
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
if (derbyOps == null) derbyOps = new ArrayList<Object[]>();
derbyOps.add(data);
SQLDistTxTest.derbyOps.set(derbyOps);
}
@SuppressWarnings("unchecked")
@Override
public boolean deleteGfxd(Connection gConn, boolean withDerby) {
if (!withDerby) {
return deleteGfxdOnly(gConn);
}
int whichDelete = rand.nextInt(delete.length);
int cid = getExistingCid();
int cid1 = cid + 3;
int sid = getExistingSid();
int oid = getExistingOid();
int[] updateCount = new int[1];
SQLException gfxdse = null;
Connection nonTxConn = (Connection)SQLDistTxTest.gfxdNoneTxConn.get();
HashMap<String, Integer> modifiedKeysByOp = new HashMap<String, Integer>();
HashMap<String, Integer> modifiedKeysByTx = (HashMap<String, Integer>)
SQLDistTxTest.curTxModifiedKeys.get();
try {
getKeysForDelete(nonTxConn, modifiedKeysByOp, whichDelete, cid, cid1, sid, oid);
} catch (SQLException se) {
if (se.getSQLState().equals("X0Z01") && isHATest) { // handles HA issue for #41471
Log.getLogWriter().warning("Not able to process the keys for this op due to HA, this insert op does not proceed");
return true; //not able to process the keys due to HA, it is a no op
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureException(se) ) {
SQLHelper.printSQLException(se);
Log.getLogWriter().info("got node failure exception during Tx with HA support, continue testing");
return false;
} else SQLHelper.handleSQLException(se);
}
try {
deleteFromGfxdTable(gConn, cid, cid1, sid, oid, whichDelete, updateCount);
//the gfxd tx needs to handle prepareStatement failed due to node failure here
//does not expect critical heap exception etc in current tx testing
//once these coverage are added, similar handling of exceptions seen in getStmt()
//need to be added here.
} catch (SQLException se) {
SQLHelper.printSQLException(se);
if (se.getSQLState().equalsIgnoreCase("X0Z02") ) {
if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, se, true);
else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, se, hasSecondary, true);
return false;
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureException(se) ) {
SQLHelper.printSQLException(se);
Log.getLogWriter().info("got node failure exception during Tx with HA support, continue testing");
return false;
} else {
gfxdse = se; //security testing may get exception
}
}
if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, gfxdse, false);
else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, gfxdse, hasSecondary, false);
//add this operation for derby
addDeleteToDerbyTx(cid, cid1, sid, oid, whichDelete, updateCount[0], gfxdse);
modifiedKeysByTx.putAll(modifiedKeysByOp);
SQLDistTxTest.curTxModifiedKeys.set(modifiedKeysByTx);
return true;
}
public static int getExistingOid() {
int maxSid = (int) SQLBB.getBB().getSharedCounters().read(SQLBB.tradeSellOrdersPrimary);
int newSids = 10 * numOfThreads > 100 ? 10 * numOfThreads: 100;
if (maxSid>newSids) return rand.nextInt(maxSid-newSids)+1;
else throw new TestException("test issue, not enough sid in the tests yet");
}
private void getKeysForDelete(Connection conn, HashMap<String, Integer > keys,
int whichDelete, int cid, int cid1, int sid, int oid) throws SQLException {
String sql = null;
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(conn) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
ResultSet rs = null;
PreparedStatement stmt = null;
List<Struct>resultsList = null;
ArrayList<Integer> ids = new ArrayList<Integer>();
switch (whichDelete){
case 0:
//"delete from trade.sellorders where cid=? and sid=? and oid <?",
sql = "select oid from trade.sellorders where cid= " + cid + " and sid =" + sid
+ " and oid <" + oid;
Log.getLogWriter().info(database + sql);
rs = conn.createStatement().executeQuery(sql);
resultsList = ResultSetHelper.asList(rs, SQLHelper.isDerbyConn(conn));
if (resultsList != null) {
Iterator<Struct> it = resultsList.iterator();
while (it.hasNext()) {
Struct row = (Struct)it.next();
Integer oidKey = (Integer)row.get("OID");
ids.add(oidKey);
Log.getLogWriter().info(database + "To be deleted key is: " + oidKey);
}
}
break;
case 1:
//"delete from trade.sellorders where oid=? ",
sql = "select oid from trade.sellorders where oid= ?";
Log.getLogWriter().info(database + sql);
stmt = conn.prepareStatement(sql);
stmt.setInt(1, oid);
rs = stmt.executeQuery();
resultsList = ResultSetHelper.asList(rs, SQLHelper.isDerbyConn(conn));
if (resultsList != null) {
Iterator<Struct> it = resultsList.iterator();
while (it.hasNext()) {
Struct row = (Struct)it.next();
Integer oidKey = (Integer)row.get("OID");
ids.add(oidKey);
Log.getLogWriter().info(database + "To be deleted key is: " + oidKey);
}
}
break;
case 2:
//"delete from trade.sellorders where cid>? and cid <? and oid <?",
sql = "select oid from trade.sellorders where cid>? and cid <? and oid <?";
Log.getLogWriter().info(database + sql);
stmt = conn.prepareStatement(sql);
stmt.setInt(1, cid);
stmt.setInt(2, cid1);
stmt.setInt(3, oid);
rs = stmt.executeQuery();
resultsList = ResultSetHelper.asList(rs, SQLHelper.isDerbyConn(conn));
if (resultsList != null) {
Iterator<Struct> it = resultsList.iterator();
while (it.hasNext()) {
Struct row = (Struct)it.next();
Integer oidKey = (Integer)row.get("OID");
ids.add(oidKey);
Log.getLogWriter().info(database + "To be deleted key is: " + oidKey);
}
}
break;
case 3:
//"delete from trade.sellorders where cid<? and sid = ? and oid <?",
sql = "select oid from trade.sellorders where cid<? and sid = ? and oid <?";
Log.getLogWriter().info(database + sql);
stmt = conn.prepareStatement(sql);
stmt.setInt(1, cid);
stmt.setInt(2, sid);
stmt.setInt(3, oid);
rs = stmt.executeQuery();
resultsList = ResultSetHelper.asList(rs, SQLHelper.isDerbyConn(conn));
if (resultsList != null) {
Iterator<Struct> it = resultsList.iterator();
while (it.hasNext()) {
Struct row = (Struct)it.next();
Integer oidKey = (Integer)row.get("OID");
ids.add(oidKey);
Log.getLogWriter().info(database + "To be deleted key is: " + oidKey);
}
}
break;
default:
throw new TestException("wrong index here");
}
for (Integer key: ids) {
keys.put(getTableName() + "_" + key, txId);
//keys operated by this op will be added in verify conflict method
}
}
protected void deleteFromGfxdTable(Connection gConn, int cid, int cid1,
int sid, int oid, int whichDelete, int[]updateCount) throws SQLException{
PreparedStatement stmt = null;
if (SQLTest.isEdge && !isTicket48176Fixed && isHATest) stmt = getStmtThrowException(gConn, delete[whichDelete]);
else stmt = gConn.prepareStatement(delete[whichDelete]);
updateCount[0] = deleteFromTableTx(stmt, cid, cid1, sid, oid, whichDelete);
}
protected void deleteFromDerbyTable(Connection dConn, int cid, int cid1,
int sid, int oid, int whichDelete, int updateCount) throws SQLException{
PreparedStatement stmt = getStmt(dConn, delete[whichDelete]);
int count = deleteFromTableTx(stmt, cid, cid1, sid, oid, whichDelete);
if (count != updateCount) {
Log.getLogWriter().info("derby delete has different row count from that of gfxd " +
"gfxd deleted " + updateCount + " rows, but derby deleted " + count + " rows in "
+ getTableName());
}
}
//delete from table based on whichDelete
protected int deleteFromTableTx(PreparedStatement stmt, int cid, int cid1,
int sid, int oid, int whichDelete) throws SQLException {
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(stmt.getConnection()) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
String query = " QUERY: " + delete[whichDelete];
String successString = "";
switch (whichDelete) {
case 0:
//"delete from trade.sellorders where cid=? and sid=? and oid <?",
Log.getLogWriter().info(database + "deleting trade.sellorders with CID:" + cid
+ ",SID:" + sid + ",OID:" + oid + query);
stmt.setInt(1, cid);
stmt.setInt(2, sid);
stmt.setInt(3, oid);
successString="from trade.sellorders with CID:" + cid
+ ",SID:" + sid + ",OID:" + oid + query;
break;
case 1:
//"delete from trade.sellorders where oid=? ",
Log.getLogWriter().info(database + "deleting trade.sellorders with OID:" + oid + query);
stmt.setInt(1, oid);
successString ="trade.sellorders with OID:" + oid + query;
break;
case 2:
//"delete from trade.sellorders where cid>? and cid <? and oid <?",
Log.getLogWriter().info(database + "deleting trade.sellorders with 1_CID:" + cid
+ ",2_CID:" + cid1 + ",OID:" + oid + query);
stmt.setInt(1, cid);
stmt.setInt(2, cid1);
stmt.setInt(3, oid);
successString ="trade.sellorders with 1_CID:" + cid
+ ",2_CID:" + cid1 + ",OID:" + oid + query;
break;
case 3:
//"delete from trade.sellorders where cid<? and sid = ?",
Log.getLogWriter().info(database + "deleting trade.sellorders with CID:" + cid
+ ",SID:" + sid + ",OID:" + oid + query);
stmt.setInt(1, cid);
stmt.setInt(2, sid);
stmt.setInt(3, oid);
successString ="trade.sellorders with CID:" + cid
+ ",SID:" + sid + ",OID:" + oid + query;
break;
default:
throw new TestException("incorrect delete statement, should not happen");
}
int rowCount = stmt.executeUpdate();
Log.getLogWriter().info(database + "deleted " + rowCount + " rows from" + successString);
SQLWarning warning = stmt.getWarnings(); //test to see there is a warning
if (warning != null) {
SQLHelper.printSQLWarning(warning);
}
return rowCount;
}
protected boolean deleteGfxdOnly(Connection gConn){
try {
delete(null, gConn);
} catch (TestException te) {
if (te.getMessage().contains("X0Z02") ) {
Log.getLogWriter().info("got expected conflict exception, continuing test");
return false;
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureTestException(te)) {
Log.getLogWriter().info ("got expected node failure exception, continuing test");
return false;
} else throw te;
}
return true;
}
@SuppressWarnings("unchecked")
protected void addInsertToDerbyTx(int[] oid, int[] cid, int[] sid, int[] qty,
String[] status, Timestamp[] time, BigDecimal[] ask, int[] updateCount,
SQLException gfxdse){
Object[] data = new Object[11];
data[0] = DMLDistTxStmtsFactory.TRADE_SELLORDERS;
data[1] = "insert";
data[2] = oid;
data[3] = cid;
data[4] = sid;
data[5] = qty;
data[6] = status;
data[7] = time;
data[8] = ask;
data[9] = updateCount;
data[10] = gfxdse;
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
if (derbyOps == null) derbyOps = new ArrayList<Object[]>();
derbyOps.add(data);
SQLDistTxTest.derbyOps.set(derbyOps);
}
@SuppressWarnings("unchecked")
@Override
public void insertDerby(Connection dConn, int index) {
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
Object[] data = derbyOps.get(index);
SQLException gfxdse = (SQLException) data[10];
try {
//insertToDerbyTable(dConn, cid, cust_name,since, addr, count, size);
insertToDerbyTable(dConn, (int[])data[2], (int[])data[3], (int[])data[4],
(int[])data[5], (String[])data[6], (Timestamp[])data[7],
(BigDecimal[])data[8], (int[])data[9], 1);
} catch (SQLException derbyse) {
SQLHelper.compareExceptions(derbyse, gfxdse);
return;
}
if (gfxdse != null) {
SQLHelper.handleMissedSQLException(gfxdse);
}
}
protected boolean insertToDerbyTable(Connection conn, int[] oid, int[] cid, int[] sid,
int[] qty, String[] status, Timestamp[] time, BigDecimal[] ask, int[] updateCount,
int size) throws SQLException {
PreparedStatement stmt = getStmt(conn, insert);
int tid = getMyTid();
int count =-1;
for (int i=0 ; i<size ; i++) {
count = insertToTable(stmt, oid[i], cid[i], sid[i], qty[i], status[i], time[i], ask[i], tid);
if (count != updateCount[i]) {
Log.getLogWriter().info("derby insert has different row count from that of gfxd " +
"gfxd inserted " + updateCount[i] +
" row, but derby inserted " + count + " row in " + getTableName());
}
}
return true;
}
@SuppressWarnings("unchecked")
@Override
public boolean insertGfxd(Connection gConn, boolean withDerby) {
if (!withDerby) {
return insertGfxdOnly(gConn);
}
int size = 1;
int[] cid = new int[size];
int[] sid = new int[size];
int[] oid = new int[size];
int[] qty = new int[size];
String[] status = new String[size];
Timestamp[] time = new Timestamp[size];
BigDecimal[] ask = new BigDecimal[size];
int[] updateCount = new int[size];
boolean[] expectConflict = new boolean[1];
Connection nonTxConn = (Connection)SQLDistTxTest.gfxdNoneTxConn.get();
SQLException gfxdse = null;
getKeysFromPortfolio(nonTxConn, cid, sid);
getDataForInsert(nonTxConn, oid, cid, sid, qty, time, ask, size); //get the data
for (int i = 0; i< status.length; i++) {
status[i] = "open";
}
int chance = 200;
if (rand.nextInt(chance) == 0) cid[0] = 0;
else if (rand.nextInt(chance) == 0) sid[0] = 0;
HashMap<String, Integer> modifiedKeysByOp = new HashMap<String, Integer>();
modifiedKeysByOp.put(getTableName()+"_"+oid[0], (Integer)SQLDistTxTest.curTxId.get());
HashSet<String> parentKeysHold = new HashSet<String>();
try {
getKeysForInsert(nonTxConn, cid[0], sid[0], expectConflict, parentKeysHold);
/* check through batching fk bb now
if (batchingWithSecondaryData && expectConflict[0] == true) {
SQLDistTxTest.expectForeignKeyConflictWithBatching.set(expectConflict[0]);
//TODO need to think a better way when #43170 is fixed -- which foreign keys (range keys) are held
//and by which threads need to be tracked and verified.
}
*/
} catch (SQLException se) {
if (se.getSQLState().equals("X0Z01") && isHATest) { // handles HA issue for #41471
Log.getLogWriter().warning("Not able to process the keys for this op due to HA, this insert op does not proceed");
return true; //not able to process the keys due to HA, it is a no op
} else SQLHelper.handleSQLException(se);
}
HashMap<String, Integer> modifiedKeysByTx = (HashMap<String, Integer>)
SQLDistTxTest.curTxModifiedKeys.get();
if (batchingWithSecondaryData) {
//add to fk bb for the fk key hold due to insert into child table
HashSet<String> holdFKsByThisTx = (HashSet<String>) SQLDistTxTest.foreignKeyHeldWithBatching.get();
holdFKsByThisTx.addAll(parentKeysHold);
SQLDistTxTest.foreignKeyHeldWithBatching.set(holdFKsByThisTx);
hydra.blackboard.SharedMap holdingFKTxIds = SQLTxBatchingFKBB.getBB().getSharedMap();
Integer myTxId = (Integer) SQLDistTxTest.curTxId.get();
for (String key: parentKeysHold) {
HashSet<Integer> txIds = (HashSet<Integer>) holdingFKTxIds.get(key);
if (txIds == null) txIds = new HashSet<Integer>();
txIds.add(myTxId);
holdingFKTxIds.put(key, txIds);
}
}
try {
insertToGfxdTable(gConn, oid, cid, sid, qty, status, time, ask, updateCount, size);
//the gfxd tx needs to handle prepareStatement failed due to node failure here
//does not expect critical heap exception etc in current tx testing
//once these coverage are added, similar handling of exceptions seen in getStmt()
//need to be added here.
} catch (SQLException se) {
SQLHelper.printSQLException(se);
if (se.getSQLState().equalsIgnoreCase("X0Z02") ) {
if (expectConflict[0]) {
; //if conflict caused by foreign key
}
else {
if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, se, true);
else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, se, hasSecondary, true);
//check if conflict caused by multiple inserts on the same keys
}
if (batchingWithSecondaryData) cleanUpFKHolds(); //got the exception, ops are rolled back due to #43170
removePartialRangeForeignKeys(cid, sid);
return false;
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureException(se) ) {
SQLHelper.printSQLException(se);
Log.getLogWriter().info("got node failure exception during Tx with HA support, continue testing");
if (batchingWithSecondaryData) cleanUpFKHolds(); //got the exception, ops are rolled back due to #43170
removePartialRangeForeignKeys(cid, sid); //operation not successful, remove the fk constraint keys
return false; //not able to handle node failure yet, needs to rollback ops
// to be confirmed if select query could cause lock to be released
} else {
if (expectConflict[0] && !se.getSQLState().equals("23503")) {
if (!batchingWithSecondaryData)
throw new TestException("expect conflict exceptions, but did not get it" +
TestHelper.getStackTrace(se));
else {
//do nothing, as foreign key check may only be done on local node, conflict could be detected at commit time
;
}
}
gfxdse = se;
if (batchingWithSecondaryData) cleanUpFKHolds(); //got the exception, ops are rolled back due to #43170
removePartialRangeForeignKeys(cid, sid); //operation not successful, remove the fk constraint keys
}
}
if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, gfxdse, false);
else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, gfxdse, hasSecondary, false);
if (expectConflict[0] && gfxdse == null) {
if (!batchingWithSecondaryData)
throw new TestException("Did not get conflict exception for foreign key check. " +
"Please check for logs");
else {
//do nothing, as foreign key check may only be done on local node, conflict could be detected at commit time
;
}
}
//add this operation also for derby
if (withDerby) addInsertToDerbyTx(oid, cid, sid, qty, status, time,
ask, updateCount, gfxdse);
modifiedKeysByTx.putAll(modifiedKeysByOp);
SQLDistTxTest.curTxModifiedKeys.set(modifiedKeysByTx);
return true;
}
protected boolean insertGfxdOnly(Connection gConn){
try {
insert(null, gConn, 1);
} catch (TestException te) {
if (te.getMessage().contains("X0Z02") ) {
Log.getLogWriter().info ("got expected conflict exception, continuing test");
return false;
//delete/update a parent key in another tx could get conflict
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureTestException(te)) {
Log.getLogWriter().info ("got expected node failure exception, continuing test");
return false;
} else throw te;
}
return true;
}
@SuppressWarnings("unchecked")
private void getKeysForInsert(Connection conn, int cid, int sid, boolean[] expectConflict, HashSet<String> parentKeys)
throws SQLException {
String sql = null;
ResultSet rs = null;
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(conn) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
//check if foreign key exists
//test will add cid =0 or sid =0 to verify, tx could detect non existing fks
sql = "select * from trade.portfolio where cid = " + cid + " and sid =" + sid;
Log.getLogWriter().info(database + sql);
rs = conn.createStatement().executeQuery(sql);
if (!rs.next()) {
Log.getLogWriter().info(database + "foreign key cid " + cid + ", sid " + sid + " does not exist " +
"in the parent table, should get foreign key constraint exception");
//this key may not be able to held, due the the delete in parent table,
//so instead of fk constraint violation, we may see lock not held
}
rs.close();
//check if foreign key has been locked, if other tx hold the key for update etc,
//this insert should gets conflict
boolean hasForeignKeyConflict = hasForeignKeyConflict(TradePortfolioDMLDistTxStmt.
getTableName() + cidPrefix + cid + sidPrefix + sid, txId);
//track the parentKey hold
parentKeys.add(TradePortfolioDMLDistTxStmt.getTableName() + cidPrefix + cid + sidPrefix + sid);
//we need to modify the RangeForeignKey and check if the range foreign key has been taken,
//if not, subsequent delete in parent table could be blocked
hydra.blackboard.SharedMap rangeForeignKeys = SQLTxSecondBB.getBB().getSharedMap();
String key = getTableName() + cidPrefix + cid + sidPrefix + sid;
RangeForeignKey cid_sidRangeKey = (RangeForeignKey) rangeForeignKeys.get(key);
if (cid_sidRangeKey == null) cid_sidRangeKey = new RangeForeignKey(key);
boolean hasRangeForeignKeyConflict = cid_sidRangeKey.hasConflictAddPartialRangeKey(txId);
rangeForeignKeys.put(getTableName() + cidPrefix + cid + sidPrefix + sid, cid_sidRangeKey);
ArrayList<String> curTxRangeFK = (ArrayList<String>)SQLDistTxTest.curTxFKHeld.get();
curTxRangeFK.add(key);
SQLDistTxTest.curTxFKHeld.set(curTxRangeFK);
expectConflict[0] = hasForeignKeyConflict || hasRangeForeignKeyConflict;
if (hasForeignKeyConflict) {
Log.getLogWriter().info(database + "should expect lock not held/conflict exception here"
// + " but due to non update of primary key in parent table, will relax a little here." +
//" however the insert in parent should cause conflict exception here instead of " +
//" foreign key constraint exception here"
);
}
}
protected void removePartialRangeForeignKeys(int[] cid, int[] sid) {
int txId = (Integer) SQLDistTxTest.curTxId.get();
hydra.blackboard.SharedMap rangeForeignKeys = SQLTxSecondBB.getBB().getSharedMap();
RangeForeignKey cid_sidRangeKey = (RangeForeignKey) rangeForeignKeys.get(getTableName()
+ cidPrefix + cid[0] + sidPrefix + sid[0]);
if (cid_sidRangeKey!=null) cid_sidRangeKey.removePartialRangeKey(txId);
Log.getLogWriter().info("removing the partial range foreign key for this TXID:" + txId);
}
protected void insertToGfxdTable(Connection conn, int[] oid, int[] cid, int[] sid, int[] qty,
String[] status, Timestamp[] time, BigDecimal[] ask, int[] updateCount,int size)
throws SQLException {
PreparedStatement stmt = null;
if (SQLTest.isEdge && !isTicket48176Fixed && isHATest) {
stmt = useDefaultValue? getStmtThrowException(conn, insertWithDefaultValue)
: getStmtThrowException(conn, insert);
}
else stmt = useDefaultValue? conn.prepareStatement(insertWithDefaultValue) :
conn.prepareStatement(insert);
//PreparedStatement stmt = getStmt(conn, insert);
int tid = getMyTid();
for (int i=0 ; i<size ; i++) {
updateCount[i] = insertToTable(stmt, oid[i], cid[i], sid[i], qty[i], status[i], time[i], ask[i], tid);
}
}
public static String getTableName() {
return "sellorders";
}
@SuppressWarnings("unchecked")
@Override
public void queryDerby(Connection dConn, int index) {
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
Object[] data = derbyOps.get(index);
SQLException gfxdse = (SQLException) data[10];
List<Struct> gfxdList = (List<Struct>) data[9];
ResultSet derbyRS = null;
try {
//query(dConn, whichQuery, status, ask, cid, oid, orderTime, tid);
derbyRS = query(dConn, (Integer)data[2], (String[])data[3], (BigDecimal[])data[4],
(int[])data[5], (int[])data[6], (Timestamp)data[7], (Integer)data[8]);
} catch (SQLException derbyse) {
SQLHelper.compareExceptions(derbyse, gfxdse);
}
ResultSetHelper.compareResultSets(ResultSetHelper.asList(derbyRS, true), gfxdList);
}
@SuppressWarnings("unchecked")
@Override
public boolean queryGfxd(Connection gConn, boolean withDerby) {
if (!withDerby) {
return queryGfxdOnly(gConn);
}
int numOfNonUniq = select.length/2; //how many query statement is for non unique keys, non uniq query must be at the end
int whichQuery = getWhichOne(numOfNonUniq, select.length); //randomly select one query sql based on test uniq or not
Connection nonTxConn = (Connection)SQLDistTxTest.gfxdNoneTxConn.get();
String[] status = new String[2];
BigDecimal[] ask = new BigDecimal[2];
int[] cid = new int[5]; //test In for 5
int[] oid = new int[5];
int tid = getMyTid();
Timestamp orderTime = getRandTime();
getStatus(status);
getAsk(ask);
getCids(nonTxConn, cid);
getOids(oid);
ResultSet gfxdRS = null;
SQLException gfxdse = null;
HashMap<String, Integer> modifiedKeysByTx = (HashMap<String, Integer>)
SQLDistTxTest.curTxModifiedKeys.get();
try {
gfxdRS = query (gConn, whichQuery, status, ask, cid, oid, orderTime, tid);
if (gfxdRS == null) {
/* has specific node failure exception for txn being thrown by
* select query, this is handled in catch block
if (isHATest) {
Log.getLogWriter().info("Testing HA and did not get GFXD result set");
return true;
}
else
*/
throw new TestException("Not able to get gfxd result set");
}
} catch (SQLException se) {
SQLHelper.printSQLException(se);
if (se.getSQLState().equalsIgnoreCase("X0Z02") ) {
if (batchingWithSecondaryData) {
verifyConflictWithBatching(new HashMap<String, Integer>(), modifiedKeysByTx, se, hasSecondary, true);
return false;
}
}
//handle node failure condition
if (isHATest &&
SQLHelper.gotTXNodeFailureException(se) ) {
SQLHelper.printSQLException(se);
Log.getLogWriter().info("got node failure exception during Tx with HA support, continue testing");
return false; //not able to handle node failure yet, needs to rollback ops
// to be confirmed if select query could cause lock to be released
}
gfxdse = se;
}
List<Struct> gfxdList = ResultSetHelper.asList(gfxdRS, false);
/*
if (gfxdList == null && isHATest && (Boolean) SQLDistTxTest.convertTxnRSGotNodeFailure.get()) {
Log.getLogWriter().info("Testing HA and did not get GFXD result set due to node failure");
SQLDistTxTest.convertTxnRSGotNodeFailure.set(false); //reset flag
return false; //do not compare query results as gemfirexd does not get any due to node failure
}
*/
if (gfxdList == null && isHATest) {
Log.getLogWriter().info("Testing HA and did not get GFXD result set due to node failure");
return false; //assume txn failure occur and txn rolled back by product, otherwise return true here
}
addQueryToDerbyTx(whichQuery, status, ask, cid, oid, orderTime, tid, gfxdList, gfxdse);
return true;
}
@SuppressWarnings("unchecked")
protected void addQueryToDerbyTx(int whichQuery, String[] status,
BigDecimal[] ask, int[] cid, int[] oid, Timestamp orderTime, int tid,
List<Struct> gfxdList, SQLException gfxdse){
Object[] data = new Object[11];
data[0] = DMLDistTxStmtsFactory.TRADE_SELLORDERS;
data[1] = "query";
data[2] = whichQuery;
data[3] = status;
data[4] = ask;
data[5] = cid;
data[6] = oid;
data[7] = orderTime;
data[8] = tid;
data[9] = gfxdList;
data[10] = gfxdse;
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
if (derbyOps == null) derbyOps = new ArrayList<Object[]>();
derbyOps.add(data);
SQLDistTxTest.derbyOps.set(derbyOps);
}
protected boolean queryGfxdOnly(Connection gConn){
try {
query(null, gConn);
} catch (TestException te) {
if (te.getMessage().contains("X0Z02") && batchingWithSecondaryData) {
Log.getLogWriter().info("got expected conflict exception, continuing test");
return false;
} else if (isHATest && SQLHelper.gotTXNodeFailureTestException(te)) {
Log.getLogWriter().info ("got expected node failure exception, continuing test");
return false;
} else throw te;
}
return true;
}
@SuppressWarnings("unchecked")
@Override
public void updateDerby(Connection dConn, int index) {
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
Object[] data = derbyOps.get(index);
SQLException gfxdse = (SQLException) data[12];
try {
//updateDerbyTable(Connection conn, int[] cid, int[] cid2,
//int[] sid, BigDecimal[] ask, int[] qty, Timestamp[] orderTime,
//String status, ArrayLit<Integer> oids, int[] whichUpdate, int[] updateCount,
//int size)
updateDerbyTable(dConn, (int[])data[2], (int[])data[3],
(int[])data[4], (BigDecimal[])data[5], (int[])data[6],
(Timestamp[])data[7], (String)data[8], (ArrayList<Integer>)data[9],
(int[])data[10], (int[])data[11], 1);
} catch (SQLException derbyse) {
SQLHelper.compareExceptions(derbyse, gfxdse);
return;
}
if (gfxdse != null) {
SQLHelper.handleMissedSQLException(gfxdse);
}
}
protected void updateDerbyTable(Connection conn, int[] cid, int[] cid2,
int[] sid, BigDecimal[] ask, int[] qty, Timestamp[] orderTime,
String status, ArrayList<Integer> oids, int[] whichUpdate, int[] gfxdupdateCount,
int size) throws SQLException {
PreparedStatement pstmt = null;
PreparedStatement ursstmt = null;
int tid = getMyTid();
int count = -1;
Log.getLogWriter().info("select for update in derby, myTid is " + tid);
//first oid case
for (int i=0 ; i < size ; i++) {
String sql = selectForUpdate[whichUpdate[i]];
Log.getLogWriter().info("select for update statement is " + sql);
if (SQLTest.testPartitionBy) pstmt = getCorrectTxStmt(conn, whichUpdate[i]);
else pstmt = getStmt(conn, updateByPK[whichUpdate[i]]); //use only this after bug#39913 is fixed
if (pstmt!=null) {
try {
ursstmt = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_UPDATABLE);
} catch (SQLException se) {
SQLHelper.handleSQLException(se);
}
ResultSet rs = getSelectForUpdateRS(ursstmt, cid[i], cid2[i], sid[i],
ask[i], orderTime[i], status, whichUpdate[i], size);
Log.getLogWriter().info("oids size is " + oids.size());
int[] derbyUpdateCount = new int [oids.size()];
updateTable(null, rs, qty[i], status, orderTime[i], oids, whichUpdate[i],
derbyUpdateCount, size, false); //using updateable result set only to work around #43988
for (int j=0; j<oids.size(); j++) {
if (derbyUpdateCount[j] != gfxdupdateCount[j]){
Log.getLogWriter().info("Derby update has different row count from that of gfxd, " +
"gfxd updated " + gfxdupdateCount[j] +
" rows, but derby updated " + count + " rows");
}
}
}
}
}
@SuppressWarnings("unchecked")
@Override
public boolean updateGfxd(Connection gConn, boolean withDerby) {
if (!withDerby) {
return updateGfxdOnly(gConn);
}
if (!SQLDistTxTest.isTicket43188fiFixed && SQLDistTxTest.useThinClientDriverInTx)
return true; //workaround #43188 Updatable resultset is not supported yet using thin client driver
if (partitionKeys == null) setPartitionKeys();
int size =1;
int[] sid = new int[size];
BigDecimal[] ask = new BigDecimal[size];
Timestamp[] orderTime = new Timestamp[size];
int[] cid = new int[size];
int[] cid2 = new int[size];
int[] qty = new int[size];
ArrayList<Integer> oids = new ArrayList<Integer>();
String status = statuses[rand.nextInt(statuses.length)];
int[] whichUpdate = new int[size];
SQLException gfxdse = null;
boolean success = getDataForUpdate((Connection)SQLDistTxTest.gfxdNoneTxConn.get(), cid, cid2,
sid, qty, orderTime, ask, whichUpdate, size);
if (!success) return true; //did not get data or not commit early txs, it is a no op
HashMap<String, Integer> modifiedKeysByOp = new HashMap<String, Integer>();
HashMap<String, Integer> modifiedKeysByTx = (HashMap<String, Integer>)
SQLDistTxTest.curTxModifiedKeys.get();
try {
getKeysForUpdate((Connection)SQLDistTxTest.gfxdNoneTxConn.get(), modifiedKeysByOp,
whichUpdate[0], cid[0], cid2[0], sid[0], ask[0], orderTime[0], oids);
} catch (SQLException se) {
if (se.getSQLState().equals("X0Z01") && isHATest) { // handles HA issue for #41471
Log.getLogWriter().warning("Not able to process the keys for this op due to HA, this update op does not proceed");
return true; //not able to process the keys due to HA, it is a no op
} else SQLHelper.handleSQLException(se); //else gfxdse = se;
}
//Log.getLogWriter().info("oids size after get keys is " + oids.size());
int[] updateCount = new int [oids.size()];
try {
success = updateGfxdTable(gConn, cid, cid2, sid,
ask, qty, orderTime, status, oids, whichUpdate, updateCount, size);
if (!success) {
/*
if (SQLTest.isEdge && isHATest && !isTicket48176Fixed &&
batchingWithSecondaryData &&(Boolean) SQLDistTxTest.failedToGetStmtNodeFailure.get()) {
SQLDistTxTest.failedToGetStmtNodeFailure.set(false);
return false; //due to node failure, need to rollback tx
}
else return true; //due to unsupported exception
*/
//handles get stmt failure conditions -- node failure or unsupported update on partition field
if (isHATest && (Boolean) SQLDistTxTest.failedToGetStmtNodeFailure.get()) {
SQLDistTxTest.failedToGetStmtNodeFailure.set(false); //reset flag
return false; //due to node failure, assume txn rolled back
}
if ((Boolean) SQLDistTxTest.updateOnPartitionCol.get()) {
SQLDistTxTest.updateOnPartitionCol.set(false); //reset flag
return true; //assume 0A000 exception does not cause txn to rollback
}
}
//partitioned on partitoned key, needs to check if using URS will rollback
//the tx, if so test needs to be modified. which may needs to separate update
//by PK and URS (no of column case) and return accordingly here
} catch (SQLException se) {
SQLHelper.printSQLException(se);
if (se.getSQLState().equalsIgnoreCase("X0Z02") ) {
if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, se, true);
else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, se, hasSecondary, true);
return false;
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureException(se) ) {
SQLHelper.printSQLException(se);
Log.getLogWriter().info("got node failure exception during Tx with HA support, continue testing");
return false;
} else {
SQLHelper.handleSQLException(se);
}
}
if (!batchingWithSecondaryData) verifyConflict(modifiedKeysByOp, modifiedKeysByTx, gfxdse, false);
else verifyConflictWithBatching(modifiedKeysByOp, modifiedKeysByTx, gfxdse, hasSecondary, false);
//add this operation for derby
addUpdateToDerbyTx(cid, cid2, sid, ask, qty, orderTime, status, oids,
whichUpdate, updateCount, gfxdse);
modifiedKeysByTx.putAll(modifiedKeysByOp);
SQLDistTxTest.curTxModifiedKeys.set(modifiedKeysByTx);
return true;
}
@SuppressWarnings("unchecked")
protected void addUpdateToDerbyTx(int[] cid, int[] cid2, int[] sid,
BigDecimal[] ask, int[] qty, Timestamp[] orderTime, String status,
ArrayList<Integer> oids, int[] whichUpdate, int[] updateCount, SQLException gfxdse){
Object[] data = new Object[13];
data[0] = DMLDistTxStmtsFactory.TRADE_SELLORDERS;
data[1] = "update";
data[2] = cid;
data[3] = cid2;
data[4] = sid;
data[5] = ask;
data[6] = qty;
data[7] = orderTime;
data[8] = status;
data[9] = oids;
data[10] = whichUpdate;
data[11] = updateCount;
data[12] = gfxdse;
ArrayList<Object[]> derbyOps = (ArrayList<Object[]>)SQLDistTxTest.derbyOps.get();
if (derbyOps == null) derbyOps = new ArrayList<Object[]>();
derbyOps.add(data);
SQLDistTxTest.derbyOps.set(derbyOps);
}
protected boolean getDataForUpdate(Connection regConn, int[] cid, int[] cid2,
int[] sid, int[] qty, Timestamp[] orderTime, BigDecimal[] ask,
int[] whichUpdate, int size) {
Connection conn = getAuthConn(regConn);
int cidRange = 3;
if (!(Boolean)SQLDistTxTest.commitEarly.get())
return false; //only committed early could update these select for update statement
int[] cids = new int[size*2];
int[] sids = new int[size*2];
if (!getDataFromResult(conn, cid, sid)) return false; //did not get data
for (int i=0; i<size; i++) {
qty[i] = getQty();
orderTime[i] = getRandTime();
ask[i] = getPrice();
whichUpdate[i] = rand.nextInt(selectForUpdate.length);
cid[i] = cids[i];
cid2[i] = cid[i] + cidRange;
sid[i] = sids[i];
}
return true;
}
protected boolean updateGfxdOnly(Connection gConn){
try {
update(null, gConn, 1);
} catch (TestException te) {
if (te.getMessage().contains("X0Z02") ) {
Log.getLogWriter().info("got expected conflict exception, continuing test");
return false;
} else if (gfxdtxHANotReady && isHATest &&
SQLHelper.gotTXNodeFailureTestException(te)) {
Log.getLogWriter().info ("got expected node failure exception, continuing test");
return false;
} else throw te;
}
return true;
}
//need to be changed to be similar to networth table once the update is actually implemented
//for now this is used to avoid the #43909 test issue
protected int getWhichUpdate(int numOfNonUniq, int total) {
int whichOne = super.getWhichOne(numOfNonUniq, total);
if (isConcUpdateTx){
//to avoid update ask column in concUpdateTx tx to test conc update in tx
if (whichOne == 2 || whichOne == 6)
whichOne--;
else if (whichOne == 3 || whichOne == 7)
whichOne = whichOne-2;
}
return whichOne;
}
protected void getKeysForUpdate(Connection conn, HashMap<String, Integer > keys,
int whichUpdate, int cid, int cid2, int sid, BigDecimal ask,
Timestamp orderTime, ArrayList<Integer> oids) throws SQLException {
String sql = null;
ResultSet rs = null;
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(conn) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
switch (whichUpdate) {
case 0:
// "select oid, status from trade.sellorders where sid = ? and ask>? for update of status ",
sql = "select oid from trade.sellorders where sid=" + sid + " and ask > " + ask;
Log.getLogWriter().info(database + "executing " + sql);
rs = conn.createStatement().executeQuery(sql);
while (rs.next()) {
int oid = rs.getInt(1);
Log.getLogWriter().info(database + "OID: " + oid + " exists to be locked by select for update");
if (oids.size() == 0) oids.add(oid);
else {
if (rand.nextBoolean()) oids.add(oid);
else Log.getLogWriter().info(database + "OID: " + oid + " will not be modified by update");
}
keys.put(getTableName()+"_"+oid, txId); //holding the keys
}
rs.close();
break;
case 1:
//"select * from trade.sellorders where cid >= ? and cid <? for update of qty, status ",
sql = "select oid from trade.sellorders where cid>="+cid + " and cid <" + cid2;
Log.getLogWriter().info(database + "executing " + sql);
rs = conn.createStatement().executeQuery(sql);
while (rs.next()) {
int oid = rs.getInt(1);
Log.getLogWriter().info(database + "OID: " + oid + " exists to be locked by select for update");
if (oids.size() == 0) oids.add(oid);
else {
if (rand.nextBoolean()) oids.add(oid);
else Log.getLogWriter().info(database + "OID: " + oid + " will not be modified by update");
}
keys.put(getTableName()+"_"+oid, txId); //holding the keys
}
rs.close();
break;
case 2:
//"select * from trade.sellorders where cid = ? and sid= ? for update
sql = "select oid from trade.sellorders where cid="+cid + " and sid =" + sid;
Log.getLogWriter().info(database + "executing " + sql);
rs = conn.createStatement().executeQuery(sql);
while (rs.next()) {
int oid = rs.getInt(1);
Log.getLogWriter().info(database + "OID: " + oid + " exists to be locked by select for update");
if (oids.size() == 0) oids.add(oid);
else {
if (rand.nextBoolean()) oids.add(oid);
else Log.getLogWriter().info(database + "OID: " + oid + " will not be modified by update");
}
keys.put(getTableName()+"_"+oid, txId); //holding the keys
}
rs.close();
break;
default:
throw new TestException ("Wrong update statement here");
}
}
//this method returns false if update on partitioned key
@SuppressWarnings("unchecked")
protected boolean updateGfxdTable(Connection conn, int[] cid, int[] cid2,
int[] sid, BigDecimal[] ask, int[] qty, Timestamp[] orderTime,
String status, ArrayList<Integer> oids, int[] whichUpdate, int[] updateCount,
int size) throws SQLException {
PreparedStatement stmt = null;
int tid = getMyTid();
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(conn) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
Log.getLogWriter().info(database + "select for update in gemfirexd, myTid is " + tid);
for (int i=0 ; i<size ; i++) {
boolean usePK = false; //to work around issue #43988
String sql = selectForUpdate[whichUpdate[i]];
Log.getLogWriter().info(database + "select for update statement is " + sql);
try {
stmt = conn.prepareStatement(sql, ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_UPDATABLE);
} catch (SQLException se) {
SQLHelper.printSQLException(se);
if (se.getSQLState().equals("0A000")) {
if (whichUpdate[i] == 0 && partitionKeys.contains("status")) {
SQLDistTxTest.updateOnPartitionCol.set(true);
return false;
}
else if (whichUpdate[i] == 1 &&
(partitionKeys.contains("qty") || partitionKeys.contains("status"))) {
SQLDistTxTest.updateOnPartitionCol.set(true);
return false;
}
else SQLHelper.handleSQLException(se);
} else throw se; //let caller handle node failure issue
}
ResultSet rs = getSelectForUpdateRS(stmt, cid[i], cid2[i], sid[i],
ask[i], orderTime[i], status, whichUpdate[i], size);
ArrayList<ResultSet>selectForUpdateRSList = (ArrayList<ResultSet>)
SQLDistTxTest.selectForUpdateRS.get();
selectForUpdateRSList.add(rs);
SQLDistTxTest.selectForUpdateRS.set(selectForUpdateRSList);
if (usePK) {
if (SQLTest.testPartitionBy) stmt = getCorrectTxStmt(conn, whichUpdate[i]);
else stmt = getStmt(conn, updateByPK[whichUpdate[i]]); //use only this after bug#39913 is fixed
if (stmt == null) return false;
else {
boolean success = updateTable(stmt, rs, qty[i], status, orderTime[i], oids,
whichUpdate[i], updateCount, size, usePK);
if (!success) return success;
}
} else {
//Log.getLogWriter().info("gfxd update table oids size is " + oids.size());
boolean success = updateTable(stmt, rs, qty[i], status, orderTime[i],
oids, whichUpdate[i], updateCount, size, usePK);
if (!success) return success;
}
}
return true;
}
protected ResultSet getSelectForUpdateRS(PreparedStatement stmt, int cid, int cid2,
int sid, BigDecimal ask, Timestamp orderTime, String status,
int whichUpdate, int size) throws SQLException {
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(stmt.getConnection()) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
switch (whichUpdate) {
case 0:
//"select oid, status from trade.sellorders where sid = ? and ask>? for update of status ",
stmt.setInt(1, sid);
stmt.setBigDecimal(2, ask);
Log.getLogWriter().info(database + " selecting for update from trade.sellorders with SID:" + sid + ",ASK:" + ask + " QUERY: " +selectForUpdate[whichUpdate]);
break;
case 1:
// "select * from trade.sellorders where cid >= ? and cid <? for update of qty, status ",
stmt.setInt(1, cid);
stmt.setInt(2, cid2);
Log.getLogWriter().info(database + "selecting for update from trade.sellorders with 1_CID:" + cid + ",2_CID:" + cid2 + " QUERY: " +selectForUpdate[whichUpdate]);
break;
case 2: //"select * from trade.sellorders where cid = ? and sid= ? for update ",
stmt.setInt(1, cid);
stmt.setInt(2, sid);
Log.getLogWriter().info(database + "selecting for update from trade.sellorders with CID:" + cid + ",SID:" + sid + " QUERY: " + selectForUpdate[whichUpdate]);
break;
default:
throw new TestException (database + "Wrong select for update sql string here");
}
return stmt.executeQuery();
}
//if updatable resultset gets unsupported exception due to update on
//partitioned key, this method return false
//otherwise it returns true as update stmt has been checked already
protected boolean updateTable (PreparedStatement stmt, ResultSet rs,
int qty, String status, Timestamp orderTime, ArrayList<Integer> oids, int whichUpdate,
int[] updateCount, int size, boolean usePK) throws SQLException {
boolean[] success = new boolean[1];
String txid = SQLDistTxTest.curTxId.get() == null ? "" : "TXID:" + (Integer)SQLDistTxTest.curTxId.get() + " ";
String database = "Derby - ";
if (stmt != null ){
int txId = (Integer) SQLDistTxTest.curTxId.get();
database = SQLHelper.isDerbyConn(stmt.getConnection()) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
}
if (usePK) { //use pk based update statement
for (int i=0; i<oids.size(); i++) {
updateCount[i] = updateTableUsngPKStmt(stmt, qty,
status, orderTime, oids.get(i), whichUpdate);
}
} else { //use updatable statement available
Log.getLogWriter().info(database + "update using resultset updateRow");
while (rs.next()) {
int oid = rs.getInt("OID");
for (int i=0; i<oids.size(); i++) {
if ( oid == oids.get(i)) {
updateCount[i] = updateTableUsingURS(rs, qty,
status, orderTime, oids.get(i), whichUpdate, success);
if (success[0] == false) return false;
}
}
}
}
return true;
}
protected int updateTableUsngPKStmt(PreparedStatement stmt, int qty,
String status, Timestamp orderTime, int oid, int whichUpdate) throws SQLException {
int rowCount = 0;
int txId = (Integer) SQLDistTxTest.curTxId.get();
String database = SQLHelper.isDerbyConn(stmt.getConnection()) ? "Derby - " : "gemfirexd - TXID:" + txId + " " ;
switch (whichUpdate) {
case 0:
//"update trade.sellorders set status = ? where oid = ? ",
Log.getLogWriter().info(database + "updating trade.sellorders with STATUS:" + status +
"where OID:" + oid + " QUERY: " + updateByPK[whichUpdate]);
stmt.setString(1, status);
stmt.setInt(2, oid);
rowCount = stmt.executeUpdate();
Log.getLogWriter().info(database + "updated " + rowCount + " rows in trade.sellorders with STATUS:" + status +
"where OID:" + oid + " QUERY: " + updateByPK[whichUpdate]);
break;
case 1:
//"update trade.sellorders set qty = ?, status = ? where where oid = ? ",
Log.getLogWriter().info(database + "updating trade.sellorders with QTY:" + qty +
",STATUS:" + status + "where OID:" + oid + " QUERY: " + updateByPK[whichUpdate]);
stmt.setInt(1, qty);
stmt.setString(2, status);
stmt.setInt(3, oid);
rowCount = stmt.executeUpdate();
Log.getLogWriter().info(database + "updated " + rowCount + " rows in trade.sellorders with QTY:" + qty +
",STATUS:" + status + "where OID:" + oid + " QUERY: " + updateByPK[whichUpdate]);
break;
case 2:
//update trade.sellorders set order_time = ? where oid = ? ,
Log.getLogWriter().info(database + "updating trade.sellorders with ORDERTIME:" + orderTime
+ " where OID:" + oid + " QUERY: " + updateByPK[whichUpdate]);
stmt.setTimestamp(1, orderTime);
stmt.setInt(2, oid);
rowCount = stmt.executeUpdate();
Log.getLogWriter().info(database + "updated " + rowCount + " rows in trade.sellorders with ORDERTIME:" + orderTime
+ " where OID:" + oid + " QUERY: " + updateByPK[whichUpdate]);
break;
default:
throw new TestException (database + "Wrong update sql string here");
}
SQLWarning warning = stmt.getWarnings(); //test to see there is a warning
if (warning != null) {
SQLHelper.printSQLWarning(warning);
}
return rowCount;
}
protected int updateTableUsingURS(ResultSet rs, int qty, String status,
Timestamp orderTime, int oid, int whichUpdate, boolean[] success)
throws SQLException {
int rowCount = 1;
String txid = "TXID:" + (Integer)SQLDistTxTest.curTxId.get() + " ";
switch (whichUpdate) {
case 0:
//"update status = ?
Log.getLogWriter().info(txid + "updating tarde.sellorders table using URS with STATUS: " + status +
"where OID:" + oid + " QUERY: " + "update status = ? where oid = ?");
//select for update of column (status) has checked already
//whether updating on partition column
rs.updateString("STATUS", status);
rs.updateRow();
break;
case 1:
//"update trade.sellorders set qty = ?, status = ? where where oid = ? ",
Log.getLogWriter().info(txid + "updating trade.sellorders table using URS with QTY:" + qty + ", " +
"STATUS:" + status + " where OID:" + oid + " QUERY: " + "update trade.sellorders set qty = ?, status = ? where where oid = ? ");
//select for update of column (status and qty) has checked already
//whether updating on partition column
rs.updateInt("QTY", qty);
rs.updateString("STATUS", status);
rs.updateRow();
break;
case 2:
//"update trade.sellorders set order_time = ? where where oid = ? ",
Log.getLogWriter().info(txid + "updating trade.sellorders table using URS with ORDERTIME:"
+ orderTime + " where OID:" + oid + " QUERY: " + "update trade.sellorders set order_time = ? where where oid = ? ");
try {
rs.updateTimestamp("ORDER_TIME", orderTime);
rs.updateRow();
} catch (SQLException se) {
SQLHelper.printSQLException(se);
if (se.getSQLState().equals("0A000") &&
partitionKeys.contains("order_time")) {
rowCount = 0;
success[0] = false;
return rowCount;
} else throw se;
}
break;
default:
throw new TestException ("Wrong updatable resultset used here");
}
success[0] = true;
return rowCount;
}
//used to parse the partitionKey and test unsupported update on partitionKey, no need after bug #39913 is fixed
private PreparedStatement getCorrectTxStmt(Connection conn, int whichUpdate,
ArrayList<String> partitionKeys){
PreparedStatement stmt = null;
switch (whichUpdate) {
case 0:
// "select oid, status from trade.sellorders where sid = ? and ask>? and status = 'open' for update of status ",
// update status
if (partitionKeys.contains("status")) {
Log.getLogWriter().info("Will update gemfirexd on partition key");
if (!SQLHelper.isDerbyConn(conn))
stmt = getUnsupportedStmt(conn, updateByPK[whichUpdate]);
//if derbyConn, stmt is null so no update in derby as well
} else stmt = getStmt(conn, updateByPK[whichUpdate]);
break;
case 1:
// "select * from trade.sellorders where cid >= ? and cid <? order_time >? and status = 'open' for update of qty, status ",
// update qty, status
if (partitionKeys.contains("qty") || partitionKeys.contains("status") ) {
Log.getLogWriter().info("Will update gemfirexd on partition key");
if (!SQLHelper.isDerbyConn(conn))
stmt = getUnsupportedStmt(conn, updateByPK[whichUpdate]);
//if derbyConn, stmt is null so no update in derby as well
} else stmt = getStmt(conn, updateByPK[whichUpdate]);
break;
case 2: //update cid, sid
// "select * from trade.sellorders where cid = ? and sid= ? for update ",
if (partitionKeys.contains("order_time")) {
if (!SQLHelper.isDerbyConn(conn))
stmt = getUnsupportedStmt(conn, updateByPK[whichUpdate]);
//if derbyConn, stmt is null so no update in derby as well
} else stmt = getStmt(conn, updateByPK[whichUpdate]);
break;
default:
throw new TestException ("Wrong update sql string here");
}
return stmt;
}
private PreparedStatement getCorrectTxStmt(Connection conn, int whichUpdate){
if (partitionKeys == null) setPartitionKeys();
return getCorrectTxStmt(conn, whichUpdate, partitionKeys);
}
}
| apache-2.0 |
ldp4j/ldp4j | framework/application/api/src/test/java/org/ldp4j/application/session/SnapshotResolverTest.java | 5026 | /**
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* This file is part of the LDP4j Project:
* http://www.ldp4j.org/
*
* Center for Open Middleware
* http://www.centeropenmiddleware.com/
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* Copyright (C) 2014-2016 Center for Open Middleware.
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
* Artifact : org.ldp4j.framework:ldp4j-application-api:0.2.2
* Bundle : ldp4j-application-api-0.2.2.jar
* #-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=#
*/
package org.ldp4j.application.session;
import java.net.URI;
import mockit.Deencapsulation;
import mockit.Mocked;
import mockit.Verifications;
import mockit.integration.junit4.JMockit;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ldp4j.application.ApplicationContextException;
import org.ldp4j.application.spi.ResourceSnapshotResolver;
import org.ldp4j.application.spi.RuntimeDelegate;
import org.ldp4j.application.spi.ShutdownListener;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.*;
@RunWith(JMockit.class)
public class SnapshotResolverTest {
private final class MockedRuntimeDelegate extends RuntimeDelegate {
private final ResourceSnapshotResolver resolver;
private MockedRuntimeDelegate(ResourceSnapshotResolver resolver) {
this.resolver = resolver;
}
@Override
public boolean isOffline() {
return false;
}
@Override
public WriteSession createSession() throws ApplicationContextException {
return null;
}
@Override
public ResourceSnapshotResolver createResourceResolver(URI canonicalBase,ReadSession session) {
return this.resolver;
}
@Override
public void registerShutdownListener(ShutdownListener listener) {
}
}
private static final URI CANONICAL_BASE=URI.create("http://www.ldp4j.org/context/");
private static final URI NON_HIERARCHICAL_CANONICAL_BASE=URI.create("urn:www.ldp4j.org/context/");
private static final URI NON_ABSOLUTE_CANONICAL_BASE=URI.create("context/");
@Test
public void testToURI(@Mocked ReadSession session, final @Mocked ResourceSnapshotResolver resolver) throws Exception {
SnapshotResolver sut = buildResolver(session, resolver);
sut.fromURI(CANONICAL_BASE);
new Verifications() {{
resolver.resolve(CANONICAL_BASE);
}};
}
@Test
public void testFromURI(@Mocked ReadSession session, final @Mocked ResourceSnapshotResolver resolver, final @Mocked ResourceSnapshot snapshot) throws Exception {
SnapshotResolver sut = buildResolver(session, resolver);
sut.toURI(snapshot);
new Verifications() {{
resolver.resolve(snapshot);
}};
}
@Test(expected=NullPointerException.class)
public void testBuilder$nullSession() {
SnapshotResolver.
builder().
withReadSession(null).
withCanonicalBase(CANONICAL_BASE).
build();
}
@Test(expected=NullPointerException.class)
public void testBuilder$nullCanonicalBase(@Mocked ReadSession session) {
SnapshotResolver.
builder().
withReadSession(session).
withCanonicalBase(null).
build();
}
@Test(expected=IllegalArgumentException.class)
public void testBuilder$nonHierarchicalCanonicalBase(@Mocked ReadSession session) {
SnapshotResolver.
builder().
withReadSession(session).
withCanonicalBase(NON_HIERARCHICAL_CANONICAL_BASE).
build();
}
@Test(expected=IllegalArgumentException.class)
public void testBuilder$nonAbsoluteCanonicalBase(@Mocked ReadSession session) {
SnapshotResolver.
builder().
withReadSession(session).
withCanonicalBase(NON_ABSOLUTE_CANONICAL_BASE).
build();
}
@Test
public void testBuilder$validBase(@Mocked ReadSession session, final @Mocked ResourceSnapshotResolver resolver) {
buildResolver(session, resolver);
}
private SnapshotResolver buildResolver(ReadSession session,
final ResourceSnapshotResolver resolver) {
RuntimeDelegate.setInstance(new MockedRuntimeDelegate(resolver));
SnapshotResolver result=
SnapshotResolver.
builder().
withReadSession(session).
withCanonicalBase(CANONICAL_BASE).
build();
assertThat(result,notNullValue());
assertThat(Deencapsulation.getField(result, "resolver"),sameInstance((Object)resolver));
return result;
}
}
| apache-2.0 |
gravitee-io/gravitee-gateway | gravitee-gateway-handlers/gravitee-gateway-handlers-api/src/main/java/io/gravitee/gateway/handlers/api/policy/security/rule/EvaluableAuthenticationContext.java | 1254 | /**
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gravitee.gateway.handlers.api.policy.security.rule;
import io.gravitee.gateway.security.core.AuthenticationContext;
import java.util.Map;
/**
* @author David BRASSELY (david.brassely at graviteesource.com)
* @author GraviteeSource Team
*/
public class EvaluableAuthenticationContext {
private final AuthenticationContext authenticationContext;
EvaluableAuthenticationContext(AuthenticationContext authenticationContext) {
this.authenticationContext = authenticationContext;
}
public Map<String, Object> getAttributes() {
return authenticationContext.attributes();
}
}
| apache-2.0 |
Dietmar-Franken/mycontroller | src/main/java/org/mycontroller/standalone/db/tables/RoleGatewayMap.java | 1724 | /**
* Copyright (C) 2015-2016 Jeeva Kandasamy (jkandasa@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mycontroller.standalone.db.tables;
import org.mycontroller.standalone.db.DB_TABLES;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.table.DatabaseTable;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.ToString;
/**
* @author Jeeva Kandasamy (jkandasa)
* @since 0.0.2
*/
@DatabaseTable(tableName = DB_TABLES.ROLE_GATEWAY_MAP)
@Data
@NoArgsConstructor
@AllArgsConstructor
@Builder
@ToString(includeFieldNames = true)
public class RoleGatewayMap {
public static final String KEY_ROLE_ID = "roleId";
public static final String KEY_GATEWAY_ID = "gatewayId";
@DatabaseField(canBeNull = false, columnName = KEY_ROLE_ID, uniqueCombo = true, foreign = true,
foreignAutoRefresh = true, maxForeignAutoRefreshLevel = 0)
private Role role;
@DatabaseField(canBeNull = false, columnName = KEY_GATEWAY_ID, uniqueCombo = true, foreign = true,
foreignAutoRefresh = true, maxForeignAutoRefreshLevel = 0)
private Gateway gateway;
}
| apache-2.0 |
RedHelixOrg/RedHelix-1 | redhx-build-all/redhx-core-api/src/main/java/org/redhelix/core/computer/system/boot/RedHxComputerBootProperties.java | 947 | /*
* Copyright 2015 JBlade LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License
*/
package org.redhelix.core.computer.system.boot;
/**
*
*
* @since RedHelix Version 0.1
* @author Hank Bruning
*
*/
public interface RedHxComputerBootProperties {
RedHxComputerBootSourceEnum getBootSource();
RedHxComputerSystemBootSourceOverrideEnabledEnum getBootSourceOverride();
RedHxComputerBootUefiTargetSourceOverride getBootUefiTarget();
}
| apache-2.0 |
Orange-OpenSource/matos-profiles | matos-android/src/main/java/android/widget/ListPopupWindow.java | 5046 | package android.widget;
/*
* #%L
* Matos
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2010 - 2014 Orange SA
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
@com.francetelecom.rd.stubs.annotation.ClassDone(0)
public class ListPopupWindow
{
// Fields
public static final int POSITION_PROMPT_ABOVE = 0;
public static final int POSITION_PROMPT_BELOW = 1;
public static final int MATCH_PARENT = -1;
public static final int WRAP_CONTENT = -2;
public static final int INPUT_METHOD_FROM_FOCUSABLE = 0;
public static final int INPUT_METHOD_NEEDED = 1;
public static final int INPUT_METHOD_NOT_NEEDED = 2;
// Constructors
@com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate")
public ListPopupWindow(android.content.Context arg1){
}
@com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate")
public ListPopupWindow(android.content.Context arg1, android.util.AttributeSet arg2){
}
@com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate")
public ListPopupWindow(android.content.Context arg1, android.util.AttributeSet arg2, int arg3){
}
@com.francetelecom.rd.stubs.annotation.CallBackRegister("onCreate")
public ListPopupWindow(android.content.Context arg1, android.util.AttributeSet arg2, int arg3, int arg4){
}
// Methods
@com.francetelecom.rd.stubs.annotation.CallBack("onCreate")
public boolean onKeyDown(int arg1, android.view.KeyEvent arg2){
return false;
}
@com.francetelecom.rd.stubs.annotation.CallBack("onCreate")
public boolean onKeyUp(int arg1, android.view.KeyEvent arg2){
return false;
}
public void show(){
}
public void setBackgroundDrawable(android.graphics.drawable.Drawable arg1){
}
public int getWidth(){
return 0;
}
public int getHeight(){
return 0;
}
@com.francetelecom.rd.stubs.annotation.CallBack("onCreate")
public boolean onKeyPreIme(int arg1, android.view.KeyEvent arg2){
return false;
}
public android.graphics.drawable.Drawable getBackground(){
return (android.graphics.drawable.Drawable) null;
}
public void setSelection(int arg1){
}
public void setHeight(int arg1){
}
public void setWidth(int arg1){
}
public void setAdapter(ListAdapter arg1){
}
public android.view.View getSelectedView(){
return (android.view.View) null;
}
public boolean performItemClick(int arg1){
return false;
}
public int getSelectedItemPosition(){
return 0;
}
public long getSelectedItemId(){
return 0l;
}
public void setOnItemClickListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onItemClick") AdapterView.OnItemClickListener arg1){
}
public void setOnItemSelectedListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onItemSelected") AdapterView.OnItemSelectedListener arg1){
}
public java.lang.Object getSelectedItem(){
return (java.lang.Object) null;
}
public void clearListSelection(){
}
public boolean isDropDownAlwaysVisible(){
return false;
}
public void setDropDownAlwaysVisible(boolean arg1){
}
public boolean isInputMethodNotNeeded(){
return false;
}
public void setForceIgnoreOutsideTouch(boolean arg1){
}
public void dismiss(){
}
public ListView getListView(){
return (ListView) null;
}
public boolean isShowing(){
return false;
}
public void setOnDismissListener(@com.francetelecom.rd.stubs.annotation.CallBackRegister("onDismiss") PopupWindow.OnDismissListener arg1){
}
public void setAnchorView(android.view.View arg1){
}
public void setSoftInputMode(int arg1){
}
public void setInputMethodMode(int arg1){
}
public int getAnimationStyle(){
return 0;
}
public void setAnimationStyle(int arg1){
}
public int getInputMethodMode(){
return 0;
}
public int getSoftInputMode(){
return 0;
}
public int getVerticalOffset(){
return 0;
}
public void setPromptPosition(int arg1){
}
public void setListSelector(android.graphics.drawable.Drawable arg1){
}
public void setVerticalOffset(int arg1){
}
public void setHorizontalOffset(int arg1){
}
public void setPromptView(android.view.View arg1){
}
public int getHorizontalOffset(){
return 0;
}
public void postShow(){
}
public android.view.View getAnchorView(){
return (android.view.View) null;
}
public void setModal(boolean arg1){
}
public void setContentWidth(int arg1){
}
public int getPromptPosition(){
return 0;
}
public boolean isModal(){
return false;
}
}
| apache-2.0 |
ThiagoGarciaAlves/intellij-community | python/testSrc/com/jetbrains/python/inspections/PyArgumentListInspectionTest.java | 6264 | // Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python.inspections;
import com.jetbrains.python.fixtures.PyInspectionTestCase;
import com.jetbrains.python.psi.LanguageLevel;
import org.jetbrains.annotations.NotNull;
/**
* @author yole
*/
public class PyArgumentListInspectionTest extends PyInspectionTestCase {
public void testBadarglist() {
doTest();
}
public void testKwargsMapToNothing() {
doTest();
}
public void testDecorators() {
doTest();
}
public void testDecoratorsPy3K() {
runWithLanguageLevel(LanguageLevel.PYTHON30, this::doTest);
}
// PY-19130
public void testClassDecoratedThroughDecorator() {
doTest();
}
// PY-19130
public void testClassDecoratedThroughCall() {
doTest();
}
public void testTupleVsLiteralList() {
doTest();
}
// PY-312
public void testInheritedInit() {
doTest();
}
// PY-428
public void testBadDecorator() {
doTest();
}
public void testImplicitResolveResult() {
doTest();
}
public void testCallingClassDefinition() {
doTest();
}
public void testPy1133() {
doTest();
}
public void testPy2005() {
doTest();
}
public void testPy1268() {
runWithLanguageLevel(LanguageLevel.PYTHON30, this::doTest);
}
public void testInstanceMethodAsLambda() {
doTest();
}
public void testClassMethodMultipleDecorators() {
doTest();
}
// PY-19412
public void testReassignedViaClassMethod() {
doTest();
}
// PY-19412
public void testReassignedViaClassMethodInAnotherModule() {
doMultiFileTest("b.py");
}
// PY-2294
public void testTuples() {
doTest();
}
// PY-2460
public void testNestedClass() {
doTest();
}
// PY-2622
public void testReassignedMethod() {
doTest();
}
public void testConstructorQualifiedByModule() {
doTest();
}
// PY-3623
public void testFunctionStoredInInstance() {
doTest();
}
// PY-4419
public void testUnresolvedSuperclass() {
doTest();
}
// PY-4897
public void testMultipleInheritedConstructors() {
doTest();
}
public void testArgs() {
doTest();
}
// PY-9080
public void testMultipleInheritedConstructorsMRO() {
doTest();
}
// PY-9978
public void testXRange() {
doTest();
}
// PY-9978
public void testSlice() {
doTest();
}
public void testPy3k() {
runWithLanguageLevel(LanguageLevel.PYTHON30, this::doTest);
}
@NotNull
@Override
protected Class<? extends PyInspection> getInspectionClass() {
return PyArgumentListInspection.class;
}
// PY-9664
public void testFloatConstructor() {
doTest();
}
// PY-10601
public void testDecoratedChangedParameters() {
doTest();
}
// PY-9605
public void testPropertyReturnsCallable() {
doTest();
}
// PY-11162
public void testUnicodeConstructor() {
doTest();
}
// PY-11169
public void testDictFromKeys() {
doTest();
}
// PY-9934
public void testParameterWithDefaultAfterKeywordContainer() {
doTest();
}
// PY-10351
public void testParameterWithDefaultAfterKeywordContainer2() {
doTest();
}
// PY-18275
public void testStrFormat() {
doTest();
}
// PY-19716
public void testMethodsForLoggingExceptions() {
doMultiFileTest("b.py");
}
// PY-19522
public void testCsvRegisterDialect() {
doMultiFileTest("b.py");
}
// PY-21083
public void testFloatFromhex() {
doTest();
}
public void testMultiResolveWhenOneResultIsDecoratedFunction() {
doTest();
}
public void testMultiResolveWhenOneResultIsDunderInitInDecoratedClass() {
// Implement after fixing PY-20057
}
public void testMultiResolveWhenOneResultDoesNotHaveUnmappedArguments() {
doTest();
}
public void testMultiResolveWhenOneResultDoesNotHaveUnmappedParameters() {
doTest();
}
public void testMultiResolveWhenAllResultsHaveUnmappedArguments() {
doTest();
}
public void testMultiResolveWhenAllResultsHaveUnmappedParameters() {
doTest();
}
public void testUnfilledSentinelInBuiltinIter() {
doTest();
}
public void testUnfilledDefaultInBuiltinNext() {
doTest();
}
public void testUnfilledIter4InBuiltinZip() {
doTest();
}
public void testUnfilledIter2InBuiltinMap() {
doTest();
}
// PY-22507
public void testTimetupleOnAssertedDate() {
doMultiFileTest("b.py");
}
// PY-23069
public void testDunderNewCallInDictInheritor() {
doTest();
}
// PY-22767
public void testBuiltinZip() {
doTest();
}
// PY-19293, PY-22102
public void testInitializingTypingNamedTuple() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-24099
public void testInitializingTypingNamedTupleWithDefaultValues() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-4344, PY-8422, PY-22269, PY-22740
public void testInitializingCollectionsNamedTuple() {
doTest();
}
// PY-22971
public void testOverloadsAndImplementationInClass() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-22971
public void testTopLevelOverloadsAndImplementation() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-22971
public void testOverloadsAndImplementationInImportedClass() {
runWithLanguageLevel(LanguageLevel.PYTHON35, () -> doMultiFileTest("b.py"));
}
// PY-22971
public void testOverloadsAndImplementationInImportedModule() {
runWithLanguageLevel(LanguageLevel.PYTHON35, () -> doMultiFileTest("b.py"));
}
public void testTypingCallableCall() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-24286
public void testBuiltinLong() {
doTest();
}
// PY-24930
public void testCallOperator() {
runWithLanguageLevel(LanguageLevel.PYTHON35, this::doTest);
}
// PY-16968
public void testKwargsAgainstKeywordOnly() {
runWithLanguageLevel(LanguageLevel.PYTHON36, this::doTest);
}
// PY-26023
public void testAbstractMethod() {
runWithLanguageLevel(LanguageLevel.PYTHON30, this::doTest);
}
}
| apache-2.0 |
jasongardnerlv/alakazam | alakazam-jackson/src/test/java/io/alakazam/jackson/AnnotationSensitivePropertyNamingStrategyTest.java | 2116 | package io.alakazam.jackson;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.PropertyNamingStrategy;
import org.junit.Before;
import org.junit.Test;
import static org.fest.assertions.api.Assertions.assertThat;
public class AnnotationSensitivePropertyNamingStrategyTest {
public static class RegularExample {
@JsonProperty
String firstName;
@SuppressWarnings("UnusedDeclaration") // Jackson
private RegularExample() {}
public RegularExample(String firstName) {
this.firstName = firstName;
}
}
@JsonSnakeCase
public static class SnakeCaseExample {
@JsonProperty
String firstName;
@SuppressWarnings("UnusedDeclaration") // Jackson
private SnakeCaseExample() {}
public SnakeCaseExample(String firstName) {
this.firstName = firstName;
}
}
private final PropertyNamingStrategy strategy = new AnnotationSensitivePropertyNamingStrategy();
private final ObjectMapper mapper = new ObjectMapper();
@Before
public void setUp() throws Exception {
mapper.setPropertyNamingStrategy(strategy);
}
@Test
public void serializesRegularProperties() throws Exception {
assertThat(mapper.writeValueAsString(new RegularExample("woo")))
.isEqualTo("{\"firstName\":\"woo\"}");
}
@Test
public void serializesSnakeCaseProperties() throws Exception {
assertThat(mapper.writeValueAsString(new SnakeCaseExample("woo")))
.isEqualTo("{\"first_name\":\"woo\"}");
}
@Test
public void deserializesRegularProperties() throws Exception {
assertThat(mapper.readValue("{\"firstName\":\"woo\"}", RegularExample.class).firstName)
.isEqualTo("woo");
}
@Test
public void deserializesSnakeCaseProperties() throws Exception {
assertThat(mapper.readValue("{\"first_name\":\"woo\"}", SnakeCaseExample.class).firstName)
.isEqualTo("woo");
}
}
| apache-2.0 |
dante-mx/openbravopos | src-pos/com/openbravo/pos/config/PanelConfig.java | 1277 | // Openbravo POS is a point of sales application designed for touch screens.
// Copyright (C) 2007-2009 Openbravo, S.L.
// http://www.openbravo.com/product/pos
//
// This file is part of Openbravo POS.
//
// Openbravo POS is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Openbravo POS is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Openbravo POS. If not, see <http://www.gnu.org/licenses/>.
package com.openbravo.pos.config;
import java.awt.Component;
import com.openbravo.pos.forms.AppConfig;
/**
*
* @author adrianromero
*/
public interface PanelConfig {
public void loadProperties(AppConfig config);
public void saveProperties(AppConfig config);
public boolean hasChanged();
public Component getConfigComponent();
}
| apache-2.0 |
OSEHRA/vista-soa-ri | examples/identity-proxy/src/test/java/org/osehra/vista/examples/proxy/identity/IntegrationTest.java | 997 | /*
* Copyright 2012-2014 The Open Source Electronic Health Record Alliance
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.osehra.vista.examples.proxy.identity;
import junit.framework.TestCase;
import org.apache.camel.spring.Main;
public class IntegrationTest extends TestCase {
public void testSpringDeployment() throws Exception {
// boot up the Spring application context for 2 seconds
Main.main("-duration", "2s", "-o", "target/vista");
}
}
| apache-2.0 |
yukuai518/gobblin | gobblin-core/src/main/java/gobblin/writer/http/RestJsonWriterBuilder.java | 998 | /*
* Copyright (C) 2014-2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the
* License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied.
*/
package gobblin.writer.http;
import java.io.IOException;
import com.google.gson.JsonObject;
import gobblin.converter.http.RestEntry;
import gobblin.writer.DataWriter;
/**
* Builder that builds RestJsonWriter
*/
public class RestJsonWriterBuilder extends AbstractHttpWriterBuilder<Void, RestEntry<JsonObject>, RestJsonWriterBuilder> {
@Override
public DataWriter<RestEntry<JsonObject>> build() throws IOException {
validate();
return new RestJsonWriter(this);
}
}
| apache-2.0 |
fishercoder1534/Leetcode | src/main/java/com/fishercoder/solutions/_951.java | 738 | package com.fishercoder.solutions;
import com.fishercoder.common.classes.TreeNode;
public class _951 {
public static class Solution1 {
public boolean flipEquiv(TreeNode root1, TreeNode root2) {
if (root1 == null && root2 == null) {
return true;
}
if (root1 == null || root2 == null) {
return false;
}
if (root1.val != root2.val) {
return false;
}
return (
(flipEquiv(root1.left, root2.left) && flipEquiv(root1.right, root2.right))
|| (flipEquiv(root1.left, root2.right) && flipEquiv(root1.right, root2.left))
);
}
}
}
| apache-2.0 |
talsma-ict/umldoclet | src/plantuml-asl/src/net/sourceforge/plantuml/activitydiagram3/ftile/vcompact/UGraphicInterceptorGoto.java | 1928 | /* ========================================================================
* PlantUML : a free UML diagram generator
* ========================================================================
*
* (C) Copyright 2009-2020, Arnaud Roques
*
* Project Info: https://plantuml.com
*
* If you like this project or if you find it useful, you can support us at:
*
* https://plantuml.com/patreon (only 1$ per month!)
* https://plantuml.com/paypal
*
* This file is part of PlantUML.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* Original Author: Arnaud Roques
*/
package net.sourceforge.plantuml.activitydiagram3.ftile.vcompact;
import net.sourceforge.plantuml.activitydiagram3.ftile.Ftile;
import net.sourceforge.plantuml.graphic.UGraphicDelegator;
import net.sourceforge.plantuml.ugraphic.UChange;
import net.sourceforge.plantuml.ugraphic.UGraphic;
import net.sourceforge.plantuml.ugraphic.UShape;
public class UGraphicInterceptorGoto extends UGraphicDelegator {
public UGraphicInterceptorGoto(UGraphic ug) {
super(ug);
}
public void draw(UShape shape) {
System.err.println("inter=" + shape.getClass());
if (shape instanceof Ftile) {
final Ftile foo = (Ftile) shape;
foo.drawU(this);
} else {
getUg().draw(shape);
System.err.println("Drawing " + shape);
}
}
public UGraphic apply(UChange change) {
return new UGraphicInterceptorGoto(getUg().apply(change));
}
}
| apache-2.0 |
wso2/carbon-identity-framework | components/authentication-framework/org.wso2.carbon.identity.application.authentication.framework/src/test/java/org/wso2/carbon/identity/application/authentication/framework/handler/request/impl/JITProvisioningPostAuthenticationHandlerTest.java | 12754 | /*
* Copyright (c) 2018, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.application.authentication.framework.handler.request.impl;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.wso2.carbon.identity.application.authentication.framework.AbstractFrameworkTest;
import org.wso2.carbon.identity.application.authentication.framework.ApplicationAuthenticator;
import org.wso2.carbon.identity.application.authentication.framework.FederatedApplicationAuthenticator;
import org.wso2.carbon.identity.application.authentication.framework.config.ConfigurationFacade;
import org.wso2.carbon.identity.application.authentication.framework.config.loader.UIBasedConfigurationLoader;
import org.wso2.carbon.identity.application.authentication.framework.config.model.AuthenticatorConfig;
import org.wso2.carbon.identity.application.authentication.framework.config.model.ExternalIdPConfig;
import org.wso2.carbon.identity.application.authentication.framework.config.model.SequenceConfig;
import org.wso2.carbon.identity.application.authentication.framework.config.model.StepConfig;
import org.wso2.carbon.identity.application.authentication.framework.context.AuthenticationContext;
import org.wso2.carbon.identity.application.authentication.framework.exception.FrameworkException;
import org.wso2.carbon.identity.application.authentication.framework.handler.request.PostAuthnHandlerFlowStatus;
import org.wso2.carbon.identity.application.authentication.framework.handler.sequence.StepBasedSequenceHandler;
import org.wso2.carbon.identity.application.authentication.framework.internal.FrameworkServiceDataHolder;
import org.wso2.carbon.identity.application.authentication.framework.model.AuthenticatedUser;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkConstants;
import org.wso2.carbon.identity.application.authentication.framework.util.FrameworkUtils;
import org.wso2.carbon.identity.application.common.model.IdentityProvider;
import org.wso2.carbon.identity.application.common.model.ServiceProvider;
import org.wso2.carbon.identity.core.util.IdentityTenantUtil;
import org.wso2.carbon.identity.handler.event.account.lock.constants.AccountConstants;
import org.wso2.carbon.identity.handler.event.account.lock.exception.AccountLockServiceException;
import org.wso2.carbon.identity.handler.event.account.lock.service.AccountLockService;
import org.wso2.carbon.identity.user.profile.mgt.association.federation.FederatedAssociationManager;
import org.wso2.carbon.identity.user.profile.mgt.association.federation.FederatedAssociationManagerImpl;
import org.wso2.carbon.identity.user.profile.mgt.association.federation.exception.FederatedAssociationManagerException;
import org.wso2.carbon.idp.mgt.IdentityProviderManagementException;
import org.wso2.carbon.user.api.UserStoreException;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.UserRealm;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.service.RealmService;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantConstants;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.xml.stream.XMLStreamException;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.powermock.api.mockito.PowerMockito.mockStatic;
/**
* This is a test class for {@link JITProvisioningPostAuthenticationHandler}.
*/
@PrepareForTest({FrameworkUtils.class, ConfigurationFacade.class, AccountLockService.class,
FrameworkServiceDataHolder.class, IdentityTenantUtil.class})
@PowerMockIgnore({"javax.xml.*"})
public class JITProvisioningPostAuthenticationHandlerTest extends AbstractFrameworkTest {
private UIBasedConfigurationLoader configurationLoader;
private HttpServletRequest request;
private HttpServletResponse response;
private JITProvisioningPostAuthenticationHandler postJITProvisioningHandler;
private ServiceProvider sp;
@Mock
private FrameworkServiceDataHolder frameworkServiceDataHolder;
@Mock
private AccountLockService accountLockService;
@BeforeClass
protected void setupSuite() throws XMLStreamException, IdentityProviderManagementException {
configurationLoader = new UIBasedConfigurationLoader();
mockStatic(FrameworkUtils.class);
mockStatic(ConfigurationFacade.class);
ConfigurationFacade configurationFacade = mock(ConfigurationFacade.class);
PowerMockito.when(ConfigurationFacade.getInstance()).thenReturn(configurationFacade);
IdentityProvider identityProvider = getTestIdentityProvider("default-tp-1.xml");
ExternalIdPConfig externalIdPConfig = new ExternalIdPConfig(identityProvider);
Mockito.doReturn(externalIdPConfig).when(configurationFacade).getIdPConfigByName(Mockito.anyString(), Mockito
.anyString());
when(FrameworkUtils.isStepBasedSequenceHandlerExecuted(Mockito.any(AuthenticationContext.class)))
.thenCallRealMethod();
request = mock(HttpServletRequest.class);
response = mock(HttpServletResponse.class);
postJITProvisioningHandler = JITProvisioningPostAuthenticationHandler.getInstance();
sp = getTestServiceProvider("default-sp-1.xml");
}
@Test(description = "This test case tests the Post JIT provisioning handling flow without an authenticated user")
public void testHandleWithoutAuthenticatedUser() throws FrameworkException {
AuthenticationContext context = processAndGetAuthenticationContext(sp, false, false);
PostAuthnHandlerFlowStatus postAuthnHandlerFlowStatus = postJITProvisioningHandler.handle(request, response,
context);
Assert.assertEquals(postAuthnHandlerFlowStatus, PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED,
"Post JIT provisioning handler executed without having a authenticated user");
}
@Test(description = "This test case tests the Post JIT provisioning handling flow with an authenticated user")
public void testHandleWithAuthenticatedUserWithoutFederatedIdp() throws FrameworkException {
AuthenticationContext context = processAndGetAuthenticationContext(sp, true, false);
PostAuthnHandlerFlowStatus postAuthnHandlerFlowStatus = postJITProvisioningHandler
.handle(request, response, context);
Assert.assertEquals(postAuthnHandlerFlowStatus, PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED,
"Post JIT provisioning handler executed while having a authenticated user without federated "
+ "authenticator");
}
@Test(description = "This test case tests the Post JIT provisioning handling flow with an authenticated user")
public void testHandleWithAuthenticatedUserWithFederatedIdp() throws FrameworkException,
FederatedAssociationManagerException, AccountLockServiceException, UserStoreException {
AuthenticationContext context = processAndGetAuthenticationContext(sp, true, true);
FederatedAssociationManager federatedAssociationManager = mock(FederatedAssociationManagerImpl.class);
when(FrameworkUtils.getFederatedAssociationManager()).thenReturn(federatedAssociationManager);
doReturn("test").when(federatedAssociationManager).getUserForFederatedAssociation
(Mockito.anyString(), Mockito.anyString(), Mockito.anyString());
when(FrameworkUtils.getStepBasedSequenceHandler()).thenReturn(Mockito.mock(StepBasedSequenceHandler.class));
mockStatic(FrameworkServiceDataHolder.class);
PowerMockito.when(FrameworkServiceDataHolder.getInstance()).thenReturn(frameworkServiceDataHolder);
mockStatic(AccountLockService.class);
when(frameworkServiceDataHolder.getAccountLockService()).thenReturn(accountLockService);
when(accountLockService.isAccountLocked(anyString(), anyString())).thenReturn(false);
RealmService mockRealmService = mock(RealmService.class);
PowerMockito.when(FrameworkServiceDataHolder.getInstance().getRealmService()).thenReturn(mockRealmService);
UserRealm mockUserRealm = mock(UserRealm.class);
UserStoreManager mockUserStoreManager = mock(UserStoreManager.class);
Map<String, String> mockClaimValues = mock(HashMap.class);
mockStatic(IdentityTenantUtil.class);
when(IdentityTenantUtil.getTenantId(anyString())).thenReturn(1);
when(mockRealmService.getTenantUserRealm(anyInt())).thenReturn(mockUserRealm);
when(mockUserRealm.getUserStoreManager()).thenReturn(mockUserStoreManager);
when(mockUserStoreManager.getUserClaimValues(anyString(),
eq(new String[]{AccountConstants.ACCOUNT_DISABLED_CLAIM}),
eq(UserCoreConstants.DEFAULT_PROFILE))).thenReturn(mockClaimValues);
when(mockClaimValues.get(AccountConstants.ACCOUNT_DISABLED_CLAIM)).thenReturn("false");
PostAuthnHandlerFlowStatus postAuthnHandlerFlowStatus = postJITProvisioningHandler
.handle(request, response, context);
Assert.assertEquals(postAuthnHandlerFlowStatus, PostAuthnHandlerFlowStatus.SUCCESS_COMPLETED,
"Post JIT provisioning handler executed while having a authenticated user without federated "
+ "authenticator");
}
/**
* To get the authentication context and to call the handle method of the PostJitProvisioningHandler.
*
* @param sp1 Service Provider
* @return relevant authentication context.
* @throws FrameworkException Framwork Exception.
*/
private AuthenticationContext processAndGetAuthenticationContext(ServiceProvider sp1, boolean
withAuthenticatedUser, boolean isFederated) throws FrameworkException {
AuthenticationContext context = getAuthenticationContext(sp1);
SequenceConfig sequenceConfig = configurationLoader
.getSequenceConfig(context, Collections.emptyMap(), sp1);
context.setSequenceConfig(sequenceConfig);
context.setProperty(FrameworkConstants.STEP_BASED_SEQUENCE_HANDLER_TRIGGERED, true);
ApplicationAuthenticator applicationAuthenticator = mock(ApplicationAuthenticator.class);
if (isFederated) {
applicationAuthenticator = mock(FederatedApplicationAuthenticator.class);
}
when(applicationAuthenticator.getName()).thenReturn("Authenticator1");
if (withAuthenticatedUser) {
AuthenticatedUser authenticatedUser = new AuthenticatedUser();
authenticatedUser.setUserName("test");
authenticatedUser.setTenantDomain(MultitenantConstants.SUPER_TENANT_DOMAIN_NAME);
authenticatedUser.setAuthenticatedSubjectIdentifier("test");
sequenceConfig.setAuthenticatedUser(authenticatedUser);
AuthenticatorConfig authenticatorConfig = new AuthenticatorConfig();
authenticatorConfig.setApplicationAuthenticator(applicationAuthenticator);
for (Map.Entry<Integer, StepConfig> entry : sequenceConfig.getStepMap().entrySet()) {
StepConfig stepConfig = entry.getValue();
stepConfig.setAuthenticatedAutenticator(authenticatorConfig);
stepConfig.setAuthenticatedUser(authenticatedUser);
}
context.setSequenceConfig(sequenceConfig);
}
UserCoreUtil.setDomainInThreadLocal("test_domain");
return context;
}
}
| apache-2.0 |
ePages-de/restdocs-wiremock | wiremock/src/main/java/com/epages/restdocs/ResponseTemplateProcessor.java | 3073 | package com.epages.restdocs;
import com.jayway.jsonpath.DocumentContext;
import com.jayway.jsonpath.JsonPath;
import org.springframework.web.util.UriComponentsBuilder;
import org.springframework.web.util.UriTemplate;
import java.util.List;
class ResponseTemplateProcessor {
private final List<ResponseFieldTemplateDescriptor> templateDescriptors;
private UriTemplate uriTemplate;
private String responseBody;
ResponseTemplateProcessor(List<ResponseFieldTemplateDescriptor> templateDescriptors, UriTemplate uriTemplate, String responseBody) {
this.templateDescriptors = templateDescriptors;
this.uriTemplate = uriTemplate;
this.responseBody = responseBody;
}
String replaceTemplateFields() {
if (templateDescriptors.isEmpty()) {
return responseBody;
}
DocumentContext documentContext = JsonPath.parse(responseBody);
for (ResponseFieldTemplateDescriptor descriptor: templateDescriptors) {
String expression = null;
if (uriTemplate != null && !uriTemplate.getVariableNames().isEmpty()) {
expression = preProcessUriTemplateVariableNameExpression(descriptor);
} else if (descriptor.getUriTemplateVariableName() != null) {
throw new IllegalArgumentException("Descriptor for field '" + descriptor.getPath() + "' specifies a 'replacedWithUriTemplateVariableValue' but no URI Template could be found in. " +
"Make sure to construct your request with the methods in org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders that use URI templates");
}
if (expression == null) {
if (descriptor.getWireMockTemplateExpression() == null) {
throw new IllegalArgumentException("Descriptor for field '" + descriptor.getPath() + "' contains no replacedWithWireMockTemplateExpression");
}
expression = "{{" + descriptor.getWireMockTemplateExpression() + "}}";
}
documentContext.set(descriptor.getPath(), expression);
}
return documentContext.jsonString();
}
private String preProcessUriTemplateVariableNameExpression(ResponseFieldTemplateDescriptor descriptor) {
if (descriptor.getUriTemplateVariableName() != null) {
return "{{request.requestLine.pathSegments.[" + getIndexOfUriVariableInPath(descriptor.getUriTemplateVariableName()) + "]}}";
}
return null;
}
private int getIndexOfUriVariableInPath(String variableName) {
List<String> pathSegments = UriComponentsBuilder.fromUriString(uriTemplate.toString()).build().getPathSegments();
for (int i = 0; i < pathSegments.size(); i++) {
if (pathSegments.get(i).contains(variableName)) {
return i;
}
}
throw new IllegalArgumentException("Could not find variableName '" + variableName + "' in URL Template - present variables are '" + uriTemplate.getVariableNames() + "'");
}
}
| apache-2.0 |
liuyanggithub/Hi | src/com/ly/hi/lbs/response/CreatePoiRes.java | 482 | package com.ly.hi.lbs.response;
/**
* <Pre>
* TODO ÃèÊö¸ÃÎļþ×öʲô
* </Pre>
*
* @author ÁõÑô
* @version 1.0
* <p/>
* Create by 2015/4/1 0001 ÏÂÎç 5:50
*/
public class CreatePoiRes {
private String id; //ÐÂÔöµÄÊý¾ÝµÄid String ±ØÐë
public CreatePoiRes(int status, String message, String id) {
this.id = id;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
}
| apache-2.0 |
steelkiwi/libGDX-Path-Editor | src/com/steelkiwi/patheditor/gdx/BGDrawer.java | 2988 | /*
* Copyright (C) 2013 Steelkiwi Development, Julia Zudikova
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.steelkiwi.patheditor.gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.Pixmap;
import com.badlogic.gdx.graphics.Pixmap.Format;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.graphics.g2d.SpriteBatch;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer;
import com.badlogic.gdx.graphics.glutils.ShapeRenderer.ShapeType;
import com.badlogic.gdx.math.Matrix4;
public class BGDrawer {
private ShapeRenderer renderer;
private Texture overlay;
private Sprite l, r, t, b;
private float overlayBGAlpha = 0.7f;
public BGDrawer() {
renderer = new ShapeRenderer();
Pixmap p = new Pixmap(4, 4, Format.RGBA4444);
p.setColor(0.698f, 0.698f, 0.698f, 1f);
p.fill();
overlay = new Texture(p);
p.dispose();
t = new Sprite(overlay);
b = new Sprite(overlay);
l = new Sprite(overlay);
r = new Sprite(overlay);
}
public void presentFakeBG(int scrW, int scrH, Matrix4 cameraCombined) {
renderer.setProjectionMatrix(cameraCombined);
renderer.begin(ShapeType.FilledRectangle);
renderer.setColor(Color.BLACK);
renderer.filledRect(0, 0, scrW, scrH);
renderer.end();
renderer.begin(ShapeType.Rectangle);
renderer.setColor(Color.YELLOW);
renderer.rect(0, 0, scrW, scrH);
renderer.end();
}
public void presentOverlayBG(int scrW, int scrH, int camX, int camY, int camW, int camH, SpriteBatch batch) {
batch.begin();
t.setSize(camW, (int)(camY + camH/2 - scrH));
t.setPosition((int)(camX - camW/2), scrH);
t.draw(batch, overlayBGAlpha);
b.setSize(camW, (int)(-camY + camH/2));
b.setPosition((int)(camX - camW/2), (int)(camY - camH/2));
b.draw(batch, overlayBGAlpha);
l.setSize((int)(-camX + camW/2), scrH);
l.setPosition((int)(camX - camW/2), 0);
l.draw(batch, overlayBGAlpha);
r.setSize((int)(camX + camW/2 - scrW), scrH);
r.setPosition(scrW, 0);
r.draw(batch, overlayBGAlpha);
batch.end();
}
public void setOverlayBGAlpha(float overlayBGAlpha) {
this.overlayBGAlpha = overlayBGAlpha;
}
public void dispose() {
if (renderer != null) { renderer.dispose(); renderer = null; }
if (overlay != null) { overlay.dispose(); overlay = null; }
l = null;
r = null;
t = null;
b = null;
}
}
| apache-2.0 |
HubSpot/twitter4j | twitter4j-core/src/internal-http/java/twitter4j/HttpResponse.java | 7849 | /*
* Copyright 2007 Yusuke Yamamoto
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package twitter4j;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.util.List;
import java.util.Map;
import twitter4j.conf.ConfigurationContext;
/**
* A data class representing HTTP Response
*
* @author Yusuke Yamamoto - yusuke at mac.com
*/
public abstract class HttpResponse {
private static final Logger logger = Logger.getLogger(HttpResponseImpl.class);
protected final HttpClientConfiguration CONF;
HttpResponse() {
this.CONF = ConfigurationContext.getInstance().getHttpClientConfiguration();
}
public HttpResponse(HttpClientConfiguration conf) {
this.CONF = conf;
}
protected int statusCode;
protected String responseAsString = null;
protected InputStream is;
private boolean streamConsumed = false;
public int getStatusCode() {
return statusCode;
}
public abstract String getResponseHeader(String name);
public abstract Map<String, List<String>> getResponseHeaderFields();
/**
* Returns the response stream.<br>
* This method cannot be called after calling asString() or asDcoument()<br>
* It is suggested to call disconnect() after consuming the stream.
* <p>
* Disconnects the internal HttpsURLConnection silently.
*
* @return response body stream
* @see #disconnect()
*/
public InputStream asStream() {
if (streamConsumed) {
throw new IllegalStateException("Stream has already been consumed.");
}
return is;
}
/**
* Returns the response body as string.<br>
* Disconnects the internal HttpsURLConnection silently.
*
* @return response body
* @throws TwitterException when there is any network issue upon response body consumption
*/
public String asString() throws TwitterException {
if (null == responseAsString) {
BufferedReader br = null;
InputStream stream = null;
try {
stream = asStream();
if (null == stream) {
return null;
}
br = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
StringBuilder buf = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
buf.append(line).append("\n");
}
this.responseAsString = buf.toString();
logger.debug(responseAsString);
stream.close();
streamConsumed = true;
} catch (IOException ioe) {
throw new TwitterException(ioe.getMessage(), ioe);
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException ignore) {
}
}
if (br != null) {
try {
br.close();
} catch (IOException ignore) {
}
}
disconnectForcibly();
}
}
return responseAsString;
}
private JSONObject json = null;
/**
* Returns the response body as twitter4j.JSONObject.<br>
* Disconnects the internal HttpsURLConnection silently.
*
* @return response body as twitter4j.JSONObject
* @throws TwitterException when the response body is not in JSON Object format
*/
public JSONObject asJSONObject() throws TwitterException {
if (json == null) {
Reader reader = null;
try {
if (responseAsString == null) {
reader = asReader();
json = new JSONObject(new JSONTokener(reader));
} else {
json = new JSONObject(responseAsString);
}
if (CONF.isPrettyDebugEnabled()) {
logger.debug(json.toString(1));
} else {
logger.debug(responseAsString != null ? responseAsString :
json.toString());
}
} catch (JSONException jsone) {
if (responseAsString == null) {
throw new TwitterException(jsone.getMessage(), jsone);
} else {
throw new TwitterException(jsone.getMessage() + ":" + this.responseAsString, jsone);
}
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ignore) {
}
}
disconnectForcibly();
}
}
return json;
}
private JSONArray jsonArray = null;
/**
* Returns the response body as twitter4j.JSONArray.<br>
* Disconnects the internal HttpsURLConnection silently.
*
* @return response body as twitter4j.JSONArray
* @throws TwitterException when the response body is not in JSON Array format
*/
public JSONArray asJSONArray() throws TwitterException {
if (jsonArray == null) {
Reader reader = null;
try {
if (responseAsString == null) {
reader = asReader();
jsonArray = new JSONArray(new JSONTokener(reader));
} else {
jsonArray = new JSONArray(responseAsString);
}
if (CONF.isPrettyDebugEnabled()) {
logger.debug(jsonArray.toString(1));
} else {
logger.debug(responseAsString != null ? responseAsString :
jsonArray.toString());
}
} catch (JSONException jsone) {
if (logger.isDebugEnabled()) {
throw new TwitterException(jsone.getMessage() + ":" + this.responseAsString, jsone);
} else {
throw new TwitterException(jsone.getMessage(), jsone);
}
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException ignore) {
}
}
disconnectForcibly();
}
}
return jsonArray;
}
public Reader asReader() {
try {
return new BufferedReader(new InputStreamReader(is, "UTF-8"));
} catch (java.io.UnsupportedEncodingException uee) {
return new InputStreamReader(is);
}
}
private void disconnectForcibly() {
try {
disconnect();
} catch (Exception ignore) {
}
}
public abstract void disconnect() throws IOException;
@Override
public String toString() {
return "HttpResponse{" +
"statusCode=" + statusCode +
", responseAsString='" + responseAsString + '\'' +
", is=" + is +
", streamConsumed=" + streamConsumed +
'}';
}
}
| apache-2.0 |
KishorAndroid/PocketHub | app/src/main/java/com/github/pockethub/android/ui/search/SearchRepositoryListFragment.java | 6380 | /*
* Copyright (c) 2015 PocketHub
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.pockethub.android.ui.search;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.ListView;
import com.meisolsson.githubsdk.core.ServiceGenerator;
import com.meisolsson.githubsdk.model.Page;
import com.meisolsson.githubsdk.model.Repository;
import com.github.kevinsawicki.wishlist.SingleTypeAdapter;
import com.github.pockethub.android.R;
import com.github.pockethub.android.core.PageIterator;
import com.github.pockethub.android.core.ResourcePager;
import com.github.pockethub.android.rx.ProgressObserverAdapter;
import com.github.pockethub.android.ui.PagedItemFragment;
import com.github.pockethub.android.ui.repo.RepositoryViewActivity;
import com.github.pockethub.android.util.InfoUtils;
import com.meisolsson.githubsdk.model.SearchPage;
import com.meisolsson.githubsdk.service.repositories.RepositoryService;
import com.meisolsson.githubsdk.service.search.SearchService;
import java.text.MessageFormat;
import java.util.List;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.schedulers.Schedulers;
import retrofit2.Response;
import static android.app.SearchManager.QUERY;
/**
* Fragment to display a list of {@link Repository} instances
*/
public class SearchRepositoryListFragment extends PagedItemFragment<Repository> {
private String query;
@Override
protected ResourcePager<Repository> createPager() {
return new ResourcePager<Repository>() {
@Override
protected Object getId(Repository resource) {
return resource.id();
}
@Override
public PageIterator<Repository> createIterator(int page, int size) {
return new PageIterator<>(page1 ->
ServiceGenerator.createService(getContext(), SearchService.class)
.searchRepositories(query, null, null, page1)
.map(response -> {
SearchPage<Repository> repositorySearchPage = response.body();
return Response.success(Page.<Repository>builder()
.first(repositorySearchPage.first())
.last(repositorySearchPage.last())
.next(repositorySearchPage.next())
.prev(repositorySearchPage.prev())
.items(repositorySearchPage.items())
.build());
}), page);
}
};
}
@Override
protected int getLoadingMessage() {
return R.string.loading_repositories;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
setEmptyText(R.string.no_repositories);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
start();
}
@Override
public void refresh() {
start();
super.refresh();
}
private void start(){
query = getStringExtra(QUERY);
openRepositoryMatch(query);
}
@Override
public void onListItemClick(ListView l, View v, int position, long id) {
final Repository result = (Repository) l.getItemAtPosition(position);
ServiceGenerator.createService(getContext(), RepositoryService.class)
.getRepository(result.owner().login(), result.name())
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.compose(this.bindToLifecycle())
.subscribe(new ProgressObserverAdapter<Response<Repository>>(getActivity(),
MessageFormat.format(getString(R.string.opening_repository),
InfoUtils.createRepoId(result))) {
@Override
public void onSuccess(Response<Repository> response) {
super.onSuccess(response);
startActivity(RepositoryViewActivity.createIntent(response.body()));
}
});
}
/**
* Check if the search query is an exact repository name/owner match and
* open the repository activity and finish the current activity when it is
*
* @param query
* @return true if query opened as repository, false otherwise
*/
private boolean openRepositoryMatch(final String query) {
if (TextUtils.isEmpty(query)) {
return false;
}
Repository repoId = InfoUtils.createRepoFromUrl(query.trim());
if (repoId == null) {
return false;
}
Repository repo;
repo = ServiceGenerator.createService(getContext(), RepositoryService.class)
.getRepository(repoId.owner().login(), repoId.name())
.blockingGet()
.body();
startActivity(RepositoryViewActivity.createIntent(repo));
final Activity activity = getActivity();
if (activity != null) {
activity.finish();
}
return true;
}
@Override
protected int getErrorMessage(Exception exception) {
return R.string.error_repos_load;
}
@Override
protected SingleTypeAdapter<Repository> createAdapter(
List<Repository> items) {
return new SearchRepositoryListAdapter(getActivity()
.getLayoutInflater(), items.toArray(new Repository[items
.size()]));
}
}
| apache-2.0 |
jbosstm/microprofile-sandbox | proposals/0009-LRA/sra-annotations/src/main/java/org/eclipse/microprofile/sra/client/txstatusext/TransactionStatusElement.java | 3169 | /*
* Copyright (c) 2007, Eclipse Foundation, Inc. and its licensors.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the Eclipse Foundation, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*/
package org.eclipse.microprofile.sra.client.txstatusext;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlType;
@XmlType
@XmlEnum(String.class)
public enum TransactionStatusElement {
TransactionRollbackOnly,
TransactionRollingBack,
TransactionRolledBack,
TransactionCommitting,
TransactionCommitted,
TransactionHeuristicRollback,
TransactionHeuristicCommit,
TransactionHeuristicHazard,
TransactionHeuristicMixed,
TransactionPreparing,
TransactionPrepared,
TransactionActive,
TransactionCommittedOnePhase,
TransactionReadOnly,
TransactionStatusNone
}
| apache-2.0 |
ThiagoRobert19/Ecommerce | Ecommerce-master/src/main/java/br/com/caelum/vraptor/util/JPAUtil.java | 839 | /**
*
*/
package br.com.caelum.vraptor.util;
/**
* CIAware :: Centro de Informatizações e Análises
* -----------------------------------------------
*
* @author Thiago Robert Prado Souza (Thiago Robert 19, 04/05/2016)
* Responsabilidade da classe:
*
*
*/
/**
* CIAware :: Center of Informatization and Analysis
* -----------------------------------------------------
*
* @author Thiago Robert Prado Souza (Thiago Robert 19, 04/05/2016)
* Class responsibility:
*
*
*/
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
public class JPAUtil {
public static EntityManager criaEntityManager()
{
EntityManagerFactory factory = Persistence
.createEntityManagerFactory("default");
return factory.createEntityManager();
}
}
| apache-2.0 |
liulei-0911/LLApp | cameralibrary/src/main/java/com/umeng/cameraliarary/aibum/ThumbnaiImageView.java | 3098 | package com.umeng.cameraliarary.aibum;
import android.content.Context;
import android.view.View;
import android.widget.CheckBox;
import android.widget.CompoundButton.OnCheckedChangeListener;
import android.widget.FrameLayout;
import android.widget.ImageView;
import com.umeng.cameraliarary.imageloader.DisplayImageOptions;
import com.umeng.cameraliarary.imageloader.ImageLoader;
import com.umeng.cameralibrary.R;
/**
* @ClassName: AlbumItemView
* @Description: 相册Item项 提取出来主要是为了实现点击ImageView变暗效果
* @author LinJ
* @date 2015-1-5 下午5:39:35
*
*/
public class ThumbnaiImageView extends FrameLayout {
public static final String TAG="AlbumItemView";
private final ViewHolder mViewHolder;
private final ImageLoader mImageLoader;
private final DisplayImageOptions mOptions;
private String mPath;
private int mPosition;
public ThumbnaiImageView(Context context,ImageLoader imageLoader,DisplayImageOptions options) {
super(context);
inflate(context, R.layout.item_album_grid, this);
FilterImageView imageView=(FilterImageView) findViewById(R.id.imgThumbnail);
CheckBox checkBox=(CheckBox) findViewById(R.id.checkbox);
ImageView icon=(ImageView)findViewById(R.id.videoicon);
mViewHolder=new ViewHolder(imageView,checkBox,icon);
this.mImageLoader=imageLoader;
this.mOptions=options;
}
/**
* 设置标签
* @param path 设置item指向的文件路径 会同时把checkbox的标签设置为该值
* @param editable 是否可编辑状态
* @param checked checkbox是否选中
*/
public void setTags(String path,int position,boolean editable,boolean checked){
//可编辑状态,显示checkbox
if (editable) {
mViewHolder.checkBox.setVisibility(View.VISIBLE);
mViewHolder.checkBox.setChecked(checked);
}else {
mViewHolder.checkBox.setVisibility(View.GONE);
}
//原路径和当前路径不同,更新图片
if (mPath==null||!mPath.equals(path)) {
mImageLoader.loadImage(path, mViewHolder.imageView, mOptions);
mPath=path;
//给checkbox设置tag,用以记录当前选中项
mViewHolder.checkBox.setTag(path);
setTag(path);
if(mPath.contains("video")){
mViewHolder.videoIconView.setVisibility(View.VISIBLE);
}else {
mViewHolder.videoIconView.setVisibility(View.GONE);
}
mPosition=position;
}
}
public int getPosition(){
return mPosition;
}
/**
* 设置checkbox的状态改变事件
* @param listener
*/
public void setOnCheckedChangeListener(OnCheckedChangeListener listener){
mViewHolder.checkBox.setOnCheckedChangeListener(listener);
}
@Override
public void setOnClickListener(OnClickListener l) {
//重写click事件,将该View的click转到imageview触发
mViewHolder.imageView.setOnClickListener(l);
}
public class ViewHolder {
public ViewHolder(ImageView imageView,CheckBox checkBox,ImageView icon){
this.imageView=imageView;
this.checkBox=checkBox;
this.videoIconView=icon;
}
ImageView imageView;//缩略图
ImageView videoIconView;//播放视频图标
CheckBox checkBox;//勾选框
}
}
| apache-2.0 |
ksoichiro/ability | library/src/test/java/com/github/ksoichiro/ability/test/AbilityTest.java | 2944 | package com.github.ksoichiro.ability.test;
import com.github.ksoichiro.ability.Ability;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import static org.junit.Assert.*;
public class AbilityTest {
@Test
public void empty() {
Ability ability = new Ability();
assertFalse(ability.allowed(new User(), Book.Actions.READ.name(), new Book()));
}
@Test
public void oneRule() {
Ability ability = new Ability();
ability.addRule(Book.class);
User user1 = new User(1);
User user2 = new User(2);
Book book1 = new Book();
book1.setAuthor(user1);
book1.setPublished(true);
Book book2 = new Book();
assertEquals(1, user1.getId());
assertTrue(ability.allowed(user1, Book.Actions.READ.name(), book1));
assertTrue(ability.allowed(user2, Book.Actions.READ.name(), book1));
assertTrue(ability.allowed(user1, Book.Actions.EDIT.name(), book1));
assertFalse(ability.allowed(user2, Book.Actions.EDIT.name(), book1));
assertFalse(ability.allowed(user1, Book.Actions.READ.name(), book2));
assertFalse(ability.allowed(user2, Book.Actions.READ.name(), book2));
}
@Test
public void abilityIsSerializable() throws Exception {
Ability ability = new Ability();
ability.addRule(Book.class);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(baos);
out.writeObject(ability);
out.close();
baos.close();
ObjectInputStream input = new ObjectInputStream(new ByteArrayInputStream(baos.toByteArray()));
Ability deserialized = (Ability) input.readObject();
User user1 = new User(1);
Book book1 = new Book();
book1.setPublished(true);
assertTrue(ability.hasRule(Book.class));
assertTrue(ability.allowed(user1, Book.Actions.READ.name(), book1));
assertTrue(deserialized.hasRule(Book.class));
assertTrue(deserialized.allowed(user1, Book.Actions.READ.name(), book1));
}
@Test
public void userDefinedAbility() {
User user1 = new User(1);
User user2 = new User(2);
Book book1 = new Book();
book1.setAuthor(user1);
book1.setPublished(true);
Book book2 = new Book();
assertEquals(1, user1.getId());
assertTrue(user1.can(Book.Actions.READ.name(), book1));
assertTrue(user2.can(Book.Actions.READ.name(), book1));
assertTrue(user1.can(Book.Actions.EDIT.name(), book1));
assertFalse(user2.can(Book.Actions.EDIT.name(), book1));
assertFalse(user2.can(Book.Actions.READ.name(), book2));
assertFalse(user2.can(Book.Actions.READ.name(), book2));
assertTrue(user1.canReadBook(book1));
}
}
| apache-2.0 |
opencb/opencga | opencga-app/src/main/java/org/opencb/opencga/app/cli/main/executors/OpencgaCommandExecutor.java | 11280 | /*
* Copyright 2015-2017 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.app.cli.main.executors;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.opencb.commons.datastore.core.ObjectMap;
import org.opencb.commons.datastore.core.QueryOptions;
import org.opencb.opencga.app.cli.CliSession;
import org.opencb.opencga.app.cli.CommandExecutor;
import org.opencb.opencga.app.cli.GeneralCliOptions;
import org.opencb.opencga.app.cli.main.io.*;
import org.opencb.opencga.catalog.exceptions.CatalogException;
import org.opencb.opencga.client.exceptions.ClientException;
import org.opencb.opencga.client.rest.OpenCGAClient;
import org.opencb.opencga.core.api.ParamConstants;
import org.opencb.opencga.core.common.JacksonUtils;
import org.opencb.opencga.core.models.user.AuthenticationResponse;
import org.opencb.opencga.core.response.RestResponse;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Paths;
import java.util.Base64;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
/**
* Created on 27/05/16.
*
* @author imedina
*/
public abstract class OpencgaCommandExecutor extends CommandExecutor {
protected OpenCGAClient openCGAClient;
protected AbstractOutputWriter writer;
protected static final String ANSI_RESET = "\033[0m";
protected static final String ANSI_RED = "\033[31m";
public OpencgaCommandExecutor(GeneralCliOptions.CommonCommandOptions options) {
this(options, false);
}
public OpencgaCommandExecutor(GeneralCliOptions.CommonCommandOptions options, boolean skipDuration) {
super(options, true);
init(options, skipDuration);
}
private void init(GeneralCliOptions.CommonCommandOptions options, boolean skipDuration) {
try {
WriterConfiguration writerConfiguration = new WriterConfiguration();
writerConfiguration.setMetadata(options.metadata);
writerConfiguration.setHeader(!options.noHeader);
switch (options.outputFormat.toLowerCase()) {
case "json_pretty":
writerConfiguration.setPretty(true);
case "json":
this.writer = new JsonOutputWriter(writerConfiguration);
break;
case "yaml":
this.writer = new YamlOutputWriter(writerConfiguration);
break;
case "table":
this.writer = new TextOutputWriter(writerConfiguration, Table.PrinterType.JANSI);
break;
case "text":
default:
this.writer = new TextOutputWriter(writerConfiguration);
break;
}
// CliSession cliSession = loadCliSessionFile();
logger.debug("sessionFile = " + cliSession);
if (StringUtils.isNotEmpty(options.token)) {
// Ignore session file. Overwrite with command line information (just sessionId)
cliSession = new CliSession(clientConfiguration.getRest().getHost(), null, options.token, null);
token = options.token;
userId = null;
openCGAClient = new OpenCGAClient(new AuthenticationResponse(options.token), clientConfiguration);
} else if (cliSession != null) {
// 'logout' field is only null or empty while no logout is executed
if (StringUtils.isNotEmpty(cliSession.getToken())) {
// no timeout checks
if (skipDuration) {
openCGAClient = new OpenCGAClient(new AuthenticationResponse(cliSession.getToken(), cliSession.getRefreshToken()),
clientConfiguration);
openCGAClient.setUserId(cliSession.getUser());
if (options.token == null) {
options.token = cliSession.getToken();
}
} else {
// Get the expiration of the token stored in the session file
String myClaims = StringUtils.split(cliSession.getToken(), ".")[1];
String decodedClaimsString = new String(Base64.getDecoder().decode(myClaims), StandardCharsets.UTF_8);
ObjectMap claimsMap = new ObjectMapper().readValue(decodedClaimsString, ObjectMap.class);
Date expirationDate = new Date(claimsMap.getLong("exp") * 1000L);
Date currentDate = new Date();
if (currentDate.before(expirationDate) || !claimsMap.containsKey("exp")) {
logger.debug("Session ok!!");
// this.sessionId = cliSession.getSessionId();
openCGAClient = new OpenCGAClient(new AuthenticationResponse(cliSession.getToken(),
cliSession.getRefreshToken()), clientConfiguration);
openCGAClient.setUserId(cliSession.getUser());
// Update token
if (clientConfiguration.getRest().isTokenAutoRefresh() && claimsMap.containsKey("exp")) {
AuthenticationResponse refreshResponse = openCGAClient.refresh();
cliSession.setToken(refreshResponse.getToken());
cliSession.setRefreshToken(refreshResponse.getRefreshToken());
updateCliSessionFile();
}
if (options.token == null) {
options.token = cliSession.getToken();
}
} else {
String message = "ERROR: Your session has expired. Please, either login again or logout to work as "
+ "anonymous.";
System.err.println(ANSI_RED + message + ANSI_RESET);
System.exit(1);
}
}
} else {
logger.debug("Session already closed");
openCGAClient = new OpenCGAClient(clientConfiguration);
}
} else {
logger.debug("No Session file");
openCGAClient = new OpenCGAClient(clientConfiguration);
}
} catch (ClientException | IOException e) {
e.printStackTrace();
}
}
protected ObjectMap loadFile(String filePath) throws CatalogException {
return loadFile(filePath, ObjectMap.class);
}
protected <T> T loadFile(String filePath, Class<T> clazz) throws CatalogException {
File file = Paths.get(filePath).toFile();
if (!file.exists() || file.isDirectory()) {
throw new CatalogException("File " + filePath + " not found");
}
FileInputStream fileInputStream;
try {
fileInputStream = FileUtils.openInputStream(file);
} catch (IOException e) {
throw new CatalogException("Could not open file " + filePath + ". " + e.getMessage(), e);
}
ObjectMapper objectMapper = JacksonUtils.getUpdateObjectMapper();
try {
return objectMapper.readValue(fileInputStream, clazz);
} catch (IOException e) {
throw new CatalogException("Could not parse file " + filePath + ". Is it a valid JSON file?. "
+ e.getMessage(), e);
}
}
protected String extractIdsFromListOrFile(String ids) throws CatalogException {
if (StringUtils.isEmpty(ids)) {
return null;
}
File file = new File(ids);
if (file.exists() && file.isFile()) {
// Read the file
try(BufferedReader br = new BufferedReader(new FileReader(ids))) {
StringBuilder sb = new StringBuilder();
String line = br.readLine();
boolean isNotFirstLine = false;
while (line != null) {
if (StringUtils.isNotEmpty(line)) {
if (isNotFirstLine) {
sb.append(",");
} else {
isNotFirstLine = true;
}
sb.append(line);
}
line = br.readLine();
}
return sb.toString();
} catch (IOException e) {
throw new CatalogException("File could not be parsed. Does it contain a line per id?");
}
} else {
return ids;
}
}
public void createOutput(RestResponse queryResponse) {
if (queryResponse != null) {
writer.print(queryResponse);
}
}
public ObjectMap getCommonParams(String study) {
return getCommonParams(null, study, new HashMap<>());
}
public ObjectMap getCommonParams(String study, Map<String, String> initialParams) {
return getCommonParams(null, study, initialParams);
}
public ObjectMap getCommonParams(String project, String study, Map<String, String> initialParams) {
ObjectMap params = new ObjectMap(initialParams);
params.putIfNotEmpty(ParamConstants.PROJECT_PARAM, project);
params.putIfNotEmpty(ParamConstants.STUDY_PARAM, study);
return params;
}
public ObjectMap addJobParams(GeneralCliOptions.JobOptions jobOptions, ObjectMap params) {
params.putIfNotEmpty(ParamConstants.JOB_ID, jobOptions.jobId);
params.putIfNotEmpty(ParamConstants.JOB_DESCRIPTION, jobOptions.jobDescription);
if (jobOptions.jobDependsOn != null) {
params.put(ParamConstants.JOB_DEPENDS_ON, String.join(",", jobOptions.jobDependsOn));
}
if (jobOptions.jobTags != null) {
params.put(ParamConstants.JOB_TAGS, String.join(",", jobOptions.jobTags));
}
return params;
}
public ObjectMap addNumericParams(GeneralCliOptions.NumericOptions numericOptions, ObjectMap params) {
if (numericOptions.limit > 0) {
params.put(QueryOptions.LIMIT, numericOptions.limit);
}
if (numericOptions.skip > 0) {
params.put(QueryOptions.SKIP, numericOptions.skip);
}
if (numericOptions.count) {
params.put(QueryOptions.COUNT, numericOptions.count);
}
return params;
}
}
| apache-2.0 |
tcurdt/drift | src/test/java/org/vafer/drift/ant/GenerateAntTestCase.java | 2223 | /*
* Copyright 2008 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vafer.drift.ant;
import java.io.File;
import java.net.URL;
import junit.framework.TestCase;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.ProjectHelper;
public final class GenerateAntTestCase extends TestCase {
private Project project;
protected void setUp() throws Exception {
project = new Project();
project.setCoreLoader(getClass().getClassLoader());
project.init();
final URL url = getClass().getResource("build.xml");
assertNotNull(url);
project.setBaseDir(new File(""));
final ProjectHelper helper = ProjectHelper.getProjectHelper();
final File buildFile = new File(url.getFile());
helper.parse(project, buildFile);
}
public void testSimple() {
project.executeTarget("simple");
assertTrue("Source code not generated", new File("target/tests/simple/org/vafer/drift/generator/generated/SimpleTest.java").exists());
}
public void testMigration() {
project.executeTarget("migration");
assertTrue("Source code not generated", new File("target/tests/migration/org/vafer/drift/generator/generated/MigrationTest.java").exists());
}
public void testComplex() {
project.executeTarget("complex");
assertTrue("Source code not generated", new File("target/tests/complex/org/vafer/drift/generator/generated/CommonEvent.java").exists());
assertTrue("Source code not generated", new File("target/tests/complex/org/vafer/drift/generator/generated/Person.java").exists());
assertTrue("Source code not generated", new File("target/tests/complex/org/vafer/drift/generator/generated/ComplexEvent.java").exists());
}
}
| apache-2.0 |
ProgettoRadis/ArasuiteIta | TICO/src/tico/editor/dialogs/TControllerCellDialog.java | 16171 | /*
* File: TControllerCellDialog.java
* This file is part of Tico, an application to create and perform
* interactive communication boards to be used by people with
* severe motor disabilities.
*
* Authors: Pablo Muñoz
*
* Date: Mar 6, 2006
*
* Company: Universidad de Zaragoza, CPS, DIIS
*
* License:
* This program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package tico.editor.dialogs;
import java.awt.Color;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Hashtable;
import java.util.Map;
import java.util.Vector;
import javax.swing.BorderFactory;
import javax.swing.ImageIcon;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTabbedPane;
import javax.swing.SwingConstants;
import javax.swing.border.TitledBorder;
import tico.board.TBoardConstants;
import tico.board.components.TComponent;
import tico.components.TAlternativeSoundChooser;
import tico.components.TClickControllerCellActionsPanel;
import tico.components.TFontModelChooser;
import tico.components.TIdTextField;
import tico.components.TTextField;
import tico.configuration.TLanguage;
import tico.editor.TBoardContainer;
import tico.editor.TEditor;
import tico.editor.TFileHandler;
/**
* Dialog to change <code>TControllerCellDialog</code> attributes.
*
* @author Pablo Mu�oz
* @version 1.0 Nov 20, 2006
*/
public class TControllerCellDialog extends TComponentDialog {
private static String DEFAULT_TITLE = TLanguage.getString("TControllerCellDialog.TITLE");
// Tabbed pane which contains all the other cell properties panes
protected JTabbedPane tabbedPane;
// Text properties panel
private JPanel textPropertiesPanel;
private JPanel textFieldPanel;
private TTextField textField;
protected JPanel idFieldPanel;
protected TIdTextField idTextField;
private TFontModelChooser fontModel;
// Actions panel
private JPanel componentActionsPanel;
private TClickControllerCellActionsPanel clickControllerCellActionPanel;
private TAlternativeSoundChooser alternativeSoundChooser;
private TEditor myEditor;
// Action icons
private ImageIcon exitIcon;
private ImageIcon undoIcon;
private ImageIcon undoAllIcon;
private ImageIcon readIcon;
private ImageIcon returnIcon;
private ImageIcon homeIcon;
private ImageIcon stopIcon;
private ImageIcon copyIcon;
String exitFilePath;
String undoFilePath;
String undoAllFilePath;
String readFilePath;
String returnFilePath;
String homeFilePath;
String stopFilePath;
String copyFilePath;
/**
* Creates a new <code>TCellDialog</code> to edit the <code>cell</code>
* properties.
*
* @param boardContainer The <code>boardContainer</code> which contains the
* cell to be edited
* @param cell The <code>cell</code> to be edited
*/
public TControllerCellDialog(TBoardContainer boardContainer, TComponent cell) {
this(boardContainer, DEFAULT_TITLE, cell);
myEditor = boardContainer.getEditor();
}
/**
* Creates a new <code>TCellDialog</code> to edit the <code>cell</code>
* properties.
*
* @param boardContainer The <code>boardContainer</code> which contains the
* cell to be edited
* @param title The <code>title</code> of the dialog
* @param cell The <code>cell</code> to be edited
*/
public TControllerCellDialog(TBoardContainer boardContainer, String title,
TComponent cell) {
super(boardContainer, title, cell);
myEditor = boardContainer.getEditor();
}
// Creates the main dialog pane
protected JPanel setComponentPane(TEditor editor) {
JPanel componentPane = new JPanel();
myEditor = editor;
GridBagConstraints c = new GridBagConstraints();
componentPane.setLayout(new GridBagLayout());
createTabbedPane();
createIdField();
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 10, 0, 10);
c.gridx = 0;
c.gridy = 0;
componentPane.add(idFieldPanel, c);
c.fill = GridBagConstraints.BOTH;
c.insets = new Insets(0, 5, 5, 5);
c.gridx = 0;
c.gridy = 1;
componentPane.add(tabbedPane, c);
return componentPane;
}
// Creates the cell id field
protected void createIdField() {
idFieldPanel = new JPanel();
idFieldPanel.setLayout(new FlowLayout(FlowLayout.RIGHT));
idFieldPanel.add(new JLabel(TLanguage.getString("TControllerCellDialog.ID")));
idTextField = new TIdTextField();
idFieldPanel.add(idTextField);
idTextField.setText(TBoardConstants.getId(getAttributeMap()));
}
// Creates the main dialog tabbed pane
protected void createTabbedPane() {
tabbedPane = new JTabbedPane();
// Create properties panels
createTextPropertiesPanel();
createActionsPanel();
// Add properties panels to the tabbed pane
tabbedPane.addTab(TLanguage.getString("TControllerCellDialog.TAB_TEXT"), textPropertiesPanel);
tabbedPane.addTab(TLanguage.getString("TControllerCellDialog.TAB_ACTIONS"),
componentActionsPanel);
}
// Creates the text properties panel for the tabbed pane
private void createTextPropertiesPanel() {
textPropertiesPanel = new JPanel();
GridBagConstraints c = new GridBagConstraints();
textPropertiesPanel.setLayout(new GridBagLayout());
createTextField();
createFontModel();
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 10, 0, 10);
c.gridx = 0;
c.gridy = 0;
textPropertiesPanel.add(textFieldPanel, c);
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 10, 10, 10);
c.gridx = 0;
c.gridy = 1;
textPropertiesPanel.add(fontModel, c);
}
// Creates the actions panel for the tabbed pane
private void createActionsPanel() {
componentActionsPanel = new JPanel();
GridBagConstraints c = new GridBagConstraints();
componentActionsPanel.setLayout(new GridBagLayout());
createAlternativeSoundChooser();//alternative sound
ArrayList controllerActionsList = new ArrayList();
controllerActionsList.add(TLanguage.getString("TInterpreterExitAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterUndoAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterUndoAllAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterReadAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterReturnAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterHomeAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterStopAction.NAME"));
controllerActionsList.add(TLanguage.getString("TInterpreterCopyAction.NAME"));
//Create the icon paths
String currentDirectory = System.getProperty("user.dir");
exitFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-exit.png";
undoFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-undo.png";
undoAllFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-undo-all.png";
readFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-read.png";
returnFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-return.png";
homeFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-home.png";
stopFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-stop.png";
copyFilePath = currentDirectory + File.separator + "controller-icons"+ File.separator + "controller-copy.png";
clickControllerCellActionPanel = new TClickControllerCellActionsPanel(controllerActionsList, textField);
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 10, 0, 10);
c.gridx = 0;
c.gridy = 0;
componentActionsPanel.add(clickControllerCellActionPanel, c);
//anyadido alternativesound
c.fill = GridBagConstraints.HORIZONTAL;
c.insets = new Insets(5, 10, 0, 10);
c.gridx = 0;
c.gridy = 1;
componentActionsPanel.add(alternativeSoundChooser, c);
}
// Creates the cell text field
private void createTextField() {
Map map = getAttributeMap();
textFieldPanel = new JPanel();
textFieldPanel.setBorder(new TitledBorder(BorderFactory
.createEtchedBorder(Color.WHITE, new Color(165, 163, 151)),
TLanguage.getString("TControllerCellDialog.TEXT_FILED")));
int actionCode = TBoardConstants.getActionCode(map);
textField = new TTextField(TBoardConstants.getText(map));
if (textField.getText().equals(""))
textField = new TTextField(TLanguage.getString("TControllerCellDialog.TEXT_DEFAULT"));
else{
if (actionCode == TBoardConstants.EXIT_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterExitAction.NAME"));
}else if (actionCode == TBoardConstants.UNDO_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterUndoAction.NAME"));
}else if (actionCode == TBoardConstants.UNDO_ALL_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterUndoAllAction.NAME"));
}else if (actionCode == TBoardConstants.READ_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterReadAction.NAME"));
}else if (actionCode == TBoardConstants.RETURN_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterReturnAction.NAME"));
}else if (actionCode == TBoardConstants.HOME_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterHomeAction.NAME"));
}else if (actionCode == TBoardConstants.STOP_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterStopAction.NAME"));
}else if(actionCode == TBoardConstants.COPY_ACTION_CODE){
textField = new TTextField(TLanguage.getString("TInterpreterCopyAction.NAME"));
}
}
textField.setColumns(30);
textField.setEditable(false);
textFieldPanel.add(textField);
}
// Creates the font model selection panel
private void createFontModel() {
Map map = getAttributeMap();
fontModel = new TFontModelChooser(TBoardConstants.getFont(map)
.getName(), TBoardConstants.getForeground(map), TBoardConstants
.getFont(map).getSize(), TBoardConstants.getFont(map)
.getStyle());
}
/* (non-Javadoc)
* @see tico.editor.dialogs.TComponentDialog#newComponentsAttributeMap()
*/
protected Map newComponentsAttributeMap() {
// Create used variables
Map nested = new Hashtable();
Map attributeMap = new Hashtable();
Vector removalAttributes = new Vector();
File f = null;
// Set cell text and format
TBoardConstants.setText(attributeMap, textField.getText());
TBoardConstants.setForeground(attributeMap, fontModel.getFontColor());
TBoardConstants.setFont(attributeMap, new Font(fontModel.getFontFace(),
fontModel.getFontStyle(), fontModel.getFontSize()));
TBoardConstants.setBackground(attributeMap, TBoardConstants.DEFAULT_BACKGROUND);
// Set cell static image and other image properties
if (textField.getText().equals(TLanguage.getString("TInterpreterExitAction.NAME"))){
try {
f = TFileHandler.importFile(exitFilePath);
exitIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, exitIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.EXIT_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterUndoAction.NAME"))){
try {
f = TFileHandler.importFile(undoFilePath);
undoIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, undoIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.UNDO_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterUndoAllAction.NAME"))){
try {
f = TFileHandler.importFile(undoAllFilePath);
undoAllIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, undoAllIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.UNDO_ALL_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterReadAction.NAME"))){
try {
f = TFileHandler.importFile(readFilePath);
readIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, readIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.READ_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterReturnAction.NAME"))){
try {
f = TFileHandler.importFile(returnFilePath);
returnIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, returnIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.RETURN_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterHomeAction.NAME"))){
try {
f = TFileHandler.importFile(homeFilePath);
homeIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, homeIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.HOME_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterStopAction.NAME"))){
try {
f = TFileHandler.importFile(stopFilePath);
stopIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, stopIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.STOP_ACTION_CODE);
}
else if (textField.getText().equals(TLanguage.getString("TInterpreterCopyAction.NAME"))){
try {
f = TFileHandler.importFile(copyFilePath);
copyIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, copyIcon);
TBoardConstants.setActionCode(attributeMap, TBoardConstants.COPY_ACTION_CODE);
}
else {
try {
f = TFileHandler.importFile(exitFilePath);
exitIcon = new ImageIcon(f.getAbsolutePath());
} catch (IOException e) {
e.printStackTrace();
}
TBoardConstants.setIcon(attributeMap, exitIcon);
}
TBoardConstants.setVerticalTextPosition(attributeMap, SwingConstants.BOTTOM);
// Apply removal attributes
TBoardConstants.setRemoveAttributes(attributeMap, removalAttributes.toArray());
// Set cell id
TBoardConstants.setId(attributeMap, idTextField.getText());
// Set cell alternative sound file
String alternativeSoundFile = alternativeSoundChooser.getSoundFilePath();
if (alternativeSoundFile != null)
TBoardConstants.setAlternativeSoundFile(attributeMap, alternativeSoundFile);
else
removalAttributes.add(TBoardConstants.SOUND_FILE);
nested.put(getComponent(), attributeMap);
return nested;
}
// Creates the alternative sound chooser panel
private void createAlternativeSoundChooser() {
Map map = getAttributeMap();
alternativeSoundChooser = new TAlternativeSoundChooser();
alternativeSoundChooser.setSoundFilePath(TBoardConstants.getAlternativeSoundFile(map));
}
} | apache-2.0 |
IHTSDO/snow-owl | commons/com.b2international.commons.base/src/com/b2international/commons/collections/UuidLongMap.java | 6056 | /*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.commons.collections;
import static com.google.common.base.Preconditions.checkNotNull;
import java.text.MessageFormat;
import java.util.UUID;
import com.google.common.primitives.Longs;
/**
* Represents a map with {@link UUID}s as keys and values of primitive,
* non-negative longs. UUIDs are represented internally as a pair of primitive
* longs as well. Both the MSB and the LSB long parts of the UUID are assumed to
* be non-zero for all values.
*
* @see UUID#variant()
* @see UUID#version()
*
*/
public class UuidLongMap implements Cloneable {
private static final float GROWTH_FACTOR = 2.0f;
private static final float OVERSIZE_FACTOR = 1.3f;
private static final int DEFAULT_EXPECTED_SIZE = 32;
private static final long EMPTY = 0L;
private static int getCapacity(final int expectedSize) {
return (int) (expectedSize * OVERSIZE_FACTOR);
}
private int size;
private long[] lsbKeys;
private long[] msbKeys;
private long[] values;
public UuidLongMap() {
this(DEFAULT_EXPECTED_SIZE);
}
public UuidLongMap(final int expectedSize) {
init(expectedSize);
}
public UuidLongMap(final UuidLongMap oldMap) {
this(oldMap, DEFAULT_EXPECTED_SIZE);
}
public UuidLongMap(final UuidLongMap oldMap, final int expectedSize) {
init(Math.max(oldMap.size(), expectedSize));
for (int i = 0; i < oldMap.lsbKeys.length; i++) {
if (oldMap.lsbKeys[i] != EMPTY) {
put(oldMap.lsbKeys[i], oldMap.msbKeys[i], oldMap.values[i]);
}
}
}
private void init(final int expectedSize) {
size = 0;
lsbKeys = new long[getCapacity(expectedSize)];
msbKeys = new long[getCapacity(expectedSize)];
values = new long[getCapacity(expectedSize)];
}
private void resize() {
// Clone this map and get all fields
final UuidLongMap resized = new UuidLongMap(this, (int) (this.size * GROWTH_FACTOR));
lsbKeys = resized.lsbKeys;
msbKeys = resized.msbKeys;
values = resized.values;
size = resized.size;
}
public long put(final UUID key, final long value) {
return put(checkNotNull(key, "key").getLeastSignificantBits(), key.getMostSignificantBits(), value);
}
private long put(final long lsbKey, final long msbKey, final long value) {
if (lsbKey == 0) {
throw new IllegalArgumentException("lsbKey must be != 0");
}
if (msbKey == 0) {
throw new IllegalArgumentException("msbKey must be != 0");
}
if (value < 0) {
throw new IllegalArgumentException(MessageFormat.format("Value must be >= 0, was ''{0}''", value));
}
// ensure table is big enough. this will guarantee empty slots
if (size > lsbKeys.length / OVERSIZE_FACTOR) {
resize();
}
final int hash = hash(lsbKey, msbKey);
final int bucketIndex = indexFor(hash, lsbKeys.length);
int i = 0;
for (i = bucketIndex; lsbKeys[i] != EMPTY && msbKeys[i] != EMPTY; i = (i + 1) % lsbKeys.length) {
if (lsbKey == lsbKeys[i] && msbKey == msbKeys[i]) {
final long oldValue = values[i];
values[i] = value;
return oldValue;
}
}
lsbKeys[i] = lsbKey;
msbKeys[i] = msbKey;
values[i] = value;
size++;
return -1;
}
public long get(final UUID key) {
return get(checkNotNull(key, "key").getLeastSignificantBits(), key.getMostSignificantBits());
}
private long get(final long lsbKey, long msbKey) {
final int hash = hash(lsbKey, msbKey);
final int bucketIndex = indexFor(hash, lsbKeys.length);
for (int i = bucketIndex; lsbKeys[i] != EMPTY && msbKeys[i] != EMPTY; i = (i + 1) % lsbKeys.length) {
if (lsbKey == lsbKeys[i] && msbKey == msbKeys[i]) {
return values[i];
}
}
return -1;
}
public long remove(final UUID key) {
return remove(checkNotNull(key, "key").getLeastSignificantBits(), key.getMostSignificantBits());
}
private long remove(final long lsbKey, long msbKey) {
if (get(lsbKey, msbKey) < 0) {
return -1;
}
final int hash = hash(lsbKey, msbKey);
int i = indexFor(hash, lsbKeys.length);
while (lsbKey != lsbKeys[i] && msbKey != msbKeys[i]) {
i = (i + 1) % lsbKeys.length;
}
lsbKeys[i] = EMPTY;
msbKeys[i] = EMPTY;
final long result = values[i];
for (i = (i + 1) % lsbKeys.length; lsbKeys[i] != EMPTY && msbKeys[i] != EMPTY; i = (i + 1) % lsbKeys.length) {
long tmpLsbKey = lsbKeys[i];
long tmpMsbKey = msbKeys[i];
lsbKeys[i] = EMPTY;
msbKeys[i] = EMPTY;
size--;
put(tmpLsbKey, tmpMsbKey, values[i]);
}
size--;
return result;
}
private int hash(final long lsbKey, final long msbKey) {
int h = 31 * Longs.hashCode(lsbKey) + Longs.hashCode(msbKey);
h ^= (h >>> 20) ^ (h >>> 12);
return h ^ (h >>> 7) ^ (h >>> 4);
}
private int indexFor(final int h, final int length) {
return Math.abs(h % length);
}
public int size() {
return size;
}
public String toString() {
final StringBuilder buf = new StringBuilder();
buf.append("{");
boolean first = true;
for (int i = 0; i < lsbKeys.length; i++) {
if (lsbKeys[i] != EMPTY && msbKeys[i] != EMPTY) {
if (first) {
first = false;
} else {
buf.append(", ");
}
buf.append(new UUID(msbKeys[i], lsbKeys[i]));
buf.append("=");
buf.append(values[i]);
}
}
buf.append("}");
return buf.toString();
}
} | apache-2.0 |
punkhorn/camel-upstream | components/camel-spring/src/test/java/org/apache/camel/spring/processor/SpringComplexBlockWithEndTest.java | 3082 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring.processor;
import org.apache.camel.AggregationStrategy;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Exchange;
import org.junit.Test;
import static org.apache.camel.spring.processor.SpringTestHelper.createSpringCamelContext;
public class SpringComplexBlockWithEndTest extends ContextTestSupport {
@Test
public void testHello() throws Exception {
getMockEndpoint("mock:hello").expectedMessageCount(1);
getMockEndpoint("mock:bye").expectedMessageCount(0);
getMockEndpoint("mock:otherwise").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testBye() throws Exception {
getMockEndpoint("mock:hello").expectedMessageCount(0);
getMockEndpoint("mock:bye").expectedMessageCount(1);
getMockEndpoint("mock:otherwise").expectedMessageCount(0);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Bye World");
assertMockEndpointsSatisfied();
}
@Test
public void testOther() throws Exception {
getMockEndpoint("mock:hello").expectedMessageCount(0);
getMockEndpoint("mock:bye").expectedMessageCount(0);
getMockEndpoint("mock:otherwise").expectedMessageCount(1);
getMockEndpoint("mock:trapped").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedBodiesReceived("Cowboys");
getMockEndpoint("mock:split").expectedBodiesReceived("Hi The good", "Hi The ugly");
template.sendBody("direct:start", "The good,The bad,The ugly");
assertMockEndpointsSatisfied();
}
protected CamelContext createCamelContext() throws Exception {
return createSpringCamelContext(this, "org/apache/camel/spring/processor/SpringComplexBlockWithEndTest.xml");
}
public static class SplitAggregate implements AggregationStrategy {
public Exchange aggregate(Exchange oldExchange, Exchange newExchange) {
newExchange.getOut().setBody("Cowboys");
return newExchange;
}
}
}
| apache-2.0 |
yandong3389/money2 | src/main/java/d/money/pojo/base/User.java | 8443 | package d.money.pojo.base;
import java.util.Date;
public class User {
private Integer id;
private String password;
private String name;
private String identityCard;
private String jsrId;
private String jdrId;
private String nhCard;
private String tel;
private String address;
private String code;
private String sex;
private Date hkTime;
private String adminCode;
private String comment;
private Date zcTime;
private String approveFlag;
private Date approveDate;
private Integer userMoney;
private String proxyFlag;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password == null ? null : password.trim();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name == null ? null : name.trim();
}
public String getIdentityCard() {
return identityCard;
}
public void setIdentityCard(String identityCard) {
this.identityCard = identityCard == null ? null : identityCard.trim();
}
public String getJsrId() {
return jsrId;
}
public void setJsrId(String jsrId) {
this.jsrId = jsrId == null ? null : jsrId.trim();
}
public String getJdrId() {
return jdrId;
}
public void setJdrId(String jdrId) {
this.jdrId = jdrId == null ? null : jdrId.trim();
}
public String getNhCard() {
return nhCard;
}
public void setNhCard(String nhCard) {
this.nhCard = nhCard == null ? null : nhCard.trim();
}
public String getTel() {
return tel;
}
public void setTel(String tel) {
this.tel = tel == null ? null : tel.trim();
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address == null ? null : address.trim();
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code == null ? null : code.trim();
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex == null ? null : sex.trim();
}
public Date getHkTime() {
return hkTime;
}
public void setHkTime(Date hkTime) {
this.hkTime = hkTime;
}
public String getAdminCode() {
return adminCode;
}
public void setAdminCode(String adminCode) {
this.adminCode = adminCode == null ? null : adminCode.trim();
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment == null ? null : comment.trim();
}
public Date getZcTime() {
return zcTime;
}
public void setZcTime(Date zcTime) {
this.zcTime = zcTime;
}
public String getApproveFlag() {
return approveFlag;
}
public void setApproveFlag(String approveFlag) {
this.approveFlag = approveFlag == null ? null : approveFlag.trim();
}
public Date getApproveDate() {
return approveDate;
}
public void setApproveDate(Date approveDate) {
this.approveDate = approveDate;
}
public Integer getUserMoney() {
return userMoney;
}
public void setUserMoney(Integer userMoney) {
this.userMoney = userMoney;
}
public String getProxyFlag() {
return proxyFlag;
}
public void setProxyFlag(String proxyFlag) {
this.proxyFlag = proxyFlag == null ? null : proxyFlag.trim();
}
@Override
public boolean equals(Object that) {
if (this == that) {
return true;
}
if (that == null) {
return false;
}
if (getClass() != that.getClass()) {
return false;
}
User other = (User) that;
return (this.getId() == null ? other.getId() == null : this.getId().equals(other.getId()))
&& (this.getPassword() == null ? other.getPassword() == null : this.getPassword().equals(other.getPassword()))
&& (this.getName() == null ? other.getName() == null : this.getName().equals(other.getName()))
&& (this.getIdentityCard() == null ? other.getIdentityCard() == null : this.getIdentityCard().equals(other.getIdentityCard()))
&& (this.getJsrId() == null ? other.getJsrId() == null : this.getJsrId().equals(other.getJsrId()))
&& (this.getJdrId() == null ? other.getJdrId() == null : this.getJdrId().equals(other.getJdrId()))
&& (this.getNhCard() == null ? other.getNhCard() == null : this.getNhCard().equals(other.getNhCard()))
&& (this.getTel() == null ? other.getTel() == null : this.getTel().equals(other.getTel()))
&& (this.getAddress() == null ? other.getAddress() == null : this.getAddress().equals(other.getAddress()))
&& (this.getCode() == null ? other.getCode() == null : this.getCode().equals(other.getCode()))
&& (this.getSex() == null ? other.getSex() == null : this.getSex().equals(other.getSex()))
&& (this.getHkTime() == null ? other.getHkTime() == null : this.getHkTime().equals(other.getHkTime()))
&& (this.getAdminCode() == null ? other.getAdminCode() == null : this.getAdminCode().equals(other.getAdminCode()))
&& (this.getComment() == null ? other.getComment() == null : this.getComment().equals(other.getComment()))
&& (this.getZcTime() == null ? other.getZcTime() == null : this.getZcTime().equals(other.getZcTime()))
&& (this.getApproveFlag() == null ? other.getApproveFlag() == null : this.getApproveFlag().equals(other.getApproveFlag()))
&& (this.getApproveDate() == null ? other.getApproveDate() == null : this.getApproveDate().equals(other.getApproveDate()))
&& (this.getUserMoney() == null ? other.getUserMoney() == null : this.getUserMoney().equals(other.getUserMoney()))
&& (this.getProxyFlag() == null ? other.getProxyFlag() == null : this.getProxyFlag().equals(other.getProxyFlag()));
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((getId() == null) ? 0 : getId().hashCode());
result = prime * result + ((getPassword() == null) ? 0 : getPassword().hashCode());
result = prime * result + ((getName() == null) ? 0 : getName().hashCode());
result = prime * result + ((getIdentityCard() == null) ? 0 : getIdentityCard().hashCode());
result = prime * result + ((getJsrId() == null) ? 0 : getJsrId().hashCode());
result = prime * result + ((getJdrId() == null) ? 0 : getJdrId().hashCode());
result = prime * result + ((getNhCard() == null) ? 0 : getNhCard().hashCode());
result = prime * result + ((getTel() == null) ? 0 : getTel().hashCode());
result = prime * result + ((getAddress() == null) ? 0 : getAddress().hashCode());
result = prime * result + ((getCode() == null) ? 0 : getCode().hashCode());
result = prime * result + ((getSex() == null) ? 0 : getSex().hashCode());
result = prime * result + ((getHkTime() == null) ? 0 : getHkTime().hashCode());
result = prime * result + ((getAdminCode() == null) ? 0 : getAdminCode().hashCode());
result = prime * result + ((getComment() == null) ? 0 : getComment().hashCode());
result = prime * result + ((getZcTime() == null) ? 0 : getZcTime().hashCode());
result = prime * result + ((getApproveFlag() == null) ? 0 : getApproveFlag().hashCode());
result = prime * result + ((getApproveDate() == null) ? 0 : getApproveDate().hashCode());
result = prime * result + ((getUserMoney() == null) ? 0 : getUserMoney().hashCode());
result = prime * result + ((getProxyFlag() == null) ? 0 : getProxyFlag().hashCode());
return result;
}
} | apache-2.0 |
hazendaz/assertj-core | src/test/java/org/assertj/core/api/character/CharacterAssert_isEqualTo_char_Test.java | 1221 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2021 the original author or authors.
*/
package org.assertj.core.api.character;
import org.assertj.core.api.CharacterAssert;
import org.assertj.core.api.CharacterAssertBaseTest;
import static org.mockito.Mockito.verify;
/**
* Tests for <code>{@link CharacterAssert#isEqualTo(char)}</code>.
*
* @author Alex Ruiz
*/
class CharacterAssert_isEqualTo_char_Test extends CharacterAssertBaseTest {
@Override
protected CharacterAssert invoke_api_method() {
return assertions.isEqualTo('b');
}
@Override
protected void verify_internal_effects() {
verify(characters).assertEqual(getInfo(assertions), getActual(assertions), 'b');
}
}
| apache-2.0 |
fishercoder1534/Leetcode | src/main/java/com/fishercoder/solutions/_717.java | 445 | package com.fishercoder.solutions;
public class _717 {
public static class Solution1 {
public boolean isOneBitCharacter(int[] bits) {
int n = bits.length;
int i = 0;
while (i < n - 1) {
if (bits[i] == 0) {
i++;
} else {
i += 2;
}
}
return i == n - 1 ? true : false;
}
}
}
| apache-2.0 |
bingyuf2012/textAnalysis | src/main/java/text/thu/keg/smartsearch/jgibbetm/ETM.java | 2314 | /*
* Copyright (C) 2007 by
*
* Xuan-Hieu Phan
* hieuxuan@ecei.tohoku.ac.jp or pxhieu@gmail.com
* Graduate School of Information Sciences
* Tohoku University
*
* Cam-Tu Nguyen
* ncamtu@gmail.com
* College of Technology
* Vietnam National University, Hanoi
*
* JGibbsETM is a free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation; either version 2 of the License,
* or (at your option) any later version.
*
* JGibbsETM is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JGibbsETM; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*/
package text.thu.keg.smartsearch.jgibbetm;
import org.kohsuke.args4j.CmdLineException;
import org.kohsuke.args4j.CmdLineParser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import text.analysis.utils.CommonUtil;
import text.analysis.utils.ConstantUtil;
@Component
public class ETM {
@Autowired
CommonUtil commonUtil;
public Model est(String filetokename, String outputdir, int docNum) {
Timer timer = new Timer("ETM");
ETMCmdOption option = new ETMCmdOption();
CmdLineParser parser = new CmdLineParser(option);
try {
parser.parseArgument(new String[] { "-est", "-beta1", "0.1", "-beta2", "0.01", "-ntopics",
"" + commonUtil.getTopicNum(docNum), "-netopics", "" + commonUtil.getTopicNum(docNum), "-twords",
"" + commonUtil.getTopwords(), "-tentities", "" + commonUtil.getTop100Entity(), "-dir", outputdir,
"-dfile", filetokename });
} catch (CmdLineException e) {
e.printStackTrace();
}
/**
* -inf -alpha 10 -beta1 0.1 -beta2 0.1 -gamma 10 -ntopics 5 -netopics
* 10 -twords 20 -tentities 20 -dir .\test2\ -dfile t2.txt
*/
option.niters = ConstantUtil.ITERATOR_NUM;
timer.start();
Estimator estimator = new Estimator();
estimator.init(option);
timer.getTime();
return estimator.estimate();
}
}
| apache-2.0 |
mpollmeier/tinkerpop3 | gremlin-driver/src/main/java/com/tinkerpop/gremlin/driver/ser/KryoMessageSerializerV1d0.java | 12830 | package com.tinkerpop.gremlin.driver.ser;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import com.tinkerpop.gremlin.driver.MessageSerializer;
import com.tinkerpop.gremlin.driver.message.RequestMessage;
import com.tinkerpop.gremlin.driver.message.ResponseMessage;
import com.tinkerpop.gremlin.driver.message.ResponseStatusCode;
import com.tinkerpop.gremlin.structure.io.kryo.GremlinKryo;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.util.ReferenceCountUtil;
import org.javatuples.Pair;
import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class KryoMessageSerializerV1d0 implements MessageSerializer {
private GremlinKryo gremlinKryo;
private ThreadLocal<Kryo> kryoThreadLocal = new ThreadLocal<Kryo>() {
@Override
protected Kryo initialValue() {
return gremlinKryo.createKryo();
}
};
private static final Charset UTF8 = Charset.forName("UTF-8");
private static final String MIME_TYPE = SerTokens.MIME_KRYO_V1D0;
private static final String MIME_TYPE_STRINGD = SerTokens.MIME_KRYO_V1D0 + "-stringd";
private static final String TOKEN_EXTENDED_VERSION = "extendedVersion";
private static final String TOKEN_CUSTOM = "custom";
private static final String TOKEN_SERIALIZE_RESULT_TO_STRING = "serializeResultToString";
private boolean serializeToString;
/**
* Creates an instance with a standard {@link GremlinKryo} instance. Note that this instance
* will be overriden by {@link #configure} is called.
*/
public KryoMessageSerializerV1d0() {
gremlinKryo = GremlinKryo.build(GremlinKryo.Version.V_1_0_0).create();
}
/**
* Creates an instance with a provided custom configured {@link GremlinKryo} instance. Note that this instance
* will be overriden by {@link #configure} is called.
*/
public KryoMessageSerializerV1d0(final GremlinKryo kryo) {
this.gremlinKryo = kryo;
}
@Override
public void configure(final Map<String, Object> config) {
final byte extendedVersion;
try {
extendedVersion = Byte.parseByte(config.getOrDefault(TOKEN_EXTENDED_VERSION, GremlinKryo.DEFAULT_EXTENDED_VERSION).toString());
} catch (Exception ex) {
throw new IllegalStateException(String.format("Invalid configuration value of [%s] for [%s] setting on %s serialization configuration",
config.getOrDefault(TOKEN_EXTENDED_VERSION, ""), TOKEN_EXTENDED_VERSION, this.getClass().getName()), ex);
}
final GremlinKryo.Builder builder = GremlinKryo.build(GremlinKryo.Version.V_1_0_0).extendedVersion(extendedVersion);
final List<String> classNameList;
try {
classNameList = (List<String>) config.getOrDefault(TOKEN_CUSTOM, new ArrayList<String>());
} catch (Exception ex) {
throw new IllegalStateException(String.format("Invalid configuration value of [%s] for [%s] setting on %s serialization configuration",
config.getOrDefault(TOKEN_CUSTOM, ""), TOKEN_CUSTOM, this.getClass().getName()), ex);
}
if (!classNameList.isEmpty()) {
final List<Pair<Class, Function<Kryo,Serializer>>> classList = classNameList.stream().map(serializerDefinition -> {
String className;
Optional<String> serializerName;
if (serializerDefinition.contains(";")) {
final String[] split = serializerDefinition.split(";");
if (split.length != 2)
throw new IllegalStateException(String.format("Invalid format for serializer definition [%s] - expected <class>:<serializer-class>", serializerDefinition));
className = split[0];
serializerName = Optional.of(split[1]);
} else {
serializerName = Optional.empty();
className = serializerDefinition;
}
try {
final Class clazz = Class.forName(className);
final Serializer serializer;
if (serializerName.isPresent()) {
final Class serializerClazz = Class.forName(serializerName.get());
serializer = (Serializer) serializerClazz.newInstance();
} else
serializer = null;
return Pair.<Class, Function<Kryo,Serializer>>with(clazz, kryo -> serializer);
} catch (Exception ex) {
throw new IllegalStateException("Class could not be found", ex);
}
}).collect(Collectors.toList());
classList.forEach(c -> builder.addCustom(c.getValue0(), c.getValue1()));
}
this.serializeToString = Boolean.parseBoolean(config.getOrDefault(TOKEN_SERIALIZE_RESULT_TO_STRING, "false").toString());
this.gremlinKryo = builder.create();
}
@Override
public String[] mimeTypesSupported() {
return new String[]{this.serializeToString ? MIME_TYPE_STRINGD : MIME_TYPE};
}
@Override
public ResponseMessage deserializeResponse(final ByteBuf msg) throws SerializationException {
try {
final Kryo kryo = kryoThreadLocal.get();
final byte[] payload = new byte[msg.readableBytes()];
msg.readBytes(payload);
try (final Input input = new Input(payload)) {
final Map<String, Object> responseData = (Map<String, Object>) kryo.readClassAndObject(input);
final Map<String, Object> status = (Map<String,Object>) responseData.get(SerTokens.TOKEN_STATUS);
final Map<String, Object> result = (Map<String,Object>) responseData.get(SerTokens.TOKEN_RESULT);
return ResponseMessage.build(UUID.fromString(responseData.get(SerTokens.TOKEN_REQUEST).toString()))
.code(ResponseStatusCode.getFromValue((Integer) status.get(SerTokens.TOKEN_CODE)))
.statusMessage(Optional.ofNullable((String) status.get(SerTokens.TOKEN_MESSAGE)).orElse(""))
.statusAttributes((Map<String,Object>) status.get(SerTokens.TOKEN_ATTRIBUTES))
.result(result.get(SerTokens.TOKEN_DATA))
.responseMetaData((Map<String, Object>) result.get(SerTokens.TOKEN_META))
.create();
}
} catch (Exception ex) {
logger.warn("Response [{}] could not be deserialized by {}.", msg, KryoMessageSerializerV1d0.class.getName());
throw new SerializationException(ex);
}
}
@Override
public ByteBuf serializeResponseAsBinary(final ResponseMessage responseMessage, final ByteBufAllocator allocator) throws SerializationException {
ByteBuf encodedMessage = null;
try {
final Map<String, Object> result = new HashMap<>();
result.put(SerTokens.TOKEN_DATA, serializeToString ? serializeResultToString(responseMessage) : responseMessage.getResult().getData());
result.put(SerTokens.TOKEN_META, responseMessage.getResult().getMeta());
final Map<String, Object> status = new HashMap<>();
status.put(SerTokens.TOKEN_MESSAGE, responseMessage.getStatus().getMessage());
status.put(SerTokens.TOKEN_CODE, responseMessage.getStatus().getCode().getValue());
status.put(SerTokens.TOKEN_ATTRIBUTES, responseMessage.getStatus().getAttributes());
final Map<String, Object> message = new HashMap<>();
message.put(SerTokens.TOKEN_STATUS, status);
message.put(SerTokens.TOKEN_RESULT, result);
message.put(SerTokens.TOKEN_REQUEST, responseMessage.getRequestId() != null ? responseMessage.getRequestId() : null);
final Kryo kryo = kryoThreadLocal.get();
try (final OutputStream baos = new ByteArrayOutputStream()) {
final Output output = new Output(baos);
kryo.writeClassAndObject(output, message);
final long size = output.total();
if (size > Integer.MAX_VALUE)
throw new SerializationException(String.format("Message size of %s exceeds allocatable space", size));
encodedMessage = allocator.buffer((int) output.total());
encodedMessage.writeBytes(output.toBytes());
}
return encodedMessage;
} catch (Exception ex) {
if (encodedMessage != null) ReferenceCountUtil.release(encodedMessage);
logger.warn("Response [{}] could not be serialized by {}.", responseMessage.toString(), KryoMessageSerializerV1d0.class.getName());
throw new SerializationException(ex);
}
}
@Override
public RequestMessage deserializeRequest(final ByteBuf msg) throws SerializationException {
try {
final Kryo kryo = kryoThreadLocal.get();
final byte[] payload = new byte[msg.readableBytes()];
msg.readBytes(payload);
try (final Input input = new Input(payload)) {
final Map<String, Object> requestData = (Map<String, Object>) kryo.readClassAndObject(input);
final RequestMessage.Builder builder = RequestMessage.build((String) requestData.get(SerTokens.TOKEN_OP))
.overrideRequestId((UUID) requestData.get(SerTokens.TOKEN_REQUEST))
.processor((String) requestData.get(SerTokens.TOKEN_PROCESSOR));
final Map<String, Object> args = (Map<String, Object>) requestData.get(SerTokens.TOKEN_ARGS);
args.forEach(builder::addArg);
return builder.create();
}
} catch (Exception ex) {
logger.warn("Request [{}] could not be deserialized by {}.", msg, KryoMessageSerializerV1d0.class.getName());
throw new SerializationException(ex);
}
}
@Override
public ByteBuf serializeRequestAsBinary(final RequestMessage requestMessage, final ByteBufAllocator allocator) throws SerializationException {
ByteBuf encodedMessage = null;
try {
final Kryo kryo = kryoThreadLocal.get();
try (final OutputStream baos = new ByteArrayOutputStream()) {
final Output output = new Output(baos);
final String mimeType = serializeToString ? MIME_TYPE_STRINGD : MIME_TYPE;
output.writeByte(mimeType.length());
output.write(mimeType.getBytes(UTF8));
final Map<String, Object> request = new HashMap<>();
request.put(SerTokens.TOKEN_REQUEST, requestMessage.getRequestId());
request.put(SerTokens.TOKEN_PROCESSOR, requestMessage.getProcessor());
request.put(SerTokens.TOKEN_OP, requestMessage.getOp());
request.put(SerTokens.TOKEN_ARGS, requestMessage.getArgs());
kryo.writeClassAndObject(output, request);
final long size = output.total();
if (size > Integer.MAX_VALUE)
throw new SerializationException(String.format("Message size of %s exceeds allocatable space", size));
encodedMessage = allocator.buffer((int) size);
encodedMessage.writeBytes(output.toBytes());
}
return encodedMessage;
} catch (Exception ex) {
if (encodedMessage != null) ReferenceCountUtil.release(encodedMessage);
logger.warn("Request [{}] could not be serialized by {}.", requestMessage.toString(), KryoMessageSerializerV1d0.class.getName());
throw new SerializationException(ex);
}
}
private Object serializeResultToString(final ResponseMessage msg) {
if (msg.getResult() == null) return "null";
if (msg.getResult().getData() == null) return "null";
// the IteratorHandler should return a collection so keep it as such
final Object o = msg.getResult().getData();
if (o instanceof Collection) {
return ((Collection) o).stream().map(Object::toString).collect(Collectors.toList());
} else {
return o.toString();
}
}
}
| apache-2.0 |
terrancesnyder/solr-analytics | lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextDocValuesConsumer.java | 9362 | package org.apache.lucene.codecs.simpletext;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
import java.io.IOException;
import org.apache.lucene.codecs.DocValuesArraySource;
import org.apache.lucene.codecs.DocValuesConsumer;
import org.apache.lucene.index.DocValues.Type;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefHash;
import org.apache.lucene.util.IOUtils;
/**
* Writes plain-text DocValues.
* <p>
* <b><font color="red">FOR RECREATIONAL USE ONLY</font></B>
*
* @lucene.experimental
*/
public class SimpleTextDocValuesConsumer extends DocValuesConsumer {
static final BytesRef ZERO_DOUBLE = new BytesRef(Double.toString(0d));
static final BytesRef ZERO_INT = new BytesRef(Integer.toString(0));
static final BytesRef HEADER = new BytesRef("SimpleTextDocValues");
static final BytesRef END = new BytesRef("END");
static final BytesRef VALUE_SIZE = new BytesRef("valuesize ");
static final BytesRef DOC = new BytesRef(" doc ");
static final BytesRef VALUE = new BytesRef(" value ");
protected BytesRef scratch = new BytesRef();
protected int maxDocId = -1;
protected final String segment;
protected final Directory dir;
protected final IOContext ctx;
protected final Type type;
protected final BytesRefHash hash;
private int[] ords;
private int valueSize = Integer.MIN_VALUE;
private BytesRef zeroBytes;
private final String segmentSuffix;
public SimpleTextDocValuesConsumer(String segment, Directory dir,
IOContext ctx, Type type, String segmentSuffix) {
this.ctx = ctx;
this.dir = dir;
this.segment = segment;
this.type = type;
hash = new BytesRefHash();
ords = new int[0];
this.segmentSuffix = segmentSuffix;
}
@Override
public void add(int docID, IndexableField value) throws IOException {
assert docID >= 0;
final int ord, vSize;
switch (type) {
case BYTES_FIXED_DEREF:
case BYTES_FIXED_SORTED:
case BYTES_FIXED_STRAIGHT:
vSize = value.binaryValue().length;
ord = hash.add(value.binaryValue());
break;
case BYTES_VAR_DEREF:
case BYTES_VAR_SORTED:
case BYTES_VAR_STRAIGHT:
vSize = -1;
ord = hash.add(value.binaryValue());
break;
case FIXED_INTS_16:
vSize = 2;
scratch.grow(2);
DocValuesArraySource.copyShort(scratch, value.numericValue().shortValue());
ord = hash.add(scratch);
break;
case FIXED_INTS_32:
vSize = 4;
scratch.grow(4);
DocValuesArraySource.copyInt(scratch, value.numericValue().intValue());
ord = hash.add(scratch);
break;
case FIXED_INTS_8:
vSize = 1;
scratch.grow(1);
scratch.bytes[scratch.offset] = value.numericValue().byteValue();
scratch.length = 1;
ord = hash.add(scratch);
break;
case FIXED_INTS_64:
vSize = 8;
scratch.grow(8);
DocValuesArraySource.copyLong(scratch, value.numericValue().longValue());
ord = hash.add(scratch);
break;
case VAR_INTS:
vSize = -1;
scratch.grow(8);
DocValuesArraySource.copyLong(scratch, value.numericValue().longValue());
ord = hash.add(scratch);
break;
case FLOAT_32:
vSize = 4;
scratch.grow(4);
DocValuesArraySource.copyInt(scratch,
Float.floatToRawIntBits(value.numericValue().floatValue()));
ord = hash.add(scratch);
break;
case FLOAT_64:
vSize = 8;
scratch.grow(8);
DocValuesArraySource.copyLong(scratch,
Double.doubleToRawLongBits(value.numericValue().doubleValue()));
ord = hash.add(scratch);
break;
default:
throw new RuntimeException("should not reach this line");
}
if (valueSize == Integer.MIN_VALUE) {
assert maxDocId == -1;
valueSize = vSize;
} else {
if (valueSize != vSize) {
throw new IllegalArgumentException("value size must be " + valueSize + " but was: " + vSize);
}
}
maxDocId = Math.max(docID, maxDocId);
ords = grow(ords, docID);
ords[docID] = (ord < 0 ? (-ord)-1 : ord) + 1;
}
protected BytesRef getHeader() {
return HEADER;
}
private int[] grow(int[] array, int upto) {
if (array.length <= upto) {
return ArrayUtil.grow(array, 1 + upto);
}
return array;
}
private void prepareFlush(int docCount) {
assert ords != null;
ords = grow(ords, docCount);
}
@Override
public void finish(int docCount) throws IOException {
final String fileName = IndexFileNames.segmentFileName(segment, "",
segmentSuffix);
IndexOutput output = dir.createOutput(fileName, ctx);
boolean success = false;
BytesRef spare = new BytesRef();
try {
SimpleTextUtil.write(output, getHeader());
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, VALUE_SIZE);
SimpleTextUtil.write(output, Integer.toString(this.valueSize), scratch);
SimpleTextUtil.writeNewline(output);
prepareFlush(docCount);
for (int i = 0; i < docCount; i++) {
SimpleTextUtil.write(output, DOC);
SimpleTextUtil.write(output, Integer.toString(i), scratch);
SimpleTextUtil.writeNewline(output);
SimpleTextUtil.write(output, VALUE);
writeDoc(output, i, spare);
SimpleTextUtil.writeNewline(output);
}
SimpleTextUtil.write(output, END);
SimpleTextUtil.writeNewline(output);
success = true;
} finally {
hash.close();
if (success) {
IOUtils.close(output);
} else {
IOUtils.closeWhileHandlingException(output);
dir.deleteFile(fileName);
}
}
}
protected void writeDoc(IndexOutput output, int docId, BytesRef spare) throws IOException {
int ord = ords[docId] - 1;
if (ord != -1) {
assert ord >= 0;
hash.get(ord, spare);
switch (type) {
case BYTES_FIXED_DEREF:
case BYTES_FIXED_SORTED:
case BYTES_FIXED_STRAIGHT:
case BYTES_VAR_DEREF:
case BYTES_VAR_SORTED:
case BYTES_VAR_STRAIGHT:
SimpleTextUtil.write(output, spare);
break;
case FIXED_INTS_16:
SimpleTextUtil.write(output,
Short.toString(DocValuesArraySource.asShort(spare)), scratch);
break;
case FIXED_INTS_32:
SimpleTextUtil.write(output,
Integer.toString(DocValuesArraySource.asInt(spare)), scratch);
break;
case VAR_INTS:
case FIXED_INTS_64:
SimpleTextUtil.write(output,
Long.toString(DocValuesArraySource.asLong(spare)), scratch);
break;
case FIXED_INTS_8:
assert spare.length == 1 : spare.length;
SimpleTextUtil.write(output,
Integer.toString(spare.bytes[spare.offset]), scratch);
break;
case FLOAT_32:
float valueFloat = Float.intBitsToFloat(DocValuesArraySource.asInt(spare));
SimpleTextUtil.write(output, Float.toString(valueFloat), scratch);
break;
case FLOAT_64:
double valueDouble = Double.longBitsToDouble(DocValuesArraySource
.asLong(spare));
SimpleTextUtil.write(output, Double.toString(valueDouble), scratch);
break;
default:
throw new IllegalArgumentException("unsupported type: " + type);
}
} else {
switch (type) {
case BYTES_FIXED_DEREF:
case BYTES_FIXED_SORTED:
case BYTES_FIXED_STRAIGHT:
if(zeroBytes == null) {
assert valueSize > 0;
zeroBytes = new BytesRef(new byte[valueSize]);
}
SimpleTextUtil.write(output, zeroBytes);
break;
case BYTES_VAR_DEREF:
case BYTES_VAR_SORTED:
case BYTES_VAR_STRAIGHT:
scratch.length = 0;
SimpleTextUtil.write(output, scratch);
break;
case FIXED_INTS_16:
case FIXED_INTS_32:
case FIXED_INTS_64:
case FIXED_INTS_8:
case VAR_INTS:
SimpleTextUtil.write(output, ZERO_INT);
break;
case FLOAT_32:
case FLOAT_64:
SimpleTextUtil.write(output, ZERO_DOUBLE);
break;
default:
throw new IllegalArgumentException("unsupported type: " + type);
}
}
}
@Override
protected Type getType() {
return type;
}
@Override
public int getValueSize() {
return valueSize;
}
}
| apache-2.0 |
googleads/google-ads-java | google-ads-stubs-v9/src/main/java/com/google/ads/googleads/v9/services/stub/GrpcSmartCampaignSettingServiceStub.java | 8989 | /*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.ads.googleads.v9.services.stub;
import com.google.ads.googleads.v9.resources.SmartCampaignSetting;
import com.google.ads.googleads.v9.services.GetSmartCampaignSettingRequest;
import com.google.ads.googleads.v9.services.MutateSmartCampaignSettingsRequest;
import com.google.ads.googleads.v9.services.MutateSmartCampaignSettingsResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.stub.GrpcOperationsStub;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the SmartCampaignSettingService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcSmartCampaignSettingServiceStub extends SmartCampaignSettingServiceStub {
private static final MethodDescriptor<GetSmartCampaignSettingRequest, SmartCampaignSetting>
getSmartCampaignSettingMethodDescriptor =
MethodDescriptor.<GetSmartCampaignSettingRequest, SmartCampaignSetting>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.ads.googleads.v9.services.SmartCampaignSettingService/GetSmartCampaignSetting")
.setRequestMarshaller(
ProtoUtils.marshaller(GetSmartCampaignSettingRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(SmartCampaignSetting.getDefaultInstance()))
.build();
private static final MethodDescriptor<
MutateSmartCampaignSettingsRequest, MutateSmartCampaignSettingsResponse>
mutateSmartCampaignSettingsMethodDescriptor =
MethodDescriptor
.<MutateSmartCampaignSettingsRequest, MutateSmartCampaignSettingsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
"google.ads.googleads.v9.services.SmartCampaignSettingService/MutateSmartCampaignSettings")
.setRequestMarshaller(
ProtoUtils.marshaller(MutateSmartCampaignSettingsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(MutateSmartCampaignSettingsResponse.getDefaultInstance()))
.build();
private final UnaryCallable<GetSmartCampaignSettingRequest, SmartCampaignSetting>
getSmartCampaignSettingCallable;
private final UnaryCallable<
MutateSmartCampaignSettingsRequest, MutateSmartCampaignSettingsResponse>
mutateSmartCampaignSettingsCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcSmartCampaignSettingServiceStub create(
SmartCampaignSettingServiceStubSettings settings) throws IOException {
return new GrpcSmartCampaignSettingServiceStub(settings, ClientContext.create(settings));
}
public static final GrpcSmartCampaignSettingServiceStub create(ClientContext clientContext)
throws IOException {
return new GrpcSmartCampaignSettingServiceStub(
SmartCampaignSettingServiceStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcSmartCampaignSettingServiceStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcSmartCampaignSettingServiceStub(
SmartCampaignSettingServiceStubSettings.newBuilder().build(),
clientContext,
callableFactory);
}
/**
* Constructs an instance of GrpcSmartCampaignSettingServiceStub, using the given settings. This
* is protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcSmartCampaignSettingServiceStub(
SmartCampaignSettingServiceStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcSmartCampaignSettingServiceCallableFactory());
}
/**
* Constructs an instance of GrpcSmartCampaignSettingServiceStub, using the given settings. This
* is protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected GrpcSmartCampaignSettingServiceStub(
SmartCampaignSettingServiceStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<GetSmartCampaignSettingRequest, SmartCampaignSetting>
getSmartCampaignSettingTransportSettings =
GrpcCallSettings.<GetSmartCampaignSettingRequest, SmartCampaignSetting>newBuilder()
.setMethodDescriptor(getSmartCampaignSettingMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("resource_name", String.valueOf(request.getResourceName()));
return params.build();
})
.build();
GrpcCallSettings<MutateSmartCampaignSettingsRequest, MutateSmartCampaignSettingsResponse>
mutateSmartCampaignSettingsTransportSettings =
GrpcCallSettings
.<MutateSmartCampaignSettingsRequest, MutateSmartCampaignSettingsResponse>
newBuilder()
.setMethodDescriptor(mutateSmartCampaignSettingsMethodDescriptor)
.setParamsExtractor(
request -> {
ImmutableMap.Builder<String, String> params = ImmutableMap.builder();
params.put("customer_id", String.valueOf(request.getCustomerId()));
return params.build();
})
.build();
this.getSmartCampaignSettingCallable =
callableFactory.createUnaryCallable(
getSmartCampaignSettingTransportSettings,
settings.getSmartCampaignSettingSettings(),
clientContext);
this.mutateSmartCampaignSettingsCallable =
callableFactory.createUnaryCallable(
mutateSmartCampaignSettingsTransportSettings,
settings.mutateSmartCampaignSettingsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<GetSmartCampaignSettingRequest, SmartCampaignSetting>
getSmartCampaignSettingCallable() {
return getSmartCampaignSettingCallable;
}
@Override
public UnaryCallable<MutateSmartCampaignSettingsRequest, MutateSmartCampaignSettingsResponse>
mutateSmartCampaignSettingsCallable() {
return mutateSmartCampaignSettingsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
| apache-2.0 |
dorzey/assertj-core | src/test/java/org/assertj/core/internal/iterables/Iterables_assertStartsWith_Test.java | 6869 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2016 the original author or authors.
*/
package org.assertj.core.internal.iterables;
import static org.assertj.core.error.ShouldStartWith.shouldStartWith;
import static org.assertj.core.test.ErrorMessages.valuesToLookForIsNull;
import static org.assertj.core.test.ObjectArrays.emptyArray;
import static org.assertj.core.test.TestData.someInfo;
import static org.assertj.core.test.TestFailures.failBecauseExpectedAssertionErrorWasNotThrown;
import static org.assertj.core.util.Arrays.array;
import static org.assertj.core.util.FailureMessages.actualIsNull;
import static org.assertj.core.util.Lists.newArrayList;
import static org.mockito.Mockito.verify;
import java.util.Collection;
import java.util.Iterator;
import org.assertj.core.api.AssertionInfo;
import org.assertj.core.internal.Iterables;
import org.assertj.core.internal.IterablesBaseTest;
import org.junit.Before;
import org.junit.Test;
/**
* Tests for <code>{@link Iterables#assertStartsWith(AssertionInfo, Collection, Object[])}</code>.
*
* @author Alex Ruiz
* @author Joel Costigliola
*/
public class Iterables_assertStartsWith_Test extends IterablesBaseTest {
@Override
@Before
public void setUp() {
super.setUp();
actual = newArrayList("Yoda", "Luke", "Leia", "Obi-Wan");
}
@Test
public void should_throw_error_if_sequence_is_null() {
thrown.expectNullPointerException(valuesToLookForIsNull());
iterables.assertStartsWith(someInfo(), actual, null);
}
@Test
public void should_pass_if_actual_and_sequence_are_empty() {
actual.clear();
iterables.assertStartsWith(someInfo(), actual, emptyArray());
}
@Test
public void should_fail_if_sequence_to_look_for_is_empty_and_actual_is_not() {
thrown.expect(AssertionError.class);
iterables.assertStartsWith(someInfo(), actual, emptyArray());
}
@Test
public void should_fail_if_actual_is_null() {
thrown.expectAssertionError(actualIsNull());
iterables.assertStartsWith(someInfo(), null, array("Yoda"));
}
@Test
public void should_fail_if_sequence_is_bigger_than_actual() {
AssertionInfo info = someInfo();
Object[] sequence = { "Yoda", "Luke", "Leia", "Obi-Wan", "Han", "C-3PO", "R2-D2", "Anakin" };
try {
iterables.assertStartsWith(info, actual, sequence);
} catch (AssertionError e) {
verifyFailureThrownWhenSequenceNotFound(info, sequence);
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
@Test
public void should_fail_if_actual_does_not_start_with_sequence() {
AssertionInfo info = someInfo();
Object[] sequence = { "Han", "C-3PO" };
try {
iterables.assertStartsWith(info, actual, sequence);
} catch (AssertionError e) {
verifyFailureThrownWhenSequenceNotFound(info, sequence);
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
@Test
public void should_fail_if_actual_starts_with_first_elements_of_sequence_only() {
AssertionInfo info = someInfo();
Object[] sequence = { "Leia", "Obi-Wan", "Han" };
try {
iterables.assertStartsWith(info, actual, sequence);
} catch (AssertionError e) {
verifyFailureThrownWhenSequenceNotFound(info, sequence);
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
private void verifyFailureThrownWhenSequenceNotFound(AssertionInfo info, Object[] sequence) {
verify(failures).failure(info, shouldStartWith(actual, sequence));
}
@Test
public void should_pass_if_actual_starts_with_sequence() {
iterables.assertStartsWith(someInfo(), actual, array("Yoda", "Luke", "Leia"));
}
@Test
public void should_pass_if_actual_and_sequence_are_equal() {
iterables.assertStartsWith(someInfo(), actual, array("Yoda", "Luke", "Leia", "Obi-Wan"));
}
@Test
public void should_pass_if_infinite_iterable_starts_with_given_sequence() throws Exception {
iterables.assertStartsWith(someInfo(), infiniteListOfNumbers(), array(1, 2, 3, 4, 5));
}
private Iterable<Integer> infiniteListOfNumbers() {
return new Iterable<Integer>() {
int number = 1;
@Override
public Iterator<Integer> iterator() {
return new Iterator<Integer>() {
@Override
public boolean hasNext() {
return true;
}
@Override
public Integer next() {
return number++;
}
@Override
public void remove() {
}
};
}
};
}
// ------------------------------------------------------------------------------------------------------------------
// tests using a custom comparison strategy
// ------------------------------------------------------------------------------------------------------------------
@Test
public void should_fail_if_actual_does_not_start_with_sequence_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Object[] sequence = { "Han", "C-3PO" };
try {
iterablesWithCaseInsensitiveComparisonStrategy.assertStartsWith(info, actual, sequence);
} catch (AssertionError e) {
verify(failures).failure(info, shouldStartWith(actual, sequence, comparisonStrategy));
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
@Test
public void should_fail_if_actual_starts_with_first_elements_of_sequence_only_according_to_custom_comparison_strategy() {
AssertionInfo info = someInfo();
Object[] sequence = { "YODA", "luke", "Leia", "Obi-Wan", "Han" };
try {
iterablesWithCaseInsensitiveComparisonStrategy.assertStartsWith(info, actual, sequence);
} catch (AssertionError e) {
verify(failures).failure(info, shouldStartWith(actual, sequence, comparisonStrategy));
return;
}
failBecauseExpectedAssertionErrorWasNotThrown();
}
@Test
public void should_pass_if_actual_starts_with_sequence_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertStartsWith(someInfo(), actual, array("YODA", "luke", "Leia"));
}
@Test
public void should_pass_if_actual_and_sequence_are_equal_according_to_custom_comparison_strategy() {
iterablesWithCaseInsensitiveComparisonStrategy.assertStartsWith(someInfo(), actual, array("Yoda", "LUke", "LeIA", "oBi-WaN"));
}
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-backup/src/main/java/com/amazonaws/services/backup/model/transform/CreateBackupPlanResultJsonUnmarshaller.java | 3618 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.backup.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.backup.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateBackupPlanResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CreateBackupPlanResultJsonUnmarshaller implements Unmarshaller<CreateBackupPlanResult, JsonUnmarshallerContext> {
public CreateBackupPlanResult unmarshall(JsonUnmarshallerContext context) throws Exception {
CreateBackupPlanResult createBackupPlanResult = new CreateBackupPlanResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return createBackupPlanResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("BackupPlanId", targetDepth)) {
context.nextToken();
createBackupPlanResult.setBackupPlanId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("BackupPlanArn", targetDepth)) {
context.nextToken();
createBackupPlanResult.setBackupPlanArn(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("CreationDate", targetDepth)) {
context.nextToken();
createBackupPlanResult.setCreationDate(DateJsonUnmarshallerFactory.getInstance("unixTimestamp").unmarshall(context));
}
if (context.testExpression("VersionId", targetDepth)) {
context.nextToken();
createBackupPlanResult.setVersionId(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createBackupPlanResult;
}
private static CreateBackupPlanResultJsonUnmarshaller instance;
public static CreateBackupPlanResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateBackupPlanResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |