repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
siosio/intellij-community | platform/platform-impl/src/com/intellij/openapi/fileEditor/impl/FileEditorProviderManagerImpl.java | 6610 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.fileEditor.impl;
import com.intellij.diagnostic.PluginException;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.RoamingType;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileEditor.FileEditorPolicy;
import com.intellij.openapi.fileEditor.FileEditorProvider;
import com.intellij.openapi.fileEditor.WeighedFileEditorProvider;
import com.intellij.openapi.fileEditor.ex.FileEditorProviderManager;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.SlowOperations;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xmlb.annotations.MapAnnotation;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
@State(
name = "FileEditorProviderManager",
storages = @Storage(value = "fileEditorProviderManager.xml", roamingType = RoamingType.DISABLED)
)
public final class FileEditorProviderManagerImpl extends FileEditorProviderManager
implements PersistentStateComponent<FileEditorProviderManagerImpl> {
private static final @NotNull Logger LOG = Logger.getInstance(FileEditorProviderManagerImpl.class);
@Override
public FileEditorProvider @NotNull [] getProviders(@NotNull final Project project, @NotNull final VirtualFile file) {
// Collect all possible editors
List<FileEditorProvider> sharedProviders = new ArrayList<>();
boolean hideDefaultEditor = false;
for (final FileEditorProvider provider : FileEditorProvider.EP_FILE_EDITOR_PROVIDER.getExtensionList()) {
if (SlowOperations.allowSlowOperations(() -> ReadAction.compute(() -> {
if (DumbService.isDumb(project) && !DumbService.isDumbAware(provider)) {
return false;
}
if (!provider.accept(project, file)) {
return false;
}
for (FileEditorProviderSuppressor suppressor : FileEditorProviderSuppressor.EP_NAME.getExtensionList()) {
if (suppressor.isSuppressed(project, file, provider)) {
LOG.info(String.format("FileEditorProvider %s for VirtualFile %s was suppressed by FileEditorProviderSuppressor %s",
provider.getClass(), file, suppressor.getClass()));
return false;
}
}
return true;
}))) {
sharedProviders.add(provider);
hideDefaultEditor |= provider.getPolicy() == FileEditorPolicy.HIDE_DEFAULT_EDITOR;
if (provider.getPolicy() == FileEditorPolicy.HIDE_DEFAULT_EDITOR && !DumbService.isDumbAware(provider)) {
String message = "HIDE_DEFAULT_EDITOR is supported only for DumbAware providers; " + provider.getClass() + " is not DumbAware.";
LOG.error(PluginException.createByClass(message, null, provider.getClass()));
}
}
}
// Throw out default editors provider if necessary
if (hideDefaultEditor) {
ContainerUtil.retainAll(sharedProviders, provider -> !(provider instanceof DefaultPlatformFileEditorProvider));
}
// Sort editors according policies
sharedProviders.sort(MyComparator.ourInstance);
return sharedProviders.toArray(new FileEditorProvider[0]);
}
@Override
@Nullable
public FileEditorProvider getProvider(@NotNull String editorTypeId) {
for (FileEditorProvider provider : FileEditorProvider.EP_FILE_EDITOR_PROVIDER.getExtensionList()) {
if (provider.getEditorTypeId().equals(editorTypeId)) {
return provider;
}
}
return null;
}
@NotNull
@Override
public FileEditorProviderManagerImpl getState() {
return this;
}
@Override
public void loadState(@NotNull FileEditorProviderManagerImpl state) {
mySelectedProviders.clear();
mySelectedProviders.putAll(state.mySelectedProviders);
}
private final Map<String, String> mySelectedProviders = new HashMap<>();
void providerSelected(@NotNull EditorComposite composite) {
FileEditorProvider[] providers = composite.getProviders();
if (providers.length < 2) return;
mySelectedProviders.put(computeKey(providers),
composite.getSelectedWithProvider().getProvider().getEditorTypeId());
}
private static @NotNull String computeKey(FileEditorProvider[] providers) {
return StringUtil.join(ContainerUtil.map(providers, FileEditorProvider::getEditorTypeId), ",");
}
@Nullable
FileEditorProvider getSelectedFileEditorProvider(@NotNull EditorHistoryManager editorHistoryManager,
@NotNull VirtualFile file,
FileEditorProvider @NotNull [] providers) {
FileEditorProvider provider = editorHistoryManager.getSelectedProvider(file);
if (provider != null || providers.length < 2) {
return provider;
}
String id = mySelectedProviders.get(computeKey(providers));
return id == null ? null : getProvider(id);
}
@MapAnnotation
public Map<String, String> getSelectedProviders() {
return mySelectedProviders;
}
@SuppressWarnings("unused")
public void setSelectedProviders(Map<String, String> selectedProviders) {
mySelectedProviders.clear();
mySelectedProviders.putAll(selectedProviders);
}
@TestOnly
public void clearSelectedProviders() {
mySelectedProviders.clear();
}
private static final class MyComparator implements Comparator<FileEditorProvider> {
public static final MyComparator ourInstance = new MyComparator();
private static double getWeight(FileEditorProvider provider) {
return provider instanceof WeighedFileEditorProvider
? ((WeighedFileEditorProvider)provider).getWeight()
: Double.MAX_VALUE;
}
@Override
public int compare(FileEditorProvider provider1, FileEditorProvider provider2) {
int c = provider1.getPolicy().compareTo(provider2.getPolicy());
if (c != 0) return c;
final double value = getWeight(provider1) - getWeight(provider2);
return value > 0 ? 1 : value < 0 ? -1 : 0;
}
}
}
| apache-2.0 |
apache/geronimo-javamail | geronimo-javamail_1.6/geronimo-javamail_1.6_provider/src/main/java/org/apache/geronimo/javamail/store/imap/connection/IMAPSizeResponse.java | 1595 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.javamail.store.imap.connection;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.mail.MessagingException;
/**
* Util class to represent a server size response.
*
* @version $Rev$ $Date$
*/
public class IMAPSizeResponse extends IMAPUntaggedResponse {
// the associated size
protected int size;
/**
* Create a size response item.
*
* @param keyword The KEYWORD item associated with the size.
* @param size The size value.
* @param response The raw response data.
*/
public IMAPSizeResponse(String keyword, int size, byte [] response) {
super(keyword, response);
this.size = size;
}
public int getSize() {
return size;
}
}
| apache-2.0 |
zqian/sakai | edu-services/cm-service/cm-impl/hibernate-impl/impl/src/java/org/sakaiproject/coursemanagement/impl/CourseManagementAdministrationHibernateImpl.java | 27734 | /**********************************************************************************
* $URL$
* $Id$
***********************************************************************************
*
* Copyright (c) 2006, 2007, 2008 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.coursemanagement.impl;
import java.sql.Time;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.hibernate.Hibernate;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.sakaiproject.coursemanagement.api.AcademicSession;
import org.sakaiproject.coursemanagement.api.CanonicalCourse;
import org.sakaiproject.coursemanagement.api.CourseManagementAdministration;
import org.sakaiproject.coursemanagement.api.CourseOffering;
import org.sakaiproject.coursemanagement.api.CourseSet;
import org.sakaiproject.coursemanagement.api.Enrollment;
import org.sakaiproject.coursemanagement.api.EnrollmentSet;
import org.sakaiproject.coursemanagement.api.Meeting;
import org.sakaiproject.coursemanagement.api.Membership;
import org.sakaiproject.coursemanagement.api.Section;
import org.sakaiproject.coursemanagement.api.SectionCategory;
import org.sakaiproject.coursemanagement.api.exception.IdExistsException;
import org.sakaiproject.coursemanagement.api.exception.IdNotFoundException;
import org.sakaiproject.coursemanagement.impl.facade.Authentication;
import org.springframework.dao.DataIntegrityViolationException;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* Manipulates course and enrollment data stored in sakai's local hibernate tables.
*
* @author <a href="mailto:jholtzman@berkeley.edu">Josh Holtzman</a>
*
*/
public class CourseManagementAdministrationHibernateImpl extends
HibernateDaoSupport implements CourseManagementAdministration {
private static final Logger log = LoggerFactory.getLogger(CourseManagementAdministrationHibernateImpl.class);
protected Authentication authn;
public void setAuthn(Authentication authn) {
this.authn = authn;
}
public void init() {
log.info("Initializing " + getClass().getName());
}
public void destroy() {
log.info("Destroying " + getClass().getName());
}
public AcademicSession createAcademicSession(String eid, String title,
String description, Date startDate, Date endDate) throws IdExistsException {
AcademicSessionCmImpl academicSession = new AcademicSessionCmImpl(eid, title, description, startDate, endDate);
academicSession.setCreatedBy(authn.getUserEid());
academicSession.setCreatedDate(new Date());
try {
getHibernateTemplate().save(academicSession);
return academicSession;
} catch (DataIntegrityViolationException dive) {
throw new IdExistsException(eid, AcademicSession.class.getName());
}
}
public void updateAcademicSession(AcademicSession academicSession) {
AcademicSessionCmImpl as = (AcademicSessionCmImpl)academicSession;
as.setLastModifiedBy(authn.getUserEid());
as.setLastModifiedDate(new Date());
getHibernateTemplate().update(as);
}
public CourseSet createCourseSet(String eid, String title, String description, String category,
String parentCourseSetEid) throws IdExistsException {
CourseSet parent = null;
if(parentCourseSetEid != null) {
parent = (CourseSet)getObjectByEid(parentCourseSetEid, CourseSetCmImpl.class.getName());
}
CourseSetCmImpl courseSet = new CourseSetCmImpl(eid, title, description, category, parent);
courseSet.setCreatedBy(authn.getUserEid());
courseSet.setCreatedDate(new Date());
try {
getHibernateTemplate().save(courseSet);
return courseSet;
} catch (DataIntegrityViolationException dive) {
throw new IdExistsException(eid, CourseSet.class.getName());
}
}
public void updateCourseSet(CourseSet courseSet) {
CourseSetCmImpl cs = (CourseSetCmImpl)courseSet;
cs.setLastModifiedBy(authn.getUserEid());
cs.setLastModifiedDate(new Date());
getHibernateTemplate().update(cs);
}
public CanonicalCourse createCanonicalCourse(String eid, String title, String description) throws IdExistsException {
CanonicalCourseCmImpl canonCourse = new CanonicalCourseCmImpl(eid, title, description);
canonCourse.setCreatedBy(authn.getUserEid());
canonCourse.setCreatedDate(new Date());
try {
getHibernateTemplate().save(canonCourse);
return canonCourse;
} catch (DataIntegrityViolationException dive) {
throw new IdExistsException(eid, CanonicalCourse.class.getName());
}
}
public void updateCanonicalCourse(CanonicalCourse canonicalCourse) {
CanonicalCourseCmImpl cc = (CanonicalCourseCmImpl)canonicalCourse;
cc.setLastModifiedBy(authn.getUserEid());
cc.setLastModifiedDate(new Date());
getHibernateTemplate().update(cc);
}
public void addCanonicalCourseToCourseSet(String courseSetEid, String canonicalCourseEid) throws IdNotFoundException {
CourseSetCmImpl courseSet = (CourseSetCmImpl)getObjectByEid(courseSetEid, CourseSetCmImpl.class.getName());
CanonicalCourseCmImpl canonCourse = (CanonicalCourseCmImpl)getObjectByEid(canonicalCourseEid, CanonicalCourseCmImpl.class.getName());
Set<CanonicalCourse> canonCourses = courseSet.getCanonicalCourses();
if(canonCourses == null) {
canonCourses = new HashSet<CanonicalCourse>();
courseSet.setCanonicalCourses(canonCourses);
}
canonCourses.add(canonCourse);
courseSet.setLastModifiedBy(authn.getUserEid());
courseSet.setLastModifiedDate(new Date());
getHibernateTemplate().update(courseSet);
}
public boolean removeCanonicalCourseFromCourseSet(String courseSetEid, String canonicalCourseEid) {
CourseSetCmImpl courseSet = (CourseSetCmImpl)getObjectByEid(courseSetEid, CourseSetCmImpl.class.getName());
CanonicalCourseCmImpl canonCourse = (CanonicalCourseCmImpl)getObjectByEid(canonicalCourseEid, CanonicalCourseCmImpl.class.getName());
Set courses = courseSet.getCanonicalCourses();
if(courses == null || ! courses.contains(canonCourse)) {
return false;
}
courses.remove(canonCourse);
courseSet.setLastModifiedBy(authn.getUserEid());
courseSet.setLastModifiedDate(new Date());
getHibernateTemplate().update(courseSet);
return true;
}
private void setEquivalents(Set crossListables) {
CrossListingCmImpl newCrossListing = new CrossListingCmImpl();
newCrossListing.setCreatedBy(authn.getUserEid());
newCrossListing.setCreatedDate(new Date());
getHibernateTemplate().save(newCrossListing);
Set<CrossListingCmImpl> oldCrossListings = new HashSet<CrossListingCmImpl>();
for(Iterator iter = crossListables.iterator(); iter.hasNext();) {
CrossListableCmImpl clable = (CrossListableCmImpl)iter.next();
CrossListingCmImpl oldCrossListing = clable.getCrossListing();
if(oldCrossListing != null) {
oldCrossListings.add(oldCrossListing);
}
if(log.isDebugEnabled()) log.debug("Setting crosslisting for crosslistable " +
clable.getEid() + " to " + newCrossListing.getKey());
clable.setCrossListing(newCrossListing);
clable.setLastModifiedBy(authn.getUserEid());
clable.setLastModifiedDate(new Date());
getHibernateTemplate().update(clable);
}
// TODO Clean up orphaned cross listings
}
public void setEquivalentCanonicalCourses(Set canonicalCourses) {
setEquivalents(canonicalCourses);
}
private boolean removeEquiv(CrossListableCmImpl impl) {
boolean hadCrossListing = impl.getCrossListing() != null;
impl.setCrossListing(null);
impl.setLastModifiedBy(authn.getUserEid());
impl.setLastModifiedDate(new Date());
getHibernateTemplate().update(impl);
return hadCrossListing;
}
public boolean removeEquivalency(CanonicalCourse canonicalCourse) {
return removeEquiv((CanonicalCourseCmImpl)canonicalCourse);
}
public CourseOffering createCourseOffering(String eid, String title, String description,
String status, String academicSessionEid, String canonicalCourseEid, Date startDate, Date endDate) throws IdExistsException {
AcademicSession as = (AcademicSession)getObjectByEid(academicSessionEid, AcademicSessionCmImpl.class.getName());
CanonicalCourse cc = (CanonicalCourse)getObjectByEid(canonicalCourseEid, CanonicalCourseCmImpl.class.getName());
CourseOfferingCmImpl co = new CourseOfferingCmImpl(eid, title, description, status, as, cc, startDate, endDate);
co.setCreatedBy(authn.getUserEid());
co.setCreatedDate(new Date());
try {
getHibernateTemplate().save(co);
return co;
} catch (DataIntegrityViolationException dive) {
throw new IdExistsException(eid, CourseOffering.class.getName());
}
}
public void updateCourseOffering(CourseOffering courseOffering) {
CourseOfferingCmImpl co = (CourseOfferingCmImpl)courseOffering;
co.setLastModifiedBy(authn.getUserEid());
co.setLastModifiedDate(new Date());
getHibernateTemplate().update(co);
}
public void setEquivalentCourseOfferings(Set courseOfferings) {
setEquivalents(courseOfferings);
}
public boolean removeEquivalency(CourseOffering courseOffering) {
return removeEquiv((CrossListableCmImpl)courseOffering);
}
public void addCourseOfferingToCourseSet(String courseSetEid, String courseOfferingEid) {
// CourseSet's set of courses are controlled on the CourseSet side of the bi-directional relationship
CourseSetCmImpl courseSet = (CourseSetCmImpl)getObjectByEid(courseSetEid, CourseSetCmImpl.class.getName());
CourseOfferingCmImpl courseOffering = (CourseOfferingCmImpl)getObjectByEid(courseOfferingEid, CourseOfferingCmImpl.class.getName());
Set<CourseOffering> offerings = courseSet.getCourseOfferings();
if(offerings == null) {
offerings = new HashSet<CourseOffering>();
}
offerings.add(courseOffering);
courseSet.setCourseOfferings(offerings);
courseSet.setLastModifiedBy(authn.getUserEid());
courseSet.setLastModifiedDate(new Date());
getHibernateTemplate().update(courseSet);
}
public boolean removeCourseOfferingFromCourseSet(String courseSetEid, String courseOfferingEid) {
CourseSetCmImpl courseSet = (CourseSetCmImpl)getObjectByEid(courseSetEid, CourseSetCmImpl.class.getName());
CourseOffering courseOffering = (CourseOffering)getObjectByEid(courseOfferingEid, CourseOfferingCmImpl.class.getName());
Set offerings = courseSet.getCourseOfferings();
if(offerings == null || ! offerings.contains(courseOffering)) {
return false;
}
offerings.remove(courseOffering);
courseSet.setLastModifiedBy(authn.getUserEid());
courseSet.setLastModifiedDate(new Date());
getHibernateTemplate().update(courseSet);
return true;
}
public EnrollmentSet createEnrollmentSet(String eid, String title, String description, String category,
String defaultEnrollmentCredits, String courseOfferingEid, Set officialGraders)
throws IdExistsException {
if(courseOfferingEid == null) {
throw new IllegalArgumentException("You can not create an EnrollmentSet without specifying a courseOffering");
}
CourseOffering co = (CourseOffering)getObjectByEid(courseOfferingEid, CourseOfferingCmImpl.class.getName());
EnrollmentSetCmImpl enrollmentSet = new EnrollmentSetCmImpl(eid, title, description, category, defaultEnrollmentCredits, co, officialGraders);
enrollmentSet.setCreatedBy(authn.getUserEid());
enrollmentSet.setCreatedDate(new Date());
try {
getHibernateTemplate().save(enrollmentSet);
return enrollmentSet;
} catch (DataIntegrityViolationException dive) {
throw new IdExistsException(eid, EnrollmentSet.class.getName());
}
}
public void updateEnrollmentSet(EnrollmentSet enrollmentSet) {
EnrollmentSetCmImpl es = (EnrollmentSetCmImpl)enrollmentSet;
es.setLastModifiedBy(authn.getUserEid());
es.setLastModifiedDate(new Date());
getHibernateTemplate().update(es);
}
public Enrollment addOrUpdateEnrollment(String userId, String enrollmentSetEid, String enrollmentStatus, String credits, String gradingScheme) {
return addOrUpdateEnrollment(userId, enrollmentSetEid, enrollmentStatus, credits, gradingScheme, null);
}
public Enrollment addOrUpdateEnrollment(String userId, String enrollmentSetEid, String enrollmentStatus, String credits, String gradingScheme, Date dropDate) {
EnrollmentCmImpl enrollment = null;
List enrollments = getHibernateTemplate().findByNamedQueryAndNamedParam("findEnrollment",
new String[] {"enrollmentSetEid", "userId"},
new Object[] {enrollmentSetEid, userId});
if(enrollments.isEmpty()) {
EnrollmentSet enrollmentSet = (EnrollmentSet)getObjectByEid(enrollmentSetEid, EnrollmentSetCmImpl.class.getName());
enrollment = new EnrollmentCmImpl(userId, enrollmentSet, enrollmentStatus, credits, gradingScheme, dropDate);
enrollment.setCreatedBy(authn.getUserEid());
enrollment.setCreatedDate(new Date());
getHibernateTemplate().save(enrollment);
} else {
enrollment = (EnrollmentCmImpl)enrollments.get(0);
enrollment.setEnrollmentStatus(enrollmentStatus);
enrollment.setCredits(credits);
enrollment.setGradingScheme(gradingScheme);
enrollment.setDropped(false);
enrollment.setDropDate(dropDate);
enrollment.setLastModifiedBy(authn.getUserEid());
enrollment.setLastModifiedDate(new Date());
getHibernateTemplate().update(enrollment);
}
return enrollment;
}
public boolean removeEnrollment(String userId, String enrollmentSetEid) {
List enrollments = getHibernateTemplate().findByNamedQueryAndNamedParam("findEnrollment",
new String[] {"enrollmentSetEid", "userId"},
new Object[] {enrollmentSetEid, userId});
if(enrollments.isEmpty()) {
return false;
} else {
EnrollmentCmImpl enr = (EnrollmentCmImpl)enrollments.get(0);
enr.setDropped(true);
enr.setLastModifiedBy(authn.getUserEid());
enr.setLastModifiedDate(new Date());
getHibernateTemplate().update(enr);
return true;
}
}
public Section createSection(String eid, String title, String description, String category,
String parentSectionEid, String courseOfferingEid, String enrollmentSetEid) throws IdExistsException {
// The objects related to this section
Section parent = null;
CourseOffering co = null;
EnrollmentSet es = null;
Integer maxSize = null;
// Get the enrollment set, if needed
if(courseOfferingEid != null) {
co = (CourseOffering)getObjectByEid(courseOfferingEid, CourseOfferingCmImpl.class.getName());
}
// Get the parent section, if needed
if(parentSectionEid != null) {
parent = (Section)getObjectByEid(parentSectionEid, SectionCmImpl.class.getName());
}
// Get the enrollment set, if needed
if(enrollmentSetEid != null) {
es = (EnrollmentSet)getObjectByEid(enrollmentSetEid, EnrollmentSetCmImpl.class.getName());
}
SectionCmImpl section = new SectionCmImpl(eid, title, description, category, parent, co, es, maxSize);
section.setCreatedBy(authn.getUserEid());
section.setCreatedDate(new Date());
try {
getHibernateTemplate().save(section);
return section;
} catch (DataIntegrityViolationException dive) {
throw new IdExistsException(eid, Section.class.getName());
}
}
public void updateSection(Section section) {
SectionCmImpl sec = (SectionCmImpl)section;
sec.setLastModifiedBy(authn.getUserEid());
sec.setLastModifiedDate(new Date());
getHibernateTemplate().update(sec);
}
public Membership addOrUpdateCourseSetMembership(final String userId, String role, final String courseSetEid, final String status) throws IdNotFoundException {
CourseSetCmImpl cs = (CourseSetCmImpl)getObjectByEid(courseSetEid, CourseSetCmImpl.class.getName());
MembershipCmImpl member =getMembership(userId, cs);
if(member == null) {
// Add the new member
member = new MembershipCmImpl(userId, role, cs, status);
member.setCreatedBy(authn.getUserEid());
member.setCreatedDate(new Date());
getHibernateTemplate().save(member);
} else {
// Update the existing member
member.setRole(role);
member.setStatus(status);
member.setLastModifiedBy(authn.getUserEid());
member.setLastModifiedDate(new Date());
getHibernateTemplate().update(member);
}
return member;
}
public boolean removeCourseSetMembership(String userId, String courseSetEid) {
MembershipCmImpl member = getMembership(userId, (CourseSetCmImpl)getObjectByEid(courseSetEid, CourseSetCmImpl.class.getName()));
if(member == null) {
return false;
} else {
getHibernateTemplate().delete(member);
return true;
}
}
public Membership addOrUpdateCourseOfferingMembership(String userId, String role, String courseOfferingEid, String status) {
CourseOfferingCmImpl co = (CourseOfferingCmImpl)getObjectByEid(courseOfferingEid, CourseOfferingCmImpl.class.getName());
MembershipCmImpl member =getMembership(userId, co);
if(member == null) {
// Add the new member
member = new MembershipCmImpl(userId, role, co, status);
member.setCreatedBy(authn.getUserEid());
member.setCreatedDate(new Date());
getHibernateTemplate().save(member);
} else {
// Update the existing member
member.setRole(role);
member.setStatus(status);
member.setLastModifiedBy(authn.getUserEid());
member.setLastModifiedDate(new Date());
getHibernateTemplate().update(member);
}
return member;
}
public boolean removeCourseOfferingMembership(String userId, String courseOfferingEid) {
CourseOfferingCmImpl courseOffering = (CourseOfferingCmImpl)getObjectByEid(courseOfferingEid, CourseOfferingCmImpl.class.getName());
MembershipCmImpl member = getMembership(userId, courseOffering);
if(member == null) {
return false;
} else {
getHibernateTemplate().delete(member);
return true;
}
}
public Membership addOrUpdateSectionMembership(String userId, String role, String sectionEid, String status) {
SectionCmImpl sec = (SectionCmImpl)getObjectByEid(sectionEid, SectionCmImpl.class.getName());
MembershipCmImpl member =getMembership(userId, sec);
if(member == null) {
// Add the new member
member = new MembershipCmImpl(userId, role, sec, status);
member.setCreatedBy(authn.getUserEid());
member.setCreatedDate(new Date());
getHibernateTemplate().save(member);
} else {
// Update the existing member
member.setRole(role);
member.setStatus(status);
member.setLastModifiedBy(authn.getUserEid());
member.setLastModifiedDate(new Date());
getHibernateTemplate().update(member);
}
return member;
}
public boolean removeSectionMembership(String userId, String sectionEid) {
SectionCmImpl sec = (SectionCmImpl)getObjectByEid(sectionEid, SectionCmImpl.class.getName());
MembershipCmImpl member = getMembership(userId, sec);
if(member == null) {
return false;
} else {
getHibernateTemplate().delete(member);
return true;
}
}
private MembershipCmImpl getMembership(final String userId, final AbstractMembershipContainerCmImpl container) {
// This may be a dynamic proxy. In that case, make sure we're using the class
// that hibernate understands.
final String className = Hibernate.getClass(container).getName();
final StringBuilder sb = new StringBuilder("select mbr from MembershipCmImpl as mbr, ");
sb.append(className);
sb.append(" as container where mbr.memberContainer=container ");
sb.append("and container.eid=:eid ");
sb.append("and mbr.userId=:userId");
HibernateCallback hc = new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
Query q = session.createQuery(sb.toString());
q.setParameter("eid", container.getEid());
q.setParameter("userId", userId);
return q.uniqueResult();
}
};
return (MembershipCmImpl)getHibernateTemplate().execute(hc);
}
public Meeting newSectionMeeting(String sectionEid, String location, Time startTime, Time finishTime, String notes) {
Section section = (Section)getObjectByEid(sectionEid, SectionCmImpl.class.getName());
MeetingCmImpl meeting = new MeetingCmImpl(section, location, startTime, finishTime, notes);
meeting.setCreatedBy(authn.getUserEid());
meeting.setCreatedDate(new Date());
Set<Meeting> meetings = section.getMeetings();
if(meetings == null) {
meetings = new HashSet<Meeting>();
section.setMeetings(meetings);
}
return meeting;
}
public void removeAcademicSession(String eid) {
AcademicSessionCmImpl as = (AcademicSessionCmImpl)getObjectByEid(eid, AcademicSessionCmImpl.class.getName());
// Remove the course offerings in this academic session
List<CourseOffering> courseOfferings = (List<CourseOffering>) getHibernateTemplate().find("select co from CourseOfferingCmImpl as co where co.academicSession.eid = ?", eid);
for(Iterator<CourseOffering> iter = courseOfferings.iterator(); iter.hasNext();) {
removeCourseOffering(iter.next().getEid());
}
// Remove the academic session itself
getHibernateTemplate().delete(as);
}
public void removeCanonicalCourse(String eid) {
CanonicalCourseCmImpl cc = (CanonicalCourseCmImpl)getObjectByEid(eid, CanonicalCourseCmImpl.class.getName());
// Remove any equivalents
removeEquiv(cc);
// Remove the associated course offerings (see removeCourseOffering for further cascades)
Set<CourseOffering> coSet = new HashSet<CourseOffering>((List<CourseOffering>) getHibernateTemplate().findByNamedQueryAndNamedParam("findCourseOfferingsByCanonicalCourse", "canonicalCourseEid",eid));
for(Iterator<CourseOffering> iter = coSet.iterator(); iter.hasNext();) {
CourseOffering co = iter.next();
removeCourseOffering(co.getEid());
}
getHibernateTemplate().delete(cc);
}
public void removeCourseOffering(String eid) {
CourseOfferingCmImpl co = (CourseOfferingCmImpl)getObjectByEid(eid, CourseOfferingCmImpl.class.getName());
// Remove the memberships
for(Iterator iter = getMemberships(co).iterator(); iter.hasNext();) {
getHibernateTemplate().delete(iter.next());
}
// Remove the sections
List sections = getHibernateTemplate().findByNamedQueryAndNamedParam(
"findTopLevelSectionsInCourseOffering", "courseOffering",co);
for(Iterator iter = sections.iterator(); iter.hasNext();) {
Section sec = (Section)iter.next();
removeSection(sec.getEid());
}
List enrollmentSets = getHibernateTemplate().findByNamedQueryAndNamedParam(
"findEnrollmentSetsByCourseOffering", "courseOfferingEid",eid);
// Remove the enrollment sets
for(Iterator iter = enrollmentSets.iterator(); iter.hasNext();) {
EnrollmentSet enr = (EnrollmentSet)iter.next();
removeEnrollmentSet(enr.getEid());
}
// Remove the course offering itself
getHibernateTemplate().delete(co);
}
public void removeCourseSet(String eid) {
CourseSetCmImpl cs = (CourseSetCmImpl)getObjectByEid(eid, CourseSetCmImpl.class.getName());
// Remove the memberships
for(Iterator iter = getMemberships(cs).iterator(); iter.hasNext();) {
getHibernateTemplate().delete(iter.next());
}
// Remove the course set itself
getHibernateTemplate().delete(cs);
}
public void removeEnrollmentSet(String eid) {
EnrollmentSetCmImpl es = (EnrollmentSetCmImpl)getObjectByEid(eid, EnrollmentSetCmImpl.class.getName());
List enrollments = getHibernateTemplate().findByNamedQueryAndNamedParam(
"findEnrollments", "enrollmentSetEid", eid);
for(Iterator iter = enrollments.iterator(); iter.hasNext();) {
getHibernateTemplate().delete(iter.next());
}
// Remove the enrollment set
getHibernateTemplate().delete(es);
}
public void removeSection(String eid) {
SectionCmImpl sec = (SectionCmImpl)getObjectByEid(eid, SectionCmImpl.class.getName());
// Remove the memberships
for(Iterator iter = getMemberships(sec).iterator(); iter.hasNext();) {
getHibernateTemplate().delete(iter.next());
}
// Remove the section itself
getHibernateTemplate().delete(sec);
}
public SectionCategory addSectionCategory(String categoryCode, String categoryDescription) {
SectionCategoryCmImpl cat = new SectionCategoryCmImpl(categoryCode, categoryDescription);
getHibernateTemplate().save(cat);
return cat;
}
// TODO: The following two methods were copied from CM Service. Consolidate them.
/**
* A generic approach to finding objects by their eid. This is "coding by convention",
* since it expects the parameterized query to use "eid" as the single named parameter.
*
* @param eid The eid of the object we're trying to load
* @param className The name of the class / interface we're looking for
* @return The object, if found
* @throws IdNotFoundException
*/
private Object getObjectByEid(final String eid, final String className) throws IdNotFoundException {
HibernateCallback hc = new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
StringBuilder hql = new StringBuilder();
hql.append("from ").append(className).append(" as obj where obj.eid=:eid");
Query q = session.createQuery(hql.toString());
q.setParameter("eid", eid);
Object result = q.uniqueResult();
if(result == null) {
throw new IdNotFoundException(eid, className);
}
return result;
}
};
return getHibernateTemplate().execute(hc);
}
/**
* Gets the memberships for a membership container. This query can not be
* performed using just the container's eid, since it may conflict with other kinds
* of objects with the same eid.
*
* @param container
* @return
*/
private Set<Membership> getMemberships(final AbstractMembershipContainerCmImpl container) {
// This may be a dynamic proxy. In that case, make sure we're using the class
// that hibernate understands.
final String className = Hibernate.getClass(container).getName();
HibernateCallback hc = new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
StringBuilder sb = new StringBuilder("select mbr from MembershipCmImpl as mbr, ");
sb.append(className);
sb.append(" as container where mbr.memberContainer=container ");
sb.append("and container.eid=:eid");
Query q = session.createQuery(sb.toString());
q.setParameter("eid", container.getEid());
return q.list();
}
};
return new HashSet<Membership>((List<Membership>) getHibernateTemplate().executeFind(hc));
}
public void setCurrentAcademicSessions(final List<String> academicSessionEids) {
HibernateCallback hc = new HibernateCallback() {
public Object doInHibernate(Session session) throws HibernateException {
List<AcademicSessionCmImpl> academicSessions = session.createQuery(
"from AcademicSessionCmImpl")
.list();
for (AcademicSessionCmImpl academicSession : academicSessions) {
if (academicSessionEids.contains(academicSession.getEid())) {
if (!academicSession.isCurrent()) {
academicSession.setCurrent(true);
}
} else {
if (academicSession.isCurrent()) {
academicSession.setCurrent(false);
}
}
}
return null;
}
};
getHibernateTemplate().executeFind(hc);
}
}
| apache-2.0 |
massakam/pulsar | pulsar-io/hdfs2/src/test/java/org/apache/pulsar/io/hdfs2/sink/seq/HdfsSequentialSinkTests.java | 2936 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.io.hdfs2.sink.seq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.testng.Assert.assertNotNull;
import org.apache.pulsar.io.hdfs2.sink.AbstractHdfsSinkTest;
import org.apache.pulsar.io.hdfs2.sink.seq.HdfsSequentialTextSink;
import org.testng.annotations.Test;
public class HdfsSequentialSinkTests extends AbstractHdfsSinkTest<Long, String> {
@Override
protected void createSink() {
sink = new HdfsSequentialTextSink();
}
@Test(enabled = false)
public final void write100Test() throws Exception {
map.put("filenamePrefix", "write100Test-seq");
map.put("fileExtension", ".seq");
map.put("syncInterval", 1000);
sink.open(map, mockSinkContext);
assertNotNull(sink);
send(100);
Thread.sleep(2000);
verify(mockRecord, times(100)).ack();
sink.close();
}
@Test(enabled = false)
public final void write5000Test() throws Exception {
map.put("filenamePrefix", "write5000Test-seq");
map.put("fileExtension", ".seq");
map.put("syncInterval", 1000);
sink.open(map, mockSinkContext);
assertNotNull(sink);
send(5000);
Thread.sleep(2000);
verify(mockRecord, times(5000)).ack();
sink.close();
}
@Test(enabled = false)
public final void tenSecondTest() throws Exception {
map.put("filenamePrefix", "tenSecondTest-seq");
map.put("fileExtension", ".seq");
map.put("syncInterval", 1000);
sink.open(map, mockSinkContext);
runFor(10);
sink.close();
}
@Test(enabled = false)
public final void bzip2CompressionTest() throws Exception {
map.put("filenamePrefix", "bzip2CompressionTest-seq");
map.put("compression", "BZIP2");
map.remove("fileExtension");
sink.open(map, mockSinkContext);
send(5000);
verify(mockRecord, times(5000)).ack();
}
@Test(enabled = false)
public final void deflateCompressionTest() throws Exception {
map.put("filenamePrefix", "deflateCompressionTest-seq");
map.put("compression", "DEFLATE");
map.remove("fileExtension");
sink.open(map, mockSinkContext);
send(5000);
verify(mockRecord, times(5000)).ack();
}
}
| apache-2.0 |
andrewvc/elasticsearch | src/main/java/org/elasticsearch/cluster/routing/RoutingException.java | 1237 | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing;
import org.elasticsearch.ElasticSearchException;
/**
* A base {@link Exception}s for all exceptions thrown by routing related operations.
*/
public class RoutingException extends ElasticSearchException {
public RoutingException(String message) {
super(message);
}
public RoutingException(String message, Throwable cause) {
super(message, cause);
}
} | apache-2.0 |
xingguang2013/gs-collections | collections-api/src/main/java/com/gs/collections/api/bag/package-info.java | 1415 | /*
* Copyright 2014 Goldman Sachs.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package contains interfaces for Bag API.
* <p>
* A Bag is a {@link java.util.Collection} which contains elements that are unordered, and may contain duplicate entries. It adds a protocol for
* adding, removing, and determining the number of occurrences for an item.
* <p>
* This package contains 3 interfaces:
* <ul>
* <li>
* {@link com.gs.collections.api.bag.Bag} - contains the common API for Mutable and Immutable Bag.
* </li>
* <li>
* {@link com.gs.collections.api.bag.MutableBag} - a Bag whose contents can be altered after initialization.
* </li>
* <li>
* {@link com.gs.collections.api.bag.ImmutableBag} - a Bag whose contents cannot be altered after initialization.
* </li>
* </ul>
*/
package com.gs.collections.api.bag;
| apache-2.0 |
adufilie/flex-sdk | modules/fxgutils/src/java/com/adobe/internal/fxg/dom/fills/SolidColorFillNode.java | 3271 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.adobe.internal.fxg.dom.fills;
import static com.adobe.fxg.FXGConstants.*;
import com.adobe.fxg.FXGException;
import com.adobe.internal.fxg.dom.DOMParserHelper;
public class SolidColorFillNode extends AbstractFillNode
{
//--------------------------------------------------------------------------
//
// Attributes
//
//--------------------------------------------------------------------------
/** An RGB value (in the form #RRGGBB) that
* defines the single color value to fill the shape with.
* Defaults to black "#000000". */
public int color = COLOR_BLACK;
/** A real number value ranging from 0 to 1
* specifying the opacity of the fill, with 1 being opaque.
* Defaults to 1. */
public double alpha = 1.0;
//--------------------------------------------------------------------------
//
// FXGNode Implementation
//
//--------------------------------------------------------------------------
/**
* @return The unqualified name of a SolidColor node, without tag markup.
*/
public String getNodeName()
{
return FXG_SOLIDCOLOR_ELEMENT;
}
/**
* Set solid color fill properties. Delegates to the parent class
* to process attributes that are not in the list below.
* <p>Attributes include:
* <ul>
* <li><b>alpha </b> (Number): A real number value ranging from 0 to 1
* specifying the opacity of the fill, with 1 being opaque.
* Defaults to 1. </li>
* <li><b>color </b> (String): An RGB value (in the form #RRGGBB) that
* defines the single color value to fill the shape with.
* Defaults to "#000000". </li>
* </ul>
* </p>
*
* @param name - the unqualified attribute name.
* @param value - the attribute value.
*
* @throws FXGException if a value is out of the valid range.
* @see com.adobe.internal.fxg.dom.fills.AbstractFillNode#setAttribute(java.lang.String, java.lang.String)
*/
@Override
public void setAttribute(String name, String value)
{
if (FXG_ALPHA_ATTRIBUTE.equals(name))
alpha = DOMParserHelper.parseDouble(this, value, name, ALPHA_MIN_INCLUSIVE, ALPHA_MAX_INCLUSIVE, alpha);
else if (FXG_COLOR_ATTRIBUTE.equals(name))
color = DOMParserHelper.parseRGB(this, value, name);
else
super.setAttribute(name, value);
}
}
| apache-2.0 |
jwren/intellij-community | plugins/kotlin/refIndex/tests/testData/compilerIndex/functions/members/extension/JavaClass.java | 118 | public class JavaClass {
public static void main(String[] args) {
new Main().memberExtension(42);
}
}
| apache-2.0 |
guiling/msopentech-tools-for-intellij | src/azure/com/microsoftopentechnologies/intellij/ui/KeyFeaturesStep.java | 2462 | /**
* Copyright 2014 Microsoft Open Technologies Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.microsoftopentechnologies.intellij.ui;
import com.intellij.openapi.ui.ValidationInfo;
import com.intellij.ui.wizard.WizardNavigationState;
import com.intellij.ui.wizard.WizardStep;
import com.microsoftopentechnologies.intellij.ui.components.AzureWizardStep;
import org.jdesktop.swingx.JXHyperlink;
import javax.swing.*;
import java.net.URI;
import java.util.HashMap;
import java.util.Map;
import static com.microsoftopentechnologies.intellij.ui.messages.AzureBundle.message;
public class KeyFeaturesStep extends AzureWizardStep {
private JPanel rootPanel;
private JCheckBox sessionAffinityCheckBox;
private JCheckBox cachingCheckBox;
private JCheckBox debuggingCheckBox;
private JXHyperlink ssnAffLnk;
private JXHyperlink cachLnk;
private JXHyperlink debugLnk;
public KeyFeaturesStep(final String title) {
super(title, message("keyFtrPgMsg"));
init();
}
public void init() {
initLink(ssnAffLnk, message("ssnAffLnk"));
initLink(cachLnk, message("cachLnk"));
initLink(debugLnk, message("debugLnk"));
}
@Override
public JComponent prepare(WizardNavigationState state) {
rootPanel.revalidate();
return rootPanel;
}
private void initLink(JXHyperlink link, String linkText) {
link.setURI(URI.create(linkText));
link.setText(message("lblLearnMore"));
}
public Map<String, Boolean> getValues() {
Map <String, Boolean> values = new HashMap<String, Boolean>();
values.put("ssnAffChecked", sessionAffinityCheckBox.isSelected());
values.put("cacheChecked", cachingCheckBox.isSelected());
values.put("debugChecked", debuggingCheckBox.isSelected());
return values;
}
@Override
public ValidationInfo doValidate() {
return null;
}
}
| apache-2.0 |
KirillMakarov/edx-app-android | VideoLocker/src/main/java/org/edx/mobile/loader/CoursesVisibleLoader.java | 3110 | package org.edx.mobile.loader;
import android.content.Context;
import android.os.Bundle;
import android.support.v4.content.AsyncTaskLoader;
import org.edx.mobile.core.IEdxEnvironment;
import org.edx.mobile.logger.Logger;
/**
* Created by yervant on 1/19/15.
*/
public class CoursesVisibleLoader extends AsyncTaskLoader<AsyncTaskResult<Boolean>>{
AsyncTaskResult<Boolean> mData;
private Boolean setToValue = null;
private boolean fetchValue = false;
public static final String KEY_SET_TO_VALUE = "key_set_to_value";
public static final String KEY_GET_VALUE = "key_get_value";
private static final Logger logger = new Logger(CoursesVisibleLoader.class);
IEdxEnvironment environment;
public CoursesVisibleLoader(Context context, Bundle bundle, IEdxEnvironment environment){
super(context);
this.environment = environment;
if(bundle != null){
if(bundle.containsKey(KEY_SET_TO_VALUE))
setToValue = bundle.getBoolean(KEY_SET_TO_VALUE);
else if(bundle.containsKey(KEY_GET_VALUE))
fetchValue = true;
}
}
@Override
public AsyncTaskResult<Boolean> loadInBackground() {
AsyncTaskResult<Boolean> result = new AsyncTaskResult<>();
try {
if(fetchValue && setToValue == null){
Boolean isSet = environment.getServiceManager().getUserCourseShareConsent();
result.setResult(isSet);
}
else if(setToValue != null){
Boolean wasSet = environment.getServiceManager().setUserCourseShareConsent(setToValue);
result.setResult(wasSet);
}
else{
result.setEx(new IllegalArgumentException("Course sharing: either KEY_GET_VALUE or KEY_SET_TO_VALUE must be set."));
}
} catch (Exception e) {
logger.error(e);
result.setEx(e);
}
return result;
}
@Override
protected void onReset() {
onStopLoading();
if (mData != null) {
releaseResources(mData);
mData = null;
}
}
@Override
protected void onStartLoading() {
if (mData != null) {
deliverResult(mData);
}
if (takeContentChanged() || mData == null) {
forceLoad();
}
}
@Override
protected void onStopLoading() {
cancelLoad();
}
@Override
public void onCanceled(AsyncTaskResult<Boolean> data) {
super.onCanceled(data);
releaseResources(data);
}
@Override
public void deliverResult(AsyncTaskResult<Boolean> data) {
if (isReset()) {
releaseResources(data);
return;
}
AsyncTaskResult<Boolean> oldData = mData;
mData = data;
if (isStarted()) {
super.deliverResult(data);
}
if (oldData != null && oldData != data) {
releaseResources(oldData);
}
}
private void releaseResources(AsyncTaskResult<Boolean> data) {
}
}
| apache-2.0 |
chanakaudaya/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/keyhandlers/ESBKeyHandler.java | 1219 | package org.wso2.developerstudio.eclipse.gmf.keyhandlers;
import org.eclipse.core.commands.AbstractHandler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.PlatformUI;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbMultiPageEditor;
/**
* Our sample handler extends AbstractHandler, an IHandler base class.
* @see org.eclipse.core.commands.IHandler
* @see org.eclipse.core.commands.AbstractHandler
*/
public class ESBKeyHandler extends AbstractHandler {
/**
* The constructor.
*/
public ESBKeyHandler() {
}
/**
* the command has been executed, so extract extract the needed information
* from the application context.
*/
public Object execute(ExecutionEvent event) throws ExecutionException {
//EsbMultiPageEditor x;
IEditorPart editorReferences[] = PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage().getEditors();
for (int i = 0; i < editorReferences.length; i++) {
IEditorPart editor = editorReferences[i];
if (editor instanceof EsbMultiPageEditor) {
((EsbMultiPageEditor) editor).focusToolbar();
}
}
return null;
}
}
| apache-2.0 |
salyh/javamailspec | geronimo-jaxws_2.1_spec/src/main/java/javax/xml/ws/wsaddressing/package-info.java | 1027 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
@javax.xml.bind.annotation.XmlSchema(namespace = "http://www.w3.org/2005/08/addressing",
location = "http://www.w3.org/2006/03/addressing/ws-addr.xsd")
package javax.xml.ws.wsaddressing;
| apache-2.0 |
apereo/cas | support/cas-server-support-gauth-core-mfa/src/main/java/org/apereo/cas/gauth/token/GoogleAuthenticatorToken.java | 491 | package org.apereo.cas.gauth.token;
import org.apereo.cas.authentication.OneTimeToken;
import lombok.NoArgsConstructor;
/**
* This is {@link GoogleAuthenticatorToken}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
@NoArgsConstructor
public class GoogleAuthenticatorToken extends OneTimeToken {
private static final long serialVersionUID = 8494781829798273770L;
public GoogleAuthenticatorToken(final Integer token, final String userId) {
super(token, userId);
}
}
| apache-2.0 |
ebyhr/presto | core/trino-main/src/test/java/io/trino/operator/TestGroupedTopNRankBuilder.java | 10850 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import io.trino.spi.Page;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeOperators;
import io.trino.sql.gen.JoinCompiler;
import io.trino.type.BlockTypeOperators;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.IntStream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static io.trino.RowPageBuilder.rowPageBuilder;
import static io.trino.RowPagesBuilder.rowPagesBuilder;
import static io.trino.operator.PageAssertions.assertPageEquals;
import static io.trino.operator.UpdateMemory.NOOP;
import static io.trino.spi.connector.SortOrder.ASC_NULLS_LAST;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestGroupedTopNRankBuilder
{
@DataProvider
public static Object[][] produceRanking()
{
return new Object[][] {{true}, {false}};
}
@Test
public void testEmptyInput()
{
GroupedTopNBuilder groupedTopNBuilder = new GroupedTopNRankBuilder(
ImmutableList.of(BIGINT),
(left, leftPosition, right, rightPosition) -> {
throw new UnsupportedOperationException();
},
new PageWithPositionEqualsAndHash()
{
@Override
public boolean equals(Page left, int leftPosition, Page right, int rightPosition)
{
throw new UnsupportedOperationException();
}
@Override
public long hashCode(Page page, int position)
{
throw new UnsupportedOperationException();
}
},
5,
false,
new NoChannelGroupByHash());
assertFalse(groupedTopNBuilder.buildResult().hasNext());
}
@Test(dataProvider = "produceRanking")
public void testSingleGroupTopN(boolean produceRanking)
{
TypeOperators typeOperators = new TypeOperators();
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(typeOperators);
List<Type> types = ImmutableList.of(DOUBLE);
GroupedTopNBuilder groupedTopNBuilder = new GroupedTopNRankBuilder(
types,
new SimplePageWithPositionComparator(types, ImmutableList.of(0), ImmutableList.of(ASC_NULLS_LAST), typeOperators),
new SimplePageWithPositionEqualsAndHash(types, ImmutableList.of(0), blockTypeOperators),
3,
produceRanking,
new NoChannelGroupByHash());
// Expected effect: [0.2 x 1 => rank=1, 0.3 x 2 => rank=2]
assertTrue(groupedTopNBuilder.processPage(
rowPageBuilder(types)
.row(0.3)
.row(0.3)
.row(0.2)
.build()).process());
// Page should be dropped, because single value 0.4 is too large to be considered
assertTrue(groupedTopNBuilder.processPage(
rowPageBuilder(types)
.row(0.4)
.build()).process());
// Next page should cause 0.3 values to be evicted (first page will be compacted)
// Expected effect: [0.1 x 2 => rank 1, 0.2 x 3 => rank 3]
assertTrue(groupedTopNBuilder.processPage(
rowPageBuilder(types)
.row(0.1)
.row(0.2)
.row(0.3)
.row(0.2)
.row(0.1)
.build()).process());
List<Page> output = ImmutableList.copyOf(groupedTopNBuilder.buildResult());
assertEquals(output.size(), 1);
List<Type> outputTypes = ImmutableList.of(DOUBLE, BIGINT);
Page expected = rowPageBuilder(outputTypes)
.row(0.1, 1)
.row(0.1, 1)
.row(0.2, 3)
.row(0.2, 3)
.row(0.2, 3)
.build();
if (!produceRanking) {
outputTypes = outputTypes.subList(0, outputTypes.size() - 1);
expected = dropLastColumn(expected);
}
assertPageEquals(outputTypes, getOnlyElement(output), expected);
}
@Test(dataProvider = "produceRanking")
public void testMultiGroupTopN(boolean produceRanking)
{
TypeOperators typeOperators = new TypeOperators();
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(typeOperators);
List<Type> types = ImmutableList.of(BIGINT, DOUBLE);
GroupByHash groupByHash = createGroupByHash(ImmutableList.of(types.get(0)), ImmutableList.of(0), NOOP, typeOperators, blockTypeOperators);
GroupedTopNBuilder groupedTopNBuilder = new GroupedTopNRankBuilder(
types,
new SimplePageWithPositionComparator(types, ImmutableList.of(1), ImmutableList.of(ASC_NULLS_LAST), typeOperators),
new SimplePageWithPositionEqualsAndHash(types, ImmutableList.of(1), blockTypeOperators),
3,
produceRanking,
groupByHash);
// Expected effect:
// Group 0 [0.2 x 1 => rank=1, 0.3 x 3 => rank=2]
// Group 1 [0.2 x 1 => rank=1]
assertTrue(groupedTopNBuilder.processPage(
rowPageBuilder(types)
.row(0L, 0.3)
.row(0L, 0.3)
.row(0L, 0.3)
.row(0L, 0.2)
.row(1L, 0.2)
.build()).process());
// Page should be dropped, because all values too large to be considered
assertTrue(groupedTopNBuilder.processPage(
rowPageBuilder(types)
.row(0L, 0.4)
.row(1L, 0.4)
.build()).process());
// Next page should cause evict 0.3 from group 0, which should cause the first page to be compacted
// Expected effect:
// Group 0 [0.1 x 1 => rank=1, 0.2 x 2 => rank=2]
// Group 1 [0.2 x 2 => rank=1, 0.3 x 2 => rank=3]
assertTrue(groupedTopNBuilder.processPage(
rowPageBuilder(types)
.row(0L, 0.1)
.row(1L, 0.2)
.row(0L, 0.3)
.row(0L, 0.2)
.row(1L, 0.5)
.row(1L, 0.4)
.row(1L, 0.3)
.row(1L, 0.3)
.build()).process());
List<Page> output = ImmutableList.copyOf(groupedTopNBuilder.buildResult());
assertEquals(output.size(), 1);
List<Type> outputTypes = ImmutableList.of(BIGINT, DOUBLE, BIGINT);
Page expected = rowPageBuilder(outputTypes)
.row(0, 0.1, 1)
.row(0, 0.2, 2)
.row(0, 0.2, 2)
.row(1, 0.2, 1)
.row(1, 0.2, 1)
.row(1, 0.3, 3)
.row(1, 0.3, 3)
.build();
if (!produceRanking) {
outputTypes = outputTypes.subList(0, outputTypes.size() - 1);
expected = dropLastColumn(expected);
}
assertPageEquals(outputTypes, getOnlyElement(output), expected);
}
@Test
public void testYield()
{
TypeOperators typeOperators = new TypeOperators();
BlockTypeOperators blockTypeOperators = new BlockTypeOperators(typeOperators);
List<Type> types = ImmutableList.of(BIGINT, DOUBLE);
Page input = rowPagesBuilder(types)
.row(1L, 0.3)
.row(1L, 0.2)
.row(1L, 0.9)
.row(1L, 0.1)
.build()
.get(0);
input.compact();
AtomicBoolean unblock = new AtomicBoolean();
GroupByHash groupByHash = createGroupByHash(ImmutableList.of(types.get(0)), ImmutableList.of(0), unblock::get, typeOperators, blockTypeOperators);
GroupedTopNBuilder groupedTopNBuilder = new GroupedTopNRankBuilder(
types,
new SimplePageWithPositionComparator(types, ImmutableList.of(1), ImmutableList.of(ASC_NULLS_LAST), typeOperators),
new SimplePageWithPositionEqualsAndHash(types, ImmutableList.of(1), blockTypeOperators),
5,
false,
groupByHash);
Work<?> work = groupedTopNBuilder.processPage(input);
assertFalse(work.process());
assertFalse(work.process());
unblock.set(true);
assertTrue(work.process());
List<Page> output = ImmutableList.copyOf(groupedTopNBuilder.buildResult());
assertEquals(output.size(), 1);
Page expected = rowPagesBuilder(types)
.row(1L, 0.1)
.row(1L, 0.2)
.row(1L, 0.3)
.row(1L, 0.9)
.build()
.get(0);
assertPageEquals(types, output.get(0), expected);
}
private GroupByHash createGroupByHash(List<Type> partitionTypes, List<Integer> partitionChannels, UpdateMemory updateMemory, TypeOperators typeOperators, BlockTypeOperators blockTypeOperators)
{
return GroupByHash.createGroupByHash(
partitionTypes,
Ints.toArray(partitionChannels),
Optional.empty(),
1,
false,
new JoinCompiler(typeOperators),
blockTypeOperators,
updateMemory);
}
private static Page dropLastColumn(Page page)
{
checkArgument(page.getChannelCount() > 0);
return page.getColumns(IntStream.range(0, page.getChannelCount() - 1).toArray());
}
}
| apache-2.0 |
ddiroma/pentaho-kettle | plugins/meta-inject/src/test/java/org/pentaho/di/trans/steps/metainject/MetaInjectMigrationTest.java | 3447 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2020 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.metainject;
import static org.junit.Assert.assertEquals;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import org.junit.Test;
public class MetaInjectMigrationTest {
@Test
public void testMigration() {
Map<TargetStepAttribute, SourceStepField> targetSourceMapping = new LinkedHashMap<>();
//First 3 old mappings are the mappings that need migration. The last one is an example of a mapping that does not need migration
String[] oldMappingNames = { "SCHENAMENAMEFIELD", "DATABASE_FIELDNAME", "STREAM_FIELDNAME", "DATE_RANGE_START_FIELD" };
//The expected new mappings. Since the migration process removes and adds again the new mapping, the one that does
// not need change will become the first one.
String[] newMappingNames = { "DATE_RANGE_START_FIELD", "SCHEMANAMEFIELD", "DATABASE_FIELD_NAME", "DATABASE_STREAM_NAME" };
//Expected Step Names. After the migration the Step names should not change.
String[] expectedStepName = { "step4", "step1", "step2", "step3" };
//Initiate target mapping with the old mapping names
int idStep = 1;
for ( String oldMappingName : oldMappingNames ) {
TargetStepAttribute target = new TargetStepAttribute( "step" + idStep, oldMappingName, true );
SourceStepField source = new SourceStepField( "step" + idStep, "field" );
targetSourceMapping.put( target, source );
++idStep;
}
//Migrate
MetaInjectMigration.migrate( targetSourceMapping );
/* Assert that after the migration the same number of mappings exist and that names that need migration are changed
and all the others are kept the same */
assertEquals( "After the migration the same number of mapping should exist", 4, targetSourceMapping.size() );
int newMappingIndex = 0;
Set<Map.Entry<TargetStepAttribute, SourceStepField>> entrySet = targetSourceMapping.entrySet();
for ( Map.Entry<TargetStepAttribute, SourceStepField> entry : entrySet ) {
assertEquals( "after the migration names that need migration should change to the new ones and all other should be kept the same",
newMappingNames[newMappingIndex], entry.getKey().getAttributeKey() );
assertEquals( "after the migration the step names should not have changed", expectedStepName[newMappingIndex], entry.getKey().getStepname() );
assertEquals( "after the migration the detail option should not have changed", true, entry.getKey().isDetail() );
++newMappingIndex;
}
}
}
| apache-2.0 |
shun634501730/java_source_cn | src_en/com/sun/corba/se/pept/transport/EventHandler.java | 1296 | /*
* Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package com.sun.corba.se.pept.transport;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import com.sun.corba.se.spi.orbutil.threadpool.Work;
/**
* @author Harold Carr
*
* This should only be registered with ONE selector.
*/
public interface EventHandler
{
public void setUseSelectThreadToWait(boolean x);
public boolean shouldUseSelectThreadToWait();
public SelectableChannel getChannel();
public int getInterestOps();
public void setSelectionKey(SelectionKey selectionKey);
public SelectionKey getSelectionKey();
public void handleEvent();
// NOTE: if there is more than one interest op this does not
// allow discrimination between different ops and how threading
// is handled.
public void setUseWorkerThreadForEvent(boolean x);
public boolean shouldUseWorkerThreadForEvent();
public void setWork(Work work);
public Work getWork();
// REVISIT: need base class with two derived.
public Acceptor getAcceptor();
public Connection getConnection();
}
// End of file.
| apache-2.0 |
android-ia/platform_tools_idea | plugins/InspectionGadgets/testsrc/com/siyeh/ig/j2me/SimplifiableIfStatementInspectionTest.java | 314 | package com.siyeh.ig.j2me;
import com.siyeh.ig.IGInspectionTestCase;
public class SimplifiableIfStatementInspectionTest extends IGInspectionTestCase {
public void test() throws Exception {
doTest("com/siyeh/igtest/j2me/simplifiable_if_statement",
new SimplifiableIfStatementInspection());
}
} | apache-2.0 |
apache/aurora | src/main/java/org/apache/aurora/scheduler/resources/ResourceSettings.java | 1551 | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aurora.scheduler.resources;
import java.util.function.Supplier;
import com.beust.jcommander.Parameter;
import com.beust.jcommander.Parameters;
/**
* Control knobs for how Aurora treats different resource types.
*
* The command line handling seen here is non-standard. Normally we declare them in modules
* and then inject them via 'settings' classes. Unfortunately, this does not work here as we
* would need to perform the injection into the ResourceType enum. Enums are picky in that regard.
*/
@Parameters(separators = "=")
public class ResourceSettings {
@Parameter(names = "-enable_revocable_cpus",
description = "Treat CPUs as a revocable resource.",
arity = 1)
public boolean enableRevocableCpus = true;
@Parameter(names = "-enable_revocable_ram",
description = "Treat RAM as a revocable resource.",
arity = 1)
public boolean enableRevocableRam = false;
static final Supplier<Boolean> NOT_REVOCABLE = () -> false;
}
| apache-2.0 |
vdewillem/dynamic-extensions-for-alfresco | annotations-runtime/src/main/java/com/github/dynamicextensionsalfresco/resources/StaticUpdateStrategy.java | 504 | package com.github.dynamicextensionsalfresco.resources;
import org.alfresco.service.cmr.repository.NodeRef;
import org.springframework.core.io.Resource;
/**
* @author Laurent Van der Linden
*/
public class StaticUpdateStrategy implements UpdateStrategy {
private final boolean strategy;
public StaticUpdateStrategy(boolean strategy) {
this.strategy = strategy;
}
@Override
public boolean updateNode(Resource resource, NodeRef nodeRef) {
return strategy;
}
}
| apache-2.0 |
arvindn05/osc-core | osc-server/src/main/java/org/osc/core/broker/service/tasks/conformance/openstack/deploymentspec/OnboardDAITask.java | 5293 | /*******************************************************************************
* Copyright (c) Intel Corporation
* Copyright (c) 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.osc.core.broker.service.tasks.conformance.openstack.deploymentspec;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Set;
import javax.persistence.EntityManager;
import org.osc.core.broker.job.lock.LockObjectReference;
import org.osc.core.broker.model.entities.appliance.DistributedApplianceInstance;
import org.osc.core.broker.model.entities.virtualization.openstack.DeploymentSpec;
import org.osc.core.broker.model.plugin.ApiFactoryService;
import org.osc.core.broker.service.persistence.OSCEntityManager;
import org.osc.core.broker.service.tasks.TransactionalTask;
import org.slf4j.LoggerFactory;
import org.osc.sdk.controller.DefaultInspectionPort;
import org.osc.sdk.controller.DefaultNetworkPort;
import org.osc.sdk.controller.api.SdnRedirectionApi;
import org.osc.sdk.controller.element.Element;
import org.osgi.service.component.annotations.Component;
import org.osgi.service.component.annotations.Reference;
import org.slf4j.Logger;
@Component(service=OnboardDAITask.class)
public class OnboardDAITask extends TransactionalTask {
private static final Logger log = LoggerFactory.getLogger(OnboardDAITask.class);
private DistributedApplianceInstance dai;
@Reference
private ApiFactoryService apiFactoryService;
public OnboardDAITask create(DistributedApplianceInstance dai) {
OnboardDAITask task = new OnboardDAITask();
task.dai = dai;
task.name = task.getName();
task.apiFactoryService = this.apiFactoryService;
task.dbConnectionManager = this.dbConnectionManager;
task.txBroadcastUtil = this.txBroadcastUtil;
return task;
}
@Override
public String getName() {
return "Onboarding Distributed Appliance Instance '" + this.dai.getName() + "'";
}
@Override
public void executeTransaction(EntityManager em) throws Exception {
this.dai = em.find(DistributedApplianceInstance.class, this.dai.getId());
try (SdnRedirectionApi controller = this.apiFactoryService.createNetworkRedirectionApi(this.dai)) {
DefaultNetworkPort ingressPort = new DefaultNetworkPort(this.dai.getInspectionOsIngressPortId(),
this.dai.getInspectionIngressMacAddress());
DefaultNetworkPort egressPort = new DefaultNetworkPort(this.dai.getInspectionOsEgressPortId(),
this.dai.getInspectionEgressMacAddress());
DeploymentSpec ds = this.dai.getDeploymentSpec();
if (this.apiFactoryService.supportsNeutronSFC(this.dai.getVirtualSystem())) {
String portGroupId = ds.getPortGroupId();
boolean pgAlreadyCreatedByOther = (portGroupId != null);
Element element = controller
.registerInspectionPort(new DefaultInspectionPort(ingressPort, egressPort, null, portGroupId));
portGroupId = element.getParentId();
log.info(String.format("Setting port_group_id to %s on DAI %s (id %d) for Deployment Spec %s (id: %d)",
portGroupId, this.dai.getName(), this.dai.getId(), ds.getName(), ds.getId()));
if (!pgAlreadyCreatedByOther) {
ds = em.find(DeploymentSpec.class, ds.getId());
ds.setPortGroupId(portGroupId);
OSCEntityManager.update(em, ds, this.txBroadcastUtil);
}
} else if (this.apiFactoryService.supportsPortGroup(this.dai.getVirtualSystem())) {
String domainId = OpenstackUtil.extractDomainId(ds.getProjectId(), ds.getProjectName(),
ds.getVirtualSystem().getVirtualizationConnector(),
new ArrayList<>(Arrays.asList(ingressPort)));
ingressPort.setParentId(domainId);
egressPort.setParentId(domainId);
if (domainId != null) {
//Element Object is not used in DefaultInstepctionPort for now, hence null
controller.registerInspectionPort(new DefaultInspectionPort(ingressPort, egressPort, null));
} else {
log.warn("DomainId is missing, cannot be null");
}
} else {
controller.registerInspectionPort(new DefaultInspectionPort(ingressPort, egressPort, null));
}
}
}
@Override
public Set<LockObjectReference> getObjects() {
return LockObjectReference.getObjectReferences(this.dai);
}
}
| apache-2.0 |
gazarenkov/che-sketch | plugins/plugin-svn/che-plugin-svn-ext-server/src/main/java/org/eclipse/che/plugin/svn/server/upstream/UpstreamUtils.java | 6648 | /*******************************************************************************
* Copyright (c) 2012-2017 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.plugin.svn.server.upstream;
import org.eclipse.che.api.core.util.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.eclipse.che.commons.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* Utilities class containing logic copied/pasted from Git extension that could/should be put into a core VCS API.
*/
public class UpstreamUtils {
private static final Logger LOG = LoggerFactory.getLogger(UpstreamUtils.class);
/**
* Private constructor.
*/
private UpstreamUtils() { }
/**
* Executes a command line executable based on the arguments specified.
*
* @param env the optional environment variables
* @param cmd the command to run
* @param args the optional command arguments
* @param timeout the optional timeout in milliseconds
* @param workingDirectory the optional working directory
*
* @return the command line result
*
* @throws IOException if something goes wrong
*/
public static CommandLineResult executeCommandLine(@Nullable final Map<String, String> env,
final String cmd,
@Nullable final String[] args,
final long timeout,
@Nullable final File workingDirectory) throws IOException {
return executeCommandLine(env, cmd, args, null, timeout, workingDirectory);
}
/**
* Executes a command line executable based on the arguments specified.
*
* @param env the optional environment variables
* @param cmd the command to run
* @param args the optional command arguments
* @param redactedArgs additional command arguments that will not be shown in result
* @param timeout the optional timeout in milliseconds
* @param workingDirectory the optional working directory
*
* @return the command line result
*
* @throws IOException if something goes wrong
*/
public static CommandLineResult executeCommandLine(@Nullable final Map<String, String> env,
final String cmd,
@Nullable final String[] args,
@Nullable final String[] redactedArgs,
final long timeout,
@Nullable final File workingDirectory) throws IOException {
return executeCommandLine(env, cmd, args, null, timeout, workingDirectory, null);
}
/**
* Executes a command line executable based on the arguments specified.
*
* @param env the optional environment variables
* @param cmd the command to run
* @param args the optional command arguments
* @param redactedArgs additional command arguments that will not be shown in result
* @param timeout the optional timeout in milliseconds
* @param workingDirectory the optional working directory
* @param lineConsumerFactory the optional std output line consumer factory
*
* @return the command line result
*
* @throws IOException if something goes wrong
*/
public static CommandLineResult executeCommandLine(@Nullable final Map<String, String> env,
final String cmd,
@Nullable final String[] args,
@Nullable final String[] redactedArgs,
final long timeout,
@Nullable final File workingDirectory,
@Nullable LineConsumerFactory lineConsumerFactory)
throws IOException {
CommandLine command = new CommandLine(cmd);
if (args != null) {
for (String arg: args) {
command.add(arg);
}
}
CommandLine redactedCommand = new CommandLine(command);
if (redactedArgs != null) {
for (String arg: redactedArgs) {
redactedCommand.add(arg);
}
}
LOG.debug("Running command: " + command.toString());
final ProcessBuilder processBuilder = new ProcessBuilder(redactedCommand.toShellCommand());
Map<String, String> environment = processBuilder.environment();
if (env != null) {
environment.putAll(env);
}
environment.put("LANG", "en_US.UTF-8");
environment.put("GDM_LANG", "en_US.UTF-8");
environment.put("LANGUAGE", "us");
processBuilder.directory(workingDirectory);
LineConsumer lineConsumer = LineConsumer.DEV_NULL;
if (lineConsumerFactory != null) {
lineConsumer = lineConsumerFactory.newLineConsumer();
}
final CommandLineOutputProcessor stdOutConsumer = new CommandLineOutputProcessor(new ArrayList<String>());
final CommandLineOutputProcessor stdErrConsumer = new CommandLineOutputProcessor(new ArrayList<String>());
final Process process = processBuilder.start();
final Watchdog watcher;
if (timeout > 0) {
watcher = new Watchdog(timeout, TimeUnit.MILLISECONDS);
watcher.start(new CancellableProcessWrapper(process));
}
try (LineConsumer consumer = new CompositeLineConsumer(lineConsumer, stdOutConsumer)) {
ProcessUtil.process(process, consumer, stdErrConsumer);
process.waitFor();
} catch (InterruptedException e) {
throw new IOException(e);
}
return new CommandLineResult(command, process.exitValue(), stdOutConsumer.getOutput(), stdErrConsumer.getOutput());
}
}
| epl-1.0 |
geosolutions-it/geoserver-exts | importer/core/src/main/java/org/geoserver/importer/ImportData.java | 2705 | package org.geoserver.importer;
import java.io.IOException;
import java.io.Serializable;
import org.geoserver.importer.job.ProgressMonitor;
/**
* Base class for all types of data sources that can be imported.
*
* @author Justin Deoliveira, OpenGeo
*
*/
public abstract class ImportData implements Serializable{
/** serialVersionUID */
private static final long serialVersionUID = 1L;
/**
* the format for this data
*/
DataFormat format;
String charsetEncoding;
/**
* message associated with the data, usually for error reporting.
*/
String message;
public ImportData() {
}
public ImportData(ImportData data) {
this.format = data.getFormat();
this.charsetEncoding = data.getCharsetEncoding();
}
public String getCharsetEncoding() {
return charsetEncoding;
}
public void setCharsetEncoding(String charsetEncoding) {
this.charsetEncoding = charsetEncoding;
}
public DataFormat getFormat() {
return format;
}
public void setFormat(DataFormat format) {
this.format = format;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
/**
* Generates a name for this data.
*/
public abstract String getName();
/**
* Runs any initial checks against the data preparing for import.
*/
public final void prepare() throws IOException {
prepare(new ProgressMonitor());
}
/**
* Runs any initial checks against the data preparing for import.
*/
public void prepare(ProgressMonitor monitor) throws IOException {
}
public void cleanup() throws IOException {
// do nothing
}
/**
* For data types that contain multiple other types, like directories, databases, etc...
* this method should return the sub part, or simply return itself for data types (liek files)
* that can't be broken up.
*/
public ImportData part(String name) {
return this;
}
/**
* A dummy transfer object to hold properties but has no functionality.
*/
public static class TransferObject extends ImportData {
@Override
public String getName() {
return null;
}
@Override
public void cleanup() throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void prepare(ProgressMonitor m) throws IOException {
throw new UnsupportedOperationException();
}
}
public void reattach() {
}
}
| gpl-2.0 |
cams7/gradle-samples | plugin/messaging/src/main/java/org/gradle/messaging/remote/internal/Protocol.java | 2268 | /*
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.messaging.remote.internal;
/**
* <p>A protocol implementation. A protocol is a stage in a bi-directional messaging pipeline. It can receive incoming and outgoing messages.
* In response to these messages, it can dispatch incoming and outgoing messages. It can also register callbacks to be executed later, to allow
* timeouts and periodic behaviour to be implemented.
*
* <p>All methods on protocol are called from a single thread at a time, so implementations do not have to use any locking for their state. The method
* implementations should not block.
*
* @param <T> The message type.
*/
public interface Protocol<T> {
/**
* Called to initialise the protocol. The supplied context can be later used to dispatch incoming and outgoing messages.
*
* @param context The context.
*/
void start(ProtocolContext<T> context);
/**
* Handles an outgoing message. The context can be used to dispatch incoming and outgoing messages, as required.
*/
void handleOutgoing(T message);
/**
* Handles an incoming message. The context can be used to dispatch incoming and outgoing messages, as required.
*/
void handleIncoming(T message);
/**
* Requests that this protocol initiate its stop messages. The protocol can call {@link ProtocolContext#stopLater()} to defer stop until some
* messages are received. In which case, it should later call {@link ProtocolContext#stopped()} to indicate it has finished.
*
* If the protocol does not call stopLater(), it is assumed to have stopped when this method returns.
*/
void stopRequested();
}
| gpl-2.0 |
sbandur84/micro-Blagajna | src-pos/com/openbravo/pos/dbupdate/JdbUpdate.java | 4783 | // uniCenta oPOS - Touch Friendly Point Of Sale
// Copyright (c) 2009-2014 uniCenta & previous Openbravo POS works
// http://www.unicenta.com
// Portions Contributed by: John D L 2013
// This file is part of uniCenta oPOS
//
// uniCenta oPOS is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// uniCenta oPOS is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with uniCenta oPOS. If not, see <http://www.gnu.org/licenses/>.
package com.openbravo.pos.dbupdate;
import com.openbravo.basic.BasicException;
import com.openbravo.pos.forms.AppConfig;
import com.openbravo.pos.forms.AppLocal;
import com.openbravo.pos.forms.AppProperties;
import com.openbravo.pos.forms.JRootFrame;
import java.awt.BorderLayout;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import java.io.IOException;
import javax.imageio.ImageIO;
import javax.swing.LookAndFeel;
import javax.swing.UIManager;
import javax.swing.UnsupportedLookAndFeelException;
import org.pushingpixels.substance.api.SubstanceLookAndFeel;
import org.pushingpixels.substance.api.SubstanceSkin;
/**
*
* @author adrianromero
*/
public class JdbUpdate extends javax.swing.JFrame {
private JPaneldbUpdate config;
/** Creates new form JdbUpdate
* @param props */
public JdbUpdate(AppProperties props) {
initComponents();
try {
this.setIconImage(ImageIO.read(JRootFrame.class.getResourceAsStream("/com/openbravo/images/favicon.png")));
} catch (IOException e) {
}
setTitle(AppLocal.APP_NAME + " - " + AppLocal.APP_VERSION + " - " + AppLocal.getIntString("Menu.Update"));
addWindowListener(new MyFrameListener());
config = new JPaneldbUpdate(props);
getContentPane().add(config, BorderLayout.CENTER);
try {
config.activate();
} catch (BasicException e) { // never thrown ;-)
}
}
private class MyFrameListener extends WindowAdapter{
@Override
public void windowClosing(WindowEvent evt) {
if (config.deactivate()) {
dispose();
}
}
@Override
public void windowClosed(WindowEvent evt) {
System.exit(0);
}
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
setDefaultCloseOperation(javax.swing.WindowConstants.DO_NOTHING_ON_CLOSE);
java.awt.Dimension screenSize = java.awt.Toolkit.getDefaultToolkit().getScreenSize();
setBounds((screenSize.width-507)/2, (screenSize.height-304)/2, 507, 304);
}// </editor-fold>//GEN-END:initComponents
/**
* @param args the command line arguments
*/
public static void main(final String args[]) {
java.awt.EventQueue.invokeLater(new Runnable() {
@Override
public void run() {
AppConfig config = new AppConfig(args);
config.load();
// Set the look and feel.
// JG 6 May 2013 to Multicatch
try {
Object laf = Class.forName(config.getProperty("swing.defaultlaf")).newInstance();
if (laf instanceof LookAndFeel){
UIManager.setLookAndFeel((LookAndFeel) laf);
} else if (laf instanceof SubstanceSkin) {
SubstanceLookAndFeel.setSkin((SubstanceSkin) laf);
}
// JG 6 May 2013 to multicatch
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException e) {
}
new JdbUpdate(config).setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
// End of variables declaration//GEN-END:variables
}
| gpl-3.0 |
eethomas/eucalyptus | clc/modules/msgs/src/main/java/com/eucalyptus/util/async/ConnectionException.java | 3797 | /*************************************************************************
* Copyright 2009-2012 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
*
* This file may incorporate work covered under the following copyright
* and permission notice:
*
* Software License Agreement (BSD License)
*
* Copyright (c) 2008, Regents of the University of California
* All rights reserved.
*
* Redistribution and use of this software in source and binary forms,
* with or without modification, are permitted provided that the
* following conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE. USERS OF THIS SOFTWARE ACKNOWLEDGE
* THE POSSIBLE PRESENCE OF OTHER OPEN SOURCE LICENSED MATERIAL,
* COPYRIGHTED MATERIAL OR PATENTED MATERIAL IN THIS SOFTWARE,
* AND IF ANY SUCH MATERIAL IS DISCOVERED THE PARTY DISCOVERING
* IT MAY INFORM DR. RICH WOLSKI AT THE UNIVERSITY OF CALIFORNIA,
* SANTA BARBARA WHO WILL THEN ASCERTAIN THE MOST APPROPRIATE REMEDY,
* WHICH IN THE REGENTS' DISCRETION MAY INCLUDE, WITHOUT LIMITATION,
* REPLACEMENT OF THE CODE SO IDENTIFIED, LICENSING OF THE CODE SO
* IDENTIFIED, OR WITHDRAWAL OF THE CODE CAPABILITY TO THE EXTENT
* NEEDED TO COMPLY WITH ANY SUCH LICENSES OR RIGHTS.
************************************************************************/
package com.eucalyptus.util.async;
import edu.ucsb.eucalyptus.msgs.BaseMessage;
/**
* TODO: DOCUMENT
*/
public class ConnectionException extends RequestException {
public ConnectionException( BaseMessage msg ) {
super( msg );
}
public ConnectionException( String message, BaseMessage msg ) {
super( message, msg );
}
public ConnectionException( String message, Throwable ex, BaseMessage msg ) {
super( message, ex, msg );
}
public ConnectionException( Throwable ex, BaseMessage msg ) {
super( ex, msg );
}
}
| gpl-3.0 |
eethomas/eucalyptus | clc/modules/msgs/src/main/java/com/eucalyptus/util/async/NOOP.java | 4074 | /*************************************************************************
* Copyright 2009-2012 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
*
* This file may incorporate work covered under the following copyright
* and permission notice:
*
* Software License Agreement (BSD License)
*
* Copyright (c) 2008, Regents of the University of California
* All rights reserved.
*
* Redistribution and use of this software in source and binary forms,
* with or without modification, are permitted provided that the
* following conditions are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE. USERS OF THIS SOFTWARE ACKNOWLEDGE
* THE POSSIBLE PRESENCE OF OTHER OPEN SOURCE LICENSED MATERIAL,
* COPYRIGHTED MATERIAL OR PATENTED MATERIAL IN THIS SOFTWARE,
* AND IF ANY SUCH MATERIAL IS DISCOVERED THE PARTY DISCOVERING
* IT MAY INFORM DR. RICH WOLSKI AT THE UNIVERSITY OF CALIFORNIA,
* SANTA BARBARA WHO WILL THEN ASCERTAIN THE MOST APPROPRIATE REMEDY,
* WHICH IN THE REGENTS' DISCRETION MAY INCLUDE, WITHOUT LIMITATION,
* REPLACEMENT OF THE CODE SO IDENTIFIED, LICENSING OF THE CODE SO
* IDENTIFIED, OR WITHDRAWAL OF THE CODE CAPABILITY TO THE EXTENT
* NEEDED TO COMPLY WITH ANY SUCH LICENSES OR RIGHTS.
************************************************************************/
package com.eucalyptus.util.async;
import edu.ucsb.eucalyptus.msgs.BaseMessage;
import edu.ucsb.eucalyptus.msgs.EucalyptusMessage;
public class NOOP implements RemoteCallback<BaseMessage,BaseMessage> {
/**
* @see com.eucalyptus.util.Callback.TwiceChecked#initialize(java.lang.Object)
*/
@Override
public void initialize( BaseMessage request ) throws Exception {}
/**
* @see com.eucalyptus.util.Callback#fire(java.lang.Object)
*/
@Override
public void fire( BaseMessage t ) {}
/**
* @see com.eucalyptus.util.Callback.Checked#fireException(Throwable)
*/
@Override
public void fireException( Throwable throwable ) {}
/**
* @see com.eucalyptus.util.async.RemoteCallback#getRequest()
*/
@Override
public BaseMessage getRequest( ) {
return new EucalyptusMessage( );
}
}
| gpl-3.0 |
AydinSakar/sql-layer | src/main/java/com/foundationdb/server/types/mcompat/aggr/MGroupConcat.java | 1939 | /**
* Copyright (C) 2009-2013 FoundationDB, LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.foundationdb.server.types.mcompat.aggr;
import com.foundationdb.server.types.TAggregator;
import com.foundationdb.server.types.TFixedTypeAggregator;
import com.foundationdb.server.types.TInstance;
import com.foundationdb.server.types.mcompat.mtypes.MString;
import com.foundationdb.server.types.value.Value;
import com.foundationdb.server.types.value.ValueSource;
import com.foundationdb.server.types.value.ValueTarget;
public class MGroupConcat extends TFixedTypeAggregator
{
public static final TAggregator INSTANCE = new MGroupConcat();
private MGroupConcat() {
super("group_concat", MString.TEXT);
}
@Override
public void input(TInstance type, ValueSource source, TInstance stateType, Value state, Object del)
{
// skip all NULL rows
if (source.isNull())
return;
// cache a StringBuilder instead?
state.putString((state.hasAnyValue()
? state.getString() + (String)del
: "")
+ source.getString(),
null);
}
@Override
public void emptyValue(ValueTarget state)
{
state.putNull();
}
}
| agpl-3.0 |
AydinSakar/sql-layer | src/test/java/com/foundationdb/server/test/it/dxl/StableUuidsIT.java | 1677 | /**
* Copyright (C) 2009-2013 FoundationDB, LLC
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.foundationdb.server.test.it.dxl;
import com.foundationdb.server.test.it.ITBase;
import org.junit.Test;
import java.util.UUID;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
public final class StableUuidsIT extends ITBase {
@Test
public void uuidsSurviveRestart() throws Exception {
final int tableId = createTable("testSchema", "customer", "id int not null primary key, name varchar(32)");
UUID origUuid = ais().getTable(tableId).getUuid();
assertNotNull("original UUID is null", origUuid);
safeRestartTestServices();
UUID afterRestartUuid = ais().getTable(tableId).getUuid();
assertNotNull("original UUID is null", afterRestartUuid);
assertEquals("UUIDs for customer", origUuid, afterRestartUuid);
assertNotSame("UUIDs are same object", origUuid, afterRestartUuid);
}
}
| agpl-3.0 |
kumarrus/voltdb | tests/test_apps/genqa/src/genqa/procedures/SampleRecord.java | 7122 | /* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
package genqa.procedures;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.Random;
import org.voltdb.VoltType;
import org.voltdb.types.TimestampType;
public class SampleRecord
{
public final long rowid;
public final Object rowid_group;
public final Object type_null_tinyint;
public final Object type_not_null_tinyint;
public final Object type_null_smallint;
public final Object type_not_null_smallint;
public final Object type_null_integer;
public final Object type_not_null_integer;
public final Object type_null_bigint;
public final Object type_not_null_bigint;
public final Object type_null_timestamp;
public final Object type_not_null_timestamp;
public final Object type_null_float;
public final Object type_not_null_float;
public final Object type_null_decimal;
public final Object type_not_null_decimal;
public final Object type_null_varchar25;
public final Object type_not_null_varchar25;
public final Object type_null_varchar128;
public final Object type_not_null_varchar128;
public final Object type_null_varchar1024;
public final Object type_not_null_varchar1024;
public SampleRecord(long rowid, Random rand)
{
this.rowid = rowid;
this.rowid_group = (byte)((rowid % 255) - 127);
this.type_null_tinyint = nextTinyint(rand, true);
this.type_not_null_tinyint = nextTinyint(rand);
this.type_null_smallint = nextSmallint(rand, true);
this.type_not_null_smallint = nextSmallint(rand);
this.type_null_integer = nextInteger(rand, true);
this.type_not_null_integer = nextInteger(rand);
this.type_null_bigint = nextBigint(rand, true);
this.type_not_null_bigint = nextBigint(rand);
this.type_null_timestamp = nextTimestamp(rand, true);
this.type_not_null_timestamp = nextTimestamp(rand);
this.type_null_float = nextFloat(rand, true);
this.type_not_null_float = nextFloat(rand);
this.type_null_decimal = nextDecimal(rand, true);
this.type_not_null_decimal = nextDecimal(rand);
this.type_null_varchar25 = nextVarchar(rand, true, 1, 25);
this.type_not_null_varchar25 = nextVarchar(rand, 1, 25);
this.type_null_varchar128 = nextVarchar(rand, true, 25, 128);
this.type_not_null_varchar128 = nextVarchar(rand, 25, 128);
this.type_null_varchar1024 = nextVarchar(rand, true, 128, 1024);
this.type_not_null_varchar1024 = nextVarchar(rand, 128, 1024);
}
private static Object nextTinyint(Random rand)
{
return nextTinyint(rand, false);
}
private static Object nextTinyint(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
byte result;
do { result = (new Integer(rand.nextInt())).byteValue(); } while(result == VoltType.NULL_TINYINT);
return result;
}
private static Object nextSmallint(Random rand)
{
return nextSmallint(rand, false);
}
private static Object nextSmallint(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
short result;
do { result = (new Integer(rand.nextInt())).shortValue(); } while(result == VoltType.NULL_SMALLINT);
return result;
}
private static Object nextInteger(Random rand)
{
return nextInteger(rand, false);
}
private static Object nextInteger(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
int result;
do { result = rand.nextInt(); } while(result == VoltType.NULL_INTEGER);
return result;
}
private static Object nextBigint(Random rand)
{
return nextBigint(rand, false);
}
private static Object nextBigint(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
long result;
do { result = rand.nextLong(); } while(result == VoltType.NULL_BIGINT);
return result;
}
private static Object nextTimestamp(Random rand)
{
return nextTimestamp(rand, false);
}
private static Object nextTimestamp(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
return new TimestampType(Math.abs(rand.nextInt())*1000l);
}
private static Object nextFloat(Random rand)
{
return nextFloat(rand, false);
}
private static Object nextFloat(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
double result; // Inconsistent naming (!) Underlying database type is Double
do { result = rand.nextDouble(); } while(result == VoltType.NULL_FLOAT);
return result;
}
private static Object nextDecimal(Random rand)
{
return nextDecimal(rand, false);
}
private static Object nextDecimal(Random rand, boolean isNullable)
{
if (isNullable && rand.nextBoolean()) return null;
return (new BigDecimal(rand.nextDouble()*rand.nextLong())).setScale(12, RoundingMode.HALF_EVEN);
}
private static Object nextVarchar(Random rand, int minLength, int maxLength)
{
return nextVarchar(rand, false, minLength, maxLength);
}
private static Object nextVarchar(Random rand, boolean isNullable, int minLength, int maxLength)
{
if (isNullable && rand.nextBoolean()) return null;
int length = (maxLength==minLength)?maxLength:rand.nextInt(maxLength-minLength)+minLength;
StringBuilder result = new StringBuilder(length);
while(result.length() < length)
result.append(Long.toBinaryString(rand.nextLong()));
return result.toString().substring(0,Math.min(result.length(), length)-1);
}
}
| agpl-3.0 |
kumarrus/voltdb | src/frontend/org/voltdb/utils/SerializationHelper.java | 11780 | /* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
package org.voltdb.utils;
import java.io.IOException;
import java.math.BigDecimal;
import java.nio.ByteBuffer;
import org.voltdb.PrivateVoltTableFactory;
import org.voltdb.VoltTable;
import org.voltdb.VoltType;
import org.voltdb.common.Constants;
import org.voltdb.types.TimestampType;
import org.voltdb.types.VoltDecimalHelper;
public class SerializationHelper {
byte[] memoizedStringBytes;
String memoizedString;
int getSerializedSize(String value) {
if (memoizedString == value) {
assert(memoizedStringBytes != null);
}
else {
memoizedString = value;
memoizedStringBytes = value.getBytes(Constants.UTF8ENCODING);
}
return memoizedStringBytes.length + 4; // int length prefix
}
void flattenToBuffer(ByteBuffer buf, String value) {
if (memoizedString == value) {
assert(memoizedStringBytes != null);
}
else {
memoizedString = value;
memoizedStringBytes = value.getBytes(Constants.UTF8ENCODING);
}
buf.putInt(memoizedStringBytes.length);
buf.put(memoizedStringBytes);
}
public static String getString(ByteBuffer buf) throws IOException {
final int len = buf.getInt();
// check for null string
if (len == VoltType.NULL_STRING_LENGTH) {
return null;
}
assert len >= 0;
if (len < VoltType.NULL_STRING_LENGTH) {
throw new IOException("String length is negative " + len);
}
// now assume not null
final byte[] strbytes = new byte[len];
buf.get(strbytes);
return new String(strbytes, Constants.UTF8ENCODING);
}
public static byte[] getVarbinary(ByteBuffer buf) throws IOException {
final int len = buf.getInt();
// check for null string
if (len == VoltType.NULL_STRING_LENGTH) {
return null;
}
assert len >= 0;
if (len < VoltType.NULL_STRING_LENGTH) {
throw new IOException("Varbinary length is negative " + len);
}
// now assume not null
final byte[] retval = new byte[len];
buf.get(retval);
return retval;
}
public static BigDecimal getBigDecimal(ByteBuffer buf) {
return VoltDecimalHelper.deserializeBigDecimal(buf);
}
public static Object readArray(final Class<?> type, ByteBuffer buf) throws IOException {
final int count = type == byte.class ? buf.getInt() : buf.getShort();
if (count < 0) {
throw new IOException("Array length is negative " + count);
}
else if (type == byte.class) {
if (count > (VoltType.MAX_VALUE_LENGTH)) {
throw new IOException("Array length is greater then the max of 1 megabyte " + count);
}
final byte[] retval = new byte[count];
buf.get(retval);
return retval;
}
else if (type == byte[].class) {
final byte[][] retval = new byte[count][];
for (int i = 0; i < count; i++) {
int size = buf.getInt();
if (size == -1) { // null length prefix
retval[i] = null;
}
else {
retval[i] = new byte[size];
buf.get(retval[i]);
}
}
return retval;
}
else if (type == short.class) {
final short[] retval = new short[count];
for (int i = 0; i < count; i++) {
retval[i] = buf.getShort();
}
return retval;
}
else if (type == int.class) {
final int[] retval = new int[count];
for (int i = 0; i < count; i++) {
retval[i] = buf.getInt();
}
return retval;
}
else if (type == long.class) {
final long[] retval = new long[count];
for (int i = 0; i < count; i++) {
retval[i] = buf.getLong();
}
return retval;
}
else if (type == Long.class) {
final Long[] retval = new Long[count];
for (int i = 0; i < count; i++) {
retval[i] = buf.getLong();
}
return retval;
}
else if (type == String.class) {
final String[] retval = new String[count];
for (int i = 0; i < count; i++) {
retval[i] = getString(buf);
}
return retval;
}
else if (type == double.class) {
final double[] retval = new double[count];
for (int i = 0; i < count; i++) {
retval[i] = buf.getDouble();
}
return retval;
}
else if (type == Double.class) {
final Double[] retval = new Double[count];
for (int i = 0; i < count; i++) {
retval[i] = buf.getDouble();
}
return retval;
}
else if (type == TimestampType.class) {
final TimestampType[] retval = new TimestampType[count];
for (int i = 0; i < count; i++) {
retval[i] = new TimestampType(buf.getLong());
}
return retval;
}
else if (type == BigDecimal.class) {
final BigDecimal[] retval = new BigDecimal[count];
for (int i = 0; i < count; ++i) {
retval[i] = getBigDecimal(buf);
}
return retval;
}
else if (type == VoltTable.class) {
final VoltTable[] retval = new VoltTable[count];
for (int i = 0; i < count; ++i) {
retval[i] = PrivateVoltTableFactory.createVoltTableFromSharedBuffer(buf.slice());
buf.position(buf.position() + retval[i].getSerializedSize());
}
return retval;
}
else {
throw new RuntimeException("SerializationHelper.readArray called with unhandled type: " + type.getName());
}
}
/**
* Write a string in the standard VoltDB way
*/
public static void writeString(String value, ByteBuffer buf) {
if (value == null) {
buf.putInt(VoltType.NULL_STRING_LENGTH);
return;
}
byte[] strbytes = value.getBytes(Constants.UTF8ENCODING);
int len = strbytes.length;
buf.putInt(len);
buf.put(strbytes);
}
/**
* Write a set of bytes in the standard VoltDB way
*/
public static void writeVarbinary(byte[] bytes, ByteBuffer buf) throws IOException {
if (bytes == null) {
buf.putInt(VoltType.NULL_STRING_LENGTH);
return;
}
buf.putInt(bytes.length);
buf.put(bytes);
}
public static void writeArray(byte[] values, ByteBuffer buf) throws IOException {
buf.putInt(values.length);
buf.put(values);
}
public static void writeArray(short[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
buf.putShort(values[i]);
}
}
public static void writeArray(int[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
buf.putInt(values[i]);
}
}
public static void writeArray(long[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
buf.putLong(values[i]);
}
}
public static void writeArray(double[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
buf.putDouble(values[i]);
}
}
public static void writeArray(TimestampType[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
if (values[i] == null) buf.putLong(Long.MIN_VALUE);
else buf.putLong(values[i].getTime());
}
}
public static void writeArray(BigDecimal[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
if (values[i] == null) {
VoltDecimalHelper.serializeNull(buf);
}
else {
VoltDecimalHelper.serializeBigDecimal(values[i], buf);
}
}
}
public static void writeArray(VoltTable[] values, ByteBuffer buf) throws IOException {
if (values.length > Short.MAX_VALUE) {
throw new IOException("Array exceeds maximum length of "
+ Short.MAX_VALUE + " bytes");
}
buf.putShort((short)values.length);
for (int i = 0; i < values.length; ++i) {
if (values[i] == null)
throw new IOException("Array being fastserialized can't contain null values (position " + i + ")");
values[i].flattenToBuffer(buf);
}
}
public static void writeArray(byte[][] values, ByteBuffer buf) throws IOException {
if (values.length > VoltType.MAX_VALUE_LENGTH) {
throw new IOException("Array exceeds maximum length of "
+ VoltType.MAX_VALUE_LENGTH + " bytes");
}
buf.putShort((short) values.length);
for (int i = 0; i < values.length; ++i) {
if (values[i] == null) {
buf.putInt(VoltType.NULL_STRING_LENGTH);
}
else {
writeArray(values[i], buf);
}
}
}
}
| agpl-3.0 |
Metaswitch/jitsi | src/net/java/sip/communicator/util/Base64.java | 14234 | /*
* Jitsi, the OpenSource Java VoIP and Instant Messaging client.
*
* Distributable under LGPL license.
* See terms of license at gnu.org.
*
* The contents of this file has been copied from the Base64 and Base64Encoder
* classes of the Bouncy Castle libraries and included the following license.
*
* Copyright (c) 2000 - 2006 The Legion Of The Bouncy Castle
* (http://www.bouncycastle.org)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.java.sip.communicator.util;
import java.io.*;
public class Base64
{
private static final Base64Encoder encoder = new Base64Encoder();
/**
* encode the input data producing a base 64 encoded byte array.
*
* @return a byte array containing the base 64 encoded data.
* @param data the byte array to encode
*/
public static byte[] encode(
byte[] data)
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
try
{
encoder.encode(data, 0, data.length, bOut);
}
catch (IOException e)
{
throw new RuntimeException("exception encoding base64 string: " + e);
}
return bOut.toByteArray();
}
/**
* Encode the byte data to base 64 writing it to the given output stream.
*
* @param data the byte array to encode
* @param out the output stream where the result is to be written.
*
* @return the number of bytes produced.
*
* @throws IOException if the output stream throws one
*/
public static int encode(
byte[] data,
OutputStream out)
throws IOException
{
return encoder.encode(data, 0, data.length, out);
}
/**
* Encode the byte data to base 64 writing it to the given output stream.
*
* @return the number of bytes produced.
* @param data the byte array to encode
* @param off offset
* @param length length
* @param out OutputStream
* @throws IOException
*/
public static int encode(
byte[] data,
int off,
int length,
OutputStream out)
throws IOException
{
return encoder.encode(data, off, length, out);
}
/**
* decode the base 64 encoded input data. It is assumed the input data is
* valid.
*
* @return a byte array representing the decoded data.
* @param data the byte array to encode
*/
public static byte[] decode(
byte[] data)
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
try
{
encoder.decode(data, 0, data.length, bOut);
}
catch (IOException e)
{
throw new RuntimeException("exception decoding base64 string: " + e);
}
return bOut.toByteArray();
}
/**
* decode the base 64 encoded String data - whitespace will be ignored.
*
* @param data the byte array to encode
* @return a byte array representing the decoded data.
*/
public static byte[] decode(
String data)
{
ByteArrayOutputStream bOut = new ByteArrayOutputStream();
try
{
encoder.decode(data, bOut);
}
catch (IOException e)
{
throw new RuntimeException("exception decoding base64 string: " + e);
}
return bOut.toByteArray();
}
/**
* decode the base 64 encoded String data writing it to the given output stream,
* whitespace characters will be ignored.
*
* @param data the data to decode
* @param out OutputStream
* @return the number of bytes produced.
* @throws IOException if an exception occurs while writing to the specified
* output stream
*/
public static int decode(
String data,
OutputStream out)
throws IOException
{
return encoder.decode(data, out);
}
/** Prevents the initialization of <tt>Base64</tt> instances. */
private Base64() {}
public static class Base64Encoder
{
protected final byte[] encodingTable =
{
(byte)'A', (byte)'B', (byte)'C', (byte)'D', (byte)'E', (byte)'F', (byte)'G',
(byte)'H', (byte)'I', (byte)'J', (byte)'K', (byte)'L', (byte)'M', (byte)'N',
(byte)'O', (byte)'P', (byte)'Q', (byte)'R', (byte)'S', (byte)'T', (byte)'U',
(byte)'V', (byte)'W', (byte)'X', (byte)'Y', (byte)'Z',
(byte)'a', (byte)'b', (byte)'c', (byte)'d', (byte)'e', (byte)'f', (byte)'g',
(byte)'h', (byte)'i', (byte)'j', (byte)'k', (byte)'l', (byte)'m', (byte)'n',
(byte)'o', (byte)'p', (byte)'q', (byte)'r', (byte)'s', (byte)'t', (byte)'u',
(byte)'v',
(byte)'w', (byte)'x', (byte)'y', (byte)'z',
(byte)'0', (byte)'1', (byte)'2', (byte)'3', (byte)'4', (byte)'5', (byte)'6',
(byte)'7', (byte)'8', (byte)'9',
(byte)'+', (byte)'/'
};
protected byte padding = (byte) '=';
/*
* set up the decoding table.
*/
protected final byte[] decodingTable = new byte[128];
protected void initialiseDecodingTable()
{
for (int i = 0; i < encodingTable.length; i++)
{
decodingTable[encodingTable[i]] = (byte) i;
}
}
public Base64Encoder()
{
initialiseDecodingTable();
}
/**
* encode the input data producing a base 64 output stream.
*
* @return the number of bytes produced.
* @param data the byte array to encode
* @param off offset
* @param length length
* @param out OutputStream
* @throws IOException if an exception occurs while writing to the
* stream.
*/
public int encode(
byte[] data,
int off,
int length,
OutputStream out) throws IOException
{
int modulus = length % 3;
int dataLength = (length - modulus);
int a1, a2, a3;
for (int i = off; i < off + dataLength; i += 3)
{
a1 = data[i] & 0xff;
a2 = data[i + 1] & 0xff;
a3 = data[i + 2] & 0xff;
out.write(encodingTable[ (a1 >>> 2) & 0x3f]);
out.write(encodingTable[ ( (a1 << 4) | (a2 >>> 4)) & 0x3f]);
out.write(encodingTable[ ( (a2 << 2) | (a3 >>> 6)) & 0x3f]);
out.write(encodingTable[a3 & 0x3f]);
}
/*
* process the tail end.
*/
int b1, b2, b3;
int d1, d2;
switch (modulus)
{
case 0: /* nothing left to do */
break;
case 1:
d1 = data[off + dataLength] & 0xff;
b1 = (d1 >>> 2) & 0x3f;
b2 = (d1 << 4) & 0x3f;
out.write(encodingTable[b1]);
out.write(encodingTable[b2]);
out.write(padding);
out.write(padding);
break;
case 2:
d1 = data[off + dataLength] & 0xff;
d2 = data[off + dataLength + 1] & 0xff;
b1 = (d1 >>> 2) & 0x3f;
b2 = ( (d1 << 4) | (d2 >>> 4)) & 0x3f;
b3 = (d2 << 2) & 0x3f;
out.write(encodingTable[b1]);
out.write(encodingTable[b2]);
out.write(encodingTable[b3]);
out.write(padding);
break;
}
return (dataLength / 3) * 4 + ( (modulus == 0) ? 0 : 4);
}
private boolean ignore(
char c)
{
return (c == '\n' || c == '\r' || c == '\t' || c == ' ');
}
/**
* decode the base 64 encoded byte data writing it to the given output
* stream, whitespace characters will be ignored.
*
* @return the number of bytes produced.
* @param data the byte array to encode
* @param off offset
* @param length length
* @param out OutputStream
* @throws IOException if an exception occurs while wrinting to the
* stream.
*/
public int decode(
byte[] data,
int off,
int length,
OutputStream out) throws IOException
{
byte b1, b2, b3, b4;
int outLen = 0;
int end = off + length;
while (end > off)
{
if (!ignore( (char) data[end - 1]))
{
break;
}
end--;
}
int i = off;
int finish = end - 4;
i = nextI(data, i, finish);
while (i < finish)
{
b1 = decodingTable[data[i++]];
i = nextI(data, i, finish);
b2 = decodingTable[data[i++]];
i = nextI(data, i, finish);
b3 = decodingTable[data[i++]];
i = nextI(data, i, finish);
b4 = decodingTable[data[i++]];
out.write( (b1 << 2) | (b2 >> 4));
out.write( (b2 << 4) | (b3 >> 2));
out.write( (b3 << 6) | b4);
outLen += 3;
i = nextI(data, i, finish);
}
outLen +=
decodeLastBlock(out, (char) data[end - 4], (char) data[end - 3],
(char) data[end - 2], (char) data[end - 1]);
return outLen;
}
private int nextI(byte[] data, int i, int finish)
{
while ( (i < finish) && ignore( (char) data[i]))
{
i++;
}
return i;
}
/**
* decode the base 64 encoded String data writing it to the given output
* stream, whitespace characters will be ignored.
*
* @return the number of bytes produced.
* @param data the byte array to encode
* @param out OutputStream
* @throws IOException if an exception occurs while writing to the
* stream
*/
public int decode(
String data,
OutputStream out) throws IOException
{
byte b1, b2, b3, b4;
int length = 0;
int end = data.length();
while (end > 0)
{
if (!ignore(data.charAt(end - 1)))
{
break;
}
end--;
}
int i = 0;
int finish = end - 4;
i = nextI(data, i, finish);
while (i < finish)
{
b1 = decodingTable[data.charAt(i++)];
i = nextI(data, i, finish);
b2 = decodingTable[data.charAt(i++)];
i = nextI(data, i, finish);
b3 = decodingTable[data.charAt(i++)];
i = nextI(data, i, finish);
b4 = decodingTable[data.charAt(i++)];
out.write( (b1 << 2) | (b2 >> 4));
out.write( (b2 << 4) | (b3 >> 2));
out.write( (b3 << 6) | b4);
length += 3;
i = nextI(data, i, finish);
}
length +=
decodeLastBlock(out, data.charAt(end - 4), data.charAt(end - 3),
data.charAt(end - 2), data.charAt(end - 1));
return length;
}
private int decodeLastBlock(OutputStream out, char c1, char c2, char c3,
char c4) throws IOException
{
byte b1, b2, b3, b4;
if (c3 == padding)
{
b1 = decodingTable[c1];
b2 = decodingTable[c2];
out.write( (b1 << 2) | (b2 >> 4));
return 1;
}
else if (c4 == padding)
{
b1 = decodingTable[c1];
b2 = decodingTable[c2];
b3 = decodingTable[c3];
out.write( (b1 << 2) | (b2 >> 4));
out.write( (b2 << 4) | (b3 >> 2));
return 2;
}
else
{
b1 = decodingTable[c1];
b2 = decodingTable[c2];
b3 = decodingTable[c3];
b4 = decodingTable[c4];
out.write( (b1 << 2) | (b2 >> 4));
out.write( (b2 << 4) | (b3 >> 2));
out.write( (b3 << 6) | b4);
return 3;
}
}
private int nextI(String data, int i, int finish)
{
while ( (i < finish) && ignore(data.charAt(i)))
{
i++;
}
return i;
}
}
} | lgpl-2.1 |
dgageot/sonarqube | server/sonar-server/src/main/java/org/sonar/server/qualityprofile/ws/SearchAction.java | 7857 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.qualityprofile.ws;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import org.apache.commons.lang.builder.CompareToBuilder;
import org.sonar.api.resources.Languages;
import org.sonar.api.server.ws.Request;
import org.sonar.api.server.ws.Response;
import org.sonar.api.server.ws.WebService;
import org.sonar.api.server.ws.WebService.NewAction;
import org.sonar.api.server.ws.WebService.Param;
import org.sonar.api.utils.text.JsonWriter;
import org.sonar.db.qualityprofile.QualityProfileDao;
import org.sonar.core.util.NonNullInputFunction;
import org.sonar.server.qualityprofile.QProfile;
import org.sonar.server.qualityprofile.QProfileLoader;
import org.sonar.server.qualityprofile.QProfileLookup;
import javax.annotation.CheckForNull;
import javax.annotation.Nullable;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class SearchAction implements QProfileWsAction {
private static final String FIELD_KEY = "key";
private static final String FIELD_NAME = "name";
private static final String FIELD_LANGUAGE = "language";
private static final String FIELD_LANGUAGE_NAME = "languageName";
private static final String FIELD_IS_INHERITED = "isInherited";
private static final String FIELD_IS_DEFAULT = "isDefault";
private static final String FIELD_PARENT_KEY = "parentKey";
private static final String FIELD_PARENT_NAME = "parentName";
private static final String FIELD_ACTIVE_RULE_COUNT = "activeRuleCount";
private static final String FIELD_PROJECT_COUNT = "projectCount";
private static final Set<String> ALL_FIELDS = ImmutableSet.of(
FIELD_KEY, FIELD_NAME, FIELD_LANGUAGE, FIELD_LANGUAGE_NAME, FIELD_IS_INHERITED, FIELD_PARENT_KEY, FIELD_PARENT_NAME, FIELD_IS_DEFAULT, FIELD_ACTIVE_RULE_COUNT,
FIELD_PROJECT_COUNT);
private static final String PARAM_LANGUAGE = FIELD_LANGUAGE;
private final Languages languages;
private final QProfileLookup profileLookup;
private final QProfileLoader profileLoader;
private final QualityProfileDao qualityProfileDao;
public SearchAction(Languages languages, QProfileLookup profileLookup, QProfileLoader profileLoader, QualityProfileDao qualityProfileDao) {
this.languages = languages;
this.profileLookup = profileLookup;
this.profileLoader = profileLoader;
this.qualityProfileDao = qualityProfileDao;
}
@Override
public void define(WebService.NewController controller) {
NewAction search = controller.createAction("search")
.setSince("5.2")
.setDescription("List quality profiles.")
.setHandler(this)
.setResponseExample(getClass().getResource("example-search.json"));
search.createParam(PARAM_LANGUAGE)
.setDescription("The key of a language supported by the platform. If specified, only profiles for the given language are returned.")
.setExampleValue("js")
.setPossibleValues(LanguageParamUtils.getLanguageKeys(languages));
search.createParam(Param.FIELDS)
.setDescription("Use to restrict returned fields.")
.setExampleValue("key,language")
.setPossibleValues(ALL_FIELDS);
}
@Override
public void handle(Request request, Response response) throws Exception {
List<String> fields = request.paramAsStrings(Param.FIELDS);
String language = request.param(PARAM_LANGUAGE);
List<QProfile> profiles = null;
if (language == null) {
profiles = profileLookup.allProfiles();
} else {
profiles = profileLookup.profiles(language);
}
Collections.sort(profiles, new Comparator<QProfile>() {
@Override
public int compare(QProfile o1, QProfile o2) {
return new CompareToBuilder()
.append(o1.language(), o2.language())
.append(o1.name(), o2.name())
.toComparison();
}
});
JsonWriter json = response.newJsonWriter().beginObject();
writeProfiles(json, profiles, fields);
json.endObject().close();
}
private void writeProfiles(JsonWriter json, List<QProfile> profiles, List<String> fields) {
Map<String, QProfile> profilesByKey = Maps.uniqueIndex(profiles, new NonNullInputFunction<QProfile, String>() {
@Override
protected String doApply(QProfile input) {
return input.key();
}
});
Map<String, Long> activeRuleCountByKey = profileLoader.countAllActiveRules();
Map<String, Long> projectCountByKey = qualityProfileDao.countProjectsByProfileKey();
json.name("profiles")
.beginArray();
for (QProfile profile : profiles) {
if (languages.get(profile.language()) == null) {
// Hide profiles on an unsupported language
continue;
}
String key = profile.key();
Long activeRuleCount = activeRuleCountByKey.containsKey(key) ? activeRuleCountByKey.get(key) : 0L;
Long projectCount = projectCountByKey.containsKey(key) ? projectCountByKey.get(key) : 0L;
json.beginObject()
.prop(FIELD_KEY, nullUnlessNeeded(FIELD_KEY, key, fields))
.prop(FIELD_NAME, nullUnlessNeeded(FIELD_NAME, profile.name(), fields))
.prop(FIELD_ACTIVE_RULE_COUNT, nullUnlessNeeded(FIELD_ACTIVE_RULE_COUNT, activeRuleCount, fields));
if (!profile.isDefault()) {
json.prop(FIELD_PROJECT_COUNT, nullUnlessNeeded(FIELD_PROJECT_COUNT, projectCount, fields));
}
writeLanguageFields(json, profile, fields);
writeParentFields(json, profile, fields, profilesByKey);
// Special case for booleans
if (fieldIsNeeded(FIELD_IS_INHERITED, fields)) {
json.prop(FIELD_IS_INHERITED, profile.isInherited());
}
if (fieldIsNeeded(FIELD_IS_DEFAULT, fields)) {
json.prop(FIELD_IS_DEFAULT, profile.isDefault());
}
json.endObject();
}
json.endArray();
}
private void writeLanguageFields(JsonWriter json, QProfile profile, List<String> fields) {
String languageKey = profile.language();
json.prop(FIELD_LANGUAGE, nullUnlessNeeded(FIELD_LANGUAGE, languageKey, fields))
.prop(FIELD_LANGUAGE_NAME, nullUnlessNeeded(FIELD_LANGUAGE_NAME, languages.get(languageKey).getName(), fields));
}
private void writeParentFields(JsonWriter json, QProfile profile, List<String> fields, Map<String, QProfile> profilesByKey) {
String parentKey = profile.parent();
QProfile parent = parentKey == null ? null : profilesByKey.get(parentKey);
json.prop(FIELD_PARENT_KEY, nullUnlessNeeded(FIELD_PARENT_KEY, parentKey, fields))
.prop(FIELD_PARENT_NAME, nullUnlessNeeded(FIELD_PARENT_NAME, parent == null ? parentKey : parent.name(), fields));
}
@CheckForNull
private <T> T nullUnlessNeeded(String field, T value, @Nullable List<String> fields) {
return fieldIsNeeded(field, fields) ? value : null;
}
private boolean fieldIsNeeded(String field, @Nullable List<String> fields) {
return fields == null || fields.contains(field);
}
}
| lgpl-3.0 |
zhengbangpeng/product-mdm-zh | modules/apps/jax-rs/mdm-admin/src/main/java/org/wso2/carbon/mdm/util/Constants.java | 1072 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mdm.util;
/**
* Holds the constants used by MDM-Admin web application.
*/
public class Constants {
public static final String USER_CLAIM_EMAIL_ADDRESS = "http://wso2.org/claims/emailaddress";
public static final String USER_CLAIM_FIRST_NAME = "http://wso2.org/claims/givenname";
public static final String USER_CLAIM_LAST_NAME = "http://wso2.org/claims/lastname";
}
| apache-2.0 |
ingokegel/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/advHighlightingSealedTypes/SealedFunctionalInterface.java | 129 | <error descr="Functional interface can't be declared as 'sealed'">@FunctionalInterface</error>
sealed interface I {
void m();
} | apache-2.0 |
davidgin/parquet-mr | parquet-hadoop/src/test/java/parquet/filter2/recordlevel/PhoneBookWriter.java | 7793 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package parquet.filter2.recordlevel;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import parquet.example.data.Group;
import parquet.example.data.simple.SimpleGroup;
import parquet.filter2.compat.FilterCompat.Filter;
import parquet.hadoop.ParquetReader;
import parquet.hadoop.ParquetWriter;
import parquet.hadoop.example.GroupReadSupport;
import parquet.hadoop.example.GroupWriteSupport;
import parquet.schema.MessageType;
import parquet.schema.MessageTypeParser;
public class PhoneBookWriter {
private static final String schemaString =
"message user {\n"
+ " required int64 id;\n"
+ " optional binary name (UTF8);\n"
+ " optional group location {\n"
+ " optional double lon;\n"
+ " optional double lat;\n"
+ " }\n"
+ " optional group phoneNumbers {\n"
+ " repeated group phone {\n"
+ " required int64 number;\n"
+ " optional binary kind (UTF8);\n"
+ " }\n"
+ " }\n"
+ "}\n";
private static final MessageType schema = MessageTypeParser.parseMessageType(schemaString);
public static class Location {
private final Double lon;
private final Double lat;
public Location(Double lon, Double lat) {
this.lon = lon;
this.lat = lat;
}
public Double getLon() {
return lon;
}
public Double getLat() {
return lat;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Location location = (Location) o;
if (lat != null ? !lat.equals(location.lat) : location.lat != null) return false;
if (lon != null ? !lon.equals(location.lon) : location.lon != null) return false;
return true;
}
@Override
public int hashCode() {
int result = lon != null ? lon.hashCode() : 0;
result = 31 * result + (lat != null ? lat.hashCode() : 0);
return result;
}
}
public static class PhoneNumber {
private final long number;
private final String kind;
public PhoneNumber(long number, String kind) {
this.number = number;
this.kind = kind;
}
public long getNumber() {
return number;
}
public String getKind() {
return kind;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PhoneNumber that = (PhoneNumber) o;
if (number != that.number) return false;
if (kind != null ? !kind.equals(that.kind) : that.kind != null) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (number ^ (number >>> 32));
result = 31 * result + (kind != null ? kind.hashCode() : 0);
return result;
}
}
public static class User {
private final long id;
private final String name;
private final List<PhoneNumber> phoneNumbers;
private final Location location;
public User(long id, String name, List<PhoneNumber> phoneNumbers, Location location) {
this.id = id;
this.name = name;
this.phoneNumbers = phoneNumbers;
this.location = location;
}
public long getId() {
return id;
}
public String getName() {
return name;
}
public List<PhoneNumber> getPhoneNumbers() {
return phoneNumbers;
}
public Location getLocation() {
return location;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
User user = (User) o;
if (id != user.id) return false;
if (location != null ? !location.equals(user.location) : user.location != null) return false;
if (name != null ? !name.equals(user.name) : user.name != null) return false;
if (phoneNumbers != null ? !phoneNumbers.equals(user.phoneNumbers) : user.phoneNumbers != null) return false;
return true;
}
@Override
public int hashCode() {
int result = (int) (id ^ (id >>> 32));
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (phoneNumbers != null ? phoneNumbers.hashCode() : 0);
result = 31 * result + (location != null ? location.hashCode() : 0);
return result;
}
}
public static SimpleGroup groupFromUser(User user) {
SimpleGroup root = new SimpleGroup(schema);
root.append("id", user.getId());
if (user.getName() != null) {
root.append("name", user.getName());
}
if (user.getPhoneNumbers() != null) {
Group phoneNumbers = root.addGroup("phoneNumbers");
for (PhoneNumber number : user.getPhoneNumbers()) {
Group phone = phoneNumbers.addGroup("phone");
phone.append("number", number.getNumber());
if (number.getKind() != null) {
phone.append("kind", number.getKind());
}
}
}
if (user.getLocation() != null) {
Group location = root.addGroup("location");
if (user.getLocation().getLon() != null) {
location.append("lon", user.getLocation().getLon());
}
if (user.getLocation().getLat() != null) {
location.append("lat", user.getLocation().getLat());
}
}
return root;
}
public static File writeToFile(List<User> users) throws IOException {
File f = File.createTempFile("phonebook", ".parquet");
f.deleteOnExit();
if (!f.delete()) {
throw new IOException("couldn't delete tmp file" + f);
}
writeToFile(f, users);
return f;
}
public static void writeToFile(File f, List<User> users) throws IOException {
Configuration conf = new Configuration();
GroupWriteSupport.setSchema(schema, conf);
ParquetWriter<Group> writer = new ParquetWriter<Group>(new Path(f.getAbsolutePath()), conf, new GroupWriteSupport());
for (User u : users) {
writer.write(groupFromUser(u));
}
writer.close();
}
public static List<Group> readFile(File f, Filter filter) throws IOException {
Configuration conf = new Configuration();
GroupWriteSupport.setSchema(schema, conf);
ParquetReader<Group> reader =
ParquetReader.builder(new GroupReadSupport(), new Path(f.getAbsolutePath()))
.withConf(conf)
.withFilter(filter)
.build();
Group current;
List<Group> users = new ArrayList<Group>();
current = reader.read();
while (current != null) {
users.add(current);
current = reader.read();
}
return users;
}
public static void main(String[] args) throws IOException {
File f = new File(args[0]);
writeToFile(f, TestRecordLevelFilters.makeUsers());
}
}
| apache-2.0 |
RyanMagnusson/cassandra | src/java/org/apache/cassandra/cql3/Lists.java | 19382 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3;
import static org.apache.cassandra.cql3.Constants.UNSET_VALUE;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.cql3.functions.Function;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.rows.*;
import org.apache.cassandra.db.marshal.Int32Type;
import org.apache.cassandra.db.marshal.ListType;
import org.apache.cassandra.exceptions.InvalidRequestException;
import org.apache.cassandra.serializers.CollectionSerializer;
import org.apache.cassandra.serializers.MarshalException;
import org.apache.cassandra.transport.Server;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.cassandra.utils.UUIDGen;
/**
* Static helper methods and classes for lists.
*/
public abstract class Lists
{
private Lists() {}
public static ColumnSpecification indexSpecOf(ColumnSpecification column)
{
return new ColumnSpecification(column.ksName, column.cfName, new ColumnIdentifier("idx(" + column.name + ")", true), Int32Type.instance);
}
public static ColumnSpecification valueSpecOf(ColumnSpecification column)
{
return new ColumnSpecification(column.ksName, column.cfName, new ColumnIdentifier("value(" + column.name + ")", true), ((ListType)column.type).getElementsType());
}
public static class Literal extends Term.Raw
{
private final List<Term.Raw> elements;
public Literal(List<Term.Raw> elements)
{
this.elements = elements;
}
public Term prepare(String keyspace, ColumnSpecification receiver) throws InvalidRequestException
{
validateAssignableTo(keyspace, receiver);
ColumnSpecification valueSpec = Lists.valueSpecOf(receiver);
List<Term> values = new ArrayList<>(elements.size());
boolean allTerminal = true;
for (Term.Raw rt : elements)
{
Term t = rt.prepare(keyspace, valueSpec);
if (t.containsBindMarker())
throw new InvalidRequestException(String.format("Invalid list literal for %s: bind variables are not supported inside collection literals", receiver.name));
if (t instanceof Term.NonTerminal)
allTerminal = false;
values.add(t);
}
DelayedValue value = new DelayedValue(values);
return allTerminal ? value.bind(QueryOptions.DEFAULT) : value;
}
private void validateAssignableTo(String keyspace, ColumnSpecification receiver) throws InvalidRequestException
{
if (!(receiver.type instanceof ListType))
throw new InvalidRequestException(String.format("Invalid list literal for %s of type %s", receiver.name, receiver.type.asCQL3Type()));
ColumnSpecification valueSpec = Lists.valueSpecOf(receiver);
for (Term.Raw rt : elements)
{
if (!rt.testAssignment(keyspace, valueSpec).isAssignable())
throw new InvalidRequestException(String.format("Invalid list literal for %s: value %s is not of type %s", receiver.name, rt, valueSpec.type.asCQL3Type()));
}
}
public AssignmentTestable.TestResult testAssignment(String keyspace, ColumnSpecification receiver)
{
if (!(receiver.type instanceof ListType))
return AssignmentTestable.TestResult.NOT_ASSIGNABLE;
// If there is no elements, we can't say it's an exact match (an empty list if fundamentally polymorphic).
if (elements.isEmpty())
return AssignmentTestable.TestResult.WEAKLY_ASSIGNABLE;
ColumnSpecification valueSpec = Lists.valueSpecOf(receiver);
return AssignmentTestable.TestResult.testAll(keyspace, valueSpec, elements);
}
public String getText()
{
return elements.stream().map(Term.Raw::getText).collect(Collectors.joining(", ", "[", "]"));
}
}
public static class Value extends Term.MultiItemTerminal
{
public final List<ByteBuffer> elements;
public Value(List<ByteBuffer> elements)
{
this.elements = elements;
}
public static Value fromSerialized(ByteBuffer value, ListType type, int version) throws InvalidRequestException
{
try
{
// Collections have this small hack that validate cannot be called on a serialized object,
// but compose does the validation (so we're fine).
List<?> l = type.getSerializer().deserializeForNativeProtocol(value, version);
List<ByteBuffer> elements = new ArrayList<>(l.size());
for (Object element : l)
// elements can be null in lists that represent a set of IN values
elements.add(element == null ? null : type.getElementsType().decompose(element));
return new Value(elements);
}
catch (MarshalException e)
{
throw new InvalidRequestException(e.getMessage());
}
}
public ByteBuffer get(int protocolVersion)
{
return CollectionSerializer.pack(elements, elements.size(), protocolVersion);
}
public boolean equals(ListType lt, Value v)
{
if (elements.size() != v.elements.size())
return false;
for (int i = 0; i < elements.size(); i++)
if (lt.getElementsType().compare(elements.get(i), v.elements.get(i)) != 0)
return false;
return true;
}
public List<ByteBuffer> getElements()
{
return elements;
}
}
/**
* Basically similar to a Value, but with some non-pure function (that need
* to be evaluated at execution time) in it.
*
* Note: this would also work for a list with bind markers, but we don't support
* that because 1) it's not excessively useful and 2) we wouldn't have a good
* column name to return in the ColumnSpecification for those markers (not a
* blocker per-se but we don't bother due to 1)).
*/
public static class DelayedValue extends Term.NonTerminal
{
private final List<Term> elements;
public DelayedValue(List<Term> elements)
{
this.elements = elements;
}
public boolean containsBindMarker()
{
// False since we don't support them in collection
return false;
}
public void collectMarkerSpecification(VariableSpecifications boundNames)
{
}
public Terminal bind(QueryOptions options) throws InvalidRequestException
{
List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(elements.size());
for (Term t : elements)
{
ByteBuffer bytes = t.bindAndGet(options);
if (bytes == null)
throw new InvalidRequestException("null is not supported inside collections");
if (bytes == ByteBufferUtil.UNSET_BYTE_BUFFER)
return UNSET_VALUE;
buffers.add(bytes);
}
return new Value(buffers);
}
public Iterable<Function> getFunctions()
{
return Terms.getFunctions(elements);
}
}
/**
* A marker for List values and IN relations
*/
public static class Marker extends AbstractMarker
{
protected Marker(int bindIndex, ColumnSpecification receiver)
{
super(bindIndex, receiver);
assert receiver.type instanceof ListType;
}
public Terminal bind(QueryOptions options) throws InvalidRequestException
{
ByteBuffer value = options.getValues().get(bindIndex);
if (value == null)
return null;
if (value == ByteBufferUtil.UNSET_BYTE_BUFFER)
return UNSET_VALUE;
return Value.fromSerialized(value, (ListType)receiver.type, options.getProtocolVersion());
}
}
/*
* For prepend, we need to be able to generate unique but decreasing time
* UUID, which is a bit challenging. To do that, given a time in milliseconds,
* we adds a number representing the 100-nanoseconds precision and make sure
* that within the same millisecond, that number is always decreasing. We
* do rely on the fact that the user will only provide decreasing
* milliseconds timestamp for that purpose.
*/
private static class PrecisionTime
{
// Our reference time (1 jan 2010, 00:00:00) in milliseconds.
private static final long REFERENCE_TIME = 1262304000000L;
private static final AtomicReference<PrecisionTime> last = new AtomicReference<>(new PrecisionTime(Long.MAX_VALUE, 0));
public final long millis;
public final int nanos;
PrecisionTime(long millis, int nanos)
{
this.millis = millis;
this.nanos = nanos;
}
static PrecisionTime getNext(long millis)
{
while (true)
{
PrecisionTime current = last.get();
assert millis <= current.millis;
PrecisionTime next = millis < current.millis
? new PrecisionTime(millis, 9999)
: new PrecisionTime(millis, Math.max(0, current.nanos - 1));
if (last.compareAndSet(current, next))
return next;
}
}
}
public static class Setter extends Operation
{
public Setter(ColumnDefinition column, Term t)
{
super(column, t);
}
public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException
{
Term.Terminal value = t.bind(params.options);
if (value == UNSET_VALUE)
return;
// delete + append
if (column.type.isMultiCell())
params.setComplexDeletionTimeForOverwrite(column);
Appender.doAppend(value, column, params);
}
}
private static int existingSize(Row row, ColumnDefinition column)
{
if (row == null)
return 0;
ComplexColumnData complexData = row.getComplexColumnData(column);
return complexData == null ? 0 : complexData.cellsCount();
}
public static class SetterByIndex extends Operation
{
private final Term idx;
public SetterByIndex(ColumnDefinition column, Term idx, Term t)
{
super(column, t);
this.idx = idx;
}
@Override
public boolean requiresRead()
{
return true;
}
@Override
public void collectMarkerSpecification(VariableSpecifications boundNames)
{
super.collectMarkerSpecification(boundNames);
idx.collectMarkerSpecification(boundNames);
}
public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException
{
// we should not get here for frozen lists
assert column.type.isMultiCell() : "Attempted to set an individual element on a frozen list";
ByteBuffer index = idx.bindAndGet(params.options);
ByteBuffer value = t.bindAndGet(params.options);
if (index == null)
throw new InvalidRequestException("Invalid null value for list index");
if (index == ByteBufferUtil.UNSET_BYTE_BUFFER)
throw new InvalidRequestException("Invalid unset value for list index");
Row existingRow = params.getPrefetchedRow(partitionKey, params.currentClustering());
int existingSize = existingSize(existingRow, column);
int idx = ByteBufferUtil.toInt(index);
if (existingSize == 0)
throw new InvalidRequestException("Attempted to set an element on a list which is null");
if (idx < 0 || idx >= existingSize)
throw new InvalidRequestException(String.format("List index %d out of bound, list has size %d", idx, existingSize));
CellPath elementPath = existingRow.getComplexColumnData(column).getCellByIndex(idx).path();
if (value == null)
params.addTombstone(column, elementPath);
else if (value != ByteBufferUtil.UNSET_BYTE_BUFFER)
params.addCell(column, elementPath, value);
}
}
public static class Appender extends Operation
{
public Appender(ColumnDefinition column, Term t)
{
super(column, t);
}
public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException
{
assert column.type.isMultiCell() : "Attempted to append to a frozen list";
Term.Terminal value = t.bind(params.options);
doAppend(value, column, params);
}
static void doAppend(Term.Terminal value, ColumnDefinition column, UpdateParameters params) throws InvalidRequestException
{
if (column.type.isMultiCell())
{
// If we append null, do nothing. Note that for Setter, we've
// already removed the previous value so we're good here too
if (value == null)
return;
for (ByteBuffer buffer : ((Value) value).elements)
{
ByteBuffer uuid = ByteBuffer.wrap(UUIDGen.getTimeUUIDBytes());
params.addCell(column, CellPath.create(uuid), buffer);
}
}
else
{
// for frozen lists, we're overwriting the whole cell value
if (value == null)
params.addTombstone(column);
else
params.addCell(column, value.get(Server.CURRENT_VERSION));
}
}
}
public static class Prepender extends Operation
{
public Prepender(ColumnDefinition column, Term t)
{
super(column, t);
}
public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException
{
assert column.type.isMultiCell() : "Attempted to prepend to a frozen list";
Term.Terminal value = t.bind(params.options);
if (value == null || value == UNSET_VALUE)
return;
long time = PrecisionTime.REFERENCE_TIME - (System.currentTimeMillis() - PrecisionTime.REFERENCE_TIME);
List<ByteBuffer> toAdd = ((Value) value).elements;
for (int i = toAdd.size() - 1; i >= 0; i--)
{
PrecisionTime pt = PrecisionTime.getNext(time);
ByteBuffer uuid = ByteBuffer.wrap(UUIDGen.getTimeUUIDBytes(pt.millis, pt.nanos));
params.addCell(column, CellPath.create(uuid), toAdd.get(i));
}
}
}
public static class Discarder extends Operation
{
public Discarder(ColumnDefinition column, Term t)
{
super(column, t);
}
@Override
public boolean requiresRead()
{
return true;
}
public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException
{
assert column.type.isMultiCell() : "Attempted to delete from a frozen list";
// We want to call bind before possibly returning to reject queries where the value provided is not a list.
Term.Terminal value = t.bind(params.options);
Row existingRow = params.getPrefetchedRow(partitionKey, params.currentClustering());
ComplexColumnData complexData = existingRow == null ? null : existingRow.getComplexColumnData(column);
if (value == null || value == UNSET_VALUE || complexData == null)
return;
// Note: below, we will call 'contains' on this toDiscard list for each element of existingList.
// Meaning that if toDiscard is big, converting it to a HashSet might be more efficient. However,
// the read-before-write this operation requires limits its usefulness on big lists, so in practice
// toDiscard will be small and keeping a list will be more efficient.
List<ByteBuffer> toDiscard = ((Value)value).elements;
for (Cell cell : complexData)
{
if (toDiscard.contains(cell.value()))
params.addTombstone(column, cell.path());
}
}
}
public static class DiscarderByIndex extends Operation
{
public DiscarderByIndex(ColumnDefinition column, Term idx)
{
super(column, idx);
}
@Override
public boolean requiresRead()
{
return true;
}
public void execute(DecoratedKey partitionKey, UpdateParameters params) throws InvalidRequestException
{
assert column.type.isMultiCell() : "Attempted to delete an item by index from a frozen list";
Term.Terminal index = t.bind(params.options);
if (index == null)
throw new InvalidRequestException("Invalid null value for list index");
if (index == Constants.UNSET_VALUE)
return;
Row existingRow = params.getPrefetchedRow(partitionKey, params.currentClustering());
int existingSize = existingSize(existingRow, column);
int idx = ByteBufferUtil.toInt(index.get(params.options.getProtocolVersion()));
if (existingSize == 0)
throw new InvalidRequestException("Attempted to delete an element from a list which is null");
if (idx < 0 || idx >= existingSize)
throw new InvalidRequestException(String.format("List index %d out of bound, list has size %d", idx, existingSize));
params.addTombstone(column, existingRow.getComplexColumnData(column).getCellByIndex(idx).path());
}
}
}
| apache-2.0 |
josh-mckenzie/cassandra | src/java/org/apache/cassandra/db/streaming/CassandraOutgoingFile.java | 9483 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.streaming;
import java.io.File;
import java.io.IOException;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.compaction.AbstractCompactionStrategy;
import org.apache.cassandra.db.compaction.LeveledCompactionStrategy;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.io.sstable.Component;
import org.apache.cassandra.io.sstable.KeyIterator;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.DataOutputStreamPlus;
import org.apache.cassandra.net.async.ByteBufDataOutputStreamPlus;
import org.apache.cassandra.schema.TableId;
import org.apache.cassandra.streaming.OutgoingStream;
import org.apache.cassandra.streaming.StreamOperation;
import org.apache.cassandra.streaming.StreamSession;
import org.apache.cassandra.utils.concurrent.Ref;
import static org.apache.cassandra.db.compaction.Verifier.RangeOwnHelper;
/**
* used to transfer the part(or whole) of a SSTable data file
*/
public class CassandraOutgoingFile implements OutgoingStream
{
public static final List<Component> STREAM_COMPONENTS = ImmutableList.of(Component.DATA, Component.PRIMARY_INDEX, Component.STATS,
Component.COMPRESSION_INFO, Component.FILTER, Component.SUMMARY,
Component.DIGEST, Component.CRC);
private final Ref<SSTableReader> ref;
private final long estimatedKeys;
private final List<SSTableReader.PartitionPositionBounds> sections;
private final String filename;
private final CassandraStreamHeader header;
private final boolean keepSSTableLevel;
private final ComponentManifest manifest;
private Boolean isFullyContained;
private final List<Range<Token>> normalizedRanges;
public CassandraOutgoingFile(StreamOperation operation, Ref<SSTableReader> ref,
List<SSTableReader.PartitionPositionBounds> sections, List<Range<Token>> normalizedRanges,
long estimatedKeys)
{
Preconditions.checkNotNull(ref.get());
Range.assertNormalized(normalizedRanges);
this.ref = ref;
this.estimatedKeys = estimatedKeys;
this.sections = sections;
this.normalizedRanges = ImmutableList.copyOf(normalizedRanges);
this.filename = ref.get().getFilename();
this.manifest = getComponentManifest(ref.get());
SSTableReader sstable = ref.get();
keepSSTableLevel = operation == StreamOperation.BOOTSTRAP || operation == StreamOperation.REBUILD;
this.header =
CassandraStreamHeader.builder()
.withSSTableFormat(sstable.descriptor.formatType)
.withSSTableVersion(sstable.descriptor.version)
.withSSTableLevel(keepSSTableLevel ? sstable.getSSTableLevel() : 0)
.withEstimatedKeys(estimatedKeys)
.withSections(sections)
.withCompressionMetadata(sstable.compression ? sstable.getCompressionMetadata() : null)
.withSerializationHeader(sstable.header.toComponent())
.isEntireSSTable(shouldStreamEntireSSTable())
.withComponentManifest(manifest)
.withFirstKey(sstable.first)
.withTableId(sstable.metadata().id)
.build();
}
@VisibleForTesting
public static ComponentManifest getComponentManifest(SSTableReader sstable)
{
LinkedHashMap<Component, Long> components = new LinkedHashMap<>(STREAM_COMPONENTS.size());
for (Component component : STREAM_COMPONENTS)
{
File file = new File(sstable.descriptor.filenameFor(component));
if (file.exists())
components.put(component, file.length());
}
return new ComponentManifest(components);
}
public static CassandraOutgoingFile fromStream(OutgoingStream stream)
{
Preconditions.checkArgument(stream instanceof CassandraOutgoingFile);
return (CassandraOutgoingFile) stream;
}
@VisibleForTesting
public Ref<SSTableReader> getRef()
{
return ref;
}
@Override
public String getName()
{
return filename;
}
@Override
public long getSize()
{
return header.size();
}
@Override
public TableId getTableId()
{
return ref.get().metadata().id;
}
@Override
public long getRepairedAt()
{
return ref.get().getRepairedAt();
}
@Override
public UUID getPendingRepair()
{
return ref.get().getPendingRepair();
}
@Override
public void write(StreamSession session, DataOutputStreamPlus out, int version) throws IOException
{
SSTableReader sstable = ref.get();
CassandraStreamHeader.serializer.serialize(header, out, version);
out.flush();
if (shouldStreamEntireSSTable() && out instanceof ByteBufDataOutputStreamPlus)
{
CassandraEntireSSTableStreamWriter writer = new CassandraEntireSSTableStreamWriter(sstable, session, manifest);
writer.write((ByteBufDataOutputStreamPlus) out);
}
else
{
CassandraStreamWriter writer = (header.compressionInfo == null) ?
new CassandraStreamWriter(sstable, header.sections, session) :
new CassandraCompressedStreamWriter(sstable, header.sections,
header.compressionInfo, session);
writer.write(out);
}
}
@VisibleForTesting
public boolean shouldStreamEntireSSTable()
{
// don't stream if full sstable transfers are disabled or legacy counter shards are present
if (!DatabaseDescriptor.streamEntireSSTables() || ref.get().getSSTableMetadata().hasLegacyCounterShards)
return false;
ColumnFamilyStore cfs = ColumnFamilyStore.getIfExists(getTableId());
if (cfs == null)
return false;
AbstractCompactionStrategy compactionStrategy = cfs.getCompactionStrategyManager()
.getCompactionStrategyFor(ref.get());
if (compactionStrategy instanceof LeveledCompactionStrategy)
return contained(normalizedRanges, ref.get());
return false;
}
@VisibleForTesting
public boolean contained(List<Range<Token>> normalizedRanges, SSTableReader sstable)
{
if (isFullyContained != null)
return isFullyContained;
isFullyContained = computeContainment(normalizedRanges, sstable);
return isFullyContained;
}
private boolean computeContainment(List<Range<Token>> normalizedRanges, SSTableReader sstable)
{
if (normalizedRanges == null)
return false;
RangeOwnHelper rangeOwnHelper = new RangeOwnHelper(normalizedRanges);
try (KeyIterator iter = new KeyIterator(sstable.descriptor, sstable.metadata()))
{
while (iter.hasNext())
{
DecoratedKey key = iter.next();
if (!rangeOwnHelper.check(key))
return false;
}
}
return true;
}
@Override
public void finish()
{
ref.release();
}
public boolean equals(Object o)
{
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CassandraOutgoingFile that = (CassandraOutgoingFile) o;
return estimatedKeys == that.estimatedKeys &&
Objects.equals(ref, that.ref) &&
Objects.equals(sections, that.sections);
}
public int hashCode()
{
return Objects.hash(ref, estimatedKeys, sections);
}
@Override
public String toString()
{
return "CassandraOutgoingFile{" + filename + '}';
}
}
| apache-2.0 |
azaytsev/ios-driver | server/src/main/java/org/uiautomation/ios/command/web/FindElementsHandler.java | 3748 | /*
* Copyright 2012-2013 eBay Software Foundation and ios-driver committers
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.uiautomation.ios.command.web;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.openqa.selenium.InvalidSelectorException;
import org.openqa.selenium.NoSuchElementException;
import org.openqa.selenium.remote.Response;
import org.uiautomation.ios.communication.WebDriverLikeRequest;
import org.uiautomation.ios.IOSServerManager;
import org.uiautomation.ios.command.BaseWebCommandHandler;
import org.uiautomation.ios.wkrdp.RemoteExceptionException;
import org.uiautomation.ios.wkrdp.model.RemoteWebElement;
import java.util.ArrayList;
import java.util.List;
public class FindElementsHandler extends BaseWebCommandHandler {
public FindElementsHandler(IOSServerManager driver, WebDriverLikeRequest request) {
super(driver, request);
}
@Override
public Response handle() throws Exception {
waitForPageToLoad();
int implicitWait = (Integer) getConf("implicit_wait", 0);
long deadline = System.currentTimeMillis() + implicitWait;
List<RemoteWebElement> elements = null;
do {
try {
elements = findElements();
if (elements.size() != 0) {
break;
}
} catch (InvalidSelectorException e) {
// no recovery here.
throw e;
} catch (NoSuchElementException e) {
//ignore.
} catch (RemoteExceptionException e2) {
// ignore.
// if the page is reloading, the previous nodeId won't be there anymore, resulting in a
// RemoteExceptionException: Could not find node with given id.Keep looking.
}
} while (System.currentTimeMillis() < deadline);
JSONArray array = new JSONArray();
List<JSONObject> list = new ArrayList<JSONObject>();
for (RemoteWebElement el : elements) {
list.add(new JSONObject().put("ELEMENT", "" + el.getReference()));
}
Response resp = new Response();
resp.setSessionId(getSession().getSessionId());
resp.setStatus(0);
resp.setValue(list);
return resp;
}
private List<RemoteWebElement> findElements() throws Exception {
JSONObject payload = getRequest().getPayload();
String type = payload.getString("using");
String value = payload.getString("value");
RemoteWebElement element = null;
if (getRequest().hasVariable(":reference")) {
String ref = getRequest().getVariableValue(":reference");
element = getWebDriver().createElement(ref);
} else {
element = getWebDriver().getDocument();
}
List<RemoteWebElement> res;
if ("link text".equals(type)) {
res = element.findElementsByLinkText(value, false);
} else if ("partial link text".equals(type)) {
res = element.findElementsByLinkText(value, true);
} else if ("xpath".equals(type)) {
res = element.findElementsByXpath(value);
} else {
String cssSelector = ToCSSSelectorConverter.convertToCSSSelector(type, value);
res = element.findElementsByCSSSelector(cssSelector);
}
return res;
}
@Override
public JSONObject configurationDescription() throws JSONException {
return noConfigDefined();
}
}
| apache-2.0 |
codeaudit/optaplanner | optaplanner-examples/src/test/java/org/optaplanner/examples/investment/persistence/InvestmentImporterTest.java | 1381 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.investment.persistence;
import java.io.File;
import java.util.Collection;
import org.junit.runners.Parameterized;
import org.optaplanner.examples.common.persistence.AbstractSolutionImporter;
import org.optaplanner.examples.common.persistence.SolutionImporterTest;
public class InvestmentImporterTest extends SolutionImporterTest {
@Override
protected AbstractSolutionImporter createSolutionImporter() {
return new InvestmentImporter();
}
@Parameterized.Parameters(name = "{index}: {0}")
public static Collection<Object[]> getInputFilesAsParameters() {
return getInputFilesAsParameters(new InvestmentImporter());
}
public InvestmentImporterTest(File solutionFile) {
super(solutionFile);
}
}
| apache-2.0 |
ydai1124/gobblin-1 | gobblin-data-management/src/main/java/gobblin/data/management/policy/NewestKSelectionPolicy.java | 5363 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gobblin.data.management.policy;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import lombok.Data;
import lombok.ToString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import gobblin.data.management.version.DatasetVersion;
/**
* Select the newest k versions of the dataset.
*/
@ToString
public class NewestKSelectionPolicy<T extends DatasetVersion> implements VersionSelectionPolicy<T> {
private static final Logger LOGGER = LoggerFactory.getLogger(NewestKSelectionPolicy.class);
/**
* The number of newest versions to select. Only one of
* {@link #NEWEST_K_VERSIONS_SELECTED_KEY} and {@link #NEWEST_K_VERSIONS_NOTSELECTED_KEY} can
* be specified. The default is {@link #NEWEST_K_VERSIONS_SELECTED_KEY} with a value of
* {@link #VERSIONS_SELECTED_DEFAULT_INT}. Valid values are in the range
* [{@link #MIN_VERSIONS_ALLOWED}, {@link #MAX_VERSIONS_ALLOWED}].
*/
public static final String NEWEST_K_VERSIONS_SELECTED_KEY = "selection.newestK.versionsSelected";
/**
* The number of newest versions to exclude from the result. Only one of
* {@link #NEWEST_K_VERSIONS_SELECTED_KEY} and {@link #NEWEST_K_VERSIONS_NOTSELECTED_KEY} can
* be specified. The default is {@link #NEWEST_K_VERSIONS_SELECTED_KEY} with a value of
* {@link #VERSIONS_SELECTED_DEFAULT}. Valid values are in the range
* [{@link #MIN_VERSIONS_ALLOWED}, {@link #MAX_VERSIONS_ALLOWED}].
*/
public static final String NEWEST_K_VERSIONS_NOTSELECTED_KEY = "selection.newestK.versionsNotSelected";
public static final Integer VERSIONS_SELECTED_DEFAULT = 2;
public static final Integer MAX_VERSIONS_ALLOWED = 1000000;
public static final Integer MIN_VERSIONS_ALLOWED = 1;
@Data
private static class Params {
private final int versionsSelected;
private final boolean excludeMode;
Params(int versionsSelected, boolean excludeMode) {
Preconditions.checkArgument(versionsSelected >= MIN_VERSIONS_ALLOWED && versionsSelected <= MAX_VERSIONS_ALLOWED);
this.versionsSelected = versionsSelected;
this.excludeMode = excludeMode;
}
static Params createFromConfig(Config config) {
if (config.hasPath(NEWEST_K_VERSIONS_SELECTED_KEY)) {
if (config.hasPath(NEWEST_K_VERSIONS_NOTSELECTED_KEY)) {
throw new RuntimeException("Only one of " + NEWEST_K_VERSIONS_SELECTED_KEY + " and "
+ NEWEST_K_VERSIONS_NOTSELECTED_KEY + " can be specified.");
}
return new Params(config.getInt(NEWEST_K_VERSIONS_SELECTED_KEY), false);
} else if (config.hasPath(NEWEST_K_VERSIONS_NOTSELECTED_KEY)) {
return new Params(config.getInt(NEWEST_K_VERSIONS_NOTSELECTED_KEY), true);
} else {
return new Params(VERSIONS_SELECTED_DEFAULT, false);
}
}
static Params createFromProps(Properties props) {
return createFromConfig(ConfigFactory.parseProperties(props));
}
}
private final Params params;
private NewestKSelectionPolicy(Params params) {
this.params = params;
LOGGER.info(String.format("Will %s %d versions of each dataset.", (this.params.excludeMode ? "select" : "exclude"),
this.params.versionsSelected));
}
public NewestKSelectionPolicy(int versionsRetained, boolean excludeMode) {
this(new Params(versionsRetained, excludeMode));
}
public NewestKSelectionPolicy(Properties props) {
this(Params.createFromProps(props));
}
public NewestKSelectionPolicy(Config config) {
this(Params.createFromConfig(config));
}
@Override
public Class<? extends DatasetVersion> versionClass() {
return DatasetVersion.class;
}
@Override
public Collection<T> listSelectedVersions(List<T> allVersions) {
if (this.isExcludeMode()) {
return getBoundarySafeSublist(allVersions, this.getVersionsSelected(), allVersions.size());
}
return getBoundarySafeSublist(allVersions, 0, this.getVersionsSelected());
}
private List<T> getBoundarySafeSublist(List<T> l, int fromIndex, int toIndex) {
fromIndex = Math.min(fromIndex, l.size());
toIndex = Math.min(toIndex, l.size());
return l.subList(fromIndex, toIndex);
}
@VisibleForTesting
int getVersionsSelected() {
return this.params.getVersionsSelected();
}
@VisibleForTesting
boolean isExcludeMode() {
return this.params.isExcludeMode();
}
}
| apache-2.0 |
paulpv/BarcodeEye | src/com/github/barcodeeye/scan/result/supplement/URIResultInfoRetriever.java | 1913 | /*
* Copyright (C) 2010 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.barcodeeye.scan.result.supplement;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import android.content.Context;
import android.widget.TextView;
import com.github.barcodeeye.R;
import com.github.barcodeeye.migrated.HttpHelper;
import com.google.zxing.client.result.URIParsedResult;
final class URIResultInfoRetriever extends SupplementalInfoRetriever {
private static final int MAX_REDIRECTS = 5;
private final URIParsedResult result;
private final String redirectString;
URIResultInfoRetriever(TextView textView, URIParsedResult result, Context context) {
super(textView);
redirectString = context.getString(R.string.msg_redirect);
this.result = result;
}
@Override
void retrieveSupplementalInfo() throws IOException {
URI oldURI;
try {
oldURI = new URI(result.getURI());
} catch (URISyntaxException ignored) {
return;
}
URI newURI = HttpHelper.unredirect(oldURI);
int count = 0;
while (count++ < MAX_REDIRECTS && !oldURI.equals(newURI)) {
append(result.getDisplayResult(),
null,
new String[] { redirectString + " : " + newURI },
newURI.toString());
oldURI = newURI;
newURI = HttpHelper.unredirect(newURI);
}
}
}
| apache-2.0 |
psoreide/bnd | biz.aQute.bndlib/src/aQute/bnd/classfile/MethodParametersAttribute.java | 2399 | package aQute.bnd.classfile;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
public class MethodParametersAttribute implements Attribute {
public static final String NAME = "MethodParameters";
public final MethodParameter[] parameters;
public MethodParametersAttribute(MethodParameter[] parameters) {
this.parameters = parameters;
}
@Override
public String name() {
return NAME;
}
@Override
public String toString() {
return NAME + " " + Arrays.toString(parameters);
}
public static MethodParametersAttribute read(DataInput in, ConstantPool constant_pool) throws IOException {
int parameters_count = in.readUnsignedByte();
MethodParameter[] parameters = new MethodParameter[parameters_count];
for (int i = 0; i < parameters_count; i++) {
parameters[i] = MethodParameter.read(in, constant_pool);
}
return new MethodParametersAttribute(parameters);
}
@Override
public void write(DataOutput out, ConstantPool constant_pool) throws IOException {
int attribute_name_index = constant_pool.utf8Info(name());
int attribute_length = attribute_length();
out.writeShort(attribute_name_index);
out.writeInt(attribute_length);
out.writeByte(parameters.length);
for (MethodParameter parameter : parameters) {
parameter.write(out, constant_pool);
}
}
@Override
public int attribute_length() {
int attribute_length = 1 * Byte.BYTES;
for (MethodParameter parameter : parameters) {
attribute_length += parameter.value_length();
}
return attribute_length;
}
public static class MethodParameter {
public final String name;
public final int access_flags;
public MethodParameter(String name, int access_flags) {
this.name = name;
this.access_flags = access_flags;
}
@Override
public String toString() {
return name + ":" + access_flags;
}
static MethodParameter read(DataInput in, ConstantPool constant_pool) throws IOException {
int name_index = in.readUnsignedShort();
int access_flags = in.readUnsignedShort();
return new MethodParameter(constant_pool.utf8(name_index), access_flags);
}
void write(DataOutput out, ConstantPool constant_pool) throws IOException {
int name_index = constant_pool.utf8Info(name);
out.writeShort(name_index);
out.writeShort(access_flags);
}
int value_length() {
return 2 * Short.BYTES;
}
}
}
| apache-2.0 |
facebook/presto | presto-main/src/test/java/com/facebook/presto/execution/buffer/TestBroadcastOutputBuffer.java | 47657 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.execution.buffer;
import com.facebook.presto.common.Page;
import com.facebook.presto.common.type.BigintType;
import com.facebook.presto.execution.StateMachine;
import com.facebook.presto.execution.buffer.OutputBuffers.OutputBufferId;
import com.facebook.presto.memory.context.AggregatedMemoryContext;
import com.facebook.presto.memory.context.MemoryReservationHandler;
import com.facebook.presto.memory.context.SimpleLocalMemoryContext;
import com.facebook.presto.spi.page.PagesSerde;
import com.google.common.collect.ImmutableList;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import io.airlift.units.DataSize;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import static com.facebook.airlift.concurrent.Threads.daemonThreadsNamed;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.execution.buffer.BufferResult.emptyResults;
import static com.facebook.presto.execution.buffer.BufferState.OPEN;
import static com.facebook.presto.execution.buffer.BufferState.TERMINAL_BUFFER_STATES;
import static com.facebook.presto.execution.buffer.BufferTestUtils.MAX_WAIT;
import static com.facebook.presto.execution.buffer.BufferTestUtils.NO_WAIT;
import static com.facebook.presto.execution.buffer.BufferTestUtils.acknowledgeBufferResult;
import static com.facebook.presto.execution.buffer.BufferTestUtils.addPage;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertBufferResultEquals;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertFinished;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertFutureIsDone;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertQueueClosed;
import static com.facebook.presto.execution.buffer.BufferTestUtils.assertQueueState;
import static com.facebook.presto.execution.buffer.BufferTestUtils.createBufferResult;
import static com.facebook.presto.execution.buffer.BufferTestUtils.createPage;
import static com.facebook.presto.execution.buffer.BufferTestUtils.enqueuePage;
import static com.facebook.presto.execution.buffer.BufferTestUtils.getBufferResult;
import static com.facebook.presto.execution.buffer.BufferTestUtils.getFuture;
import static com.facebook.presto.execution.buffer.BufferTestUtils.sizeOfPages;
import static com.facebook.presto.execution.buffer.OutputBuffers.BROADCAST_PARTITION_ID;
import static com.facebook.presto.execution.buffer.OutputBuffers.BufferType.BROADCAST;
import static com.facebook.presto.execution.buffer.OutputBuffers.createInitialEmptyOutputBuffers;
import static com.facebook.presto.execution.buffer.TestingPagesSerdeFactory.testingPagesSerde;
import static com.facebook.presto.memory.context.AggregatedMemoryContext.newRootAggregatedMemoryContext;
import static com.facebook.presto.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static com.google.common.util.concurrent.MoreExecutors.directExecutor;
import static io.airlift.units.DataSize.Unit.BYTE;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.Executors.newScheduledThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestBroadcastOutputBuffer
{
private static final PagesSerde PAGES_SERDE = testingPagesSerde();
private static final String TASK_INSTANCE_ID = "task-instance-id";
private static final ImmutableList<BigintType> TYPES = ImmutableList.of(BIGINT);
private static final OutputBufferId FIRST = new OutputBufferId(0);
private static final OutputBufferId SECOND = new OutputBufferId(1);
private static final OutputBufferId THIRD = new OutputBufferId(2);
private ScheduledExecutorService stateNotificationExecutor;
@BeforeClass
public void setUp()
{
stateNotificationExecutor = newScheduledThreadPool(5, daemonThreadsNamed("test-%s"));
}
@AfterClass(alwaysRun = true)
public void tearDown()
{
if (stateNotificationExecutor != null) {
stateNotificationExecutor.shutdownNow();
stateNotificationExecutor = null;
}
}
@Test
public void testInvalidConstructorArg()
{
try {
createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID).withNoMoreBufferIds(), new DataSize(0, BYTE));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
try {
createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), new DataSize(0, BYTE));
fail("Expected IllegalStateException");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testSimple()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge first three pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// pages now acknowledged
assertQueueState(buffer, FIRST, 0, 3);
// fill the buffer (we already added 3 pages)
for (int i = 3; i < 10; i++) {
addPage(buffer, createPage(i));
}
assertQueueState(buffer, FIRST, 7, 3);
// try to add one more page, which should block
ListenableFuture<?> future = enqueuePage(buffer, createPage(10));
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 8, 3);
// remove a page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 3, sizeOfPages(1), NO_WAIT), bufferResult(3, createPage(3)));
// page not acknowledged yet so sent count is the same
assertQueueState(buffer, FIRST, 8, 3);
// we should still be blocked
assertFalse(future.isDone());
//
// add another buffer and verify it sees all pages
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, SECOND, 11, 0);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0),
createPage(1),
createPage(2),
createPage(3),
createPage(4),
createPage(5),
createPage(6),
createPage(7),
createPage(8),
createPage(9)));
// page not acknowledged yet so sent count is still zero
assertQueueState(buffer, SECOND, 11, 0);
// acknowledge the 10 pages
buffer.get(SECOND, 10, sizeOfPages(10)).cancel(true);
assertQueueState(buffer, SECOND, 1, 10);
//
// tell shared buffer there will be no more queues
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// queues consumed the first three pages, so they should be dropped now and the blocked page future from above should be done
assertQueueState(buffer, FIRST, 8, 3);
assertQueueState(buffer, SECOND, 1, 10);
assertFutureIsDone(future);
// we should be able to add 3 more pages (the third will be queued)
// although the first queue fetched the 4th page, the page has not been acknowledged yet
addPage(buffer, createPage(11));
addPage(buffer, createPage(12));
future = enqueuePage(buffer, createPage(13));
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 11, 3);
assertQueueState(buffer, SECOND, 4, 10);
// acknowledge the receipt of the 3rd page and try to remove the 4th page from the first queue
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 4, sizeOfPages(1), NO_WAIT), bufferResult(4, createPage(4)));
// the blocked page future above should be done
assertFutureIsDone(future);
assertQueueState(buffer, FIRST, 10, 4);
assertQueueState(buffer, SECOND, 4, 10);
//
// finish the buffer
assertFalse(buffer.isFinished());
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 10, 4);
assertQueueState(buffer, SECOND, 4, 10);
// not fully finished until all pages are consumed
assertFalse(buffer.isFinished());
// remove a page, not finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 5, sizeOfPages(1), NO_WAIT), bufferResult(5, createPage(5)));
assertQueueState(buffer, FIRST, 9, 5);
assertQueueState(buffer, SECOND, 4, 10);
assertFalse(buffer.isFinished());
// remove all remaining pages from first queue, should not be finished
BufferResult x = getBufferResult(buffer, FIRST, 6, sizeOfPages(10), NO_WAIT);
assertBufferResultEquals(TYPES, x, bufferResult(6, createPage(6),
createPage(7),
createPage(8),
createPage(9),
createPage(10),
createPage(11),
createPage(12),
createPage(13)));
assertQueueState(buffer, FIRST, 8, 6);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
// finish first queue
buffer.abort(FIRST);
assertQueueClosed(buffer, FIRST, 14);
assertQueueState(buffer, SECOND, 4, 10);
assertFalse(buffer.isFinished());
// remove all remaining pages from second queue, should be finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 10, sizeOfPages(10), NO_WAIT), bufferResult(10, createPage(10),
createPage(11),
createPage(12),
createPage(13)));
assertQueueState(buffer, SECOND, 4, 10);
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
buffer.abort(SECOND);
assertQueueClosed(buffer, FIRST, 14);
assertQueueClosed(buffer, SECOND, 14);
assertFinished(buffer);
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
assertBufferResultEquals(TYPES, getBufferResult(buffer, SECOND, 14, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 14, true));
}
// TODO: remove this after PR #7987 is landed
@Test
public void testAcknowledge()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
outputBuffers = createInitialEmptyOutputBuffers(BROADCAST).withBuffer(FIRST, BROADCAST_PARTITION_ID);
// add a queue
buffer.setOutputBuffers(outputBuffers);
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// acknowledge pages 0 and 1
acknowledgeBufferResult(buffer, FIRST, 2);
// only page 2 is not removed
assertQueueState(buffer, FIRST, 1, 2);
// acknowledge page 2
acknowledgeBufferResult(buffer, FIRST, 3);
// nothing left
assertQueueState(buffer, FIRST, 0, 3);
// acknowledge more pages will fail
try {
acknowledgeBufferResult(buffer, FIRST, 4);
}
catch (IllegalArgumentException e) {
assertEquals(e.getMessage(), "Invalid sequence id");
}
// fill the buffer
for (int i = 3; i < 6; i++) {
addPage(buffer, createPage(i));
}
assertQueueState(buffer, FIRST, 3, 3);
// getting new pages will again acknowledge the previously acknowledged pages but this is ok
buffer.get(FIRST, 3, sizeOfPages(1)).cancel(true);
assertQueueState(buffer, FIRST, 3, 3);
}
@Test
public void testSharedBufferFull()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testDuplicateRequests()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// add three items
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(i));
}
// add a queue
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), bufferResult(0, createPage(0), createPage(1), createPage(2)));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 3, 0);
// acknowledge the pages
buffer.get(FIRST, 3, sizeOfPages(10)).cancel(true);
// attempt to get the three elements again
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, false));
// pages not acknowledged yet so state is the same
assertQueueState(buffer, FIRST, 0, 3);
}
@Test
public void testAddQueueAfterCreation()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
try {
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds());
fail("Expected IllegalStateException from addQueue after noMoreQueues has been called");
}
catch (IllegalArgumentException ignored) {
}
}
@Test
public void testAddAfterFinish()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.setNoMorePages();
addPage(buffer, createPage(0));
addPage(buffer, createPage(0));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testAddQueueAfterNoMoreQueues()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// tell buffer no more queues will be added
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
// set no more queues a second time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
// set no more queues a third time to assure that we don't get an exception or such
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
assertTrue(buffer.isFinished());
}
@Test
public void testAddAfterDestroy()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
buffer.destroy();
addPage(buffer, createPage(0));
addPage(buffer, createPage(0));
assertEquals(buffer.getInfo().getTotalPagesSent(), 0);
}
@Test
public void testGetBeforeCreate()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and verify the future is complete
addPage(buffer, createPage(33));
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = ".*does not contain.*\\[0]")
public void testSetFinalBuffersWihtoutDeclaringUsedBuffer()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0L, sizeOfPages(1));
assertFalse(future.isDone());
// add a page and set no more pages
addPage(buffer, createPage(33));
buffer.setNoMorePages();
// read the page
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(33)));
// acknowledge the page and verify we are finished
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, true));
buffer.abort(FIRST);
// set final buffers to a set that does not contain the buffer, which will fail
buffer.setOutputBuffers(createInitialEmptyOutputBuffers(BROADCAST).withNoMoreBufferIds());
}
@Test(expectedExceptions = IllegalStateException.class, expectedExceptionsMessageRegExp = "No more buffers already set")
public void testUseUndeclaredBufferAfterFinalBuffersSet()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
assertFalse(buffer.isFinished());
// get a page from a buffer that was not declared, which will fail
buffer.get(SECOND, 0L, sizeOfPages(1));
}
@Test
public void testAbortBeforeCreate()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), sizeOfPages(2));
assertFalse(buffer.isFinished());
// get a page from a buffer that doesn't exist yet
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(1));
assertFalse(future.isDone());
// abort that buffer, and verify the future is complete and buffer is finished
buffer.abort(FIRST);
assertTrue(future.isDone());
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(10), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFullBufferBlocksWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
// third page is blocked
enqueuePage(buffer, createPage(3));
}
@Test
public void testAcknowledgementFreesWriters()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(2));
// Add two pages, buffer is full
addPage(buffer, createPage(1));
addPage(buffer, createPage(2));
assertQueueState(buffer, FIRST, 2, 0);
// third page is blocked
ListenableFuture<?> future = enqueuePage(buffer, createPage(3));
// we should be blocked
assertFalse(future.isDone());
assertQueueState(buffer, FIRST, 3, 0);
assertQueueState(buffer, SECOND, 3, 0);
// acknowledge pages for first buffer, no space is freed
buffer.get(FIRST, 2, sizeOfPages(10)).cancel(true);
assertFalse(future.isDone());
// acknowledge pages for second buffer, which makes space in the buffer
buffer.get(SECOND, 2, sizeOfPages(10)).cancel(true);
// writer should not be blocked
assertFutureIsDone(future);
assertQueueState(buffer, SECOND, 1, 2);
}
@Test
public void testAbort()
{
BroadcastOutputBuffer bufferedBuffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// fill the buffer
for (int i = 0; i < 10; i++) {
addPage(bufferedBuffer, createPage(i));
}
bufferedBuffer.setNoMorePages();
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, FIRST, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
bufferedBuffer.abort(FIRST);
assertQueueClosed(bufferedBuffer, FIRST, 0);
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, FIRST, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, SECOND, 0, sizeOfPages(1), NO_WAIT), bufferResult(0, createPage(0)));
bufferedBuffer.abort(SECOND);
assertQueueClosed(bufferedBuffer, SECOND, 0);
assertFinished(bufferedBuffer);
assertBufferResultEquals(TYPES, getBufferResult(bufferedBuffer, SECOND, 1, sizeOfPages(1), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 0, true));
}
@Test
public void testFinishClosesEmptyQueues()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(10));
// finish while queues are empty
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 0, 0);
assertQueueState(buffer, SECOND, 0, 0);
buffer.abort(FIRST);
buffer.abort(SECOND);
assertQueueClosed(buffer, FIRST, 0);
assertQueueClosed(buffer, SECOND, 0);
}
@Test
public void testAbortFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
assertTrue(future.isDone());
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// abort the buffer
buffer.abort(FIRST);
// verify the future completed
// broadcast buffer does not return a "complete" result in this case, but it doesn't mapper
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
// further requests will see a completed result
assertQueueClosed(buffer, FIRST, 1);
}
@Test
public void testFinishFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one item
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// finish the buffer
buffer.setNoMorePages();
assertQueueState(buffer, FIRST, 0, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, true));
}
@Test
public void testFinishFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// enqueue the addition two pages more pages
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// finish the query
buffer.setNoMorePages();
assertFalse(buffer.isFinished());
// verify futures are complete
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
// get and acknowledge the last 6 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 1, sizeOfPages(100), NO_WAIT),
bufferResult(1, createPage(1), createPage(2), createPage(3), createPage(4), createPage(5), createPage(6)));
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 7, sizeOfPages(100), NO_WAIT), emptyResults(TASK_INSTANCE_ID, 7, true));
buffer.abort(FIRST);
// verify finished
assertFinished(buffer);
}
@Test
public void testDestroyFreesReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// destroy the buffer
buffer.destroy();
assertQueueClosed(buffer, FIRST, 1);
// verify the future completed
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), emptyResults(TASK_INSTANCE_ID, 1, false));
}
@Test
public void testDestroyFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// destroy the buffer (i.e., cancel the query)
buffer.destroy();
assertFinished(buffer);
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testFailDoesNotFreeReader()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
// fail the buffer
buffer.fail();
// future should have not finished
assertFalse(future.isDone());
// attempt to get another page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testFailFreesWriter()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// add two pages to the buffer queue
ListenableFuture<?> firstEnqueuePage = enqueuePage(buffer, createPage(5));
ListenableFuture<?> secondEnqueuePage = enqueuePage(buffer, createPage(6));
// get and acknowledge one page
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(1), MAX_WAIT), bufferResult(0, createPage(0)));
buffer.get(FIRST, 1, sizeOfPages(1)).cancel(true);
// verify we are still blocked because the buffer is full
assertFalse(firstEnqueuePage.isDone());
assertFalse(secondEnqueuePage.isDone());
// fail the buffer (i.e., cancel the query)
buffer.fail();
assertFalse(buffer.isFinished());
// verify the futures are completed
assertFutureIsDone(firstEnqueuePage);
assertFutureIsDone(secondEnqueuePage);
}
@Test
public void testAddBufferAfterFail()
{
OutputBuffers outputBuffers = createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID);
BroadcastOutputBuffer buffer = createBroadcastBuffer(outputBuffers, sizeOfPages(5));
assertFalse(buffer.isFinished());
// attempt to get a page
ListenableFuture<BufferResult> future = buffer.get(FIRST, 0, sizeOfPages(10));
// verify we are waiting for a page
assertFalse(future.isDone());
// add one page
addPage(buffer, createPage(0));
// verify we got one page
assertBufferResultEquals(TYPES, getFuture(future, NO_WAIT), bufferResult(0, createPage(0)));
// fail the buffer
buffer.fail();
// add a buffer
outputBuffers = outputBuffers.withBuffer(SECOND, BROADCAST_PARTITION_ID);
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
// set no more buffers
outputBuffers = outputBuffers.withNoMoreBufferIds();
buffer.setOutputBuffers(outputBuffers);
// attempt to get page, and verify we are blocked
future = buffer.get(FIRST, 1, sizeOfPages(10));
assertFalse(future.isDone());
future = buffer.get(SECOND, 0, sizeOfPages(10));
assertFalse(future.isDone());
}
@Test
public void testBufferCompletion()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
assertFalse(buffer.isFinished());
// fill the buffer
List<Page> pages = new ArrayList<>();
for (int i = 0; i < 5; i++) {
Page page = createPage(i);
addPage(buffer, page);
pages.add(page);
}
buffer.setNoMorePages();
// get and acknowledge 5 pages
assertBufferResultEquals(TYPES, getBufferResult(buffer, FIRST, 0, sizeOfPages(5), MAX_WAIT), createBufferResult(TASK_INSTANCE_ID, 0, pages));
// buffer is not finished
assertFalse(buffer.isFinished());
// there are no more pages and no more buffers, but buffer is not finished because it didn't receive an acknowledgement yet
assertFalse(buffer.isFinished());
// ask the buffer to finish
buffer.abort(FIRST);
// verify that the buffer is finished
assertTrue(buffer.isFinished());
}
@Test
public void testSharedBufferBlocking()
{
SettableFuture<?> blockedFuture = SettableFuture.create();
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = PAGES_SERDE.serialize(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), new DataSize(pageSize * 2, BYTE), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
// adding the first page will block as no memory is available (MockMemoryReservationHandler will return a future that is not done)
enqueuePage(buffer, page);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add one more page after more memory is available
addPage(buffer, page);
// the buffer is full now
enqueuePage(buffer, page);
}
@Test
public void testSharedBufferBlocking2()
{
// start with a complete future
SettableFuture<?> blockedFuture = SettableFuture.create();
blockedFuture.set(null);
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = PAGES_SERDE.serialize(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), new DataSize(pageSize * 2, BYTE), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
// add two pages to fill up the buffer (memory is available)
addPage(buffer, page);
addPage(buffer, page);
// fill up the memory pool
blockedFuture = SettableFuture.create();
reservationHandler.updateBlockedFuture(blockedFuture);
// allocate one more byte to make the buffer full
memoryManager.updateMemoryUsage(1L);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
// memoryManager should still return a blocked future as the buffer is still full
assertFalse(memoryManager.getBufferBlockedFuture().isDone(), "buffer should be blocked");
// remove all pages from the memory manager and the 1 byte that we added above
memoryManager.updateMemoryUsage(-pageSize * 2 - 1);
// now we have both buffer space and memory available, so memoryManager shouldn't be blocked
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add two pages after more memory is available
addPage(buffer, page);
addPage(buffer, page);
// the buffer is full now
enqueuePage(buffer, page);
}
@Test
public void testSharedBufferBlockingNoBlockOnFull()
{
SettableFuture<?> blockedFuture = SettableFuture.create();
MockMemoryReservationHandler reservationHandler = new MockMemoryReservationHandler(blockedFuture);
AggregatedMemoryContext memoryContext = newRootAggregatedMemoryContext(reservationHandler, 0L);
Page page = createPage(1);
long pageSize = PAGES_SERDE.serialize(page).getRetainedSizeInBytes();
// create a buffer that can only hold two pages
BroadcastOutputBuffer buffer = createBroadcastBuffer(createInitialEmptyOutputBuffers(BROADCAST), new DataSize(pageSize * 2, BYTE), memoryContext, directExecutor());
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
memoryManager.setNoBlockOnFull();
// even if setNoBlockOnFull() is called the buffer should block on memory when we add the first page
// as no memory is available (MockMemoryReservationHandler will return a future that is not done)
enqueuePage(buffer, page);
// more memory is available
blockedFuture.set(null);
memoryManager.onMemoryAvailable();
assertTrue(memoryManager.getBufferBlockedFuture().isDone(), "buffer shouldn't be blocked");
// we should be able to add one more page after more memory is available
addPage(buffer, page);
// the buffer is full now, but setNoBlockOnFull() is called so the buffer shouldn't block
addPage(buffer, page);
}
private static class MockMemoryReservationHandler
implements MemoryReservationHandler
{
private ListenableFuture<?> blockedFuture;
public MockMemoryReservationHandler(ListenableFuture<?> blockedFuture)
{
this.blockedFuture = requireNonNull(blockedFuture, "blockedFuture is null");
}
@Override
public ListenableFuture<?> reserveMemory(String allocationTag, long delta, boolean enforceBroadcastMemoryLimit)
{
return blockedFuture;
}
@Override
public boolean tryReserveMemory(String allocationTag, long delta, boolean enforceBroadcastMemoryLimit)
{
return true;
}
public void updateBlockedFuture(ListenableFuture<?> blockedFuture)
{
this.blockedFuture = requireNonNull(blockedFuture);
}
}
private BroadcastOutputBuffer createBroadcastBuffer(OutputBuffers outputBuffers, DataSize dataSize, AggregatedMemoryContext memoryContext, Executor notificationExecutor)
{
BroadcastOutputBuffer buffer = new BroadcastOutputBuffer(
TASK_INSTANCE_ID,
new StateMachine<>("bufferState", stateNotificationExecutor, OPEN, TERMINAL_BUFFER_STATES),
dataSize,
() -> memoryContext.newLocalMemoryContext("test"),
notificationExecutor);
buffer.setOutputBuffers(outputBuffers);
buffer.registerLifespanCompletionCallback(ignore -> {});
return buffer;
}
@Test
public void testBufferFinishesWhenClientBuffersDestroyed()
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withBuffer(SECOND, BROADCAST_PARTITION_ID)
.withBuffer(THIRD, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
// add pages before closing the buffers to make sure
// that the buffers close even if there are pending pages
for (int i = 0; i < 5; i++) {
addPage(buffer, createPage(i));
}
// the buffer is in the NO_MORE_BUFFERS state now
// and if we abort all the buffers it should destroy itself
// and move to the FINISHED state
buffer.abort(FIRST);
assertFalse(buffer.isFinished());
buffer.abort(SECOND);
assertFalse(buffer.isFinished());
buffer.abort(THIRD);
assertTrue(buffer.isFinished());
}
@Test
public void testForceFreeMemory()
throws Throwable
{
BroadcastOutputBuffer buffer = createBroadcastBuffer(
createInitialEmptyOutputBuffers(BROADCAST)
.withBuffer(FIRST, BROADCAST_PARTITION_ID)
.withNoMoreBufferIds(),
sizeOfPages(5));
for (int i = 0; i < 3; i++) {
addPage(buffer, createPage(1), 0);
}
OutputBufferMemoryManager memoryManager = buffer.getMemoryManager();
assertTrue(memoryManager.getBufferedBytes() > 0);
buffer.forceFreeMemory();
assertEquals(memoryManager.getBufferedBytes(), 0);
// adding a page after forceFreeMemory() should be NOOP
addPage(buffer, createPage(1));
assertEquals(memoryManager.getBufferedBytes(), 0);
}
private BroadcastOutputBuffer createBroadcastBuffer(OutputBuffers outputBuffers, DataSize dataSize)
{
BroadcastOutputBuffer buffer = new BroadcastOutputBuffer(
TASK_INSTANCE_ID,
new StateMachine<>("bufferState", stateNotificationExecutor, OPEN, TERMINAL_BUFFER_STATES),
dataSize,
() -> new SimpleLocalMemoryContext(newSimpleAggregatedMemoryContext(), "test"),
stateNotificationExecutor);
buffer.setOutputBuffers(outputBuffers);
buffer.registerLifespanCompletionCallback(ignore -> {});
return buffer;
}
private static BufferResult bufferResult(long token, Page firstPage, Page... otherPages)
{
List<Page> pages = ImmutableList.<Page>builder().add(firstPage).add(otherPages).build();
return createBufferResult(TASK_INSTANCE_ID, token, pages);
}
}
| apache-2.0 |
GabrielBrascher/cloudstack | server/src/main/java/com/cloud/api/query/dao/ProjectAccountJoinDao.java | 1278 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.api.query.dao;
import org.apache.cloudstack.api.response.ProjectAccountResponse;
import com.cloud.api.query.vo.ProjectAccountJoinVO;
import com.cloud.projects.ProjectAccount;
import com.cloud.utils.db.GenericDao;
public interface ProjectAccountJoinDao extends GenericDao<ProjectAccountJoinVO, Long> {
ProjectAccountResponse newProjectAccountResponse(ProjectAccountJoinVO proj);
ProjectAccountJoinVO newProjectAccountView(ProjectAccount proj);
}
| apache-2.0 |
wjw465150/jodd | jodd-db/src/test/java/jodd/db/oom/DbSqlTemplateTest.java | 12646 | // Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.db.oom;
import jodd.db.oom.sqlgen.DbSqlBuilder;
import jodd.db.oom.sqlgen.ParameterValue;
import jodd.db.oom.tst.BadBoy;
import jodd.db.oom.tst.BadGirl;
import jodd.db.oom.tst.Boy;
import jodd.db.oom.tst.Girl;
import org.junit.Before;
import org.junit.Test;
import java.util.Map;
import static jodd.db.oom.ColumnAliasType.*;
import static jodd.db.oom.sqlgen.DbSqlBuilder.sql;
import static org.junit.Assert.*;
public class DbSqlTemplateTest {
@Before
public void setUp() throws Exception {
DbOomManager.resetAll();
DbOomManager dbOom = DbOomManager.getInstance();
dbOom.registerType(Boy.class);
dbOom.registerType(BadBoy.class);
dbOom.registerType(BadGirl.class);
dbOom.registerType(Girl.class);
}
protected void assertContains(String string, String... chunks) {
for (String chunk : chunks) {
assertTrue(string.contains(chunk));
}
}
@Test
public void testAliasNoAlias() {
DbSqlBuilder st;
st = sql("$T{Boy} $Boy.id $C{Boy.id}");
assertEquals("BOY BOY.ID BOY.ID", st.generateQuery());
st = sql("$T{Boy b} $b.id $C{b.id}");
assertEquals("BOY b b.ID b.ID", st.generateQuery());
}
@Test
public void testTables() {
DbSqlBuilder st;
// 1
st = sql("$T{Boy}");
assertEquals("BOY", st.generateQuery());
st = sql("$T{BadBoy}");
assertEquals("BOY", st.generateQuery());
st = sql("$T{Boy b}");
assertEquals("BOY b", st.generateQuery());
st = sql("$T{BadBoy b}");
assertEquals("BOY b", st.generateQuery());
// 2
st = sql("$T{b}").use("b", new Boy());
assertEquals("BOY", st.generateQuery());
st = sql("$T{b b}").use("b", new Boy());
assertEquals("BOY b", st.generateQuery());
st = sql("$T{b}").use("b", new BadBoy());
assertEquals("BOY", st.generateQuery());
// 3
st = sql("$T{b}").use("b", Boy.class);
assertEquals("BOY", st.generateQuery());
st = sql("$T{b}").use("b", BadBoy.class);
assertEquals("BOY", st.generateQuery());
}
@Test
public void testManyTables() {
DbSqlBuilder st = sql("$T{Boy, Girl girl}");
assertEquals("BOY, GIRL girl", st.generateQuery());
}
@Test
public void testColumns1() {
DbSqlBuilder st;
// 1
st = sql("$T{Boy} | $C{Boy.id} | $C{Boy.*}");
assertEquals("BOY | BOY.ID | BOY.GIRL_ID, BOY.ID, BOY.NAME", st.generateQuery());
st = sql("$T{BadBoy} | $C{BadBoy.ajdi} | $C{BadBoy.*} | $C{BadBoy.+}");
assertEquals("BOY | BOY.ID | BOY.ID, BOY.GIRL_ID, BOY.NAME | BOY.ID", st.generateQuery());
// 2
st = sql("$T{b b} | $C{b.id}").use("b", new Boy());
assertEquals("BOY b | b.ID", st.generateQuery());
st = sql("$T{b bb} | $C{bb.id} | $C{bb.*}").use("b", new Boy());
assertEquals("BOY bb | bb.ID | bb.GIRL_ID, bb.ID, bb.NAME", st.generateQuery());
st = sql("$T{b bb} | $C{bb.ajdi} | $C{bb.*} | $C{bb.+}").use("b", new BadBoy());
assertEquals("BOY bb | bb.ID | bb.ID, bb.GIRL_ID, bb.NAME | bb.ID", st.generateQuery());
// 3
st = sql("$T{b b} | $C{b.id}").use("b", Boy.class);
assertEquals("BOY b | b.ID", st.generateQuery());
st = sql("$T{b bb} | $C{bb.id} | $C{bb.*}").use("b", Boy.class);
assertEquals("BOY bb | bb.ID | bb.GIRL_ID, bb.ID, bb.NAME", st.generateQuery());
st = sql("$T{b bb} | $C{bb.ajdi} | $C{bb.*}").use("b", BadBoy.class);
assertEquals("BOY bb | bb.ID | bb.ID, bb.GIRL_ID, bb.NAME", st.generateQuery());
st = sql("$T{b bb} | $C{bb.ajdi} | $C{bb.*} | $C{bb.+}").use("b", BadBoy.class);
assertEquals("BOY bb | bb.ID | bb.ID, bb.GIRL_ID, bb.NAME | bb.ID", st.generateQuery());
}
@Test
public void testColumns2() {
DbSqlBuilder st;
// 1
st = sql("$T{Boy} | $C{Boy.id} | $C{Boy.*}");
assertEquals("BOY | BOY.ID as BOY$ID | BOY.GIRL_ID as BOY$GIRL_ID, BOY.ID as BOY$ID, BOY.NAME as BOY$NAME",
st.aliasColumnsAs(TABLE_NAME).generateQuery());
assertEquals("BOY | BOY.ID as Boy$ID | BOY.GIRL_ID as Boy$GIRL_ID, BOY.ID as Boy$ID, BOY.NAME as Boy$NAME",
st.reset().aliasColumnsAs(TABLE_REFERENCE).generateQuery());
assertEquals("BOY | BOY.ID as col_0_ | BOY.GIRL_ID as col_1_, BOY.ID as col_2_, BOY.NAME as col_3_",
st.reset().aliasColumnsAs(COLUMN_CODE).generateQuery());
st = sql("$T{BadBoy BadBoy} | $C{BadBoy.ajdi} | $C{BadBoy.*}");
assertEquals("BOY BadBoy | BadBoy.ID as BOY$ID | BadBoy.ID as BOY$ID, BadBoy.GIRL_ID as BOY$GIRL_ID, BadBoy.NAME as BOY$NAME",
st.aliasColumnsAs(TABLE_NAME).generateQuery());
assertEquals("BOY BadBoy | BadBoy.ID as BadBoy$ID | BadBoy.ID as BadBoy$ID, BadBoy.GIRL_ID as BadBoy$GIRL_ID, BadBoy.NAME as BadBoy$NAME",
st.reset().aliasColumnsAs(TABLE_REFERENCE).generateQuery());
assertEquals("BOY BadBoy | BadBoy.ID as col_0_ | BadBoy.ID as col_1_, BadBoy.GIRL_ID as col_2_, BadBoy.NAME as col_3_",
st.reset().aliasColumnsAs(COLUMN_CODE).generateQuery());
}
@Test
public void testColumns3() {
DbSqlBuilder st;
st = sql("$C{b.id,b.name} from $T{b b}").use("b", Boy.class);
assertEquals("b.ID, b.NAME from BOY b", st.generateQuery());
st = sql("$C{ a.ajdi , a.nejm } from $T{b a}").use("b", BadBoy.class);
assertEquals("a.ID, a.NAME from BOY a", st.generateQuery());
st = sql("$C{b.id,b.name} from $T{b b}").use("b", Boy.class);
assertEquals("b.ID as BOY$ID, b.NAME as BOY$NAME from BOY b", st.aliasColumnsAs(TABLE_NAME).generateQuery());
st = sql("$C{ a.ajdi , a.nejm } from $T{b a}").use("b", BadBoy.class);
assertEquals("a.ID as BOY$ID, a.NAME as BOY$NAME from BOY a", st.aliasColumnsAs(TABLE_NAME).generateQuery());
st = sql("$b.id as d, $C{b.name} from $T{b b}").use("b", Boy.class).aliasColumnsAs(TABLE_NAME);
assertEquals("b.ID as d, b.NAME as BOY$NAME from BOY b", st.generateQuery());
st = sql("$a.ajdi as d, $C{ a.nejm } from $T{b a}").use("b", BadBoy.class).aliasColumnsAs(TABLE_NAME);
assertEquals("a.ID as d, a.NAME as BOY$NAME from BOY a", st.generateQuery());
st = sql("$C{b.*} from $T{b b}").use("b", Boy.class).aliasColumnsAs(TABLE_NAME);
assertEquals("b.GIRL_ID as BOY$GIRL_ID, b.ID as BOY$ID, b.NAME as BOY$NAME from BOY b", st.generateQuery());
st = sql("$C{a.*} from $T{b a}").use("b", BadBoy.class);
assertEquals("a.ID, a.GIRL_ID, a.NAME from BOY a", st.generateQuery());
st = sql("$C{a.*} from $T{BadBoy a}");
assertEquals("a.ID, a.GIRL_ID, a.NAME from BOY a", st.generateQuery());
st = sql("$C{BadBoy.ajdi} from $T{BadBoy BadBoy}");
assertEquals("BadBoy.ID from BOY BadBoy", st.generateQuery());
st = sql("$C{BadBoy.ajdi} from $T{BadBoy}");
assertEquals("BOY.ID from BOY", st.generateQuery());
}
@Test
public void testColumns4() {
DbSqlBuilder st;
st = sql("$T{b b} | $C{b.[name]} | $C{b.[id,name]} | $C{b.[id,name,girlId]}").use("b", Boy.class);
assertEquals("BOY b | b.NAME | b.ID, b.NAME | b.GIRL_ID, b.ID, b.NAME", st.generateQuery());
st = sql("$T{b b} | $C{b.[ name ]} | $C{b.[ id , name ]}").use("b", Boy.class);
assertEquals("BOY b | b.NAME | b.ID, b.NAME", st.generateQuery());
st = sql("$T{b b} | $C{b.[id,name]} | $C{b.[name,id]}").use("b", Boy.class);
assertEquals("BOY b | b.ID, b.NAME | b.ID, b.NAME", st.generateQuery());
st = sql("$T{b b} | $C{b.[+,nejm]} | $C{b.[ajdi,nejm]}").use("b", BadBoy.class);
assertEquals("BOY b | b.ID, b.NAME | b.ID, b.NAME", st.generateQuery());
}
@Test
public void testReferencesAndEscapes() {
DbSqlBuilder st;
assertEquals("...$foo...", new DbSqlBuilder("...\\$foo...").generateQuery());
assertEquals("$foo", new DbSqlBuilder("\\$foo").generateQuery());
assertEquals("...\\$foo...", new DbSqlBuilder("...\\\\\\$foo...").generateQuery());
assertEquals("\\$foo", new DbSqlBuilder("\\\\\\$foo").generateQuery());
assertEquals("$f", new DbSqlBuilder("\\$f").generateQuery());
st = sql("$T{BadBoy b} x$b.ajdi=2").aliasColumnsAs(TABLE_NAME);
assertEquals("BOY b xb.ID=2", st.generateQuery());
st = sql("$T{BadBoy b} $b.ajdi+2").aliasColumnsAs(TABLE_NAME);
assertEquals("BOY b b.ID+2", st.generateQuery());
st = sql("$T{Boy, BadBoy b} - \\$$Boy.id $b.ajdi");
assertEquals("BOY, BOY b - $BOY.ID b.ID", st.generateQuery());
st = sql("$T{Boy}, $T{BadBoy b} - \\$$Boy.id $b.ajdi");
assertEquals("BOY, BOY b - $BOY.ID b.ID", st.generateQuery());
st = sql("$C{b.ajdi} $T{Boy Boy, BadBoy b} - \\$$Boy.id $b.ajdi").aliasColumnsAs(TABLE_NAME);
assertEquals("b.ID as BOY$ID BOY Boy, BOY b - $Boy.ID b.ID", st.generateQuery());
st = sql("\\$C{b.ajdi} $T{Boy, BadBoy b} - \\$$Boy.id $b.ajdi");
assertEquals("$C{b.ajdi} BOY, BOY b - $BOY.ID b.ID", st.generateQuery());
st = sql("\\$C{b.*} $T{Boy, BadBoy b} - $Boy.id");
assertEquals("$C{b.*} BOY, BOY b - BOY.ID", st.generateQuery());
st = sql("$C{b.*} $T{Boy Boy, BadBoy b} - $b.ajdi 'foo\\$'").aliasColumnsAs(TABLE_NAME);
assertEquals("b.ID as BOY$ID, b.GIRL_ID as BOY$GIRL_ID, b.NAME as BOY$NAME BOY Boy, BOY b - b.ID 'foo$'", st.generateQuery());
st = sql("$T{BadBoy b} - $b.ajdi=2,$b.ajdi<$b.ajdi").aliasColumnsAs(TABLE_NAME);
assertEquals("BOY b - b.ID=2,b.ID<b.ID", st.generateQuery());
}
@Test
public void testMatch() {
DbSqlBuilder st;
Boy boy = new Boy();
boy.id = 1;
boy.girlId = 3;
st = sql("$T{boy boy} where $M{boy=boy}").use("boy", boy);
assertEquals("BOY boy where (boy.GIRL_ID=:boy.girlId and boy.ID=:boy.id)", st.generateQuery());
Map<String, ParameterValue> map = st.getQueryParameters();
assertEquals(2, map.size());
assertEquals(Integer.valueOf(1), map.get("boy.id").getValue());
assertEquals(Integer.valueOf(3), map.get("boy.girlId").getValue());
boy.id = 4;
boy.girlId = 1;
st = sql("$T{boy b} where $M{b=boy}").use("boy", boy);
assertEquals("BOY b where (b.GIRL_ID=:boy.girlId and b.ID=:boy.id)", st.generateQuery());
map = st.getQueryParameters();
assertEquals(2, map.size());
assertEquals(Integer.valueOf(4), map.get("boy.id").getValue());
assertEquals(Integer.valueOf(1), map.get("boy.girlId").getValue());
BadBoy badBoy = new BadBoy();
st = sql("$T{boy b} where $M{b=boy}").use("boy", badBoy);
assertEquals("BOY b where (1=1)", st.generateQuery());
map = st.getQueryParameters();
assertNull(map);
st = sql("$T{boy b} where $M{b=boy}").use("boy", badBoy);
assertEquals("BOY b where (1=1)", st.generateQuery());
map = st.getQueryParameters();
assertNull(map);
badBoy.ajdi = Integer.valueOf(4);
badBoy.girlId = Integer.valueOf(1);
st = sql("$T{boy b} where $M{b=boy}").use("boy", badBoy);
assertEquals("BOY b where (b.ID=:boy.ajdi and b.GIRL_ID=:boy.girlId)", st.generateQuery());
map = st.getQueryParameters();
assertEquals(2, map.size());
assertEquals(Integer.valueOf(4), map.get("boy.ajdi").getValue());
assertEquals(Integer.valueOf(1), map.get("boy.girlId").getValue());
badBoy.ajdi = null;
st = sql("$T{boy b} where $M{b=boy.*}").use("boy", badBoy);
assertEquals("BOY b where (b.ID=:boy.ajdi and b.GIRL_ID=:boy.girlId and b.NAME=:boy.nejm)", st.generateQuery());
map = st.getQueryParameters();
assertEquals(3, map.size());
assertEquals(Integer.valueOf(1), map.get("boy.girlId").getValue());
}
@Test
public void testJoin() {
DbSqlBuilder st = sql("select $C{bb.*}, $C{bg.+} from $T{BadGirl bg} join $T{Boy bb} on $bg.+=bb.GIRL_ID");
assertEquals("select bb.GIRL_ID, bb.ID, bb.NAME, bg.ID from GIRL bg join BOY bb on bg.ID=bb.GIRL_ID", st.generateQuery());
}
}
| bsd-2-clause |
andrewmkrug/Selenium-Grid-Extras | SeleniumGridExtras/src/main/java/com/groupon/seleniumgridextras/tasks/StopGridExtras.java | 3500 | /**
* Copyright (c) 2013, Groupon, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of GROUPON nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* Created with IntelliJ IDEA.
* User: Dima Kovalenko (@dimacus) && Darko Marinov
* Date: 5/10/13
* Time: 4:06 PM
*/
package com.groupon.seleniumgridextras.tasks;
import com.google.gson.JsonObject;
import com.groupon.seleniumgridextras.tasks.config.TaskDescriptions;
import com.groupon.seleniumgridextras.utilities.json.JsonCodec;
import org.apache.log4j.Logger;
import java.util.Map;
public class StopGridExtras extends ExecuteOSTask {
private static Logger logger = Logger.getLogger(StopGridExtras.class);
public StopGridExtras(){
setEndpoint(TaskDescriptions.Endpoints.STOP_GRID_EXTRAS);
setDescription(TaskDescriptions.Description.STOP_GRID_EXTRAS);
JsonObject params = new JsonObject();
params.addProperty(JsonCodec.CONFIRM, "(Required) Will ignore request unless true is passed here");
setAcceptedParams(params);
setRequestType("GET");
setResponseType("json");
setClassname(this.getClass().getCanonicalName().toString());
setCssClass(TaskDescriptions.UI.BTN_DANGER);
setButtonText(TaskDescriptions.UI.ButtonText.STOP_GRID_EXTRAS);
setEnabledInGui(true);
}
@Override
public JsonObject getAcceptedParams() {
JsonObject params = new JsonObject();
params.addProperty(JsonCodec.CONFIRM, "(Required) Will ignore request unless true is passed here");
return params;
}
@Override
public JsonObject execute() {
return execute("");
}
@Override
public JsonObject execute(String port) {
getJsonResponse().addKeyValues(JsonCodec.ERROR, "Pass in confirm=true");
return getJsonResponse().getJson();
}
@Override
public JsonObject execute(Map<String, String> parameter) {
if (!parameter.isEmpty() && parameter.containsKey(JsonCodec.CONFIRM) && parameter.get(
JsonCodec.CONFIRM).equals(
JsonCodec.TRUE)) {
logger.info("Shutdown command received, shutting down.");
System.exit(0);
}
return execute();
}
}
| bsd-3-clause |
Skywalker-11/spongycastle | pkix/src/main/java/org/spongycastle/operator/MacCalculator.java | 805 | package org.spongycastle.operator;
import java.io.OutputStream;
import org.spongycastle.asn1.x509.AlgorithmIdentifier;
public interface MacCalculator
{
AlgorithmIdentifier getAlgorithmIdentifier();
/**
* Returns a stream that will accept data for the purpose of calculating
* the MAC for later verification. Use org.spongycastle.util.io.TeeOutputStream if you want to accumulate
* the data on the fly as well.
*
* @return an OutputStream
*/
OutputStream getOutputStream();
/**
* Return the calculated MAC based on what has been written to the stream.
*
* @return calculated MAC.
*/
byte[] getMac();
/**
* Return the key used for calculating the MAC.
*
* @return the MAC key.
*/
GenericKey getKey();
} | mit |
PlegmaLabs/openhab | bundles/binding/org.openhab.binding.homematic/src/main/java/org/openhab/binding/homematic/internal/model/HmVariable.java | 994 | /**
* Copyright (c) 2010-2014, openHAB.org and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.homematic.internal.model;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
/**
* Object that represents a Homematic variable.
*
* @author Gerhard Riegler
* @since 1.5.0
*/
@XmlRootElement(name = "variable")
@XmlAccessorType(XmlAccessType.FIELD)
public class HmVariable extends HmValueItem {
@Override
public String toString() {
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).append("name", name).toString();
}
}
| epl-1.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jdi/EventRequest/hashCode/hashcode001/TestDescription.java | 2512 | /*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
*
* @summary converted from VM Testbase nsk/jdi/EventRequest/hashCode/hashcode001.
* VM Testbase keywords: [quick, jpda, jdi]
* VM Testbase readme:
* DESCRIPTION:
* The test for public hashCode() method of an implementing class of
* com.sun.jdi.request.EventRequest interface.
* The test checks an assertion cited from spec for hashCode() method of
* java.lang.Object class:
* The general contract of hashCode is:
* - Whenever it is invoked on the same object more than once during
* an execution of a Java application, the hashCode method must
* consistently return the same integer, provided no information used
* in equals comparisons on the object is modified.
* ...
* COMMENTS:
* The test is aimed to increase jdi source code coverage and checks
* the code which was not yet covered by previous tests for EventRequest
* interface. The coverage analysis was done for jdk1.4.0-b92 build.
*
* @library /vmTestbase
* /test/lib
* @run driver jdk.test.lib.FileInstaller . .
* @build nsk.jdi.EventRequest.hashCode.hashcode001
* nsk.jdi.EventRequest.hashCode.hashcode001a
* @run main/othervm PropertyResolvingWrapper
* nsk.jdi.EventRequest.hashCode.hashcode001
* -verbose
* -arch=${os.family}-${os.simpleArch}
* -waittime=5
* -debugee.vmkind=java
* -transport.address=dynamic
* "-debugee.vmkeys=${test.vm.opts} ${test.java.opts}"
*/
| gpl-2.0 |
dwango/quercus | src/main/java/com/caucho/log/RotateLog.java | 4668 | /*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
*
* Free Software Foundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.log;
import com.caucho.config.ConfigException;
import com.caucho.config.types.Bytes;
import com.caucho.config.types.Period;
import com.caucho.util.L10N;
import com.caucho.vfs.Path;
import javax.annotation.PostConstruct;
import java.io.IOException;
/**
* Configuration for a rotating log
*/
public class RotateLog {
private final static L10N L = new L10N(RotateLog.class);
private Path _path;
private String _pathFormat;
private String _archiveFormat;
private Period _rolloverPeriod;
private Bytes _rolloverSize;
private int _rolloverCount = -1;
private RotateStream _rotateStream;
private String _timestamp;
/**
* Gets the output path.
*/
public Path getPath()
{
return _path;
}
/**
* Sets the output path.
*/
public void setPath(Path path)
{
_path = path;
}
/**
* Gets the output path.
*/
public String getPathFormat()
{
return _pathFormat;
}
/**
* Sets the output path.
*/
public void setPathFormat(String path)
{
_pathFormat = path;
}
/**
* Sets the output path (backward compat).
*/
public void setHref(Path path)
{
setPath(path);
}
/**
* Sets the rollover period.
*/
public void setRolloverPeriod(Period period)
{
_rolloverPeriod = period;
}
/**
* Sets the rollover size.
*/
public void setRolloverSize(Bytes size)
{
_rolloverSize = size;
}
/**
* Sets the rollover size.
*/
public void setRolloverSizeBytes(long size)
{
_rolloverSize = new Bytes(size);
}
/**
* Sets the rollover count
*/
public int getRolloverCount()
{
return _rolloverCount;
}
/**
* Sets the rollover count.
*/
public void setRolloverCount(int count)
{
_rolloverCount = count;
}
/**
* Sets the timestamp
*/
public String getTimestamp()
{
return _timestamp;
}
/**
* Sets the timestamp.
*/
/*
public void setTimestamp(String timestamp)
{
_timestamp = timestamp;
}
*/
/**
* Gets the archive format
*/
public String getArchiveFormat()
{
return _archiveFormat;
}
/**
* Sets the archive format.
*/
public void setArchiveFormat(String format)
{
_archiveFormat = format;
}
/**
* Returns the rotated stream.
*/
public RotateStream getRotateStream()
{
return _rotateStream;
}
/**
* Returns the tag name.
*/
public String getTagName()
{
return "rotate-log";
}
/**
* Initialize the log.
*/
@PostConstruct
public void init()
throws ConfigException, IOException
{
if (_path != null)
_rotateStream = RotateStream.create(_path);
else if (_pathFormat != null)
_rotateStream = RotateStream.create(_pathFormat);
else
throw new ConfigException(L.l("`path' is a required attribute of <{0}>. Each <{0}> must configure the destination stream.", getTagName()));
if (_path != null && _path.exists() && ! _path.canRead() &&
(_rolloverPeriod != null ||
_rolloverSize != null ||
_archiveFormat != null)) {
throw new ConfigException(L.l("log path '{0}' is not readable and therefore cannot be rotated.", _path.getURL()));
}
AbstractRolloverLog rolloverLog = _rotateStream.getRolloverLog();
if (_rolloverPeriod != null)
rolloverLog.setRolloverPeriod(_rolloverPeriod);
if (_rolloverSize != null)
rolloverLog.setRolloverSize(_rolloverSize);
_rotateStream.setMaxRolloverCount(_rolloverCount);
if (_archiveFormat != null)
rolloverLog.setArchiveFormat(_archiveFormat);
_rotateStream.init();
}
}
| gpl-2.0 |
sky609/psiphon-tunnel-core | MobileLibrary/Android/SampleApps/TunneledWebView/app/src/androidTest/java/ca/psiphon/tunneledwebview/ApplicationTest.java | 357 | package ca.psiphon.tunneledwebview;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | gpl-3.0 |
opedroso/czmq | bindings/jni/czmq-jni/src/test/java/org/zeromq/czmq/ZstrTest.java | 545 | /*
################################################################################
# THIS FILE IS 100% GENERATED BY ZPROJECT; DO NOT EDIT EXCEPT EXPERIMENTALLY #
# Read the zproject/README.md for information about making permanent changes. #
################################################################################
*/
package org.zeromq.czmq;
import org.junit.Assert;
import org.junit.Test;
import org.scijava.nativelib.NativeLoader;
public class ZstrTest {
@Test
public void test () {
Zstr.test (false);
}
}
| mpl-2.0 |
tdefilip/opennms | opennms-services/src/main/java/org/opennms/netmgt/syslogd/SyslogParserException.java | 1728 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2010-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.syslogd;
public class SyslogParserException extends Exception {
private static final long serialVersionUID = 8185991282482412701L;
public SyslogParserException() {
super();
}
public SyslogParserException(final String message) {
super(message);
}
public SyslogParserException(final Throwable cause) {
super(cause);
}
public SyslogParserException(final String message, final Throwable cause) {
super(message, cause);
}
}
| agpl-3.0 |
bhutchinson/kfs | kfs-ar/src/main/java/org/kuali/kfs/module/ar/document/service/ContractsGrantsLetterOfCreditReviewDocumentService.java | 3061 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.ar.document.service;
import java.util.List;
import java.util.Map;
import org.kuali.kfs.integration.cg.ContractsAndGrantsBillingAward;
import org.kuali.kfs.integration.cg.ContractsAndGrantsBillingAwardAccount;
import org.kuali.kfs.module.ar.document.ContractsGrantsLetterOfCreditReviewDocument;
import org.kuali.rice.core.api.util.type.KualiDecimal;
/**
* Services to support the ContractsGrantsLetterOfCreditReviewDocument
*/
public interface ContractsGrantsLetterOfCreditReviewDocumentService {
/**
* Calculates the amount to draw for the award accounts, placing the amounts in a Map keyed by chart of accounts code-account number
* @param award the award to find amounts to draw for
* @param awardAccounts the award accounts to find amounts to draw for
*/
public Map<String, KualiDecimal> calculateAwardAccountAmountsToDraw(ContractsAndGrantsBillingAward award, List<ContractsAndGrantsBillingAwardAccount> awardAccounts);
/**
* Generates a key, which will be used in the map returned by calculateAwardAccountAmountsToDraw, for the given award account
* @param awardAccount an award account to generate a key for
* @return the key
*/
public String getAwardAccountKey(ContractsAndGrantsBillingAwardAccount awardAccount);
/**
* This method retrieves the amount available to draw for the award accounts
*
* @param awardTotalAmount
* @param awardAccounts
* @return
*/
public KualiDecimal getAmountAvailableToDraw(KualiDecimal awardTotalAmount, List<ContractsAndGrantsBillingAwardAccount> awardAccounts);
/**
* To create a generic method to retrieve all active awards based on the criteria passed.
*
* @param criteria
* @return
*/
public List<ContractsAndGrantsBillingAward> getActiveAwardsByCriteria(Map<String, Object> criteria);
/**
* Generates contracts grants invoices for the given letter of credit review document
* @param locReviewDoc the contracts grants letter of credit review document to generate contracts grants invoices for
*/
public void generateContractsGrantsInvoiceDocuments(ContractsGrantsLetterOfCreditReviewDocument locReviewDoc);
}
| agpl-3.0 |
jblievremont/sonarqube | server/sonar-server/src/main/java/org/sonar/server/dashboard/widget/TimelineWidget.java | 1956 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.dashboard.widget;
import org.sonar.api.web.WidgetCategory;
import org.sonar.api.web.WidgetProperties;
import org.sonar.api.web.WidgetProperty;
import org.sonar.api.web.WidgetPropertyType;
@WidgetCategory("History")
@WidgetProperties({
@WidgetProperty(key = "chartTitle", type = WidgetPropertyType.STRING),
@WidgetProperty(key = "metric1", type = WidgetPropertyType.METRIC, defaultValue = "ncloc", options = {WidgetConstants.FILTER_OUT_NEW_METRICS}),
@WidgetProperty(key = "metric2", type = WidgetPropertyType.METRIC, options = {WidgetConstants.FILTER_OUT_NEW_METRICS}),
@WidgetProperty(key = "metric3", type = WidgetPropertyType.METRIC, options = {WidgetConstants.FILTER_OUT_NEW_METRICS}),
@WidgetProperty(key = "hideEvents", type = WidgetPropertyType.BOOLEAN),
@WidgetProperty(key = "chartHeight", type = WidgetPropertyType.INTEGER, defaultValue = "180")
})
public class TimelineWidget extends CoreWidget {
public TimelineWidget() {
super("timeline", "Timeline", "/org/sonar/server/dashboard/widget/timeline.html.erb");
}
}
| lgpl-3.0 |
krzysztof-magosa/encog-java-core | src/main/java/org/encog/neural/neat/training/opp/NEATCrossover.java | 10255 | /*
* Encog(tm) Core v3.3 - Java Version
* http://www.heatonresearch.com/encog/
* https://github.com/encog/encog-java-core
* Copyright 2008-2014 Heaton Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package org.encog.neural.neat.training.opp;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import org.encog.ml.ea.genome.Genome;
import org.encog.ml.ea.opp.EvolutionaryOperator;
import org.encog.ml.ea.train.EvolutionaryAlgorithm;
import org.encog.neural.neat.NEATGenomeFactory;
import org.encog.neural.neat.training.NEATGenome;
import org.encog.neural.neat.training.NEATLinkGene;
import org.encog.neural.neat.training.NEATNeuronGene;
/**
* Crossover is performed by mixing the link genes between the parents to
* produce an offspring. Only the link genes are considered for crossover. The
* neuron genes are chosen by virtue of which link genes were chosen. If a
* neuron gene is present in both parents, then we choose the neuron gene from
* the more fit of the two parents.
*
* For NEAT, it does not really matter what parent we get the neuron gene from.
* However, because HyperNEAT also encodes a unique activation function into the
* neuron, the selection of a neuron gene between two parents is more important.
*
* The crossover operator defines two broad classes of genes. Matching genes are
* those genes that are present in both parents. Non-matching genes are only
* present in one person. Non-matching genes are further divided into two more
* groups:
*
* disjoint genes: Genes in the middle of a genome that do not match between the
* parents. excess genes: Genes at the edge of a genome that do not match
* between the parents.
*
* Matching genes are inherited randomly, whereas disjoint genes (those that do
* not match in the middle) and excess genes (those that do not match in the
* end) are inherited from the more fit parent. In this case, equal fitnesses
* are assumed, so the disjoint and excess genes are also inherited randomly.
* The disabled genes may become enabled again in future generations: there is a
* preset chance that an inherited gene is disabled if it is disabled in either
* parent.
*
* This is implemented in this class via the following algorithm. First, create
* a counter for each parent. At each step in the loop, perform the following.
*
* If both parents have the same innovation number, then randomly choose which
* parent's gene to use. Increase the parent counter who contributed the gene.
* Else if one parent has a lower innovation number than the other, then include
* the lower innovation gene if its parent is the most fit. Increase the parent
* counter who contributed the gene.
*
* -----------------------------------------------------------------------------
* http://www.cs.ucf.edu/~kstanley/ Encog's NEAT implementation was drawn from
* the following three Journal Articles. For more complete BibTeX sources, see
* NEATNetwork.java.
*
* Evolving Neural Networks Through Augmenting Topologies
*
* Generating Large-Scale Neural Networks Through Discovering Geometric
* Regularities
*
* Automatic feature selection in neuroevolution
*
*/
public class NEATCrossover implements EvolutionaryOperator {
/**
* The owning object.
*/
private EvolutionaryAlgorithm owner;
/**
* Add a neuron.
*
* @param nodeID
* The neuron id.
* @param vec
* THe list of id's used.
*/
public void addNeuronID(final long nodeID, final List<NEATNeuronGene> vec,
final NEATGenome best, final NEATGenome notBest) {
for (int i = 0; i < vec.size(); i++) {
if (vec.get(i).getId() == nodeID) {
return;
}
}
vec.add(findBestNeuron(nodeID, best, notBest));
return;
}
/**
* Choose a parent to favor.
*
* @param rnd
* A random number generator.
* @param mom
* The mother.
* @param dad
* The father.
* @return The parent to favor.
*/
private NEATGenome favorParent(final Random rnd, final NEATGenome mom,
final NEATGenome dad) {
// first determine who is more fit, the mother or the father?
// see if mom and dad are the same fitness
if (mom.getScore() == dad.getScore()) {
// are mom and dad the same fitness
if (mom.getNumGenes() == dad.getNumGenes()) {
// if mom and dad are the same fitness and have the same number
// of genes,
// then randomly pick mom or dad as the most fit.
if (rnd.nextDouble() < 0.5) {
return mom;
} else {
return dad;
}
}
// mom and dad are the same fitness, but different number of genes
// favor the parent with fewer genes
else {
if (mom.getNumGenes() < dad.getNumGenes()) {
return mom;
} else {
return dad;
}
}
} else {
// mom and dad have different scores, so choose the better score.
// important to note, better score COULD BE the larger or smaller
// score.
if (this.owner.getSelectionComparator().compare(mom, dad) < 0) {
return mom;
}
else {
return dad;
}
}
}
/**
* Find the best neuron, between two parents by the specified neuron id.
*
* @param nodeID
* The neuron id.
* @param best
* The best genome.
* @param notBest
* The non-best (second best) genome. Also the worst, since this
* is the 2nd best of 2.
* @return The best neuron genome by id.
*/
private NEATNeuronGene findBestNeuron(final long nodeID,
final NEATGenome best, final NEATGenome notBest) {
NEATNeuronGene result = best.findNeuron(nodeID);
if (result == null) {
result = notBest.findNeuron(nodeID);
}
return result;
}
/**
* Init this operator. This allows the EA to be defined.
*/
@Override
public void init(final EvolutionaryAlgorithm theOwner) {
this.owner = theOwner;
}
/**
* {@inheritDoc}
*/
@Override
public int offspringProduced() {
return 1;
}
/**
* {@inheritDoc}
*/
@Override
public int parentsNeeded() {
return 2;
}
/**
* {@inheritDoc}
*/
@Override
public void performOperation(final Random rnd, final Genome[] parents,
final int parentIndex, final Genome[] offspring,
final int offspringIndex) {
final NEATGenome mom = (NEATGenome) parents[parentIndex + 0];
final NEATGenome dad = (NEATGenome) parents[parentIndex + 1];
final NEATGenome best = favorParent(rnd, mom, dad);
final NEATGenome notBest = (best != mom) ? mom : dad;
final List<NEATLinkGene> selectedLinks = new ArrayList<NEATLinkGene>();
final List<NEATNeuronGene> selectedNeurons = new ArrayList<NEATNeuronGene>();
int curMom = 0; // current gene index from mom
int curDad = 0; // current gene index from dad
NEATLinkGene selectedGene = null;
// add in the input and bias, they should always be here
final int alwaysCount = ((NEATGenome)parents[0]).getInputCount()
+ ((NEATGenome)parents[0]).getOutputCount() + 1;
for (int i = 0; i < alwaysCount; i++) {
addNeuronID(i, selectedNeurons, best, notBest);
}
while ((curMom < mom.getNumGenes()) || (curDad < dad.getNumGenes())) {
NEATLinkGene momGene = null; // the mom gene object
NEATLinkGene dadGene = null; // the dad gene object
long momInnovation = -1;
long dadInnovation = -1;
// grab the actual objects from mom and dad for the specified
// indexes
// if there are none, then null
if (curMom < mom.getNumGenes()) {
momGene = mom.getLinksChromosome().get(curMom);
momInnovation = momGene.getInnovationId();
}
if (curDad < dad.getNumGenes()) {
dadGene = dad.getLinksChromosome().get(curDad);
dadInnovation = dadGene.getInnovationId();
}
// now select a gene for mom or dad. This gene is for the baby
if ((momGene == null) && (dadGene != null)) {
if (best == dad) {
selectedGene = dadGene;
}
curDad++;
} else if ((dadGene == null) && (momGene != null)) {
if (best == mom) {
selectedGene = momGene;
}
curMom++;
} else if (momInnovation < dadInnovation) {
if (best == mom) {
selectedGene = momGene;
}
curMom++;
} else if (dadInnovation < momInnovation) {
if (best == dad) {
selectedGene = dadGene;
}
curDad++;
} else if (dadInnovation == momInnovation) {
if (Math.random() < 0.5f) {
selectedGene = momGene;
}
else {
selectedGene = dadGene;
}
curMom++;
curDad++;
}
if (selectedGene != null) {
if (selectedLinks.size() == 0) {
selectedLinks.add(selectedGene);
} else {
if (selectedLinks.get(selectedLinks.size() - 1)
.getInnovationId() != selectedGene
.getInnovationId()) {
selectedLinks.add(selectedGene);
}
}
// Check if we already have the nodes referred to in
// SelectedGene.
// If not, they need to be added.
addNeuronID(selectedGene.getFromNeuronID(), selectedNeurons,
best, notBest);
addNeuronID(selectedGene.getToNeuronID(), selectedNeurons,
best, notBest);
}
}
// now create the required nodes. First sort them into order
Collections.sort(selectedNeurons);
// finally, create the genome
final NEATGenomeFactory factory = (NEATGenomeFactory) this.owner
.getPopulation().getGenomeFactory();
final NEATGenome babyGenome = factory.factor(selectedNeurons,
selectedLinks, mom.getInputCount(), mom.getOutputCount());
babyGenome.setBirthGeneration(this.owner.getIteration());
babyGenome.setPopulation(this.owner.getPopulation());
babyGenome.sortGenes();
offspring[offspringIndex] = babyGenome;
}
}
| apache-2.0 |
facebook/presto | presto-common/src/main/java/com/facebook/presto/common/block/RunLengthBlockEncoding.java | 1638 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.common.block;
import io.airlift.slice.SliceInput;
import io.airlift.slice.SliceOutput;
public class RunLengthBlockEncoding
implements BlockEncoding
{
public static final String NAME = "RLE";
@Override
public String getName()
{
return NAME;
}
@Override
public void writeBlock(BlockEncodingSerde blockEncodingSerde, SliceOutput sliceOutput, Block block)
{
RunLengthEncodedBlock rleBlock = (RunLengthEncodedBlock) block;
// write the run length
sliceOutput.writeInt(rleBlock.getPositionCount());
// write the value
blockEncodingSerde.writeBlock(sliceOutput, rleBlock.getValue());
}
@Override
public RunLengthEncodedBlock readBlock(BlockEncodingSerde blockEncodingSerde, SliceInput sliceInput)
{
// read the run length
int positionCount = sliceInput.readInt();
// read the value
Block value = blockEncodingSerde.readBlock(sliceInput);
return new RunLengthEncodedBlock(value, positionCount);
}
}
| apache-2.0 |
nmcl/scratch | graalvm/transactions/fork/narayana/XTS/localjunit/unit/src/test/java/com/arjuna/wstx/tests/arq/basic/SuspendResumeSingleParticipantTest.java | 1530 | package com.arjuna.wstx.tests.arq.basic;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.arjuna.mw.wst.TxContext;
import com.arjuna.mw.wst11.TransactionManager;
import com.arjuna.mw.wst11.UserTransaction;
import com.arjuna.wstx.tests.arq.WarDeployment;
import com.arjuna.wstx.tests.common.DemoDurableParticipant;
@RunWith(Arquillian.class)
public class SuspendResumeSingleParticipantTest {
@Deployment
public static WebArchive createDeployment() {
return WarDeployment.getDeployment(
DemoDurableParticipant.class);
}
@Test
public void testSuspendResumeSingleParticipant()
throws Exception
{
UserTransaction ut = UserTransaction.getUserTransaction();
TransactionManager tm = TransactionManager.getTransactionManager();
DemoDurableParticipant p = new DemoDurableParticipant();
try {
ut.begin();
tm.enlistForDurableTwoPhase(p, p.identifier());
TxContext ctx = tm.suspend();
System.out.println("Suspended: "+ctx);
tm.resume(ctx);
System.out.println("\nResumed\n");
} catch (Exception eouter) {
try {
ut.rollback();
} catch(Exception einner) {
}
throw eouter;
}
ut.commit();
}
}
| apache-2.0 |
dulvac/sling | bundles/extensions/discovery/impl/src/main/java/org/apache/sling/discovery/impl/common/resource/IsolatedInstanceDescription.java | 1571 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.discovery.impl.common.resource;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.discovery.impl.common.DefaultClusterViewImpl;
/**
* InstanceDescription which is used at bootstrap time when there is no
* established view yet - hence the instance is considered to be in 'isolated'
* state.
*/
public class IsolatedInstanceDescription extends EstablishedInstanceDescription {
public IsolatedInstanceDescription(final Resource res, final String clusterViewId,
final String slingId) {
super(null, res, slingId, true, true);
DefaultClusterViewImpl clusterView = new DefaultClusterViewImpl(
clusterViewId);
clusterView.addInstanceDescription(this);
}
}
| apache-2.0 |
rowhit/h2o-2 | src/main/java/water/Job.java | 41108 | package water;
import hex.FrameSplitter;
import static water.util.Utils.difference;
import static water.util.Utils.isEmpty;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.HashMap;
import water.H2O.H2OCountedCompleter;
import water.H2O.H2OEmptyCompleter;
import water.api.*;
import water.api.Request.Validator.NOPValidator;
import water.api.RequestServer.API_VERSION;
import water.fvec.Frame;
import water.fvec.Vec;
import water.util.*;
import water.util.Utils.ExpectedExceptionForDebug;
import dontweave.gson.*;
public abstract class Job extends Func {
static final int API_WEAVER = 1; // This file has auto-gen'd doc & json fields
static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code.
/** A system key for global list of Job keys. */
public static final Key LIST = Key.make(Constants.BUILT_IN_KEY_JOBS, (byte) 0, Key.BUILT_IN_KEY);
/** Shared empty int array. */
private static final int[] EMPTY = new int[0];
@API(help = "Job key")
public Key job_key;
@API(help = "Destination key", filter = Default.class, json = true, validator = DestKeyValidator.class)
public Key destination_key; // Key holding final value after job is removed
static class DestKeyValidator extends NOPValidator<Key> {
@Override public void validateRaw(String value) {
if (Utils.contains(value, Key.ILLEGAL_USER_KEY_CHARS))
throw new IllegalArgumentException("Key '" + value + "' contains illegal character! Please avoid these characters: " + Key.ILLEGAL_USER_KEY_CHARS);
}
}
// Output parameters
@API(help = "Job description") public String description;
@API(help = "Job start time") public long start_time;
@API(help = "Job end time") public long end_time;
@API(help = "Exception") public String exception;
@API(help = "Job state") public JobState state;
transient public H2OCountedCompleter _fjtask; // Top-level task you can block on
transient protected boolean _cv;
/** Possible job states. */
public static enum JobState {
CREATED, // Job was created
RUNNING, // Job is running
CANCELLED, // Job was cancelled by user
FAILED, // Job crashed, error message/exception is available
DONE // Job was successfully finished
}
public Job(Key jobKey, Key dstKey){
job_key = jobKey;
destination_key = dstKey;
state = JobState.CREATED;
}
public Job() {
job_key = defaultJobKey();
description = getClass().getSimpleName();
state = JobState.CREATED;
}
/** Private copy constructor used by {@link JobHandle}. */
private Job(final Job prior) {
this(prior.job_key, prior.destination_key);
this.description = prior.description;
this.start_time = prior.start_time;
this.end_time = prior.end_time;
this.state = prior.state;
this.exception = prior.exception;
}
public Key self() { return job_key; }
public Key dest() { return destination_key; }
public int gridParallelism() {
return 1;
}
protected Key defaultJobKey() {
// Pinned to this node (i.e., the node invoked computation), because it should be almost always updated locally
return Key.make((byte) 0, Key.JOB, H2O.SELF);
}
protected Key defaultDestKey() {
return Key.make(getClass().getSimpleName() + Key.rand());
}
/** Start this task based on given top-level fork-join task representing job computation.
* @param fjtask top-level job computation task.
* @return this job in {@link JobState#RUNNING} state
*
* @see JobState
* @see H2OCountedCompleter
*/
public /** FIXME: should be final or at least protected */ Job start(final H2OCountedCompleter fjtask) {
assert state == JobState.CREATED : "Trying to run job which was already run?";
assert fjtask != null : "Starting a job with null working task is not permitted! Fix you API";
_fjtask = fjtask;
start_time = System.currentTimeMillis();
state = JobState.RUNNING;
// Save the full state of the job
UKV.put(self(), this);
// Update job list
new TAtomic<List>() {
@Override public List atomic(List old) {
if( old == null ) old = new List();
Key[] jobs = old._jobs;
old._jobs = Arrays.copyOf(jobs, jobs.length + 1);
old._jobs[jobs.length] = job_key;
return old;
}
}.invoke(LIST);
return this;
}
/** Return progress of this job.
*
* @return the value in interval <0,1> representing job progress.
*/
public float progress() {
Freezable f = UKV.get(destination_key);
if( f instanceof Progress )
return ((Progress) f).progress();
return 0;
}
/** Blocks and get result of this job.
* <p>
* The call blocks on working task which was passed via {@link #start(H2OCountedCompleter)} method
* and returns the result which is fetched from UKV based on job destination key.
* </p>
* @return result of this job fetched from UKV by destination key.
* @see #start(H2OCountedCompleter)
* @see UKV
*/
public <T> T get() {
_fjtask.join(); // Block until top-level job is done
T ans = (T) UKV.get(destination_key);
remove(); // Remove self-job
return ans;
}
/** Signal cancellation of this job.
* <p>The job will be switched to state {@link JobState#CANCELLED} which signals that
* the job was cancelled by a user. */
public void cancel() {
cancel((String)null, JobState.CANCELLED);
}
/** Signal exceptional cancellation of this job.
* @param ex exception causing the termination of job.
*/
public void cancel(Throwable ex){
if(ex instanceof JobCancelledException || ex.getMessage() != null && ex.getMessage().contains("job was cancelled"))
return;
if(ex instanceof IllegalArgumentException || ex.getCause() instanceof IllegalArgumentException) {
cancel("Illegal argument: " + ex.getMessage());
return;
}
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
ex.printStackTrace(pw);
String stackTrace = sw.toString();
cancel("Got exception '" + ex.getClass() + "', with msg '" + ex.getMessage() + "'\n" + stackTrace, JobState.FAILED);
if(_fjtask != null && !_fjtask.isDone()) _fjtask.completeExceptionally(ex);
}
/** Signal exceptional cancellation of this job.
* @param msg cancellation message explaining reason for cancelation
*/
public void cancel(final String msg) {
JobState js = msg == null ? JobState.CANCELLED : JobState.FAILED;
cancel(msg, js);
}
private void cancel(final String msg, JobState resultingState ) {
if(resultingState == JobState.CANCELLED) {
Log.info("Job " + self() + "(" + description + ") was cancelled.");
}
else {
Log.err("Job " + self() + "(" + description + ") failed.");
Log.err(msg);
}
exception = msg;
state = resultingState;
// replace finished job by a job handle
replaceByJobHandle();
DKV.write_barrier();
final Job job = this;
H2O.submitTask(new H2OCountedCompleter() {
@Override public void compute2() {
job.onCancelled();
}
});
}
/**
* Callback which is called after job cancellation (by user, by exception).
*/
protected void onCancelled() {
}
/** Returns true if the job was cancelled by the user or crashed.
* @return true if the job is in state {@link JobState#CANCELLED} or {@link JobState#FAILED}
*/
public boolean isCancelledOrCrashed() {
return state == JobState.CANCELLED || state == JobState.FAILED;
}
/** Returns true if the job was terminated by unexpected exception.
* @return true, if the job was terminated by unexpected exception.
*/
public boolean isCrashed() { return state == JobState.FAILED; }
/** Returns true if this job is correctly finished.
* @return returns true if the job finished and it was not cancelled or crashed by an exception.
*/
public boolean isDone() { return state == JobState.DONE; }
/** Returns true if this job is running
* @return returns true only if this job is in running state.
*/
public boolean isRunning() { return state == JobState.RUNNING; }
public JobState getState() { return state; }
/** Returns a list of all jobs in a system.
* @return list of all jobs including running, done, cancelled, crashed jobs.
*/
public static Job[] all() {
List list = UKV.get(LIST);
Job[] jobs = new Job[list==null?0:list._jobs.length];
int j=0;
for( int i=0; i<jobs.length; i++ ) {
Job job = UKV.get(list._jobs[i]);
if( job != null ) jobs[j++] = job;
}
if( j<jobs.length ) jobs = Arrays.copyOf(jobs,j);
return jobs;
}
/** Check if given job is running.
*
* @param job_key job key
* @return true if job is still running else returns false.
*/
public static boolean isRunning(Key job_key) {
Job j = UKV.get(job_key);
assert j!=null : "Job should be always in DKV!";
return j.isRunning();
}
/**
* Returns true if job is not running.
* The job can be cancelled, crashed, or already done.
*
* @param jobkey job identification key
* @return true if job is done, cancelled, or crashed, else false
*/
public static boolean isEnded(Key jobkey) { return !isRunning(jobkey); }
/**
* Marks job as finished and records job end time.
*/
public void remove() {
end_time = System.currentTimeMillis();
if( state == JobState.RUNNING )
state = JobState.DONE;
// Overwrite handle - copy end_time, state, msg
replaceByJobHandle();
}
/** Finds a job with given key or returns null.
*
* @param jobkey job key
* @return returns a job with given job key or null if a job is not found.
*/
public static Job findJob(final Key jobkey) { return UKV.get(jobkey); }
/** Finds a job with given dest key or returns null */
public static Job findJobByDest(final Key destKey) {
Job job = null;
for( Job current : Job.all() ) {
if( current.dest().equals(destKey) ) {
job = current;
break;
}
}
return job;
}
/** Returns job execution time in milliseconds.
* If job is not running then returns job execution time. */
public final long runTimeMs() {
long until = end_time != 0 ? end_time : System.currentTimeMillis();
return until - start_time;
}
/** Description of a speed criteria: msecs/frob */
public String speedDescription() { return null; }
/** Value of the described speed criteria: msecs/frob */
public long speedValue() { return 0; }
@Override protected Response serve() {
fork();
return redirect();
}
protected Response redirect() {
return Progress2.redirect(this, job_key, destination_key);
}
/**
* Forks computation of this job.
*
* <p>The call does not block.</p>
* @return always returns this job.
*/
public Job fork() {
init();
H2OCountedCompleter task = new H2OCountedCompleter() {
@Override public void compute2() {
try {
try {
// Exec always waits till the end of computation
Job.this.exec();
Job.this.remove();
} catch (Throwable t) {
if(!(t instanceof ExpectedExceptionForDebug))
Log.err(t);
Job.this.cancel(t);
}
} finally {
tryComplete();
}
}
};
start(task);
H2O.submitTask(task);
return this;
}
@Override public void invoke() {
init();
start(new H2OEmptyCompleter()); // mark job started
exec(); // execute the implementation
remove(); // remove the job
}
/**
* Invoked before job runs. This is the place to checks arguments are valid or throw
* IllegalArgumentException. It will get invoked both from the Web and Java APIs.
*
* @throws IllegalArgumentException throws the exception if initialization fails to ensure
* correct job runtime environment.
*/
@Override protected void init() throws IllegalArgumentException {
if (destination_key == null) destination_key = defaultDestKey();
}
/**
* Block synchronously waiting for a job to end, success or not.
* @param jobkey Job to wait for.
* @param pollingIntervalMillis Polling interval sleep time.
*/
public static void waitUntilJobEnded(Key jobkey, int pollingIntervalMillis) {
while (true) {
if (Job.isEnded(jobkey)) {
return;
}
try { Thread.sleep (pollingIntervalMillis); } catch (Exception ignore) {}
}
}
/**
* Block synchronously waiting for a job to end, success or not.
* @param jobkey Job to wait for.
*/
public static void waitUntilJobEnded(Key jobkey) {
int THREE_SECONDS_MILLIS = 3 * 1000;
waitUntilJobEnded(jobkey, THREE_SECONDS_MILLIS);
}
public static class ChunkProgress extends Iced implements Progress {
final long _nchunks;
final long _count;
private final Status _status;
final String _error;
public enum Status { Computing, Done, Cancelled, Error }
public Status status() { return _status; }
public boolean isDone() { return _status == Status.Done || _status == Status.Error; }
public String error() { return _error; }
public ChunkProgress(long chunksTotal) {
_nchunks = chunksTotal;
_count = 0;
_status = Status.Computing;
_error = null;
}
private ChunkProgress(long nchunks, long computed, Status s, String err) {
_nchunks = nchunks;
_count = computed;
_status = s;
_error = err;
}
public ChunkProgress update(int count) {
if( _status == Status.Cancelled || _status == Status.Error )
return this;
long c = _count + count;
return new ChunkProgress(_nchunks, c, Status.Computing, null);
}
public ChunkProgress done() {
return new ChunkProgress(_nchunks, _nchunks, Status.Done, null);
}
public ChunkProgress cancel() {
return new ChunkProgress(0, 0, Status.Cancelled, null);
}
public ChunkProgress error(String msg) {
return new ChunkProgress(0, 0, Status.Error, msg);
}
@Override public float progress() {
if( _status == Status.Done ) return 1.0f;
return Math.min(0.99f, (float) ((double) _count / (double) _nchunks));
}
}
public static class ChunkProgressJob extends Job {
Key _progress;
public ChunkProgressJob(long chunksTotal, Key destinationKey) {
destination_key = destinationKey;
_progress = Key.make(Key.make()._kb, (byte) 0, Key.DFJ_INTERNAL_USER, destinationKey.home_node());
UKV.put(_progress, new ChunkProgress(chunksTotal));
}
public void updateProgress(final int c) { // c == number of processed chunks
if( isRunning(self()) ) {
new TAtomic<ChunkProgress>() {
@Override public ChunkProgress atomic(ChunkProgress old) {
if( old == null ) return null;
return old.update(c);
}
}.fork(_progress);
}
}
@Override public void remove() {
super.remove();
UKV.remove(_progress);
}
public final Key progressKey() { return _progress; }
public void onException(Throwable ex) {
UKV.remove(dest());
Value v = DKV.get(progressKey());
if( v != null ) {
ChunkProgress p = v.get();
p = p.error(ex.getMessage());
DKV.put(progressKey(), p);
}
cancel(ex);
}
}
public static boolean checkIdx(Frame source, int[] idx) {
for (int i : idx) if (i<0 || i>source.vecs().length-1) return false;
return true;
}
/* Update end_time, state, msg, preserve start_time */
private void replaceByJobHandle() {
assert state != JobState.RUNNING : "Running job cannot be replaced.";
final Job self = this;
new TAtomic<Job>() {
@Override public Job atomic(Job old) {
if( old == null ) return null;
JobHandle jh = new JobHandle(self);
jh.start_time = old.start_time;
return jh;
}
}.fork(job_key);
}
/**
* A job which operates with a frame.
*
* INPUT frame
*/
public static abstract class FrameJob extends Job {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help = "Source frame", required = true, filter = Default.class, json = true)
public Frame source;
/**
* Annotate the number of columns and rows of the training data set in the job parameter JSON
* @return JsonObject annotated with num_cols and num_rows of the training data set
*/
@Override public JsonObject toJSON() {
JsonObject jo = super.toJSON();
if (source != null) {
jo.getAsJsonObject("source").addProperty("num_cols", source.numCols());
jo.getAsJsonObject("source").addProperty("num_rows", source.numRows());
}
return jo;
}
}
/**
* A job which has an input represented by a frame and frame column filter.
* The filter can be specified by ignored columns or by used columns.
*
* INPUT list ignored columns by idx XOR list of ignored columns by name XOR list of used columns
*
* @see FrameJob
*/
public static abstract class ColumnsJob extends FrameJob {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help = "Input columns (Indexes start at 0)", filter=colsFilter.class, hide=true)
public int[] cols;
class colsFilter extends MultiVecSelect { public colsFilter() { super("source"); } }
@API(help = "Ignored columns by name and zero-based index", filter=colsNamesIdxFilter.class, displayName="Ignored columns")
public int[] ignored_cols;
class colsNamesIdxFilter extends MultiVecSelect { public colsNamesIdxFilter() {super("source", MultiVecSelectType.NAMES_THEN_INDEXES); } }
@API(help = "Ignored columns by name", filter=colsNamesFilter.class, displayName="Ignored columns by name", hide=true)
public int[] ignored_cols_by_name = EMPTY;
class colsNamesFilter extends MultiVecSelect { public colsNamesFilter() {super("source", MultiVecSelectType.NAMES_ONLY); } }
/**
* Annotate the used and ignored columns in the job parameter JSON
* For both the used and the ignored columns, the following rules apply:
* If the number of columns is less or equal than 100, a dense list of used columns is reported.
* If the number of columns is greater than 100, the number of columns is reported.
* If the number of columns is 0, a "N/A" is reported.
* @return JsonObject annotated with used/ignored columns
*/
@Override public JsonObject toJSON() {
JsonObject jo = super.toJSON();
if (!jo.has("source") || source==null) return jo;
HashMap<String, int[]> map = new HashMap<String, int[]>();
map.put("used_cols", cols);
map.put("ignored_cols", ignored_cols);
for (String key : map.keySet()) {
int[] val = map.get(key);
if (val != null) {
if(val.length>100) jo.getAsJsonObject("source").addProperty("num_" + key, val.length);
else if(val.length>0) {
StringBuilder sb = new StringBuilder();
for (int c : val) sb.append(c + ",");
jo.getAsJsonObject("source").addProperty(key, sb.toString().substring(0, sb.length()-1));
} else {
jo.getAsJsonObject("source").add(key, JsonNull.INSTANCE);
}
}
}
return jo;
}
@Override protected void init() {
super.init();
if (_cv) return;
// At most one of the following may be specified.
int specified = 0;
if (!isEmpty(cols)) { specified++; }
if (!isEmpty(ignored_cols)) { specified++; }
if (!isEmpty(ignored_cols_by_name)) { specified++; }
if (specified > 1) throw new IllegalArgumentException("Arguments 'cols', 'ignored_cols_by_name', and 'ignored_cols' are exclusive");
// Unify all ignored cols specifiers to ignored_cols.
{
if (!isEmpty(ignored_cols_by_name)) {
assert (isEmpty(ignored_cols));
ignored_cols = ignored_cols_by_name;
ignored_cols_by_name = EMPTY;
}
if (ignored_cols == null) {
ignored_cols = new int[0];
}
}
// At this point, ignored_cols_by_name is dead.
assert (isEmpty(ignored_cols_by_name));
// Create map of ignored columns for speed.
HashMap<Integer,Integer> ignoredColsMap = new HashMap<Integer,Integer>();
for ( int i = 0; i < ignored_cols.length; i++) {
int value = ignored_cols[i];
ignoredColsMap.put(new Integer(value), new Integer(1));
}
// Add UUID cols to ignoredColsMap. Duplicates get folded into one entry.
Vec[] vecs = source.vecs();
for( int i = 0; i < vecs.length; i++ ) {
if (vecs[i].isUUID()) {
ignoredColsMap.put(new Integer(i), new Integer(1));
}
}
// Rebuild ignored_cols from the map. Sort it.
{
ignored_cols = new int[ignoredColsMap.size()];
int j = 0;
for (Integer key : ignoredColsMap.keySet()) {
ignored_cols[j] = key.intValue();
j++;
}
Arrays.sort(ignored_cols);
}
// If the columns are not specified, then select everything.
if (isEmpty(cols)) {
cols = new int[source.vecs().length];
for( int i = 0; i < cols.length; i++ )
cols[i] = i;
} else {
if (!checkIdx(source, cols)) throw new IllegalArgumentException("Argument 'cols' specified invalid column!");
}
// Make a set difference between cols and ignored_cols.
if (!isEmpty(ignored_cols)) {
int[] icols = ! isEmpty(ignored_cols) ? ignored_cols : ignored_cols_by_name;
if (!checkIdx(source, icols)) throw new IllegalArgumentException("Argument 'ignored_cols' or 'ignored_cols_by_name' specified invalid column!");
cols = difference(cols, icols);
// Setup all variables in consistent way
ignored_cols = icols;
ignored_cols_by_name = icols;
}
if( cols.length == 0 ) {
throw new IllegalArgumentException("No column selected");
}
}
protected final Vec[] selectVecs(Frame frame) {
Vec[] vecs = new Vec[cols.length];
for( int i = 0; i < cols.length; i++ )
vecs[i] = frame.vecs()[cols[i]];
return vecs;
}
protected final Frame selectFrame(Frame frame) {
Vec[] vecs = new Vec[cols.length];
String[] names = new String[cols.length];
for( int i = 0; i < cols.length; i++ ) {
vecs[i] = frame.vecs()[cols[i]];
names[i] = frame.names()[cols[i]];
}
return new Frame(names, vecs);
}
}
/**
* A columns job that requires a response.
*
* INPUT response column from source
*/
public static abstract class ColumnsResJob extends ColumnsJob {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help="Column to use as class", required=true, filter=responseFilter.class, json = true)
public Vec response;
class responseFilter extends VecClassSelect { responseFilter() { super("source"); } }
@Override protected void registered(API_VERSION ver) {
super.registered(ver);
Argument c = find("ignored_cols");
Argument r = find("response");
int ci = _arguments.indexOf(c);
int ri = _arguments.indexOf(r);
_arguments.set(ri, c);
_arguments.set(ci, r);
((FrameKeyMultiVec) c).ignoreVec((FrameKeyVec)r);
}
/**
* Annotate the name of the response column in the job parameter JSON
* @return JsonObject annotated with the name of the response column
*/
@Override public JsonObject toJSON() {
JsonObject jo = super.toJSON();
if (source!=null) {
int idx = source.find(response);
if( idx == -1 ) {
Vec vm = response.masterVec();
if( vm != null ) idx = source.find(vm);
}
jo.getAsJsonObject("response").add("name", new JsonPrimitive(idx == -1 ? "null" : source._names[idx]));
}
return jo;
}
@Override protected void init() {
super.init();
// Check if it make sense to build a model
if (source.numRows()==0)
throw new H2OIllegalArgumentException(find("source"), "Cannot build a model on empty dataset!");
// Does not alter the Response to an Enum column if Classification is
// asked for: instead use the classification flag to decide between
// classification or regression.
Vec[] vecs = source.vecs();
for( int i = cols.length - 1; i >= 0; i-- )
if( vecs[cols[i]] == response )
cols = Utils.remove(cols,i);
final boolean has_constant_response = response.isEnum() ?
response.domain().length <= 1 : response.min() == response.max();
if (has_constant_response)
throw new H2OIllegalArgumentException(find("response"), "Constant response column!");
}
}
/**
* A job producing a model.
*
* INPUT response column from source
*/
public static abstract class ModelJob extends ModelJobWithoutClassificationField {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
@API(help="Do classification or regression", filter=myClassFilter.class, json = true)
public boolean classification = true; // we need 3-state boolean: unspecified, true/false BUT we solve that by checking UI layer to see if the classification parameter was passed
class myClassFilter extends DoClassBoolean { myClassFilter() { super("source"); } }
@Override protected void init() {
super.init();
// Reject request if classification is required and response column is float
//Argument a4class = find("classification"); // get UI control
//String p4class = input("classification"); // get value from HTTP requests
// if there is UI control and classification field was passed
final boolean classificationFieldSpecified = true; // ROLLBACK: a4class!=null ? p4class!=null : /* we are not in UI so expect that parameter is specified correctly */ true;
if (!classificationFieldSpecified) { // can happen if a client sends a request which does not specify classification parameter
classification = response.isEnum();
Log.warn("Classification field is not specified - deriving according to response! The classification field set to " + classification);
} else {
if ( classification && response.isFloat()) throw new H2OIllegalArgumentException(find("classification"), "Requested classification on float column!");
if (!classification && response.isEnum() ) throw new H2OIllegalArgumentException(find("classification"), "Requested regression on enum column!");
}
}
}
/**
* A job producing a model that has no notion of Classification or Regression.
*
* INPUT response column from source
*/
public static abstract class ModelJobWithoutClassificationField extends ColumnsResJob {
// This exists to support GLM2, which determines classification/regression using the
// family field, not a second separate field.
}
/**
* Job which produces model and validate it on a given dataset.
* INPUT validation frame
*/
public static abstract class ValidatedJob extends ModelJob {
static final int API_WEAVER = 1;
static public DocGen.FieldDoc[] DOC_FIELDS;
protected transient Vec[] _train, _valid;
/** Validation vector extracted from validation frame. */
protected transient Vec _validResponse;
/** Validation response domain or null if validation is not specified or null if response is float. */
protected transient String[] _validResponseDomain;
/** Source response domain or null if response is float. */
protected transient String[] _sourceResponseDomain;
/** CM domain derived from {@link #_validResponseDomain} and {@link #_sourceResponseDomain}. */
protected transient String[] _cmDomain;
/** Names of columns */
protected transient String[] _names;
/** Name of validation response. Should be same as source response. */
public transient String _responseName;
/** Adapted validation frame to a computed model. */
private transient Frame _adaptedValidation;
private transient Vec _adaptedValidationResponse; // Validation response adapted to computed CM domain
private transient int[][] _fromModel2CM; // Transformation for model response to common CM domain
private transient int[][] _fromValid2CM; // Transformation for validation response to common CM domain
@API(help = "Validation frame", filter = Default.class, mustExist = true, json = true)
public Frame validation;
@API(help = "Number of folds for cross-validation (if no validation data is specified)", filter = Default.class, json = true)
public int n_folds = 0;
@API(help = "Fraction of training data (from end) to hold out for validation (if no validation data is specified)", filter = Default.class, json = true)
public float holdout_fraction = 0;
@API(help = "Keep cross-validation dataset splits", filter = Default.class, json = true)
public boolean keep_cross_validation_splits = false;
@API(help = "Cross-validation models", json = true)
public Key[] xval_models;
public int _cv_count = 0;
/**
* Helper to compute the actual progress if we're doing cross-validation.
* This method is supposed to be called by the progress() implementation for CV-capable algos.
* @param p Progress reported by the main job
* @return actual progress if CV is done, otherwise returns p
*/
public float cv_progress(float p) {
if (n_folds >= 2) {
return (p + _cv_count) / (n_folds + 1); //divide by 1 more to account for final scoring as extra work
}
return p;
}
/**
* Helper to specify which arguments trigger a refresh on change
* @param ver
*/
@Override
protected void registered(RequestServer.API_VERSION ver) {
super.registered(ver);
for (Argument arg : _arguments) {
if ( arg._name.equals("validation")) {
arg.setRefreshOnChange();
}
}
}
/**
* Helper to handle arguments based on existing input values
* @param arg
* @param inputArgs
*/
@Override protected void queryArgumentValueSet(Argument arg, java.util.Properties inputArgs) {
super.queryArgumentValueSet(arg, inputArgs);
if (arg._name.equals("n_folds") && validation != null) {
arg.disable("Only if no validation dataset is provided.");
n_folds = 0;
}
}
/**
* Cross-Validate this Job (to be overridden for each instance, which also calls genericCrossValidation)
* @param splits Frames containing train/test splits
* @param cv_preds Store the predictions for each cross-validation run
* @param offsets Array to store the offsets of starting row indices for each cross-validation run
* @param i Which fold of cross-validation to perform
*/
public void crossValidate(Frame[] splits, Frame[] cv_preds, long[] offsets, int i) { throw H2O.unimpl(); }
/**
* Helper to perform the generic part of cross validation
* Expected to be called from each specific instance's crossValidate method
* @param splits Frames containing train/test splits
* @param offsets Array to store the offsets of starting row indices for each cross-validation run
* @param i Which fold of cross-validation to perform
*/
final protected void genericCrossValidation(Frame[] splits, long[] offsets, int i) {
int respidx = source.find(_responseName);
assert(respidx != -1) : "response is not found in source!";
job_key = Key.make(job_key.toString() + "_xval" + i); //make a new Job for CV
assert(xval_models != null);
destination_key = xval_models[i];
source = splits[0];
validation = splits[1];
response = source.vecs()[respidx];
n_folds = 0;
state = Job.JobState.CREATED; //Hack to allow this job to run
DKV.put(self(), this); //Needed to pass the Job.isRunning(cvdl.self()) check in FrameTask
offsets[i + 1] = offsets[i] + validation.numRows();
_cv = true; //Hack to allow init() to pass for ColumnsJob (allow cols/ignored_cols to co-exist)
invoke();
}
/**
* Annotate the number of columns and rows of the validation data set in the job parameter JSON
* @return JsonObject annotated with num_cols and num_rows of the validation data set
*/
@Override public JsonObject toJSON() {
JsonObject jo = super.toJSON();
if (validation != null) {
jo.getAsJsonObject("validation").addProperty("num_cols", validation.numCols());
jo.getAsJsonObject("validation").addProperty("num_rows", validation.numRows());
}
return jo;
}
@Override protected void init() {
if ( validation != null && n_folds != 0 ) throw new UnsupportedOperationException("Cannot specify a validation dataset and non-zero number of cross-validation folds.");
if ( n_folds < 0 ) throw new UnsupportedOperationException("The number of cross-validation folds must be >= 0.");
super.init();
xval_models = new Key[n_folds];
for (int i=0; i<xval_models.length; ++i)
xval_models[i] = Key.make(dest().toString() + "_xval" + i);
int rIndex = 0;
for( int i = 0; i < source.vecs().length; i++ )
if( source.vecs()[i] == response ) {
rIndex = i;
break;
}
_responseName = source._names != null && rIndex >= 0 ? source._names[rIndex] : "response";
if (holdout_fraction > 0) {
if (holdout_fraction >= 1)
throw new IllegalArgumentException("Holdout fraction must be less than 1.");
if (validation != null)
throw new IllegalArgumentException("Cannot specify both a holdout fraction and a validation frame.");
if (n_folds != 0)
throw new IllegalArgumentException("Cannot specify both a holdout fraction and a n-fold cross-validation.");
Log.info("Holding out last " + Utils.formatPct(holdout_fraction) + " of training data.");
FrameSplitter fs = new FrameSplitter(source, new float[]{1 - holdout_fraction});
H2O.submitTask(fs).join();
Frame[] splits = fs.getResult();
source = splits[0];
response = source.vecs()[rIndex];
validation = splits[1];
Log.warn("Allocating data split frames: " + source._key.toString() + " and " + validation._key.toString());
Log.warn("Both will be kept after the the model is trained. It's the user's responsibility to manage their lifetime.");
}
_train = selectVecs(source);
_names = new String[cols.length];
for( int i = 0; i < cols.length; i++ )
_names[i] = source._names[cols[i]];
// Compute source response domain
if (classification) _sourceResponseDomain = getVectorDomain(response);
// Is validation specified?
if( validation != null ) {
// Extract a validation response
int idx = validation.find(source.names()[rIndex]);
if( idx == -1 ) throw new IllegalArgumentException("Validation set does not have a response column called "+_responseName);
_validResponse = validation.vecs()[idx];
// Compute output confusion matrix domain for classification:
// - if validation dataset is specified then CM domain is union of train and validation response domains
// else it is only domain of response column.
if (classification) {
_validResponseDomain = getVectorDomain(_validResponse);
if (_validResponseDomain!=null) {
_cmDomain = Utils.domainUnion(_sourceResponseDomain, _validResponseDomain);
if (!Arrays.deepEquals(_sourceResponseDomain, _validResponseDomain)) {
_fromModel2CM = Model.getDomainMapping(_cmDomain, _sourceResponseDomain, false); // transformation from model produced response ~> cmDomain
_fromValid2CM = Model.getDomainMapping(_cmDomain, _validResponseDomain , false); // transformation from validation response domain ~> cmDomain
}
} else _cmDomain = _sourceResponseDomain;
} /* end of if classification */
} else if (classification) _cmDomain = _sourceResponseDomain;
}
protected String[] getVectorDomain(final Vec v) {
assert v==null || v.isInt() || v.isEnum() : "Cannot get vector domain!";
if (v==null) return null;
String[] r;
if (v.isEnum()) {
r = v.domain();
} else {
Vec tmp = v.toEnum();
r = tmp.domain();
UKV.remove(tmp._key);
}
return r;
}
/** Returns true if the job has specified validation dataset. */
protected final boolean hasValidation() { return validation!=null; }
/** Returns a domain for confusion matrix. */
protected final String[] getCMDomain() { return _cmDomain; }
/** Return validation dataset which can be adapted to a model if it is necessary. */
protected final Frame getValidation() { return _adaptedValidation!=null ? _adaptedValidation : validation; };
/** Returns original validation dataset. */
protected final Frame getOrigValidation() { return validation; }
public final Response2CMAdaptor getValidAdaptor() { return new Response2CMAdaptor(); }
/** */
protected final void prepareValidationWithModel(final Model model) {
if (validation == null) return;
Frame[] av = model.adapt(validation, false);
_adaptedValidation = av[0];
gtrash(av[1]); // delete this after computation
if (_fromValid2CM!=null) {
assert classification : "Validation response transformation should be declared only for classification!";
assert _fromModel2CM != null : "Model response transformation should exist if validation response transformation exists!";
Vec tmp = _validResponse.toEnum();
_adaptedValidationResponse = tmp.makeTransf(_fromValid2CM, getCMDomain()); // Add an original response adapted to CM domain
gtrash(_adaptedValidationResponse); // Add the created vector to a clean-up list
gtrash(tmp);
}
}
/** A micro helper for transforming model/validation responses to confusion matrix domain. */
public class Response2CMAdaptor {
/** Adapt given vector produced by a model to confusion matrix domain. Always return a new vector which needs to be deleted. */
public Vec adaptModelResponse2CM(final Vec v) { return v.makeTransf(_fromModel2CM, getCMDomain()); }
/** Adapt given validation vector to confusion matrix domain. Always return a new vector which needs to be deleted. */
public Vec adaptValidResponse2CM(final Vec v) { return v.makeTransf(_fromValid2CM, getCMDomain()); }
/** Returns validation dataset. */
public Frame getValidation() { return ValidatedJob.this.getValidation(); }
/** Return cached validation response already adapted to CM domain. */
public Vec getAdaptedValidationResponse2CM() { return _adaptedValidationResponse; }
/** Return cm domain. */
public String[] getCMDomain() { return ValidatedJob.this.getCMDomain(); }
/** Returns true if model/validation responses need to be adapted to confusion matrix domain. */
public boolean needsAdaptation2CM() { return _fromModel2CM != null; }
/** Return the adapted response name */
public String adaptedValidationResponse(final String response) { return response + ".adapted"; }
}
}
/**
*
*/
public interface Progress {
float progress();
}
public interface ProgressMonitor {
public void update(long n);
}
public static class Fail extends Iced {
public final String _message;
public Fail(String message) { _message = message; }
}
public static final class List extends Iced {
Key[] _jobs = new Key[0];
@Override
public List clone(){
List l = new List();
l._jobs = _jobs.clone();
for(int i = 0; i < l._jobs.length; ++i)
l._jobs[i] = (Key)l._jobs[i].clone();
return l;
}
}
/** Almost lightweight job handle containing the same content
* as pure Job class.
*/
public static class JobHandle extends Job {
public JobHandle(final Job job) { super(job); }
}
public static class JobCancelledException extends RuntimeException {
public JobCancelledException(){super("job was cancelled!");}
public JobCancelledException(String msg){super("job was cancelled! with msg '" + msg + "'");}
}
/** Hygienic method to prevent accidental capture of non desired values. */
public static <T extends FrameJob> T hygiene(T job) {
job.source = null;
return job;
}
public static <T extends ValidatedJob> T hygiene(T job) {
job.source = null;
job.validation = null;
return job;
}
}
| apache-2.0 |
adjohnson916/groovy-core | src/main/groovy/transform/builder/Builder.java | 6317 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.transform.builder;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.annotation.Documented;
import groovy.transform.Undefined;
import org.codehaus.groovy.transform.GroovyASTTransformationClass;
import static org.codehaus.groovy.transform.BuilderASTTransformation.BuilderStrategy;
/**
* The {@code @Builder} AST transformation is used to help write classes that can be created using <em>fluent</em> api calls.<!-- -->
* The transform supports multiple building strategies to cover a range of cases and there are a number
* of configuration options to customize the building process.
*
* In addition, a number of annotation attributes let you customise the building process. Not all annotation attributes
* are supported by all strategies. See the individual strategy documentation for more details.
* If you're an AST hacker, you can also define your own strategy class.
*
* The following strategies are bundled with Groovy:
* <ul>
* <li>{@link SimpleStrategy} for creating chained setters</li>
* <li>{@link ExternalStrategy} where you annotate an explicit builder class while leaving some buildee class being built untouched</li>
* <li>{@link DefaultStrategy} which creates a nested helper class for instance creation</li>
* <li>{@link InitializerStrategy} which creates a nested helper class for instance creation which when used with {@code @CompileStatic} allows type-safe object creation</li>
* </ul>
*
* Note that Groovy provides other built-in mechanisms for easy creation of objects, e.g. the named-args constructor:
* <pre>
* new Person(firstName: "Robert", lastName: "Lewandowski", age: 21)
* </pre>
* or the with statement:
* <pre>
* new Person().with {
* firstName = "Robert"
* lastName = "Lewandowski"
* age = 21
* }
* </pre>
* so you might not find value in using the builder transform at all. But if you need Java integration or in some cases improved type safety, the {@code @Builder} transform might prove very useful.
*
* @author Marcin Grzejszczak
* @author Paul King
* @see groovy.transform.builder.SimpleStrategy
* @see groovy.transform.builder.ExternalStrategy
* @see groovy.transform.builder.DefaultStrategy
* @see groovy.transform.builder.InitializerStrategy
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({ ElementType.TYPE, ElementType.CONSTRUCTOR, ElementType.METHOD})
@GroovyASTTransformationClass("org.codehaus.groovy.transform.BuilderASTTransformation")
public @interface Builder {
/**
* A class for which builder methods should be created. It will be an error to leave
* this attribute with its default value for some strategies.
*/
Class forClass() default Undefined.CLASS.class;
/**
* A class capturing the builder strategy
*/
Class<? extends BuilderStrategy> builderStrategy() default DefaultStrategy.class;
/**
* The prefix to use when creating the setter methods.
* Default is determined by the strategy which might use "" or "set" but you can choose your own, e.g. "with".
* If non-empty the first letter of the property will be capitalized before being appended to the prefix.
*/
String prefix() default Undefined.STRING;
/**
* For strategies which create a builder helper class, the class name to use for the helper class.
* Not used if using {@code forClass} since in such cases the builder class is explicitly supplied.
* Default is determined by the strategy, e.g. <em>TargetClass</em> + "Builder" or <em>TargetClass</em> + "Initializer".
*/
String builderClassName() default Undefined.STRING;
/**
* For strategies which create a builder helper class that creates the instance, the method name to call to create the instance.
* Default is determined by the strategy, e.g. <em>build</em> or <em>create</em>.
*/
String buildMethodName() default Undefined.STRING;
/**
* The method name to use for a builder factory method in the source class for easy access of the
* builder helper class for strategies which create such a helper class.
* Must not be used if using {@code forClass}.
* Default is determined by the strategy, e.g. <em>builder</em> or <em>createInitializer</em>.
*/
String builderMethodName() default Undefined.STRING;
/**
* List of field and/or property names to exclude from generated builder methods.
* Must not be used if 'includes' is used. For convenience, a String with comma separated names
* can be used in addition to an array (using Groovy's literal list notation) of String values.
*/
String[] excludes() default {};
/**
* List of field and/or property names to include within the generated builder methods.
* Must not be used if 'excludes' is used. For convenience, a String with comma separated names
* can be used in addition to an array (using Groovy's literal list notation) of String values.
*/
String[] includes() default {};
/**
* By default, properties are set directly using their respective field.
* By setting {@code useSetters=true} then a writable property will be set using its setter.
* If turning on this flag we recommend that setters that might be called are
* made null-safe wrt the parameter.
*/
boolean useSetters() default false;
}
| apache-2.0 |
siosio/intellij-community | java/java-impl/src/com/intellij/ide/hierarchy/method/JavaMethodHierarchyProvider.java | 2710 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.hierarchy.method;
import com.intellij.ide.hierarchy.HierarchyBrowser;
import com.intellij.ide.hierarchy.HierarchyProvider;
import com.intellij.ide.hierarchy.MethodHierarchyBrowserBase;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class JavaMethodHierarchyProvider implements HierarchyProvider {
@Override
public PsiElement getTarget(@NotNull DataContext dataContext) {
PsiMethod method = getMethodImpl(dataContext);
if (
method != null &&
method.getContainingClass() != null &&
!method.hasModifierProperty(PsiModifier.PRIVATE) &&
!method.hasModifierProperty(PsiModifier.STATIC)
){
return method;
}
else {
return null;
}
}
@Nullable
private static PsiMethod getMethodImpl(DataContext dataContext){
Project project = CommonDataKeys.PROJECT.getData(dataContext);
if (project == null) return null;
PsiElement element = CommonDataKeys.PSI_ELEMENT.getData(dataContext);
PsiMethod method = PsiTreeUtil.getParentOfType(element, PsiMethod.class, false);
if (method != null) {
return method;
}
Editor editor = CommonDataKeys.EDITOR.getData(dataContext);
if (editor == null) {
return null;
}
PsiFile psiFile = PsiDocumentManager.getInstance(project).getPsiFile(editor.getDocument());
if (psiFile == null) {
return null;
}
int offset = editor.getCaretModel().getOffset();
if (offset < 1) {
return null;
}
element = psiFile.findElementAt(offset);
if (!(element instanceof PsiWhiteSpace)) {
return null;
}
element = psiFile.findElementAt(offset - 1);
if (!(element instanceof PsiJavaToken) || ((PsiJavaToken)element).getTokenType() != JavaTokenType.SEMICOLON) {
return null;
}
return PsiTreeUtil.getParentOfType(element, PsiMethod.class, false);
}
@Override
@NotNull
public HierarchyBrowser createHierarchyBrowser(@NotNull PsiElement target) {
return new MethodHierarchyBrowser(target.getProject(), (PsiMethod) target);
}
@Override
public void browserActivated(@NotNull HierarchyBrowser hierarchyBrowser) {
((MethodHierarchyBrowser) hierarchyBrowser).changeView(MethodHierarchyBrowserBase.getMethodType());
}
}
| apache-2.0 |
gingerwizard/elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java | 52980 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.concurrent.FutureCallback;
import org.apache.http.conn.ssl.DefaultHostnameVerifier;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.impl.nio.client.CloseableHttpAsyncClient;
import org.apache.http.impl.nio.client.HttpAsyncClientBuilder;
import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.ssl.DiagnosticTrustManager;
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.Network;
import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo;
import org.junit.Before;
import org.mockito.ArgumentCaptor;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLParameters;
import javax.net.ssl.SSLPeerUnverifiedException;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSessionContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.X509ExtendedTrustManager;
import javax.security.cert.X509Certificate;
import java.nio.file.Path;
import java.security.AccessController;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.security.cert.Certificate;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.test.TestMatchers.throwableWithMessage;
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.iterableWithSize;
import static org.hamcrest.Matchers.lessThan;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class SSLServiceTests extends ESTestCase {
private Path testnodeStore;
private String testnodeStoreType;
private Path testclientStore;
private Path testnodeCert;
private Path testnodeKey;
private Environment env;
@Before
public void setup() throws Exception {
// Randomise the keystore type (jks/PKCS#12)
if (randomBoolean()) {
testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks");
// The default is to use JKS. Randomly test with explicit and with the default value.
testnodeStoreType = "jks";
} else {
testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12");
testnodeStoreType = randomBoolean() ? "PKCS12" : null;
}
logger.info("Using [{}] key/truststore [{}]", testnodeStoreType, testnodeStore);
testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt");
testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem");
testclientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks");
env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build());
}
public void testThatCustomTruststoreCanBeSpecified() throws Exception {
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
Path testClientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks");
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.truststore.secure_password", "testnode");
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
secureSettings.setString("transport.profiles.foo.xpack.security.ssl.truststore.secure_password", "testclient");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.keystore.path", testnodeStore)
.put("xpack.security.transport.ssl.truststore.path", testnodeStore)
.put("xpack.security.transport.ssl.truststore.type", testnodeStoreType)
.setSecureSettings(secureSettings)
.put("transport.profiles.foo.xpack.security.ssl.truststore.path", testClientStore)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
MockSecureSettings secureCustomSettings = new MockSecureSettings();
secureCustomSettings.setString("truststore.secure_password", "testclient");
Settings customTruststoreSettings = Settings.builder()
.put("truststore.path", testClientStore)
.setSecureSettings(secureCustomSettings)
.build();
SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings);
SSLEngine sslEngineWithTruststore = sslService.createSSLEngine(configuration, null, -1);
assertThat(sslEngineWithTruststore, is(not(nullValue())));
SSLConfiguration defaultConfig = sslService.getSSLConfiguration("xpack.security.transport.ssl");
SSLEngine sslEngine = sslService.createSSLEngine(defaultConfig, null, -1);
assertThat(sslEngineWithTruststore, is(not(sameInstance(sslEngine))));
final SSLConfiguration profileConfiguration = sslService.getSSLConfiguration("transport.profiles.foo.xpack.security.ssl");
assertThat(profileConfiguration, notNullValue());
assertThat(profileConfiguration.trustConfig(), instanceOf(StoreTrustConfig.class));
assertThat(((StoreTrustConfig) profileConfiguration.trustConfig()).trustStorePath, equalTo(testClientStore.toString()));
}
public void testThatSslContextCachingWorks() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
final Settings transportSSLSettings = settings.getByPrefix("xpack.security.transport.ssl.");
SSLContext sslContext = sslService.sslContext(sslService.sslConfiguration(transportSSLSettings));
SSLContext cachedSslContext = sslService.sslContext(sslService.sslConfiguration(transportSSLSettings));
assertThat(sslContext, is(sameInstance(cachedSslContext)));
final SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
final SSLContext configContext = sslService.sslContext(configuration);
assertThat(configContext, is(sameInstance(sslContext)));
}
public void testThatKeyStoreAndKeyCanHaveDifferentPasswords() throws Exception {
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
Path differentPasswordsStore =
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks");
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_key_password", "testnode1");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.keystore.path", differentPasswordsStore)
.setSecureSettings(secureSettings)
.build();
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
sslService.createSSLEngine(configuration, null, -1);
}
public void testIncorrectKeyPasswordThrowsException() throws Exception {
assumeFalse("Can't run in a FIPS JVM", inFipsJvm());
Path differentPasswordsStore =
getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks");
try {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.keystore.path", differentPasswordsStore)
.setSecureSettings(secureSettings)
.build();
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
sslService.createSSLEngine(configuration, null, -1);
fail("expected an exception");
} catch (ElasticsearchException e) {
assertThat(e, throwableWithMessage("failed to load SSL configuration [xpack.security.transport.ssl]"));
assertThat(e.getCause(), throwableWithMessage(containsString("failed to initialize SSL KeyManager")));
}
}
public void testThatSSLv3IsNotEnabled() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
SSLEngine engine = sslService.createSSLEngine(configuration, null, -1);
assertThat(Arrays.asList(engine.getEnabledProtocols()), not(hasItem("SSLv3")));
}
public void testThatCreateClientSSLEngineWithoutAnySettingsWorks() throws Exception {
SSLService sslService = new SSLService(env);
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
SSLEngine sslEngine = sslService.createSSLEngine(configuration, null, -1);
assertThat(sslEngine, notNullValue());
}
public void testThatCreateSSLEngineWithOnlyTruststoreWorks() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.http.ssl.truststore.secure_password", "testclient");
Settings settings = Settings.builder()
.put("xpack.http.ssl.enabled", true)
.put("xpack.http.ssl.truststore.path", testclientStore)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.http.ssl");
SSLEngine sslEngine = sslService.createSSLEngine(configuration, null, -1);
assertThat(sslEngine, notNullValue());
}
public void testCreateWithKeystoreIsValidForServer() throws Exception {
assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm());
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.keystore.path", testnodeStore)
.put("xpack.security.transport.ssl.keystore.type", testnodeStoreType)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
assertTrue(sslService.isConfigurationValidForServerUsage(sslService.getSSLConfiguration("xpack.security.transport.ssl")));
}
public void testValidForServer() throws Exception {
assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm());
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.http.ssl.truststore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.http.ssl.truststore.path", testnodeStore)
.put("xpack.http.ssl.truststore.type", testnodeStoreType)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
// Technically, we don't care whether xpack.http.ssl is valid for server - it's a client context, but we validate both of the
// server contexts (http & transport) during construction, so this is the only way to make a non-server-valid context.
assertFalse(sslService.isConfigurationValidForServerUsage(sslService.getSSLConfiguration("xpack.http.ssl")));
secureSettings.setString("xpack.http.ssl.keystore.secure_password", "testnode");
settings = Settings.builder()
.put("xpack.http.ssl.truststore.path", testnodeStore)
.put("xpack.http.ssl.truststore.type", testnodeStoreType)
.setSecureSettings(secureSettings)
.put("xpack.http.ssl.keystore.path", testnodeStore)
.put("xpack.http.ssl.keystore.type", testnodeStoreType)
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
assertTrue(sslService.isConfigurationValidForServerUsage(sslService.getSSLConfiguration("xpack.http.ssl")));
}
public void testGetVerificationMode() throws Exception {
assumeFalse("Can't run in a FIPS JVM, TrustAllConfig is not a SunJSSE TrustManagers", inFipsJvm());
SSLService sslService = new SSLService(env);
assertThat(sslService.getSSLConfiguration("xpack.security.transport.ssl").verificationMode(),
is(XPackSettings.VERIFICATION_MODE_DEFAULT));
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", false)
.put("xpack.security.transport.ssl.verification_mode", "certificate")
.put("transport.profiles.foo.xpack.security.ssl.verification_mode", "full")
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
assertThat(sslService.getSSLConfiguration("xpack.security.transport.ssl.").verificationMode(), is(VerificationMode.CERTIFICATE));
assertThat(sslService.getSSLConfiguration("transport.profiles.foo.xpack.security.ssl.").verificationMode(),
is(VerificationMode.FULL));
}
public void testIsSSLClientAuthEnabled() throws Exception {
SSLService sslService = new SSLService(env);
assertTrue(sslService.getSSLConfiguration("xpack.security.transport.ssl").sslClientAuth().enabled());
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", false)
.put("xpack.security.transport.ssl.client_authentication", "optional")
.put("transport.profiles.foo.port", "9400-9410")
.build();
sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
assertTrue(sslService.isSSLClientAuthEnabled(sslService.getSSLConfiguration("xpack.security.transport.ssl")));
assertTrue(sslService.isSSLClientAuthEnabled(sslService.getSSLConfiguration("transport.profiles.foo.xpack.security.ssl")));
}
public void testThatHttpClientAuthDefaultsToNone() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
secureSettings.setString("xpack.security.http.ssl.keystore.secure_password", "testnode");
final Settings globalSettings = Settings.builder()
.put("xpack.security.http.ssl.enabled", true)
.put("xpack.security.http.ssl.keystore.path", testnodeStore)
.put("xpack.security.http.ssl.keystore.type", testnodeStoreType)
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.client_authentication", SSLClientAuth.OPTIONAL.name())
.put("xpack.security.transport.ssl.keystore.path", testnodeStore)
.put("xpack.security.transport.ssl.keystore.type", testnodeStoreType)
.setSecureSettings(secureSettings)
.build();
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(globalSettings)));
final SSLConfiguration globalConfig = sslService.getSSLConfiguration("xpack.security.transport.ssl");
assertThat(globalConfig.sslClientAuth(), is(SSLClientAuth.OPTIONAL));
final SSLConfiguration httpConfig = sslService.getHttpTransportSSLConfiguration();
assertThat(httpConfig.sslClientAuth(), is(SSLClientAuth.NONE));
}
public void testThatTruststorePasswordIsRequired() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.keystore.path", testnodeStore)
.put("xpack.security.transport.ssl.keystore.type", testnodeStoreType)
.setSecureSettings(secureSettings)
.put("xpack.security.transport.ssl.truststore.path", testnodeStore)
.put("xpack.security.transport.ssl.truststore.type", testnodeStoreType)
.build();
ElasticsearchException e =
expectThrows(ElasticsearchException.class, () -> new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings))));
assertThat(e, throwableWithMessage("failed to load SSL configuration [xpack.security.transport.ssl]"));
assertThat(e.getCause(), throwableWithMessage(containsString("failed to initialize SSL TrustManager")));
}
public void testThatKeystorePasswordIsRequired() throws Exception {
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.keystore.path", testnodeStore)
.put("xpack.security.transport.ssl.keystore.type", testnodeStoreType)
.build();
ElasticsearchException e =
expectThrows(ElasticsearchException.class, () -> new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings))));
assertThat(e, throwableWithMessage("failed to load SSL configuration [xpack.security.transport.ssl]"));
assertThat(e.getCause(), throwableWithMessage("failed to create trust manager"));
}
public void testCiphersAndInvalidCiphersWork() throws Exception {
List<String> ciphers = new ArrayList<>(XPackSettings.DEFAULT_CIPHERS);
ciphers.add("foo");
ciphers.add("bar");
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.putList("xpack.security.transport.ssl.ciphers", ciphers.toArray(new String[ciphers.size()]))
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
SSLEngine engine = sslService.createSSLEngine(configuration, null, -1);
assertThat(engine, is(notNullValue()));
String[] enabledCiphers = engine.getEnabledCipherSuites();
assertThat(Arrays.asList(enabledCiphers), not(contains("foo", "bar")));
}
public void testInvalidCiphersOnlyThrowsException() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.putList("xpack.security.transport.ssl.cipher_suites", new String[] { "foo", "bar" })
.build();
ElasticsearchException e =
expectThrows(ElasticsearchException.class, () -> new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings))));
assertThat(e, throwableWithMessage("failed to load SSL configuration [xpack.security.transport.ssl]"));
assertThat(e.getCause(), throwableWithMessage("none of the ciphers [foo, bar] are supported by this JVM"));
}
public void testThatSSLEngineHasCipherSuitesOrderSet() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
SSLEngine engine = sslService.createSSLEngine(configuration, null, -1);
assertThat(engine, is(notNullValue()));
assertTrue(engine.getSSLParameters().getUseCipherSuitesOrder());
}
public void testThatSSLSocketFactoryHasProperCiphersAndProtocols() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration config = sslService.getSSLConfiguration("xpack.security.transport.ssl");
final SSLSocketFactory factory = sslService.sslSocketFactory(config);
final String[] ciphers = sslService.supportedCiphers(factory.getSupportedCipherSuites(), config.cipherSuites(), false);
assertThat(factory.getDefaultCipherSuites(), is(ciphers));
final String[] supportedProtocols = config.supportedProtocols().toArray(Strings.EMPTY_ARRAY);
try (SSLSocket socket = (SSLSocket) factory.createSocket()) {
assertThat(socket.getEnabledCipherSuites(), is(ciphers));
// the order we set the protocols in is not going to be what is returned as internally the JDK may sort the versions
assertThat(socket.getEnabledProtocols(), arrayContainingInAnyOrder(supportedProtocols));
assertArrayEquals(ciphers, socket.getSSLParameters().getCipherSuites());
assertThat(socket.getSSLParameters().getProtocols(), arrayContainingInAnyOrder(supportedProtocols));
assertTrue(socket.getSSLParameters().getUseCipherSuitesOrder());
}
}
public void testThatSSLEngineHasProperCiphersAndProtocols() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", true)
.put("xpack.security.transport.ssl.certificate", testnodeCert)
.put("xpack.security.transport.ssl.key", testnodeKey)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLConfiguration configuration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
SSLEngine engine = sslService.createSSLEngine(configuration, null, -1);
final String[] ciphers = sslService.supportedCiphers(engine.getSupportedCipherSuites(), configuration.cipherSuites(), false);
final String[] supportedProtocols = configuration.supportedProtocols().toArray(Strings.EMPTY_ARRAY);
assertThat(engine.getEnabledCipherSuites(), is(ciphers));
assertArrayEquals(ciphers, engine.getSSLParameters().getCipherSuites());
// the order we set the protocols in is not going to be what is returned as internally the JDK may sort the versions
assertThat(engine.getEnabledProtocols(), arrayContainingInAnyOrder(supportedProtocols));
assertThat(engine.getSSLParameters().getProtocols(), arrayContainingInAnyOrder(supportedProtocols));
}
public void testSSLStrategy() {
// this just exhaustively verifies that the right things are called and that it uses the right parameters
VerificationMode mode = randomFrom(VerificationMode.values());
Settings settings = Settings.builder()
.put("supported_protocols", "protocols")
.put("cipher_suites", "")
.put("verification_mode", mode.name())
.build();
SSLService sslService = mock(SSLService.class);
SSLConfiguration sslConfig = new SSLConfiguration(settings);
SSLParameters sslParameters = mock(SSLParameters.class);
SSLContext sslContext = mock(SSLContext.class);
String[] protocols = new String[] { "protocols" };
String[] ciphers = new String[] { "ciphers!!!" };
String[] supportedCiphers = new String[] { "supported ciphers" };
List<String> requestedCiphers = new ArrayList<>(0);
ArgumentCaptor<HostnameVerifier> verifier = ArgumentCaptor.forClass(HostnameVerifier.class);
SSLIOSessionStrategy sslStrategy = mock(SSLIOSessionStrategy.class);
when(sslService.sslConfiguration(settings)).thenReturn(sslConfig);
when(sslService.sslContext(sslConfig)).thenReturn(sslContext);
when(sslService.supportedCiphers(supportedCiphers, requestedCiphers, false)).thenReturn(ciphers);
when(sslService.sslParameters(sslContext)).thenReturn(sslParameters);
when(sslParameters.getCipherSuites()).thenReturn(supportedCiphers);
when(sslService.sslIOSessionStrategy(eq(sslContext), eq(protocols), eq(ciphers), verifier.capture())).thenReturn(sslStrategy);
// ensure it actually goes through and calls the real method
when(sslService.sslIOSessionStrategy(settings)).thenCallRealMethod();
when(sslService.sslIOSessionStrategy(sslConfig)).thenCallRealMethod();
assertThat(sslService.sslIOSessionStrategy(settings), sameInstance(sslStrategy));
if (mode.isHostnameVerificationEnabled()) {
assertThat(verifier.getValue(), instanceOf(DefaultHostnameVerifier.class));
} else {
assertThat(verifier.getValue(), sameInstance(NoopHostnameVerifier.INSTANCE));
}
}
public void testGetConfigurationByContextName() throws Exception {
assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm());
final SSLContext sslContext = SSLContext.getInstance("TLSv1.2");
sslContext.init(null, null, null);
final String[] cipherSuites = sslContext.getSupportedSSLParameters().getCipherSuites();
final String[] contextNames = {
"xpack.http.ssl",
"xpack.security.http.ssl",
"xpack.security.transport.ssl",
"transport.profiles.prof1.xpack.security.ssl",
"transport.profiles.prof2.xpack.security.ssl",
"transport.profiles.prof3.xpack.security.ssl",
"xpack.security.authc.realms.ldap.realm1.ssl",
"xpack.security.authc.realms.saml.realm2.ssl",
"xpack.monitoring.exporters.mon1.ssl",
"xpack.monitoring.exporters.mon2.ssl"
};
assumeTrue("Not enough cipher suites are available to support this test", cipherSuites.length >= contextNames.length);
// Here we use a different ciphers for each context, so we can check that the returned SSLConfiguration matches the
// provided settings
final Iterator<String> cipher = Arrays.asList(cipherSuites).iterator();
final MockSecureSettings secureSettings = new MockSecureSettings();
final Settings.Builder builder = Settings.builder();
for (String prefix : contextNames) {
if (prefix.startsWith("xpack.security.transport") || prefix.startsWith("xpack.security.http")) {
builder.put(prefix + ".enabled", true);
}
secureSettings.setString(prefix + ".keystore.secure_password", "testnode");
builder.put(prefix + ".keystore.path", testnodeStore)
.putList(prefix + ".cipher_suites", cipher.next());
}
final Settings settings = builder
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
for (int i = 0; i < contextNames.length; i++) {
final String name = contextNames[i];
final SSLConfiguration configuration = sslService.getSSLConfiguration(name);
assertThat("Configuration for " + name, configuration, notNullValue());
assertThat("KeyStore for " + name, configuration.keyConfig(), instanceOf(StoreKeyConfig.class));
final StoreKeyConfig keyConfig = (StoreKeyConfig) configuration.keyConfig();
assertThat("KeyStore Path for " + name, keyConfig.keyStorePath, equalTo(testnodeStore.toString()));
assertThat("Cipher for " + name, configuration.cipherSuites(), contains(cipherSuites[i]));
assertThat("Configuration for " + name + ".", sslService.getSSLConfiguration(name + "."), sameInstance(configuration));
}
}
public void testReadCertificateInformation() throws Exception {
assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm());
final Path jksPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks");
final Path p12Path = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12");
final Path pemPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt");
final MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode");
secureSettings.setString("xpack.security.transport.ssl.truststore.secure_password", "testnode");
secureSettings.setString("xpack.http.ssl.keystore.secure_password", "testnode");
final Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.enabled", randomBoolean())
.put("xpack.security.transport.ssl.keystore.path", jksPath)
.put("xpack.security.transport.ssl.truststore.path", jksPath)
.put("xpack.http.ssl.keystore.path", p12Path)
.put("xpack.security.authc.realms.active_directory.ad.ssl.certificate_authorities", pemPath)
.setSecureSettings(secureSettings)
.build();
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
final List<CertificateInfo> certificates = new ArrayList<>(sslService.getLoadedCertificates());
assertThat(certificates, iterableWithSize(13));
Collections.sort(certificates,
Comparator.comparing((CertificateInfo c) -> c.alias() == null ? "" : c.alias()).thenComparing(CertificateInfo::path));
final Iterator<CertificateInfo> iterator = certificates.iterator();
CertificateInfo cert = iterator.next();
assertThat(cert.alias(), nullValue());
assertThat(cert.path(), equalTo(pemPath.toString()));
assertThat(cert.format(), equalTo("PEM"));
assertThat(cert.serialNumber(), equalTo("580db8ad52bb168a4080e1df122a3f56"));
assertThat(cert.subjectDn(), equalTo("CN=ad-ELASTICSEARCHAD-CA, DC=ad, DC=test, DC=elasticsearch, DC=com"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2029-08-27T16:32:42Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
cert = iterator.next();
assertThat(cert.alias(), equalTo("activedir"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("580db8ad52bb168a4080e1df122a3f56"));
assertThat(cert.subjectDn(), equalTo("CN=ad-ELASTICSEARCHAD-CA, DC=ad, DC=test, DC=elasticsearch, DC=com"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2029-08-27T16:32:42Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
cert = iterator.next();
assertThat(cert.alias(), equalTo("mykey"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("3151a81eec8d4e34c56a8466a8510bcfbe63cc31"));
assertThat(cert.subjectDn(), equalTo("CN=samba4"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2021-02-14T17:49:11.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
cert = iterator.next();
assertThat(cert.alias(), equalTo("openldap"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("d3850b2b1995ad5f"));
assertThat(cert.subjectDn(), equalTo("CN=OpenLDAP, OU=Elasticsearch, O=Elastic, L=Mountain View, ST=CA, C=US"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2027-07-23T16:41:14Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testclient"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("b9d497f2924bbe29"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Client, OU=elasticsearch, O=org"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:55Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode-client-profile"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("c0ea4216e8ff0fd8"));
assertThat(cert.subjectDn(), equalTo("CN=testnode-client-profile"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:56Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode_dsa"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("223c736a"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:43:18.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(true));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode_dsa"));
assertThat(cert.path(), equalTo(p12Path.toString()));
assertThat(cert.format(), equalTo("PKCS12"));
assertThat(cert.serialNumber(), equalTo("223c736a"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:43:18.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(true));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode_ec"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("7268203b"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:36:10.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(true));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode_ec"));
assertThat(cert.path(), equalTo(p12Path.toString()));
assertThat(cert.format(), equalTo("PKCS12"));
assertThat(cert.serialNumber(), equalTo("7268203b"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:36:10.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(true));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode_rsa"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("b8b96c37e332cccb"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node, OU=elasticsearch, O=org"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:57.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(true));
cert = iterator.next();
assertThat(cert.alias(), equalTo("testnode_rsa"));
assertThat(cert.path(), equalTo(p12Path.toString()));
assertThat(cert.format(), equalTo("PKCS12"));
assertThat(cert.serialNumber(), equalTo("b8b96c37e332cccb"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node, OU=elasticsearch, O=org"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2019-09-22T18:52:57.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(true));
cert = iterator.next();
assertThat(cert.alias(), equalTo("trusted_testnode_ec"));
assertThat(cert.path(), equalTo(jksPath.toString()));
assertThat(cert.format(), equalTo("jks"));
assertThat(cert.serialNumber(), equalTo("7268203b"));
assertThat(cert.subjectDn(), equalTo("CN=Elasticsearch Test Node"));
assertThat(cert.expiry(), equalTo(ZonedDateTime.parse("2045-10-02T09:36:10.000Z")));
assertThat(cert.hasPrivateKey(), equalTo(false));
assertFalse(iterator.hasNext());
}
public void testSSLSessionInvalidationHandlesNullSessions() {
final int numEntries = randomIntBetween(1, 32);
final AtomicInteger invalidationCounter = new AtomicInteger();
int numNull = 0;
final Map<byte[], SSLSession> sessionMap = new HashMap<>();
for (int i = 0; i < numEntries; i++) {
final byte[] id = randomByteArrayOfLength(2);
final SSLSession sslSession;
if (rarely()) {
sslSession = null;
numNull++;
} else {
sslSession = new MockSSLSession(id, invalidationCounter::incrementAndGet);
}
sessionMap.put(id, sslSession);
}
SSLSessionContext sslSessionContext = new SSLSessionContext() {
@Override
public SSLSession getSession(byte[] sessionId) {
return sessionMap.get(sessionId);
}
@Override
public Enumeration<byte[]> getIds() {
return Collections.enumeration(sessionMap.keySet());
}
@Override
public void setSessionTimeout(int seconds) throws IllegalArgumentException {
}
@Override
public int getSessionTimeout() {
return 0;
}
@Override
public void setSessionCacheSize(int size) throws IllegalArgumentException {
}
@Override
public int getSessionCacheSize() {
return 0;
}
};
SSLService.invalidateSessions(sslSessionContext);
assertEquals(numEntries - numNull, invalidationCounter.get());
}
@Network
public void testThatSSLContextWithoutSettingsWorks() throws Exception {
SSLService sslService = new SSLService(env);
SSLContext sslContext = sslService.sslContext(sslService.sslConfiguration(Settings.EMPTY));
try (CloseableHttpClient client = HttpClients.custom().setSSLContext(sslContext).build()) {
// Execute a GET on a site known to have a valid certificate signed by a trusted public CA
// This will result in an SSLHandshakeException if the SSLContext does not trust the CA, but the default
// truststore trusts all common public CAs so the handshake will succeed
privilegedConnect(() -> client.execute(new HttpGet("https://www.elastic.co/")).close());
}
}
@Network
public void testThatSSLContextTrustsJDKTrustedCAs() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testclient");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.keystore.path", testclientStore)
.setSecureSettings(secureSettings)
.build();
SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLContext sslContext = sslService.sslContext(sslService.sslConfiguration(settings.getByPrefix("xpack.security.transport.ssl.")));
try (CloseableHttpClient client = HttpClients.custom().setSSLContext(sslContext).build()) {
// Execute a GET on a site known to have a valid certificate signed by a trusted public CA which will succeed because the JDK
// certs are trusted by default
privilegedConnect(() -> client.execute(new HttpGet("https://www.elastic.co/")).close());
}
}
@Network
public void testThatSSLIOSessionStrategyWithoutSettingsWorks() throws Exception {
SSLService sslService = new SSLService(env);
SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
logger.info("SSL Configuration: {}", sslConfiguration);
SSLIOSessionStrategy sslStrategy = sslService.sslIOSessionStrategy(sslConfiguration);
try (CloseableHttpAsyncClient client = getAsyncHttpClient(sslStrategy)) {
client.start();
// Execute a GET on a site known to have a valid certificate signed by a trusted public CA
// This will result in an SSLHandshakeException if the SSLContext does not trust the CA, but the default
// truststore trusts all common public CAs so the handshake will succeed
client.execute(new HttpHost("elastic.co", 443, "https"), new HttpGet("/"), new AssertionCallback()).get();
}
}
@Network
public void testThatSSLIOSessionStrategyTrustsJDKTrustedCAs() throws Exception {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testclient");
Settings settings = Settings.builder()
.put("xpack.security.transport.ssl.keystore.path", testclientStore)
.setSecureSettings(secureSettings)
.build();
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(settings)));
SSLIOSessionStrategy sslStrategy = sslService.sslIOSessionStrategy(sslService.getSSLConfiguration("xpack.security.transport.ssl"));
try (CloseableHttpAsyncClient client = getAsyncHttpClient(sslStrategy)) {
client.start();
// Execute a GET on a site known to have a valid certificate signed by a trusted public CA which will succeed because the JDK
// certs are trusted by default
client.execute(new HttpHost("elastic.co", 443, "https"), new HttpGet("/"), new AssertionCallback()).get();
}
}
public void testWrapTrustManagerWhenDiagnosticsEnabled() {
final Settings.Builder builder = Settings.builder();
if (randomBoolean()) { // randomly select between default, and explicit enabled
builder.put("xpack.security.ssl.diagnose.trust", true);
}
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(builder.build())));
final X509ExtendedTrustManager baseTrustManager = TrustAllConfig.INSTANCE.createTrustManager(env);
final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
final X509ExtendedTrustManager wrappedTrustManager = sslService.wrapWithDiagnostics(baseTrustManager, sslConfiguration);
assertThat(wrappedTrustManager, instanceOf(DiagnosticTrustManager.class));
assertThat(sslService.wrapWithDiagnostics(wrappedTrustManager, sslConfiguration), sameInstance(wrappedTrustManager));
}
public void testDontWrapTrustManagerWhenDiagnosticsDisabled() {
final Settings.Builder builder = Settings.builder();
builder.put("xpack.security.ssl.diagnose.trust", false);
final SSLService sslService = new SSLService(TestEnvironment.newEnvironment(buildEnvSettings(builder.build())));
final X509ExtendedTrustManager baseTrustManager = TrustAllConfig.INSTANCE.createTrustManager(env);
final SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.security.transport.ssl");
assertThat(sslService.wrapWithDiagnostics(baseTrustManager, sslConfiguration), sameInstance(baseTrustManager));
}
class AssertionCallback implements FutureCallback<HttpResponse> {
@Override
public void completed(HttpResponse result) {
assertThat(result.getStatusLine().getStatusCode(), lessThan(300));
}
@Override
public void failed(Exception ex) {
logger.error(ex);
fail(ex.toString());
}
@Override
public void cancelled() {
fail("The request was cancelled for some reason");
}
}
private CloseableHttpAsyncClient getAsyncHttpClient(SSLIOSessionStrategy sslStrategy) throws Exception {
try {
return AccessController.doPrivileged((PrivilegedExceptionAction<CloseableHttpAsyncClient>)
() -> HttpAsyncClientBuilder.create().setSSLStrategy(sslStrategy).build());
} catch (PrivilegedActionException e) {
throw (Exception) e.getCause();
}
}
private static void privilegedConnect(CheckedRunnable<Exception> runnable) throws Exception {
try {
AccessController.doPrivileged((PrivilegedExceptionAction<Void>) () -> {
runnable.run();
return null;
});
} catch (PrivilegedActionException e) {
throw (Exception) e.getCause();
}
}
private static final class MockSSLSession implements SSLSession {
private final byte[] id;
private final Runnable invalidation;
private MockSSLSession(byte[] id, Runnable invalidation) {
this.id = id;
this.invalidation = invalidation;
}
@Override
public byte[] getId() {
return id;
}
@Override
public SSLSessionContext getSessionContext() {
return null;
}
@Override
public long getCreationTime() {
return 0;
}
@Override
public long getLastAccessedTime() {
return 0;
}
@Override
public void invalidate() {
invalidation.run();
}
@Override
public boolean isValid() {
return false;
}
@Override
public void putValue(String name, Object value) {
}
@Override
public Object getValue(String name) {
return null;
}
@Override
public void removeValue(String name) {
}
@Override
public String[] getValueNames() {
return new String[0];
}
@Override
public Certificate[] getPeerCertificates() throws SSLPeerUnverifiedException {
return new Certificate[0];
}
@Override
public Certificate[] getLocalCertificates() {
return new Certificate[0];
}
@SuppressForbidden(reason = "need to reference deprecated class to implement JDK interface")
@Override
public X509Certificate[] getPeerCertificateChain() throws SSLPeerUnverifiedException {
return new X509Certificate[0];
}
@Override
public Principal getPeerPrincipal() throws SSLPeerUnverifiedException {
return null;
}
@Override
public Principal getLocalPrincipal() {
return null;
}
@Override
public String getCipherSuite() {
return null;
}
@Override
public String getProtocol() {
return null;
}
@Override
public String getPeerHost() {
return null;
}
@Override
public int getPeerPort() {
return 0;
}
@Override
public int getPacketBufferSize() {
return 0;
}
@Override
public int getApplicationBufferSize() {
return 0;
}
}
}
| apache-2.0 |
JSDemos/android-sdk-20 | src/com/android/rs/test/UT_static_globals.java | 1475 | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.rs.test;
import android.content.Context;
import android.content.res.Resources;
import android.renderscript.*;
public class UT_static_globals extends UnitTest {
private Resources mRes;
protected UT_static_globals(RSTestCore rstc, Resources res, Context ctx) {
super(rstc, "Static Globals", ctx);
mRes = res;
}
public void run() {
RenderScript pRS = RenderScript.create(mCtx);
ScriptC_static_globals s = new ScriptC_static_globals(pRS);
pRS.setMessageHandler(mRsMessage);
Type.Builder typeBuilder = new Type.Builder(pRS, Element.I32(pRS));
Allocation A = Allocation.createTyped(pRS, typeBuilder.setX(1).create());
s.forEach_root(A);
s.invoke_static_globals_test();
pRS.finish();
waitForMessage();
pRS.destroy();
}
}
| apache-2.0 |
debian-pkg-android-tools/android-platform-tools-apksig | src/main/java/com/android/apksig/internal/util/MessageDigestSink.java | 1745 | /*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.apksig.internal.util;
import com.android.apksig.util.DataSink;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
/**
* Data sink which feeds all received data into the associated {@link MessageDigest} instances. Each
* {@code MessageDigest} instance receives the same data.
*/
public class MessageDigestSink implements DataSink {
private final MessageDigest[] mMessageDigests;
public MessageDigestSink(MessageDigest[] digests) {
mMessageDigests = digests;
}
@Override
public void consume(byte[] buf, int offset, int length) {
for (MessageDigest md : mMessageDigests) {
md.update(buf, offset, length);
}
}
@Override
public void consume(ByteBuffer buf) {
int originalPosition = buf.position();
for (MessageDigest md : mMessageDigests) {
// Reset the position back to the original because the previous iteration's
// MessageDigest.update set the buffer's position to the buffer's limit.
buf.position(originalPosition);
md.update(buf);
}
}
}
| apache-2.0 |
longtaoge/playn | android/src/playn/android/AndroidPath.java | 1481 | /**
* Copyright 2010 The PlayN Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package playn.android;
import android.graphics.Path;
class AndroidPath implements playn.core.Path {
Path path;
AndroidPath() {
path = new Path();
}
@Override
public playn.core.Path bezierTo(float c1x, float c1y, float c2x, float c2y, float x, float y) {
path.cubicTo(c1x, c1y, c2x, c2y, x, y);
return this;
}
@Override
public playn.core.Path close() {
path.close();
return this;
}
@Override
public playn.core.Path lineTo(float x, float y) {
path.lineTo(x, y);
return this;
}
@Override
public playn.core.Path moveTo(float x, float y) {
path.moveTo(x, y);
return this;
}
@Override
public playn.core.Path quadraticCurveTo(float cpx, float cpy, float x, float y) {
path.quadTo(cpx, cpy, x, y);
return this;
}
@Override
public playn.core.Path reset() {
path.reset();
return this;
}
}
| apache-2.0 |
howepeng/isis | core/runtime/src/test/java/org/apache/isis/core/runtime/authentication/standard/SimpleSessionEncodabilityTestAbstract.java | 1622 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.runtime.authentication.standard;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import org.apache.isis.core.commons.encoding.EncodabilityContractTest;
public abstract class SimpleSessionEncodabilityTestAbstract extends EncodabilityContractTest {
@Override
protected void assertRoundtripped(final Object decodedEncodable, final Object originalEncodable) {
final SimpleSession decoded = (SimpleSession) decodedEncodable;
final SimpleSession original = (SimpleSession) originalEncodable;
assertThat(decoded.getUserName(), is(equalTo(original.getUserName())));
assertThat(decoded.getRoles(), is(equalTo(original.getRoles())));
}
}
| apache-2.0 |
michael-simons/spring-boot | spring-boot-project/spring-boot-docs/src/test/java/org/springframework/boot/docs/web/client/SampleWebClientTests.java | 2008 | /*
* Copyright 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.docs.web.client;
import java.time.Duration;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.boot.web.client.RestTemplateBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.http.HttpHeaders;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Example integration test that uses {@link TestRestTemplate}.
*
* @author Stephane Nicoll
*/
// tag::test[]
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
class SampleWebClientTests {
@Autowired
private TestRestTemplate template;
@Test
void testRequest() {
HttpHeaders headers = this.template.getForEntity("/example", String.class).getHeaders();
assertThat(headers.getLocation()).hasHost("other.example.com");
}
@TestConfiguration(proxyBeanMethods = false)
static class Config {
@Bean
RestTemplateBuilder restTemplateBuilder() {
return new RestTemplateBuilder().setConnectTimeout(Duration.ofSeconds(1))
.setReadTimeout(Duration.ofSeconds(1));
}
}
}
// end::test[]
| apache-2.0 |
jinglining/flink | flink-core/src/main/java/org/apache/flink/core/fs/local/LocalRecoverableWriter.java | 4482 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.core.fs.local;
import org.apache.flink.annotation.Internal;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.core.fs.Path;
import org.apache.flink.core.fs.RecoverableFsDataOutputStream;
import org.apache.flink.core.fs.RecoverableFsDataOutputStream.Committer;
import org.apache.flink.core.fs.RecoverableWriter;
import org.apache.flink.core.io.SimpleVersionedSerializer;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
import static org.apache.flink.util.Preconditions.checkArgument;
import static org.apache.flink.util.Preconditions.checkNotNull;
/**
* A {@link RecoverableWriter} for the {@link LocalFileSystem}.
*/
@Internal
public class LocalRecoverableWriter implements RecoverableWriter {
private final LocalFileSystem fs;
public LocalRecoverableWriter(LocalFileSystem fs) {
this.fs = checkNotNull(fs);
}
@Override
public RecoverableFsDataOutputStream open(Path filePath) throws IOException {
final File targetFile = fs.pathToFile(filePath);
final File tempFile = generateStagingTempFilePath(targetFile);
// try to create the parent
final File parent = tempFile.getParentFile();
if (parent != null && !parent.mkdirs() && !parent.exists()) {
throw new IOException("Failed to create the parent directory: " + parent);
}
return new LocalRecoverableFsDataOutputStream(targetFile, tempFile);
}
@Override
public RecoverableFsDataOutputStream recover(ResumeRecoverable recoverable) throws IOException {
if (recoverable instanceof LocalRecoverable) {
return new LocalRecoverableFsDataOutputStream((LocalRecoverable) recoverable);
}
else {
throw new IllegalArgumentException(
"LocalFileSystem cannot recover recoverable for other file system: " + recoverable);
}
}
@Override
public boolean requiresCleanupOfRecoverableState() {
return false;
}
@Override
public boolean cleanupRecoverableState(ResumeRecoverable resumable) throws IOException {
return false;
}
@Override
public Committer recoverForCommit(CommitRecoverable recoverable) throws IOException {
if (recoverable instanceof LocalRecoverable) {
return new LocalRecoverableFsDataOutputStream.LocalCommitter((LocalRecoverable) recoverable);
}
else {
throw new IllegalArgumentException(
"LocalFileSystem cannot recover recoverable for other file system: " + recoverable);
}
}
@Override
public SimpleVersionedSerializer<CommitRecoverable> getCommitRecoverableSerializer() {
@SuppressWarnings("unchecked")
SimpleVersionedSerializer<CommitRecoverable> typedSerializer = (SimpleVersionedSerializer<CommitRecoverable>)
(SimpleVersionedSerializer<?>) LocalRecoverableSerializer.INSTANCE;
return typedSerializer;
}
@Override
public SimpleVersionedSerializer<ResumeRecoverable> getResumeRecoverableSerializer() {
@SuppressWarnings("unchecked")
SimpleVersionedSerializer<ResumeRecoverable> typedSerializer = (SimpleVersionedSerializer<ResumeRecoverable>)
(SimpleVersionedSerializer<?>) LocalRecoverableSerializer.INSTANCE;
return typedSerializer;
}
@Override
public boolean supportsResume() {
return true;
}
@VisibleForTesting
static File generateStagingTempFilePath(File targetFile) {
checkArgument(!targetFile.isDirectory(), "targetFile must not be a directory");
final File parent = targetFile.getParentFile();
final String name = targetFile.getName();
checkArgument(parent != null, "targetFile must not be the root directory");
while (true) {
File candidate = new File(parent, "." + name + ".inprogress." + UUID.randomUUID().toString());
if (!candidate.exists()) {
return candidate;
}
}
}
}
| apache-2.0 |
jabbrwcky/selenium | java/server/src/org/openqa/grid/internal/ProxySet.java | 5514 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.grid.internal;
import net.jcip.annotations.ThreadSafe;
import org.openqa.grid.common.exception.CapabilityNotPresentOnTheGridException;
import org.openqa.grid.common.exception.GridException;
import org.openqa.selenium.remote.DesiredCapabilities;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.logging.Logger;
import java.util.stream.Collectors;
/**
* A set of RemoteProxies.
*
* Obeys the iteration guarantees of CopyOnWriteArraySet
*/
@ThreadSafe
public class ProxySet implements Iterable<RemoteProxy> {
private final Set<RemoteProxy> proxies = new CopyOnWriteArraySet<>();
private static final Logger log = Logger.getLogger(ProxySet.class.getName());
private volatile boolean throwOnCapabilityNotPresent = true;
public ProxySet(boolean throwOnCapabilityNotPresent) {
this.throwOnCapabilityNotPresent = throwOnCapabilityNotPresent;
}
/**
* killing the timeout detection threads.
*/
public void teardown() {
proxies.forEach(RemoteProxy::teardown);
}
public boolean hasCapability(Map<String, Object> requestedCapability) {
return proxies.stream().anyMatch(remoteProxy -> remoteProxy.hasCapability(requestedCapability));
}
/**
* Removes the specified instance from the proxySet
* @param proxy The proxy to remove, must be present in this set
* @return The instance that was removed. Not null.
*/
public RemoteProxy remove(RemoteProxy proxy) {
// Find the original proxy. While the supplied one is logically equivalent, it may be a fresh object with
// an empty TestSlot list, which doesn't figure into the proxy equivalence check. Since we want to free up
// those test sessions, we need to operate on that original object.
for (RemoteProxy p : proxies) {
if (p.equals(proxy)) {
proxies.remove(p);
return p;
}
}
throw new IllegalStateException("Did not contain proxy" + proxy);
}
public void add(RemoteProxy proxy) {
proxies.add(proxy);
}
public boolean contains(RemoteProxy o) {
return proxies.contains(o);
}
public List<RemoteProxy> getBusyProxies() {
return proxies.stream().filter(RemoteProxy::isBusy).collect(Collectors.toList());
}
public RemoteProxy getProxyById(String id) {
return proxies.stream().filter((proxy) -> proxy.getId().equals(id)).findFirst().orElse(null);
}
public boolean isEmpty() {
return proxies.isEmpty();
}
public List<RemoteProxy> getSorted() {
List<RemoteProxy> sorted = new ArrayList<>(proxies);
Collections.sort(sorted, proxyComparator);
return sorted;
}
private Comparator<RemoteProxy> proxyComparator = (o1, o2) -> {
double p1used = o1.getResourceUsageInPercent();
double p2used = o2.getResourceUsageInPercent();
if (p1used == p2used) {
long time1lastUsed = o1.getLastSessionStart();
long time2lastUsed = o2.getLastSessionStart();
if (time1lastUsed == time2lastUsed) return 0;
return time1lastUsed < time2lastUsed? -1 : 1;
}
return p1used < p2used? -1 : 1;
};
public TestSession getNewSession(Map<String, Object> desiredCapabilities) {
// sort the proxies first, by default by total number of
// test running, to avoid putting all the load of the first
// proxies.
List<RemoteProxy> sorted = getSorted();
log.fine("Available nodes: " + sorted);
return sorted.stream()
.map(proxy -> proxy.getNewSession(desiredCapabilities))
.filter(Objects::nonNull)
.findFirst().orElse(null);
}
public Iterator<RemoteProxy> iterator() {
return proxies.iterator();
}
public int size() {
return proxies.size();
}
public void verifyAbilityToHandleDesiredCapabilities(Map<String, Object> desiredCapabilities) {
if (proxies.isEmpty()) {
if (throwOnCapabilityNotPresent) {
throw new GridException("Empty pool of VM for setup "
+ new DesiredCapabilities(desiredCapabilities));
}
log.warning("Empty pool of nodes.");
}
if (!hasCapability(desiredCapabilities)) {
if (throwOnCapabilityNotPresent) {
throw new CapabilityNotPresentOnTheGridException(desiredCapabilities);
}
log.warning("grid doesn't contain " + new DesiredCapabilities(desiredCapabilities) +
" at the moment.");
}
}
public void setThrowOnCapabilityNotPresent(boolean throwOnCapabilityNotPresent) {
this.throwOnCapabilityNotPresent = throwOnCapabilityNotPresent;
}
}
| apache-2.0 |
medicayun/medicayundicom | dcm4che14/tags/DCM4JBOSS_2_5_3/src/java/org/dcm4che/image/PixelDataWriter.java | 1775 | package org.dcm4che.image;
import java.io.IOException;
import java.nio.ByteBuffer;
import javax.imageio.stream.ImageOutputStream;
public interface PixelDataWriter
{
/**
* Retrieves the <code>PixelDataDescription</code>.
* @return PixelDataDescription
*/
public PixelDataDescription getPixelDataDescription();
/**
* Writes the Pixel Data to a <code>ByteBuffer</code>. Please note that the
* implementation should not try to evaluate the actual sample values, to
* see if they are in the proper range. They are assumed to be correct.
* @param writeOverlayData Whether to write overlay data in the initialized
* sample array. This is ignored if this class was initialized specifying
* that there is no overlay data in the sample array.
* @return A <code>ByteBuffer</code> with the written Pixel Data stream.
*/
public ByteBuffer writePixelDataToByteBuffer();
/**
* Writes the Pixel Data to an <code>ImageOutputStream</code>. Please note
* that the implementation should not try to evaluate the actual sample
* values, to see if they are in the proper range. They are assumed to be
* correct.
* @throws IOException On I/O error
*/
public void writePixelData()
throws IOException;
/**
* Writes the Pixel Data to an <code>ImageOutputStream</code>. Please note
* that the implementation should not try to evaluate the actual sample
* values, to see if they are in the proper range. They are assumed to be
* correct.
* @param out An alternative <code>ImageOoutputStream</code> to write to.
* @throws IOException On I/O error
*/
public void writePixelData(ImageOutputStream out)
throws IOException;
}
| apache-2.0 |
charles-cooper/idylfin | src/com/opengamma/analytics/math/surface/ObjectsSurface.java | 3327 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.surface;
import java.util.Arrays;
import org.apache.commons.lang.Validate;
/**
* Parent class for a family of surfaces that have <i>x</i>, <i>y</i> and <i>z</i> values of any type.
* @param <T> Type of the x-axis values
* @param <U> Type of the y-axis values
* @param <V> Type of the z-axis values
*/
public abstract class ObjectsSurface<T, U, V> extends Surface<T, U, V> {
private final int _n;
private final T[] _xData;
private final U[] _yData;
private final V[] _zData;
/**
* @param xData An array of <i>x</i> data, not null, no null elements.
* @param yData An array of <i>y</i> data, not null, no null elements. Must be the same length as the <i>x</i> data.
* @param zData An array of <i>z</i> data, not null, no null elements. Must be the same length as the <i>x</i> data.
*/
public ObjectsSurface(final T[] xData, final U[] yData, final V[] zData) {
super();
Validate.notNull(xData, "x data");
Validate.notNull(yData, "y data");
Validate.notNull(zData, "z data");
Validate.isTrue(xData.length == yData.length);
Validate.isTrue(xData.length == zData.length);
_n = xData.length;
_xData = Arrays.copyOf(xData, _n);
_yData = Arrays.copyOf(yData, _n);
_zData = Arrays.copyOf(zData, _n);
}
/**
* @param xData An array of <i>x</i> data, not null
* @param yData An array of <i>y</i> data, not null
* @param zData An array of <i>z</i> data, not null
* @param name The surface name
*/
public ObjectsSurface(final T[] xData, final U[] yData, final V[] zData, final String name) {
super(name);
Validate.notNull(xData, "x data");
Validate.notNull(yData, "y data");
Validate.notNull(zData, "z data");
Validate.isTrue(xData.length == yData.length);
Validate.isTrue(xData.length == zData.length);
_n = xData.length;
_xData = Arrays.copyOf(xData, _n);
_yData = Arrays.copyOf(yData, _n);
_zData = Arrays.copyOf(zData, _n);
}
/**
* {@inheritDoc}
*/
@Override
public T[] getXData() {
return _xData;
}
/**
* {@inheritDoc}
*/
@Override
public U[] getYData() {
return _yData;
}
/**
* {@inheritDoc}
*/
@Override
public V[] getZData() {
return _zData;
}
/**
* {@inheritDoc}
*/
@Override
public int size() {
return _n;
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + Arrays.hashCode(_xData);
result = prime * result + Arrays.hashCode(_yData);
result = prime * result + Arrays.hashCode(_zData);
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj)) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final ObjectsSurface<?, ?, ?> other = (ObjectsSurface<?, ?, ?>) obj;
if (!Arrays.equals(_xData, other._xData)) {
return false;
}
if (!Arrays.equals(_yData, other._yData)) {
return false;
}
if (!Arrays.equals(_zData, other._zData)) {
return false;
}
return true;
}
}
| apache-2.0 |
AlienQueen/wicket | wicket-spring/src/test/java/org/apache/wicket/spring/FieldBeansCollectorTest.java | 1864 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.spring;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Field;
import java.util.List;
import org.apache.wicket.spring.FieldBeansCollector.FieldType;
import org.junit.Test;
import org.springframework.core.ResolvableType;
public class FieldBeansCollectorTest
{
public List<List<String>> listOfStringList;
public List genericList;
@Test
public void resolveGenericList() throws Exception
{
Field field = getClass().getField("listOfStringList");
ResolvableType resolvableType = ResolvableType.forField(field);
FieldBeansCollector fieldBeansCollector = new FieldBeansCollector(resolvableType);
assertTrue(fieldBeansCollector.getFieldType() == FieldType.LIST);
}
@Test
public void resolveRowList() throws Exception
{
Field field = getClass().getField("genericList");
ResolvableType resolvableType = ResolvableType.forField(field);
FieldBeansCollector fieldBeansCollector = new FieldBeansCollector(resolvableType);
assertTrue(fieldBeansCollector.getFieldType() == FieldType.LIST);
}
}
| apache-2.0 |
mdecourci/assertj-core | src/test/java/org/assertj/core/api/objectarray/ObjectArrayAssert_haveExactly_Test.java | 1544 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2015 the original author or authors.
*/
package org.assertj.core.api.objectarray;
import static org.mockito.Mockito.verify;
import org.assertj.core.api.Condition;
import org.assertj.core.api.ObjectArrayAssert;
import org.assertj.core.api.ObjectArrayAssertBaseTest;
import org.assertj.core.api.TestCondition;
import org.junit.Before;
/**
* Tests for <code>{@link ObjectArrayAssert#haveExactly(Condition, int)}</code>.
*
* @author Nicolas François
* @author Mikhail Mazursky
*/
public class ObjectArrayAssert_haveExactly_Test extends ObjectArrayAssertBaseTest {
private Condition<Object> condition;
@Before
public void before() {
condition = new TestCondition<>();
}
@Override
protected ObjectArrayAssert<Object> invoke_api_method() {
return assertions.haveExactly(2, condition);
}
@Override
protected void verify_internal_effects() {
verify(arrays).assertHaveExactly(getInfo(assertions), getActual(assertions), 2, condition);
}
}
| apache-2.0 |
llpj/gag | src/com/google/gag/annotation/literary/verse/Haiku.java | 1024 | /**
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.gag.annotation.literary.verse;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Indicates that the annotated code is a haiku. For example:
*
* <pre>
* @Haiku
* public void checkNodes() {
* if (tree.hasAnyLeafNodes()) {
* freshenLeavesOn(tree);
* }
* }
* </pre>
*/
@Retention(RetentionPolicy.SOURCE)
public @interface Haiku {
}
| apache-2.0 |
jomarko/drools | drools-test-coverage/test-compiler-integration/src/test/java/org/drools/mvel/integrationtests/FireUntilHaltTest.java | 5499 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel.integrationtests;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.drools.mvel.compiler.Cheese;
import org.drools.mvel.compiler.Person;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.TestParametersUtil;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.KieBase;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.rule.EntryPoint;
import org.kie.api.runtime.rule.FactHandle;
import org.kie.internal.utils.KieHelper;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@RunWith(Parameterized.class)
public class FireUntilHaltTest {
private final KieBaseTestConfiguration kieBaseTestConfiguration;
public FireUntilHaltTest(final KieBaseTestConfiguration kieBaseTestConfiguration) {
this.kieBaseTestConfiguration = kieBaseTestConfiguration;
}
@Parameterized.Parameters(name = "KieBase type={0}")
public static Collection<Object[]> getParameters() {
return TestParametersUtil.getKieBaseCloudConfigurations(true);
}
@Test
public void testSubmitOnFireUntilHalt() throws InterruptedException {
final String drl =
"import " + Person.class.getCanonicalName() + "\n" +
"global java.util.List list;" +
"rule R when\n" +
" Person( happy, age >= 18 )\n" +
"then\n" +
" list.add(\"happy adult\");" +
"end";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, drl);
KieSession kSession = kbase.newKieSession();
final List<String> list = new ArrayList<>();
kSession.setGlobal("list", list);
new Thread(kSession::fireUntilHalt).start();
final Person p = new Person("me", 17, true);
final FactHandle fh = kSession.insert(p);
Thread.sleep(100L);
assertEquals(0, list.size());
kSession.submit(kieSession -> {
p.setAge(18);
p.setHappy(false);
kieSession.update(fh, p);
});
Thread.sleep(100L);
assertEquals(0, list.size());
kSession.submit(kieSession -> {
p.setHappy(true);
kieSession.update(fh, p);
});
Thread.sleep(100L);
assertEquals(1, list.size());
kSession.halt();
kSession.dispose();
}
@Test
public void testFireAllWhenFiringUntilHalt() throws InterruptedException {
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration); // empty
KieSession ksession = kbase.newKieSession();
final Thread t1 = new Thread(ksession::fireUntilHalt);
final Thread t2 = new Thread(ksession::fireAllRules);
t1.start();
Thread.sleep(500);
t2.start();
// give the chance for t2 to finish
Thread.sleep(1000);
final boolean aliveT2 = t2.isAlive();
ksession.halt();
Thread.sleep(1000);
final boolean aliveT1 = t1.isAlive();
if (t2.isAlive()) {
t2.interrupt();
}
if (t1.isAlive()) {
t1.interrupt();
}
assertFalse("T2 should have finished", aliveT2);
assertFalse("T1 should have finished", aliveT1);
}
@Test
public void testFireUntilHaltFailingAcrossEntryPoints() throws Exception {
String rule1 = "package org.drools.mvel.compiler\n";
rule1 += "global java.util.List list\n";
rule1 += "rule testFireUntilHalt\n";
rule1 += "when\n";
rule1 += " Cheese()\n";
rule1 += " $p : Person() from entry-point \"testep2\"\n";
rule1 += "then \n";
rule1 += " list.add( $p ) ;\n";
rule1 += "end\n";
KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("test", kieBaseTestConfiguration, rule1);
KieSession ksession = kbase.newKieSession();
final EntryPoint ep = ksession.getEntryPoint("testep2");
final List list = new ArrayList();
ksession.setGlobal("list", list);
ksession.insert(new Cheese("cheddar"));
ksession.fireAllRules();
final Thread t1 = new Thread(ksession::fireUntilHalt);
t1.start();
Thread.sleep(500);
ep.insert(new Person("darth"));
Thread.sleep(500);
ksession.halt();
t1.join(5000);
final boolean alive = t1.isAlive();
if (alive) {
t1.interrupt();
}
assertFalse("Thread should have died!", alive);
assertEquals(1, list.size());
}
}
| apache-2.0 |
bibhrajit/openxc-androidStudio | openxc/tests/com/openxc/UnitsTest.java | 355 | package com.openxc;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import org.junit.Test;
import com.openxc.units.Meter;
public class UnitsTest {
@Test
public void testComparableToDouble() {
Meter value = new Meter(10);
assertThat(value.doubleValue(), equalTo(10.0));
}
}
| bsd-3-clause |
bestwpw/hashtag-view | sample/src/main/java/com/greenfrvr/hashtagview/sample/fragments/BaseFragment.java | 1999 | package com.greenfrvr.hashtagview.sample.fragments;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.view.View;
import com.greenfrvr.hashtagview.sample.R;
import java.util.Arrays;
import java.util.List;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* Created by greenfrvr
*/
public abstract class BaseFragment extends Fragment {
public static final List<String> DATA = Arrays.asList("android", "library", "collection",
"hashtags", "min14sdk", "UI", "view", "github", "opensource", "project", "widget");
public static final List<String> TAGS = Arrays.asList("cupcake", "donut", "eclair", "froyo",
"gingerbread", "honeycomb", "icecreamsandwich", "jellybean", "kitkat", "lollipop", "marshmallow");
public static final List<String> PEOPLE = Arrays.asList("wolverine", "jubilee", "colossus",
"beast", "rogue", "storm", "cyclops", "iceman", "magma", "emmafrost", "angel");
private PagerListener listener;
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
listener = (PagerListener) activity;
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
ButterKnife.bind(this, view);
}
@Override
public void onDestroyView() {
super.onDestroyView();
ButterKnife.unbind(this);
}
@Override
public void onDetach() {
super.onDetach();
listener = null;
}
protected PagerListener getPagerListener() {
return listener;
}
@OnClick(R.id.prev_button)
void previousButtonClicked() {
listener.onPreviousClicked();
}
@OnClick(R.id.next_button)
void nextButtonClicked() {
listener.onNextClicked();
}
public interface PagerListener {
void onPreviousClicked();
void onNextClicked();
}
}
| mit |
mandeepdhami/controller | opendaylight/md-sal/sal-restconf-broker/src/main/java/org/opendaylight/controller/sal/restconf/broker/impl/DataBrokerServiceImpl.java | 6300 | /*
* Copyright (c) 2013 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.sal.restconf.broker.impl;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.opendaylight.controller.sal.binding.api.data.DataBrokerService;
import org.opendaylight.controller.sal.binding.api.data.DataChangeListener;
import org.opendaylight.controller.sal.binding.api.data.DataModificationTransaction;
import org.opendaylight.controller.sal.restconf.broker.listeners.RemoteDataChangeNotificationListener;
import org.opendaylight.controller.sal.restconf.broker.tools.RemoteStreamTools;
import org.opendaylight.controller.sal.restconf.broker.transactions.RemoteDataModificationTransaction;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.remote.rev140114.BeginTransactionOutput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.remote.rev140114.CreateDataChangeEventSubscriptionInputBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.remote.rev140114.CreateDataChangeEventSubscriptionOutput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.controller.md.sal.remote.rev140114.SalRemoteService;
import org.opendaylight.yangtools.concepts.AbstractListenerRegistration;
import org.opendaylight.yangtools.concepts.ListenerRegistration;
import org.opendaylight.yangtools.restconf.client.api.RestconfClientContext;
import org.opendaylight.yangtools.restconf.client.api.event.EventStreamInfo;
import org.opendaylight.yangtools.restconf.client.api.event.ListenableEventStreamContext;
import org.opendaylight.yangtools.yang.binding.DataObject;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
public class DataBrokerServiceImpl implements DataBrokerService {
private static final Logger logger = LoggerFactory.getLogger(DataBrokerServiceImpl.class.toString());
private final RestconfClientContext restconfClientContext;
private final SalRemoteService salRemoteService;
public DataBrokerServiceImpl(RestconfClientContext restconfClientContext) {
this.restconfClientContext = restconfClientContext;
this.salRemoteService = this.restconfClientContext.getRpcServiceContext(SalRemoteService.class).getRpcService();
}
@Override
public DataModificationTransaction beginTransaction() {
Future<RpcResult<BeginTransactionOutput>> rpcResultFuture = this.salRemoteService.beginTransaction();
//TODO finish yang model for proper remoteDataModificationTransaction setup
RemoteDataModificationTransaction remoteDataModificationTransaction = new RemoteDataModificationTransaction();
return remoteDataModificationTransaction;
}
@Override
public DataObject readConfigurationData(InstanceIdentifier<? extends DataObject> path) {
try {
Optional<DataObject> optDataObject = (Optional<DataObject>) this.restconfClientContext.getConfigurationDatastore().readData(path).get();
if (optDataObject.isPresent()){
return optDataObject.get();
}
} catch (InterruptedException e) {
logger.trace("Reading configuration data interrupted {}",e);
} catch (ExecutionException e) {
logger.trace("Reading configuration execution exception {}",e);
}
throw new IllegalStateException("No data to return.");
}
@Override
public DataObject readOperationalData(InstanceIdentifier<? extends DataObject> path) {
try {
Optional<DataObject> optDataObject = (Optional<DataObject>) this.restconfClientContext.getOperationalDatastore().readData(path).get();
if (optDataObject.isPresent()){
return optDataObject.get();
}
} catch (InterruptedException e) {
logger.trace("Reading configuration data interrupted {}",e);
} catch (ExecutionException e) {
logger.trace("Reading configuration execution exception {}",e);
}
throw new IllegalStateException("No data to return.");
}
@Override
public ListenerRegistration<DataChangeListener> registerDataChangeListener(InstanceIdentifier<? extends DataObject> path, DataChangeListener listener) {
CreateDataChangeEventSubscriptionInputBuilder inputBuilder = new CreateDataChangeEventSubscriptionInputBuilder();
Future<RpcResult<CreateDataChangeEventSubscriptionOutput>> rpcResultFuture = salRemoteService.createDataChangeEventSubscription(inputBuilder.setPath(path).build());
String streamName = "";
try {
if (rpcResultFuture.get().isSuccessful()){
streamName = rpcResultFuture.get().getResult().getStreamName();
}
} catch (InterruptedException e) {
logger.trace("Interupted while getting rpc result due to {}",e);
} catch (ExecutionException e) {
logger.trace("Execution exception while getting rpc result due to {}",e);
}
final Map<String,EventStreamInfo> desiredEventStream = RemoteStreamTools.createEventStream(restconfClientContext,streamName);
ListenableEventStreamContext restConfListenableEventStreamContext = restconfClientContext.getEventStreamContext(desiredEventStream.get(streamName));
RemoteDataChangeNotificationListener remoteDataChangeNotificationListener = new RemoteDataChangeNotificationListener(listener);
final ListenerRegistration<?> reg = restConfListenableEventStreamContext.registerNotificationListener(remoteDataChangeNotificationListener);
return new AbstractListenerRegistration<DataChangeListener>(listener) {
@Override
protected void removeRegistration() {
reg.close();
}
};
}
}
| epl-1.0 |
Snickermicker/smarthome | bundles/core/org.eclipse.smarthome.core.semantics/src/main/java/org/eclipse/smarthome/core/semantics/model/property/Temperature.java | 879 | /**
* Copyright (c) 2014,2019 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.smarthome.core.semantics.model.property;
import org.eclipse.smarthome.core.semantics.model.Property;
import org.eclipse.smarthome.core.semantics.model.TagInfo;
/**
* This class defines a Temperature.
*
* @author Generated from generateTagClasses.groovy - Initial contribution
*
*/
@TagInfo(id = "Property_Temperature", label = "Temperature", synonyms = "Temperatures", description = "")
public interface Temperature extends Property {
}
| epl-1.0 |
gavinying/kura | kura/org.eclipse.kura.linux.bluetooth/src/main/java/org/eclipse/kura/linux/bluetooth/le/BluetoothGattCharacteristicImpl.java | 2220 | /*******************************************************************************
* Copyright (c) 2011, 2016 Eurotech and/or its affiliates
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eurotech
*******************************************************************************/
package org.eclipse.kura.linux.bluetooth.le;
import java.util.UUID;
import org.eclipse.kura.bluetooth.BluetoothGattCharacteristic;
public class BluetoothGattCharacteristicImpl implements BluetoothGattCharacteristic {
private final UUID m_uuid;
private String m_handle;
private int m_properties;
private String m_valueHandle;
public BluetoothGattCharacteristicImpl(String uuid, String handle, String properties, String valueHandle) {
this.m_uuid = UUID.fromString(uuid);
setHandle(handle);
setProperties(Integer.parseInt(properties.substring(2, properties.length()), 16));
setValueHandle(valueHandle);
}
// --------------------------------------------------------------------
//
// BluetoothGattCharacteristic API
//
// --------------------------------------------------------------------
@Override
public UUID getUuid() {
return this.m_uuid;
}
@Override
public Object getValue() {
return null;
}
@Override
public void setValue(Object value) {
}
@Override
public int getPermissions() {
return 0;
}
public void setHandle(String m_handle) {
this.m_handle = m_handle;
}
public void setProperties(int m_properties) {
this.m_properties = m_properties;
}
public void setValueHandle(String m_valueHandle) {
this.m_valueHandle = m_valueHandle;
}
@Override
public String getHandle() {
return this.m_handle;
}
@Override
public int getProperties() {
return this.m_properties;
}
@Override
public String getValueHandle() {
return this.m_valueHandle;
}
}
| epl-1.0 |
greenlion/mysql-server | storage/ndb/clusterj/clusterj-openjpa/src/test/java/com/mysql/clusterj/openjpatest/DateAsUtilDateTest.java | 1322 | /*
Copyright 2010 Sun Microsystems, Inc.
All rights reserved. Use is subject to license terms.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License, version 2.0,
as published by the Free Software Foundation.
This program is also distributed with certain software (including
but not limited to OpenSSL) that is licensed under separate terms,
as designated in a particular file or component or in included license
documentation. The authors of MySQL hereby grant you an additional
permission to link the program and your derivative works with the
separately licensed software that they have included with MySQL.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License, version 2.0, for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
package com.mysql.clusterj.openjpatest;
public class DateAsUtilDateTest extends com.mysql.clusterj.jpatest.DateAsUtilDateTest {
}
| gpl-2.0 |
jrochas/scale-proactive | src/Core/org/objectweb/proactive/core/event/RuntimeRegistrationEventProducer.java | 2041 | /*
* ################################################################
*
* ProActive Parallel Suite(TM): The Java(TM) library for
* Parallel, Distributed, Multi-Core Computing for
* Enterprise Grids & Clouds
*
* Copyright (C) 1997-2012 INRIA/University of
* Nice-Sophia Antipolis/ActiveEon
* Contact: proactive@ow2.org or contact@activeeon.com
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation; version 3 of
* the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
* USA
*
* If needed, contact us to obtain a release under GPL Version 2 or 3
* or a different license than the AGPL.
*
* Initial developer(s): The ProActive Team
* http://proactive.inria.fr/team_members.htm
* Contributor(s):
*
* ################################################################
* $$PROACTIVE_INITIAL_DEV$$
*/
package org.objectweb.proactive.core.event;
public interface RuntimeRegistrationEventProducer {
/**
* Adds a listener of RuntimeRegistrationEvent. The listener will receive event
* a remote runtime registers on this runtime.
* @param listener the listener to add
*/
public void addRuntimeRegistrationEventListener(RuntimeRegistrationEventListener listener);
/**
* Removes the RuntimeRegistrationEventListener.
* @param listener the listener to remove
*/
public void removeRuntimeRegistrationEventListener(RuntimeRegistrationEventListener listener);
}
| agpl-3.0 |
imam-san/jPOS-1 | jpos/src/test/java/org/jpos/iso/IF_NOPTest.java | 1363 | /*
* jPOS Project [http://jpos.org]
* Copyright (C) 2000-2015 Alejandro P. Revilla
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jpos.iso;
import junit.framework.TestCase;
/**
* @author joconnor
*/
public class IF_NOPTest extends TestCase
{
public void testPack() throws Exception
{
ISOField field = new ISOField(12, "ABCD");
IF_NOP packager = new IF_NOP();
assertTrue(packager.pack(field).length == 0);
}
public void testUnpack() throws Exception
{
byte[] raw = new byte[]{};
IF_NOP packager = new IF_NOP();
ISOField field = new ISOField(12);
assertEquals(0, packager.unpack(field, raw, 0));
assertNull(field.getValue());
}
}
| agpl-3.0 |
imam-san/jPOS-1 | jpos/src/main/java/org/jpos/tlv/packager/IFTA_LLLCHAR.java | 3796 | /*
* jPOS Project [http://jpos.org]
* Copyright (C) 2000-2015 Alejandro P. Revilla
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jpos.tlv.packager;
import org.jpos.iso.IFA_LLLCHAR;
import org.jpos.iso.IF_CHAR;
import org.jpos.iso.ISOComponent;
import org.jpos.iso.ISOException;
import org.jpos.iso.ISOField;
import org.jpos.iso.ISOFieldPackager;
import org.jpos.iso.LeftPadder;
import org.jpos.iso.TaggedFieldPackager;
import org.jpos.tlv.ISOTaggedField;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectOutput;
/**
* @author Vishnu Pillai
*/
public class IFTA_LLLCHAR extends IFA_LLLCHAR implements TaggedFieldPackager {
private String token;
private IF_CHAR tagPackager;
public IFTA_LLLCHAR() {
super();
}
public IFTA_LLLCHAR(int len, String description) {
super(len, description);
}
@Override
public ISOComponent createComponent(int fieldNumber) {
return new ISOTaggedField(getToken(), super.createComponent(fieldNumber));
}
@Override
public void setToken(String token) {
this.token = token;
tagPackager = new IF_CHAR(token.length(), "Tag");
tagPackager.setPadder(LeftPadder.ZERO_PADDER);
}
@Override
public String getToken() {
return token;
}
protected ISOFieldPackager getTagPackager() {
return tagPackager;
}
protected byte[] packTag(ISOComponent c) throws ISOException {
return getTagPackager().pack(new ISOField((Integer) c.getKey(), ((ISOTaggedField) c).getTag()));
}
protected int unpackTag(ISOComponent c, byte[] tagBytes, int offset) throws ISOException {
ISOField tagField = new ISOField((Integer) c.getKey());
int consumed = getTagPackager().unpack(tagField, tagBytes, offset);
((ISOTaggedField) c).setTag(tagField.getValue().toString());
return consumed;
}
protected void unpackTag(ISOComponent c, InputStream in) throws ISOException, IOException {
ISOField tagField = new ISOField((Integer) c.getKey());
getTagPackager().unpack(tagField, in);
((ISOTaggedField) c).setTag(tagField.getValue().toString());
}
@Override
public byte[] pack(ISOComponent c) throws ISOException {
byte[] tagBytes = packTag(c);
byte[] message = super.pack(c);
byte[] b = new byte[tagBytes.length + message.length];
System.arraycopy(tagBytes, 0, b, 0, tagBytes.length);
System.arraycopy(message, 0, b, tagBytes.length, message.length);
return b;
}
@Override
public int unpack(ISOComponent c, byte[] b, int offset) throws ISOException {
int consumed = unpackTag(c, b, offset);
return consumed + super.unpack(c, b, offset + consumed);
}
@Override
public void unpack(ISOComponent c, InputStream in) throws IOException, ISOException {
unpackTag(c, in);
super.unpack(c, in);
}
@Override
public void pack(ISOComponent c, ObjectOutput out) throws IOException, ISOException {
byte[] tagBytes = packTag(c);
out.write(tagBytes);
super.pack(c, out);
}
}
| agpl-3.0 |
myxyz/zstack | test/src/test/java/org/zstack/test/compute/vm/TestPolicyForVm1.java | 4891 | package org.zstack.test.compute.vm;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.componentloader.ComponentLoader;
import org.zstack.core.db.DatabaseFacade;
import org.zstack.header.configuration.InstanceOfferingInventory;
import org.zstack.header.host.HostInventory;
import org.zstack.header.identity.AccountConstant.StatementEffect;
import org.zstack.header.identity.IdentityErrors;
import org.zstack.header.identity.PolicyInventory.Statement;
import org.zstack.header.identity.SessionInventory;
import org.zstack.header.identity.UserInventory;
import org.zstack.header.image.ImageInventory;
import org.zstack.header.network.l3.L3NetworkInventory;
import org.zstack.header.vm.*;
import org.zstack.test.Api;
import org.zstack.test.ApiSenderException;
import org.zstack.test.DBUtil;
import org.zstack.test.VmCreator;
import org.zstack.test.deployer.Deployer;
import org.zstack.test.identity.IdentityCreator;
/**
* 1. create a user
* 2. assign no permission to the user
*
* confirm the user can not create/start/stop/reboot/destroy/migrate the vm
*/
public class TestPolicyForVm1 {
Deployer deployer;
Api api;
ComponentLoader loader;
CloudBus bus;
DatabaseFacade dbf;
@Before
public void setUp() throws Exception {
DBUtil.reDeployDB();
deployer = new Deployer("deployerXml/vm/TestPolicyForVm.xml");
deployer.build();
api = deployer.getApi();
loader = deployer.getComponentLoader();
bus = loader.getComponent(CloudBus.class);
dbf = loader.getComponent(DatabaseFacade.class);
}
@Test
public void test() throws ApiSenderException, InterruptedException {
InstanceOfferingInventory ioinv = deployer.instanceOfferings.get("TestInstanceOffering");
ImageInventory img = deployer.images.get("TestImage");
L3NetworkInventory l3 = deployer.l3Networks.get("TestL3Network1");
HostInventory host1 = deployer.hosts.get("TestHost1");
HostInventory host2 = deployer.hosts.get("TestHost2");
IdentityCreator identityCreator = new IdentityCreator(api);
identityCreator.useAccount("test");
UserInventory user = identityCreator.createUser("user", "password");
Statement s = new Statement();
s.setName("allow");
s.setEffect(StatementEffect.Allow);
s.addAction(String.format("%s:%s", VmInstanceConstant.ACTION_CATEGORY, APICreateVmInstanceMsg.class.getSimpleName()));
identityCreator.createPolicy("allow", s);
identityCreator.attachPolicyToUser("user", "allow");
SessionInventory session = identityCreator.userLogin(user.getName(), "password");
VmCreator vmCreator = new VmCreator(api);
vmCreator.imageUuid = img.getUuid();
vmCreator.addL3Network(l3.getUuid());
vmCreator.instanceOfferingUuid = ioinv.getUuid();
vmCreator.session = session;
vmCreator.hostUuid = host1.getUuid();
VmInstanceInventory vm = vmCreator.create();
boolean success = false;
try {
api.stopVmInstance(vm.getUuid(), session);
} catch (ApiSenderException e) {
if (IdentityErrors.PERMISSION_DENIED.toString().equals(e.getError().getCode())) {
success = true;
}
}
Assert.assertTrue(success);
success = false;
try {
api.rebootVmInstance(vm.getUuid(), session);
} catch (ApiSenderException e) {
if (IdentityErrors.PERMISSION_DENIED.toString().equals(e.getError().getCode())) {
success = true;
}
}
Assert.assertTrue(success);
success = false;
try {
api.migrateVmInstance(vm.getUuid(), host2.getUuid(), session);
} catch (ApiSenderException e) {
if (IdentityErrors.PERMISSION_DENIED.toString().equals(e.getError().getCode())) {
success = true;
}
}
Assert.assertTrue(success);
success = false;
try {
api.destroyVmInstance(vm.getUuid(), session);
} catch (ApiSenderException e) {
if (IdentityErrors.PERMISSION_DENIED.toString().equals(e.getError().getCode())) {
success = true;
}
}
Assert.assertTrue(success);
identityCreator.deletePolicy("allow");
success = false;
try {
vmCreator.create();
} catch (ApiSenderException e) {
if (IdentityErrors.PERMISSION_DENIED.toString().equals(e.getError().getCode())) {
success = true;
}
}
Assert.assertTrue(success);
}
}
| apache-2.0 |
pwoodworth/intellij-community | python/educational/course-creator/src/com/jetbrains/edu/coursecreator/actions/oldCourseFormat/OldTask.java | 355 | package com.jetbrains.edu.coursecreator.actions.oldCourseFormat;
import java.util.HashMap;
import java.util.Map;
/**
* Implementation of task which contains task files, tests, input file for tests
*/
public class OldTask {
public String name;
public Map<String, OldTaskFile> taskFiles = new HashMap<String, OldTaskFile>();
public int myIndex;
}
| apache-2.0 |
ueshin/apache-flink | flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/StandaloneCompletedCheckpointStore.java | 3616 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.checkpoint;
import org.apache.flink.runtime.jobgraph.JobStatus;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.List;
import static org.apache.flink.util.Preconditions.checkArgument;
/**
* {@link CompletedCheckpointStore} for JobManagers running in {@link HighAvailabilityMode#NONE}.
*/
public class StandaloneCompletedCheckpointStore implements CompletedCheckpointStore {
private static final Logger LOG = LoggerFactory.getLogger(StandaloneCompletedCheckpointStore.class);
/** The maximum number of checkpoints to retain (at least 1). */
private final int maxNumberOfCheckpointsToRetain;
/** The completed checkpoints. */
private final ArrayDeque<CompletedCheckpoint> checkpoints;
/**
* Creates {@link StandaloneCompletedCheckpointStore}.
*
* @param maxNumberOfCheckpointsToRetain The maximum number of checkpoints to retain (at
* least 1). Adding more checkpoints than this results
* in older checkpoints being discarded.
*/
public StandaloneCompletedCheckpointStore(int maxNumberOfCheckpointsToRetain) {
checkArgument(maxNumberOfCheckpointsToRetain >= 1, "Must retain at least one checkpoint.");
this.maxNumberOfCheckpointsToRetain = maxNumberOfCheckpointsToRetain;
this.checkpoints = new ArrayDeque<>(maxNumberOfCheckpointsToRetain + 1);
}
@Override
public void recover() throws Exception {
// Nothing to do
}
@Override
public void addCheckpoint(CompletedCheckpoint checkpoint) throws Exception {
checkpoints.addLast(checkpoint);
if (checkpoints.size() > maxNumberOfCheckpointsToRetain) {
try {
CompletedCheckpoint checkpointToSubsume = checkpoints.removeFirst();
checkpointToSubsume.discardOnSubsume();
} catch (Exception e) {
LOG.warn("Fail to subsume the old checkpoint.", e);
}
}
}
@Override
public CompletedCheckpoint getLatestCheckpoint() {
return checkpoints.isEmpty() ? null : checkpoints.getLast();
}
@Override
public List<CompletedCheckpoint> getAllCheckpoints() {
return new ArrayList<>(checkpoints);
}
@Override
public int getNumberOfRetainedCheckpoints() {
return checkpoints.size();
}
@Override
public int getMaxNumberOfRetainedCheckpoints() {
return maxNumberOfCheckpointsToRetain;
}
@Override
public void shutdown(JobStatus jobStatus) throws Exception {
try {
LOG.info("Shutting down");
for (CompletedCheckpoint checkpoint : checkpoints) {
checkpoint.discardOnShutdown(jobStatus);
}
} finally {
checkpoints.clear();
}
}
@Override
public boolean requiresExternalizedCheckpoints() {
return false;
}
}
| apache-2.0 |
myxyz/zstack | header/src/main/java/org/zstack/header/storage/primary/APIUpdatePrimaryStorageEvent.java | 595 | package org.zstack.header.storage.primary;
import org.zstack.header.message.APIEvent;
/**
* Created by frank on 6/14/2015.
*/
public class APIUpdatePrimaryStorageEvent extends APIEvent {
private PrimaryStorageInventory inventory;
public PrimaryStorageInventory getInventory() {
return inventory;
}
public void setInventory(PrimaryStorageInventory inventory) {
this.inventory = inventory;
}
public APIUpdatePrimaryStorageEvent() {
}
public APIUpdatePrimaryStorageEvent(String apiId) {
super(apiId);
}
}
| apache-2.0 |
nvoron23/presto | presto-main/src/main/java/com/facebook/presto/sql/planner/PlanSanityChecker.java | 19766 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner;
import com.facebook.presto.sql.planner.plan.AggregationNode;
import com.facebook.presto.sql.planner.plan.DistinctLimitNode;
import com.facebook.presto.sql.planner.plan.ExchangeNode;
import com.facebook.presto.sql.planner.plan.FilterNode;
import com.facebook.presto.sql.planner.plan.IndexJoinNode;
import com.facebook.presto.sql.planner.plan.IndexSourceNode;
import com.facebook.presto.sql.planner.plan.JoinNode;
import com.facebook.presto.sql.planner.plan.LimitNode;
import com.facebook.presto.sql.planner.plan.MarkDistinctNode;
import com.facebook.presto.sql.planner.plan.OutputNode;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import com.facebook.presto.sql.planner.plan.PlanVisitor;
import com.facebook.presto.sql.planner.plan.ProjectNode;
import com.facebook.presto.sql.planner.plan.RemoteSourceNode;
import com.facebook.presto.sql.planner.plan.RowNumberNode;
import com.facebook.presto.sql.planner.plan.SampleNode;
import com.facebook.presto.sql.planner.plan.SemiJoinNode;
import com.facebook.presto.sql.planner.plan.SortNode;
import com.facebook.presto.sql.planner.plan.TableCommitNode;
import com.facebook.presto.sql.planner.plan.TableScanNode;
import com.facebook.presto.sql.planner.plan.TableWriterNode;
import com.facebook.presto.sql.planner.plan.TopNNode;
import com.facebook.presto.sql.planner.plan.TopNRowNumberNode;
import com.facebook.presto.sql.planner.plan.UnionNode;
import com.facebook.presto.sql.planner.plan.UnnestNode;
import com.facebook.presto.sql.planner.plan.ValuesNode;
import com.facebook.presto.sql.planner.plan.WindowNode;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static com.facebook.presto.sql.planner.optimizations.IndexJoinOptimizer.IndexKeyTracer;
import static com.google.common.base.Preconditions.checkArgument;
/**
* Ensures that all dependencies (i.e., symbols in expressions) for a plan node are provided by its source nodes
*/
public final class PlanSanityChecker
{
private PlanSanityChecker() {}
public static void validate(PlanNode plan)
{
plan.accept(new Visitor(), null);
}
private static class Visitor
extends PlanVisitor<Void, Void>
{
private final Map<PlanNodeId, PlanNode> nodesById = new HashMap<>();
@Override
protected Void visitPlan(PlanNode node, Void context)
{
throw new UnsupportedOperationException("not yet implemented: " + node.getClass().getName());
}
@Override
public Void visitAggregation(AggregationNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
checkDependencies(inputs, node.getGroupBy(), "Invalid node. Group by symbols (%s) not in source plan output (%s)", node.getGroupBy(), node.getSource().getOutputSymbols());
if (node.getSampleWeight().isPresent()) {
checkArgument(inputs.contains(node.getSampleWeight().get()), "Invalid node. Sample weight symbol (%s) is not in source plan output (%s)", node.getSampleWeight().get(), node.getSource().getOutputSymbols());
}
for (FunctionCall call : node.getAggregations().values()) {
Set<Symbol> dependencies = DependencyExtractor.extractUnique(call);
checkDependencies(inputs, dependencies, "Invalid node. Aggregation dependencies (%s) not in source plan output (%s)", dependencies, node.getSource().getOutputSymbols());
}
return null;
}
@Override
public Void visitMarkDistinct(MarkDistinctNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
checkDependencies(source.getOutputSymbols(), node.getDistinctSymbols(), "Invalid node. Mark distinct symbols (%s) not in source plan output (%s)", node.getDistinctSymbols(), source.getOutputSymbols());
return null;
}
@Override
public Void visitWindow(WindowNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
checkDependencies(inputs, node.getPartitionBy(), "Invalid node. Partition by symbols (%s) not in source plan output (%s)", node.getPartitionBy(), node.getSource().getOutputSymbols());
checkDependencies(inputs, node.getOrderBy(), "Invalid node. Order by symbols (%s) not in source plan output (%s)", node.getOrderBy(), node.getSource().getOutputSymbols());
ImmutableList.Builder<Symbol> bounds = ImmutableList.builder();
if (node.getFrame().getStartValue().isPresent()) {
bounds.add(node.getFrame().getStartValue().get());
}
if (node.getFrame().getEndValue().isPresent()) {
bounds.add(node.getFrame().getEndValue().get());
}
checkDependencies(inputs, bounds.build(), "Invalid node. Frame bounds (%s) not in source plan output (%s)", bounds.build(), node.getSource().getOutputSymbols());
for (FunctionCall call : node.getWindowFunctions().values()) {
Set<Symbol> dependencies = DependencyExtractor.extractUnique(call);
checkDependencies(inputs, dependencies, "Invalid node. Window function dependencies (%s) not in source plan output (%s)", dependencies, node.getSource().getOutputSymbols());
}
return null;
}
@Override
public Void visitTopNRowNumber(TopNRowNumberNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
checkDependencies(inputs, node.getPartitionBy(), "Invalid node. Partition by symbols (%s) not in source plan output (%s)", node.getPartitionBy(), node.getSource().getOutputSymbols());
checkDependencies(inputs, node.getOrderBy(), "Invalid node. Order by symbols (%s) not in source plan output (%s)", node.getOrderBy(), node.getSource().getOutputSymbols());
return null;
}
@Override
public Void visitRowNumber(RowNumberNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
checkDependencies(source.getOutputSymbols(), node.getPartitionBy(), "Invalid node. Partition by symbols (%s) not in source plan output (%s)", node.getPartitionBy(), node.getSource().getOutputSymbols());
return null;
}
@Override
public Void visitFilter(FilterNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
checkDependencies(inputs, node.getOutputSymbols(), "Invalid node. Output symbols (%s) not in source plan output (%s)", node.getOutputSymbols(), node.getSource().getOutputSymbols());
Set<Symbol> dependencies = DependencyExtractor.extractUnique(node.getPredicate());
checkDependencies(inputs, dependencies, "Invalid node. Predicate dependencies (%s) not in source plan output (%s)", dependencies, node.getSource().getOutputSymbols());
return null;
}
@Override
public Void visitSample(SampleNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
return null;
}
@Override
public Void visitProject(ProjectNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
for (Expression expression : node.getExpressions()) {
Set<Symbol> dependencies = DependencyExtractor.extractUnique(expression);
checkDependencies(inputs, dependencies, "Invalid node. Expression dependencies (%s) not in source plan output (%s)", dependencies, inputs);
}
return null;
}
@Override
public Void visitTopN(TopNNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
checkDependencies(inputs, node.getOutputSymbols(), "Invalid node. Output symbols (%s) not in source plan output (%s)", node.getOutputSymbols(), node.getSource().getOutputSymbols());
checkDependencies(inputs, node.getOrderBy(),
"Invalid node. Order by dependencies (%s) not in source plan output (%s)",
node.getOrderBy(),
node.getSource().getOutputSymbols());
return null;
}
@Override
public Void visitSort(SortNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
Set<Symbol> inputs = ImmutableSet.copyOf(source.getOutputSymbols());
checkDependencies(inputs, node.getOutputSymbols(), "Invalid node. Output symbols (%s) not in source plan output (%s)", node.getOutputSymbols(), node.getSource().getOutputSymbols());
checkDependencies(inputs, node.getOrderBy(), "Invalid node. Order by dependencies (%s) not in source plan output (%s)", node.getOrderBy(), node.getSource().getOutputSymbols());
return null;
}
@Override
public Void visitOutput(OutputNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
checkDependencies(source.getOutputSymbols(), node.getOutputSymbols(), "Invalid node. Output column dependencies (%s) not in source plan output (%s)", node.getOutputSymbols(), source.getOutputSymbols());
return null;
}
@Override
public Void visitLimit(LimitNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
return null;
}
@Override
public Void visitDistinctLimit(DistinctLimitNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
return null;
}
@Override
public Void visitJoin(JoinNode node, Void context)
{
node.getLeft().accept(this, context);
node.getRight().accept(this, context);
verifyUniqueId(node);
Set<Symbol> leftInputs = ImmutableSet.copyOf(node.getLeft().getOutputSymbols());
Set<Symbol> rightInputs = ImmutableSet.copyOf(node.getRight().getOutputSymbols());
for (JoinNode.EquiJoinClause clause : node.getCriteria()) {
checkArgument(leftInputs.contains(clause.getLeft()), "Symbol from join clause (%s) not in left source (%s)", clause.getLeft(), node.getLeft().getOutputSymbols());
checkArgument(rightInputs.contains(clause.getRight()), "Symbol from join clause (%s) not in right source (%s)", clause.getRight(), node.getRight().getOutputSymbols());
}
return null;
}
@Override
public Void visitSemiJoin(SemiJoinNode node, Void context)
{
node.getSource().accept(this, context);
node.getFilteringSource().accept(this, context);
verifyUniqueId(node);
checkArgument(node.getSource().getOutputSymbols().contains(node.getSourceJoinSymbol()), "Symbol from semi join clause (%s) not in source (%s)", node.getSourceJoinSymbol(), node.getSource().getOutputSymbols());
checkArgument(node.getFilteringSource().getOutputSymbols().contains(node.getFilteringSourceJoinSymbol()), "Symbol from semi join clause (%s) not in filtering source (%s)", node.getSourceJoinSymbol(), node.getFilteringSource().getOutputSymbols());
Set<Symbol> outputs = ImmutableSet.copyOf(node.getOutputSymbols());
checkArgument(outputs.containsAll(node.getSource().getOutputSymbols()), "Semi join output symbols (%s) must contain all of the source symbols (%s)", node.getOutputSymbols(), node.getSource().getOutputSymbols());
checkArgument(outputs.contains(node.getSemiJoinOutput()),
"Semi join output symbols (%s) must contain join result (%s)",
node.getOutputSymbols(),
node.getSemiJoinOutput());
return null;
}
@Override
public Void visitIndexJoin(IndexJoinNode node, Void context)
{
node.getProbeSource().accept(this, context);
node.getIndexSource().accept(this, context);
verifyUniqueId(node);
Set<Symbol> probeInputs = ImmutableSet.copyOf(node.getProbeSource().getOutputSymbols());
Set<Symbol> indexSourceInputs = ImmutableSet.copyOf(node.getIndexSource().getOutputSymbols());
for (IndexJoinNode.EquiJoinClause clause : node.getCriteria()) {
checkArgument(probeInputs.contains(clause.getProbe()), "Probe symbol from index join clause (%s) not in probe source (%s)", clause.getProbe(), node.getProbeSource().getOutputSymbols());
checkArgument(indexSourceInputs.contains(clause.getIndex()), "Index symbol from index join clause (%s) not in index source (%s)", clause.getIndex(), node.getIndexSource().getOutputSymbols());
}
Set<Symbol> lookupSymbols = FluentIterable.from(node.getCriteria())
.transform(IndexJoinNode.EquiJoinClause::getIndex)
.toSet();
Map<Symbol, Symbol> trace = IndexKeyTracer.trace(node.getIndexSource(), lookupSymbols);
checkArgument(!trace.isEmpty() && lookupSymbols.containsAll(trace.keySet()),
"Index lookup symbols are not traceable to index source: %s",
lookupSymbols);
return null;
}
@Override
public Void visitIndexSource(IndexSourceNode node, Void context)
{
verifyUniqueId(node);
checkDependencies(node.getOutputSymbols(), node.getLookupSymbols(), "Lookup symbols must be part of output symbols");
checkDependencies(node.getAssignments().keySet(), node.getOutputSymbols(), "Assignments must contain mappings for output symbols");
return null;
}
@Override
public Void visitTableScan(TableScanNode node, Void context)
{
verifyUniqueId(node);
checkArgument(node.getAssignments().keySet().containsAll(node.getOutputSymbols()), "Assignments must contain mappings for output symbols");
return null;
}
@Override
public Void visitValues(ValuesNode node, Void context)
{
verifyUniqueId(node);
return null;
}
@Override
public Void visitUnnest(UnnestNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context);
verifyUniqueId(node);
Set<Symbol> required = ImmutableSet.<Symbol>builder()
.addAll(node.getReplicateSymbols())
.addAll(node.getUnnestSymbols().keySet())
.build();
checkDependencies(source.getOutputSymbols(), required, "Invalid node. Dependencies (%s) not in source plan output (%s)", required, source.getOutputSymbols());
return null;
}
@Override
public Void visitRemoteSource(RemoteSourceNode node, Void context)
{
verifyUniqueId(node);
return null;
}
@Override
public Void visitExchange(ExchangeNode node, Void context)
{
verifyUniqueId(node);
return null;
}
@Override
public Void visitTableWriter(TableWriterNode node, Void context)
{
PlanNode source = node.getSource();
source.accept(this, context); // visit child
verifyUniqueId(node);
if (node.getSampleWeightSymbol().isPresent()) {
checkArgument(source.getOutputSymbols().contains(node.getSampleWeightSymbol().get()), "Invalid node. Sample weight symbol (%s) is not in source plan output (%s)", node.getSampleWeightSymbol().get(), node.getSource().getOutputSymbols());
}
return null;
}
@Override
public Void visitTableCommit(TableCommitNode node, Void context)
{
node.getSource().accept(this, context); // visit child
verifyUniqueId(node);
return null;
}
@Override
public Void visitUnion(UnionNode node, Void context)
{
for (int i = 0; i < node.getSources().size(); i++) {
PlanNode subplan = node.getSources().get(i);
checkDependencies(subplan.getOutputSymbols(), node.sourceOutputLayout(i), "UNION subplan must provide all of the necessary symbols");
subplan.accept(this, context); // visit child
}
verifyUniqueId(node);
return null;
}
private void verifyUniqueId(PlanNode node)
{
PlanNodeId id = node.getId();
checkArgument(!nodesById.containsKey(id), "Duplicate node id found %s between %s and %s", node.getId(), node, nodesById.get(id));
nodesById.put(id, node);
}
}
private static void checkDependencies(Collection<Symbol> inputs, Collection<Symbol> required, String message, Object... parameters)
{
checkArgument(ImmutableSet.copyOf(inputs).containsAll(required), message, parameters);
}
}
| apache-2.0 |
pitatensai/ExRecyclerView | mylibrary/src/main/java/kale/layoutmanager/NestLinearLayoutManager.java | 4073 | package kale.layoutmanager;
import android.content.Context;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.view.ViewGroup;
/**
* 当recyclerView和scrollView嵌套时使用
* Author: ZhuWenWu
* Version V1.0
* Date: 2015/2/26 14:15.
* Description:
* Modification History:
* Date Author Version Description
* -----------------------------------------------------------------------------------
* 2015/2/26 ZhuWenWu 1.0 1.0
* Why & What is modified:
*/
public class NestLinearLayoutManager extends LinearLayoutManager {
//private static final String TAG = FullyLinearLayoutManager.class.getSimpleName();
public NestLinearLayoutManager(Context context) {
super(context);
}
public NestLinearLayoutManager(Context context, int orientation, boolean reverseLayout) {
super(context, orientation, reverseLayout);
}
private int[] mMeasuredDimension = new int[2];
@Override
public void onMeasure(RecyclerView.Recycler recycler, RecyclerView.State state,
int widthSpec, int heightSpec) {
final int widthMode = View.MeasureSpec.getMode(widthSpec);
final int heightMode = View.MeasureSpec.getMode(heightSpec);
final int widthSize = View.MeasureSpec.getSize(widthSpec);
final int heightSize = View.MeasureSpec.getSize(heightSpec);
/*Log.i(TAG, "onMeasure called. \nwidthMode " + widthMode
+ " \nheightMode " + heightSpec
+ " \nwidthSize " + widthSize
+ " \nheightSize " + heightSize
+ " \ngetItemCount() " + getItemCount());*/
int width = 0;
int height = 0;
for (int i = 0; i < getItemCount(); i++) {
measureScrapChild(recycler, i,
View.MeasureSpec.makeMeasureSpec(i, View.MeasureSpec.UNSPECIFIED),
View.MeasureSpec.makeMeasureSpec(i, View.MeasureSpec.UNSPECIFIED),
mMeasuredDimension);
if (getOrientation() == HORIZONTAL) {
width = width + mMeasuredDimension[0];
if (i == 0) {
height = mMeasuredDimension[1];
}
} else {
height = height + mMeasuredDimension[1];
if (i == 0) {
width = mMeasuredDimension[0];
}
}
}
switch (widthMode) {
case View.MeasureSpec.EXACTLY:
width = widthSize;
case View.MeasureSpec.AT_MOST:
case View.MeasureSpec.UNSPECIFIED:
}
switch (heightMode) {
case View.MeasureSpec.EXACTLY:
height = heightSize;
case View.MeasureSpec.AT_MOST:
case View.MeasureSpec.UNSPECIFIED:
}
setMeasuredDimension(width, height);
}
private void measureScrapChild(RecyclerView.Recycler recycler, int position, int widthSpec,
int heightSpec, int[] measuredDimension) {
try {
View view = recycler.getViewForPosition(position);//fix 动态添加时报IndexOutOfBoundsException
if (view != null) {
RecyclerView.LayoutParams p = (RecyclerView.LayoutParams) view.getLayoutParams();
int childWidthSpec = ViewGroup.getChildMeasureSpec(widthSpec, getPaddingLeft() + getPaddingRight(), p.width);
int childHeightSpec = ViewGroup.getChildMeasureSpec(heightSpec, getPaddingTop() + getPaddingBottom(), p.height);
view.measure(childWidthSpec, childHeightSpec);
measuredDimension[0] = view.getMeasuredWidth() + p.leftMargin + p.rightMargin;
measuredDimension[1] = view.getMeasuredHeight() + p.bottomMargin + p.topMargin;
recycler.recycleView(view);
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
| apache-2.0 |
doom369/netty | testsuite-autobahn/src/main/java/io/netty/testsuite/autobahn/AutobahnServer.java | 2271 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.testsuite.autobahn;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.buffer.PooledByteBufAllocator;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
/**
* A Web Socket echo server for running the
* <a href="http://autobahn.ws/testsuite/">autobahn test suite</a>
*/
public class AutobahnServer {
private final int port;
public AutobahnServer(int port) {
this.port = port;
}
public void run() throws Exception {
EventLoopGroup bossGroup = new NioEventLoopGroup(1);
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.childHandler(new AutobahnServerInitializer());
ChannelFuture f = b.bind(port).sync();
System.out.println("Web Socket Server started at port " + port);
f.channel().closeFuture().sync();
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
public static void main(String[] args) throws Exception {
int port;
if (args.length > 0) {
port = Integer.parseInt(args[0]);
} else {
port = 9000;
}
new AutobahnServer(port).run();
}
}
| apache-2.0 |
akoshibe/fl-hhcp | lib/gen-java/net/floodlightcontroller/packetstreamer/thrift/Message.java | 12922 | /**
* Autogenerated by Thrift Compiler (0.7.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*/
package net.floodlightcontroller.packetstreamer.thrift;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Message implements org.apache.thrift.TBase<Message, Message._Fields>, java.io.Serializable, Cloneable {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Message");
private static final org.apache.thrift.protocol.TField SESSION_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("sessionIDs", org.apache.thrift.protocol.TType.LIST, (short)1);
private static final org.apache.thrift.protocol.TField PACKET_FIELD_DESC = new org.apache.thrift.protocol.TField("packet", org.apache.thrift.protocol.TType.STRUCT, (short)2);
public List<String> sessionIDs; // required
public Packet packet; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SESSION_IDS((short)1, "sessionIDs"),
PACKET((short)2, "packet");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // SESSION_IDS
return SESSION_IDS;
case 2: // PACKET
return PACKET;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SESSION_IDS, new org.apache.thrift.meta_data.FieldMetaData("sessionIDs", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
tmpMap.put(_Fields.PACKET, new org.apache.thrift.meta_data.FieldMetaData("packet", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Packet.class)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Message.class, metaDataMap);
}
public Message() {
}
public Message(
List<String> sessionIDs,
Packet packet)
{
this();
this.sessionIDs = sessionIDs;
this.packet = packet;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public Message(Message other) {
if (other.isSetSessionIDs()) {
List<String> __this__sessionIDs = new ArrayList<String>();
for (String other_element : other.sessionIDs) {
__this__sessionIDs.add(other_element);
}
this.sessionIDs = __this__sessionIDs;
}
if (other.isSetPacket()) {
this.packet = new Packet(other.packet);
}
}
public Message deepCopy() {
return new Message(this);
}
@Override
public void clear() {
this.sessionIDs = null;
this.packet = null;
}
public int getSessionIDsSize() {
return (this.sessionIDs == null) ? 0 : this.sessionIDs.size();
}
public java.util.Iterator<String> getSessionIDsIterator() {
return (this.sessionIDs == null) ? null : this.sessionIDs.iterator();
}
public void addToSessionIDs(String elem) {
if (this.sessionIDs == null) {
this.sessionIDs = new ArrayList<String>();
}
this.sessionIDs.add(elem);
}
public List<String> getSessionIDs() {
return this.sessionIDs;
}
public Message setSessionIDs(List<String> sessionIDs) {
this.sessionIDs = sessionIDs;
return this;
}
public void unsetSessionIDs() {
this.sessionIDs = null;
}
/** Returns true if field sessionIDs is set (has been assigned a value) and false otherwise */
public boolean isSetSessionIDs() {
return this.sessionIDs != null;
}
public void setSessionIDsIsSet(boolean value) {
if (!value) {
this.sessionIDs = null;
}
}
public Packet getPacket() {
return this.packet;
}
public Message setPacket(Packet packet) {
this.packet = packet;
return this;
}
public void unsetPacket() {
this.packet = null;
}
/** Returns true if field packet is set (has been assigned a value) and false otherwise */
public boolean isSetPacket() {
return this.packet != null;
}
public void setPacketIsSet(boolean value) {
if (!value) {
this.packet = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SESSION_IDS:
if (value == null) {
unsetSessionIDs();
} else {
setSessionIDs((List<String>)value);
}
break;
case PACKET:
if (value == null) {
unsetPacket();
} else {
setPacket((Packet)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SESSION_IDS:
return getSessionIDs();
case PACKET:
return getPacket();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SESSION_IDS:
return isSetSessionIDs();
case PACKET:
return isSetPacket();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof Message)
return this.equals((Message)that);
return false;
}
public boolean equals(Message that) {
if (that == null)
return false;
boolean this_present_sessionIDs = true && this.isSetSessionIDs();
boolean that_present_sessionIDs = true && that.isSetSessionIDs();
if (this_present_sessionIDs || that_present_sessionIDs) {
if (!(this_present_sessionIDs && that_present_sessionIDs))
return false;
if (!this.sessionIDs.equals(that.sessionIDs))
return false;
}
boolean this_present_packet = true && this.isSetPacket();
boolean that_present_packet = true && that.isSetPacket();
if (this_present_packet || that_present_packet) {
if (!(this_present_packet && that_present_packet))
return false;
if (!this.packet.equals(that.packet))
return false;
}
return true;
}
@Override
public int hashCode() {
return 0;
}
public int compareTo(Message other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
Message typedOther = (Message)other;
lastComparison = Boolean.valueOf(isSetSessionIDs()).compareTo(typedOther.isSetSessionIDs());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSessionIDs()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.sessionIDs, typedOther.sessionIDs);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetPacket()).compareTo(typedOther.isSetPacket());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPacket()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.packet, typedOther.packet);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField field;
iprot.readStructBegin();
while (true)
{
field = iprot.readFieldBegin();
if (field.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (field.id) {
case 1: // SESSION_IDS
if (field.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
this.sessionIDs = new ArrayList<String>(_list0.size);
for (int _i1 = 0; _i1 < _list0.size; ++_i1)
{
String _elem2; // required
_elem2 = iprot.readString();
this.sessionIDs.add(_elem2);
}
iprot.readListEnd();
}
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
case 2: // PACKET
if (field.type == org.apache.thrift.protocol.TType.STRUCT) {
this.packet = new Packet();
this.packet.read(iprot);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
validate();
oprot.writeStructBegin(STRUCT_DESC);
if (this.sessionIDs != null) {
oprot.writeFieldBegin(SESSION_IDS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, this.sessionIDs.size()));
for (String _iter3 : this.sessionIDs)
{
oprot.writeString(_iter3);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
if (this.packet != null) {
oprot.writeFieldBegin(PACKET_FIELD_DESC);
this.packet.write(oprot);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("Message(");
boolean first = true;
sb.append("sessionIDs:");
if (this.sessionIDs == null) {
sb.append("null");
} else {
sb.append(this.sessionIDs);
}
first = false;
if (!first) sb.append(", ");
sb.append("packet:");
if (this.packet == null) {
sb.append("null");
} else {
sb.append(this.packet);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
}
| apache-2.0 |
miniway/presto | presto-client/src/test/java/io/prestosql/client/TestQueryResults.java | 2633 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.client;
import io.airlift.json.JsonCodec;
import org.testng.annotations.Test;
import static io.airlift.json.JsonCodec.jsonCodec;
import static org.testng.Assert.assertEquals;
public class TestQueryResults
{
private static final JsonCodec<QueryResults> QUERY_RESULTS_CODEC = jsonCodec(QueryResults.class);
@Test
public void testCompatibility()
{
String goldenValue = "{\n" +
" \"id\" : \"20160128_214710_00012_rk68b\",\n" +
" \"infoUri\" : \"http://localhost:54855/query.html?20160128_214710_00012_rk68b\",\n" +
" \"columns\" : [ {\n" +
" \"name\" : \"_col0\",\n" +
" \"type\" : \"bigint\",\n" +
" \"typeSignature\" : {\n" +
" \"rawType\" : \"bigint\",\n" +
" \"typeArguments\" : [ ],\n" +
" \"literalArguments\" : [ ],\n" +
" \"arguments\" : [ ]\n" +
" }\n" +
" } ],\n" +
" \"data\" : [ [ 123 ] ],\n" +
" \"stats\" : {\n" +
" \"state\" : \"FINISHED\",\n" +
" \"queued\" : false,\n" +
" \"scheduled\" : false,\n" +
" \"nodes\" : 0,\n" +
" \"totalSplits\" : 0,\n" +
" \"queuedSplits\" : 0,\n" +
" \"runningSplits\" : 0,\n" +
" \"completedSplits\" : 0,\n" +
" \"cpuTimeMillis\" : 0,\n" +
" \"wallTimeMillis\" : 0,\n" +
" \"queuedTimeMillis\" : 0,\n" +
" \"elapsedTimeMillis\" : 0,\n" +
" \"processedRows\" : 0,\n" +
" \"processedBytes\" : 0,\n" +
" \"peakMemoryBytes\" : 0\n" +
" }\n" +
"}";
QueryResults results = QUERY_RESULTS_CODEC.fromJson(goldenValue);
assertEquals(results.getId(), "20160128_214710_00012_rk68b");
}
}
| apache-2.0 |
DamianPilot382/Rubiks-Cube-Solver | opencv/sources/modules/features2d/misc/java/test/BRIEFDescriptorExtractorTest.java | 3140 | package org.opencv.test.features2d;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.features2d.DescriptorExtractor;
import org.opencv.core.KeyPoint;
import org.opencv.test.OpenCVTestCase;
import org.opencv.test.OpenCVTestRunner;
import org.opencv.imgproc.Imgproc;
public class BRIEFDescriptorExtractorTest extends OpenCVTestCase {
DescriptorExtractor extractor;
int matSize;
private Mat getTestImg() {
Mat cross = new Mat(matSize, matSize, CvType.CV_8U, new Scalar(255));
Imgproc.line(cross, new Point(20, matSize / 2), new Point(matSize - 21, matSize / 2), new Scalar(100), 2);
Imgproc.line(cross, new Point(matSize / 2, 20), new Point(matSize / 2, matSize - 21), new Scalar(100), 2);
return cross;
}
@Override
protected void setUp() throws Exception {
super.setUp();
extractor = DescriptorExtractor.create(DescriptorExtractor.BRIEF);
matSize = 100;
}
public void testComputeListOfMatListOfListOfKeyPointListOfMat() {
fail("Not yet implemented");
}
public void testComputeMatListOfKeyPointMat() {
KeyPoint point = new KeyPoint(55.775577545166016f, 44.224422454833984f, 16, 9.754629f, 8617.863f, 1, -1);
MatOfKeyPoint keypoints = new MatOfKeyPoint(point);
Mat img = getTestImg();
Mat descriptors = new Mat();
extractor.compute(img, keypoints, descriptors);
Mat truth = new Mat(1, 32, CvType.CV_8UC1) {
{
put(0, 0, 96, 0, 76, 24, 47, 182, 68, 137,
149, 195, 67, 16, 187, 224, 74, 8,
82, 169, 87, 70, 44, 4, 192, 56,
13, 128, 44, 106, 146, 72, 194, 245);
}
};
assertMatEqual(truth, descriptors);
}
public void testCreate() {
assertNotNull(extractor);
}
public void testDescriptorSize() {
assertEquals(32, extractor.descriptorSize());
}
public void testDescriptorType() {
assertEquals(CvType.CV_8U, extractor.descriptorType());
}
public void testEmpty() {
assertFalse(extractor.empty());
}
public void testRead() {
String filename = OpenCVTestRunner.getTempFileName("yml");
writeFile(filename, "%YAML:1.0\ndescriptorSize: 64\n");
extractor.read(filename);
assertEquals(64, extractor.descriptorSize());
}
public void testWrite() {
String filename = OpenCVTestRunner.getTempFileName("xml");
extractor.write(filename);
String truth = "<?xml version=\"1.0\"?>\n<opencv_storage>\n<descriptorSize>32</descriptorSize>\n</opencv_storage>\n";
assertEquals(truth, readFile(filename));
}
public void testWriteYml() {
String filename = OpenCVTestRunner.getTempFileName("yml");
extractor.write(filename);
String truth = "%YAML:1.0\ndescriptorSize: 32\n";
assertEquals(truth, readFile(filename));
}
}
| apache-2.0 |
miniway/presto | presto-jdbc/src/main/java/io/prestosql/jdbc/StageStats.java | 4016 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.jdbc;
import com.google.common.collect.ImmutableList;
import java.util.List;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public final class StageStats
{
private final String stageId;
private final String state;
private final boolean done;
private final int nodes;
private final int totalSplits;
private final int queuedSplits;
private final int runningSplits;
private final int completedSplits;
private final long cpuTimeMillis;
private final long wallTimeMillis;
private final long processedRows;
private final long processedBytes;
private final List<StageStats> subStages;
public StageStats(
String stageId,
String state,
boolean done,
int nodes,
int totalSplits,
int queuedSplits,
int runningSplits,
int completedSplits,
long cpuTimeMillis,
long wallTimeMillis,
long processedRows,
long processedBytes,
List<StageStats> subStages)
{
this.stageId = requireNonNull(stageId, "stageId is null");
this.state = requireNonNull(state, "state is null");
this.done = done;
this.nodes = nodes;
this.totalSplits = totalSplits;
this.queuedSplits = queuedSplits;
this.runningSplits = runningSplits;
this.completedSplits = completedSplits;
this.cpuTimeMillis = cpuTimeMillis;
this.wallTimeMillis = wallTimeMillis;
this.processedRows = processedRows;
this.processedBytes = processedBytes;
this.subStages = ImmutableList.copyOf(requireNonNull(subStages, "subStages is null"));
}
static StageStats create(io.prestosql.client.StageStats stats)
{
return new StageStats(
stats.getStageId(),
stats.getState(),
stats.isDone(),
stats.getNodes(),
stats.getTotalSplits(),
stats.getQueuedSplits(),
stats.getRunningSplits(),
stats.getCompletedSplits(),
stats.getCpuTimeMillis(),
stats.getWallTimeMillis(),
stats.getProcessedRows(),
stats.getProcessedBytes(),
stats.getSubStages().stream()
.map(StageStats::create)
.collect(toList()));
}
public String getStageId()
{
return stageId;
}
public String getState()
{
return state;
}
public boolean isDone()
{
return done;
}
public int getNodes()
{
return nodes;
}
public int getTotalSplits()
{
return totalSplits;
}
public int getQueuedSplits()
{
return queuedSplits;
}
public int getRunningSplits()
{
return runningSplits;
}
public int getCompletedSplits()
{
return completedSplits;
}
public long getCpuTimeMillis()
{
return cpuTimeMillis;
}
public long getWallTimeMillis()
{
return wallTimeMillis;
}
public long getProcessedRows()
{
return processedRows;
}
public long getProcessedBytes()
{
return processedBytes;
}
public List<StageStats> getSubStages()
{
return subStages;
}
}
| apache-2.0 |
alanfgates/hive | ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFOPNegative.java | 9207 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.udf.generic;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredJavaObject;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject;
import org.apache.hadoop.hive.serde2.io.ByteWritable;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.junit.Assert;
import org.junit.Test;
public class TestGenericUDFOPNegative {
private static final double EPSILON = 1E-6;
@Test
public void testByte() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
ByteWritable input = new ByteWritable((byte) 4);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableByteObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.byteTypeInfo, oi.getTypeInfo());
ByteWritable res = (ByteWritable) udf.evaluate(args);
Assert.assertEquals((byte)-4, res.get());
}
@Test
public void testShort() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
ShortWritable input = new ShortWritable((short) 74);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableShortObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.shortTypeInfo, oi.getTypeInfo());
ShortWritable res = (ShortWritable) udf.evaluate(args);
Assert.assertEquals((short)-74, res.get());
}
@Test
public void testInt() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
IntWritable input = new IntWritable(747);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableIntObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.intTypeInfo, oi.getTypeInfo());
IntWritable res = (IntWritable) udf.evaluate(args);
Assert.assertEquals(-747, res.get());
}
@Test
public void testLong() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
LongWritable input = new LongWritable(3234747);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableLongObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.longTypeInfo, oi.getTypeInfo());
LongWritable res = (LongWritable) udf.evaluate(args);
Assert.assertEquals(-3234747L, res.get());
}
@Test
public void testFloat() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
FloatWritable input = new FloatWritable(323.4747f);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableFloatObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.floatTypeInfo, oi.getTypeInfo());
FloatWritable res = (FloatWritable) udf.evaluate(args);
Assert.assertEquals(-323.4747f, res.get(), EPSILON);
}
@Test
public void testDouble() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
DoubleWritable input = new DoubleWritable(32300.004747);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableDoubleObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(-32300.004747, res.get(), EPSILON);
}
@Test
public void testDecimal() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveDecimalWritable input = new HiveDecimalWritable(HiveDecimal.create("32300.004747"));
DecimalTypeInfo inputTypeInfo = TypeInfoFactory.getDecimalTypeInfo(11, 6);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(inputTypeInfo, oi.getTypeInfo());
HiveDecimalWritable res = (HiveDecimalWritable) udf.evaluate(args);
Assert.assertEquals(HiveDecimal.create("-32300.004747"), res.getHiveDecimal());
}
@Test
public void testString() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
Text input = new Text("32300.004747");
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.writableStringObjectInspector,
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(-32300.004747, res.get(), EPSILON);
}
@Test
public void testVarchar() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveVarchar vc = new HiveVarchar("32300.004747", 12);
HiveVarcharWritable input = new HiveVarcharWritable(vc);
VarcharTypeInfo inputTypeInfo = TypeInfoFactory.getVarcharTypeInfo(12);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(-32300.004747, res.get(), EPSILON);
}
@Test
public void testChar() throws HiveException {
GenericUDFOPNegative udf = new GenericUDFOPNegative();
HiveChar vc = new HiveChar("32300.004747", 12);
HiveCharWritable input = new HiveCharWritable(vc);
CharTypeInfo inputTypeInfo = TypeInfoFactory.getCharTypeInfo(12);
ObjectInspector[] inputOIs = {
PrimitiveObjectInspectorFactory.getPrimitiveWritableObjectInspector(inputTypeInfo),
};
DeferredObject[] args = {
new DeferredJavaObject(input)
};
PrimitiveObjectInspector oi = (PrimitiveObjectInspector) udf.initialize(inputOIs);
Assert.assertEquals(TypeInfoFactory.doubleTypeInfo, oi.getTypeInfo());
DoubleWritable res = (DoubleWritable) udf.evaluate(args);
Assert.assertEquals(-32300.004747, res.get(), EPSILON);
}
}
| apache-2.0 |
jirmauritz/perun | perun-web-gui/src/main/java/cz/metacentrum/perun/webgui/tabs/securitytabs/AddUserToBlacklistTabItem.java | 9572 | package cz.metacentrum.perun.webgui.tabs.securitytabs;
import com.google.gwt.cell.client.FieldUpdater;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.client.ui.*;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.UiElements;
import cz.metacentrum.perun.webgui.client.localization.ButtonTranslation;
import cz.metacentrum.perun.webgui.client.resources.ButtonType;
import cz.metacentrum.perun.webgui.client.resources.PerunEntity;
import cz.metacentrum.perun.webgui.client.resources.PerunSearchEvent;
import cz.metacentrum.perun.webgui.client.resources.SmallIcons;
import cz.metacentrum.perun.webgui.json.GetEntityById;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.authzResolver.AddAdmin;
import cz.metacentrum.perun.webgui.json.securityTeamsManager.AddUserToBlacklist;
import cz.metacentrum.perun.webgui.json.usersManager.FindCompleteRichUsers;
import cz.metacentrum.perun.webgui.json.usersManager.FindUsersByIdsNotInRpc;
import cz.metacentrum.perun.webgui.model.SecurityTeam;
import cz.metacentrum.perun.webgui.model.User;
import cz.metacentrum.perun.webgui.tabs.TabItem;
import cz.metacentrum.perun.webgui.tabs.userstabs.UserDetailTabItem;
import cz.metacentrum.perun.webgui.widgets.Confirm;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.ExtendedTextBox;
import cz.metacentrum.perun.webgui.widgets.TabMenu;
import java.util.ArrayList;
/**
* !! USE AS INNER TAB ONLY !!
*
* Provides page with add admin to VO form
*
* @author Pavel Zlamal <256627@mail.muni.cz>
* @author Vaclav Mach <374430@mail.muni.cz>
*/
public class AddUserToBlacklistTabItem implements TabItem {
/**
* Perun web session
*/
private PerunWebSession session = PerunWebSession.getInstance();
/**
* Content widget - should be simple panel
*/
private SimplePanel contentWidget = new SimplePanel();
/**
* Title widget
*/
private Label titleWidget = new Label("Add to blacklist");
/**
* Entity ID to set
*/
private int securityTeamId = 0;
private SecurityTeam securityTeam;
private FindCompleteRichUsers users;
private ArrayList<User> alreadyAddedList = new ArrayList<User>();
private SimplePanel alreadyAdded = new SimplePanel();
private String searchString = "";
/**
* Creates a tab instance
*
* @param securityTeamId ID of SecurityTeam to add admin into
*/
public AddUserToBlacklistTabItem(int securityTeamId){
this.securityTeamId = securityTeamId;
JsonCallbackEvents events = new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso) {
securityTeam = jso.cast();
}
};
new GetEntityById(PerunEntity.SECURITY_TEAM, securityTeamId, events).retrieveData();
}
/**
* Creates a tab instance
*
* @param securityTeam SecurityTeam to add admin into
*/
public AddUserToBlacklistTabItem(SecurityTeam securityTeam){
this.securityTeamId = securityTeam.getId();
this.securityTeam = securityTeam;
}
public boolean isPrepared(){
return securityTeam != null;
}
public Widget draw() {
titleWidget.setText("Add to blacklist");
final CustomButton searchButton = new CustomButton("Search", ButtonTranslation.INSTANCE.searchUsers(), SmallIcons.INSTANCE.findIcon());
this.users = new FindCompleteRichUsers("", null, JsonCallbackEvents.disableButtonEvents(searchButton, new JsonCallbackEvents(){
@Override
public void onFinished(JavaScriptObject jso) {
// if found 1 item, select
ArrayList<User> list = JsonUtils.jsoAsList(jso);
if (list != null && list.size() == 1) {
users.getSelectionModel().setSelected(list.get(0), true);
}
}
}));
// MAIN TAB PANEL
VerticalPanel firstTabPanel = new VerticalPanel();
firstTabPanel.setSize("100%", "100%");
// HORIZONTAL MENU
TabMenu tabMenu = new TabMenu();
// get the table
final CellTable<User> table;
if (session.isPerunAdmin()) {
table = users.getTable(new FieldUpdater<User, String>() {
public void update(int i, User user, String s) {
session.getTabManager().addTab(new UserDetailTabItem(user));
}
});
} else {
table = users.getTable();
}
rebuildAlreadyAddedWidget();
final CustomButton addButton = TabMenu.getPredefinedButton(ButtonType.ADD, ButtonTranslation.INSTANCE.addSelectedUsersToBlacklist());
final TabItem tab = this;
// search textbox
final ExtendedTextBox searchBox = tabMenu.addSearchWidget(new PerunSearchEvent() {
@Override
public void searchFor(String text) {
startSearching(text);
searchString = text;
}
}, searchButton);
tabMenu.addWidget(addButton);
tabMenu.addWidget(TabMenu.getPredefinedButton(ButtonType.CLOSE, "", new ClickHandler() {
@Override
public void onClick(ClickEvent clickEvent) {
session.getTabManager().closeTab(tab, !alreadyAddedList.isEmpty());
}
}));
addButton.addClickHandler(new ClickHandler(){
public void onClick(ClickEvent event) {
final ArrayList<User> list = users.getTableSelectedList();
if (UiElements.cantSaveEmptyListDialogBox(list)){
FlexTable layout = new FlexTable();
layout.setStyleName("inputFormFlexTable");
final TextArea textArea = new TextArea();
textArea.setSize("250px", "120px");
layout.getFlexCellFormatter().addStyleName(0, 0, "itemName");
layout.setHTML(0, 0, "Reason:");
layout.setWidget(1, 0, textArea);
layout.setHTML(2, 0, "Please specify why users are blacklisted.");
layout.getFlexCellFormatter().addStyleName(2, 0, "inputFormInlineComment");
Confirm c = new Confirm("Add user(s) to blacklist", layout, new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
for (int i=0; i<list.size(); i++) {
// FIXME - Should have only one callback to core
final int n = i;
AddUserToBlacklist request = new AddUserToBlacklist(securityTeamId, JsonCallbackEvents.disableButtonEvents(addButton, new JsonCallbackEvents(){
@Override
public void onFinished(JavaScriptObject jso) {
// put names to already added
alreadyAddedList.add(list.get(n));
rebuildAlreadyAddedWidget();
// unselect added person
users.getSelectionModel().setSelected(list.get(n), false);
// clear search
searchBox.getTextBox().setText("");
}
}));
request.addUserToBlacklist(list.get(i).getId(), textArea.getText().trim());
}
}
}, "Add", true);
c.show();
}
}
});
// if some text has been searched before
if(!searchString.equals("")) {
searchBox.getTextBox().setText(searchString);
startSearching(searchString);
}
addButton.setEnabled(false);
JsonUtils.addTableManagedButton(users, table, addButton);
// add a class to the table and wrap it into scroll panel
table.addStyleName("perun-table");
ScrollPanel sp = new ScrollPanel(table);
sp.addStyleName("perun-tableScrollPanel");
// add menu and the table to the main panel
firstTabPanel.add(tabMenu);
firstTabPanel.setCellHeight(tabMenu, "30px");
firstTabPanel.add(alreadyAdded);
firstTabPanel.add(sp);
session.getUiElements().resizePerunTable(sp, 350, this);
this.contentWidget.setWidget(firstTabPanel);
return getWidget();
}
/**
* Starts the search for users
*/
protected void startSearching(String text){
users.clearTable();
// IS searched string IDs?
if (JsonUtils.isStringWithIds(text)) {
FindUsersByIdsNotInRpc req = new FindUsersByIdsNotInRpc(new JsonCallbackEvents(){
@Override
public void onFinished(JavaScriptObject jso){
ArrayList<User> usersList = JsonUtils.jsoAsList(jso);
for (User u : usersList) {
users.addToTable(u);
}
}
}, text);
req.retrieveData();
return;
}
users.searchFor(text);
}
/**
* Rebuild already added widget based on already added admins
*/
private void rebuildAlreadyAddedWidget() {
alreadyAdded.setStyleName("alreadyAdded");
alreadyAdded.setVisible(!alreadyAddedList.isEmpty());
alreadyAdded.setWidget(new HTML("<strong>Already added: </strong>"));
for (int i=0; i<alreadyAddedList.size(); i++) {
alreadyAdded.getWidget().getElement().setInnerHTML(alreadyAdded.getWidget().getElement().getInnerHTML()+ ((i!=0) ? ", " : "") + alreadyAddedList.get(i).getFullName());
}
}
public Widget getWidget() {
return this.contentWidget;
}
public Widget getTitle() {
return this.titleWidget;
}
public ImageResource getIcon() {
return SmallIcons.INSTANCE.addIcon();
}
@Override
public int hashCode() {
final int prime = 1297;
int result = 1;
result = prime * result + 6786786;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
AddUserToBlacklistTabItem create = (AddUserToBlacklistTabItem) obj;
if (securityTeamId != create.securityTeamId){
return false;
}
return true;
}
public boolean multipleInstancesEnabled() {
return false;
}
public void open() { }
public boolean isAuthorized() {
if (session.isSecurityAdmin(securityTeamId)) {
return true;
} else {
return false;
}
}
}
| bsd-2-clause |
scheib/chromium | components/offline_items_collection/core/android/java/src/org/chromium/components/offline_items_collection/LegacyHelpers.java | 3588 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.components.offline_items_collection;
import android.text.TextUtils;
import androidx.annotation.Nullable;
/**
* Legacy helper information meant to help with the migration process to OfflineItems.
*/
public class LegacyHelpers {
// These are legacy namespaces for the purpose of ID generation that will only affect the UI.
public static final String LEGACY_OFFLINE_PAGE_NAMESPACE = "LEGACY_OFFLINE_PAGE";
public static final String LEGACY_CONTENT_INDEX_NAMESPACE = "content_index";
public static final String LEGACY_DOWNLOAD_NAMESPACE = "LEGACY_DOWNLOAD";
public static final String LEGACY_ANDROID_DOWNLOAD_NAMESPACE = "LEGACY_ANDROID_DOWNLOAD";
private static final String LEGACY_DOWNLOAD_NAMESPACE_PREFIX = "LEGACY_DOWNLOAD";
/**
* Helper to build a {@link ContentId} based on a single GUID for old offline content sources
* (downloads and offline pages).
* TODO(shaktisahu): Make this function aware of incognito downloads.
* @param isOfflinePage Whether or not {@code guid} is for an offline page or a download.
* @param guid The {@code guid} of the download.
* @return A new {@link ContentId} instance.
*/
public static ContentId buildLegacyContentId(boolean isOfflinePage, String guid) {
String namespace =
isOfflinePage ? LEGACY_OFFLINE_PAGE_NAMESPACE : LEGACY_DOWNLOAD_NAMESPACE;
return new ContentId(namespace, guid);
}
/**
* Helper to determine if a {@link ContentId} was created from
* {@link #buildLegacyContentId(boolean, String)} for a download ({@code false} for {@code
* isOfflinePage}).
* @param id The {@link ContentId} to inspect.
* @return Whether or not {@code id} was built for a traditional download.
*/
public static boolean isLegacyDownload(@Nullable ContentId id) {
return id != null && id.namespace != null
&& id.namespace.startsWith(LEGACY_DOWNLOAD_NAMESPACE);
}
/**
* Helper to determine if a {@link ContentId} is for an content indexed item.
* @param id The {@link ContentId} to inspect.
* @return Whether or not {@code id} was built for a content indexed item.
*/
public static boolean isLegacyContentIndexedItem(@Nullable ContentId id) {
return id != null && TextUtils.equals(LEGACY_CONTENT_INDEX_NAMESPACE, id.namespace);
}
/**
* Helper to determine if a {@link ContentId} was created from
* {@link #buildLegacyContentId(boolean, String)} for an offline page ({@code true} for {@code
* isOfflinePage}).
* @param id The {@link ContentId} to inspect.
* @return Whether or not {@code id} was built for a traditional offline page.
*/
public static boolean isLegacyOfflinePage(@Nullable ContentId id) {
return id != null && TextUtils.equals(LEGACY_OFFLINE_PAGE_NAMESPACE, id.namespace);
}
/**
* Helper to determine if a {@link ContentId} corresponds to a download through android download
* manager.
* @param id The {@link ContentId} to inspect.
* @return Whether or not {@code id} was built for a android DownloadManager download.
*/
public static boolean isLegacyAndroidDownload(@Nullable ContentId id) {
return id != null && TextUtils.equals(LEGACY_ANDROID_DOWNLOAD_NAMESPACE, id.namespace);
}
private LegacyHelpers() {}
}
| bsd-3-clause |
chinmaygarde/flutter_engine | shell/platform/android/io/flutter/plugin/common/BinaryMessenger.java | 4513 | // Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.common;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.UiThread;
import java.nio.ByteBuffer;
/**
* Facility for communicating with Flutter using asynchronous message passing with binary messages.
* The Flutter Dart code should use <a
* href="https://api.flutter.dev/flutter/services/BinaryMessages-class.html">BinaryMessages</a> to
* participate.
*
* <p>{@code BinaryMessenger} is expected to be utilized from a single thread throughout the
* duration of its existence. If created on the main thread, then all invocations should take place
* on the main thread. If created on a background thread, then all invocations should take place on
* that background thread.
*
* @see BasicMessageChannel , which supports message passing with Strings and semi-structured
* messages.
* @see MethodChannel , which supports communication using asynchronous method invocation.
* @see EventChannel , which supports communication using event streams.
*/
public interface BinaryMessenger {
/**
* Sends a binary message to the Flutter application.
*
* @param channel the name {@link String} of the logical channel used for the message.
* @param message the message payload, a direct-allocated {@link ByteBuffer} with the message
* bytes between position zero and current position, or null.
*/
@UiThread
void send(@NonNull String channel, @Nullable ByteBuffer message);
/**
* Sends a binary message to the Flutter application, optionally expecting a reply.
*
* <p>Any uncaught exception thrown by the reply callback will be caught and logged.
*
* @param channel the name {@link String} of the logical channel used for the message.
* @param message the message payload, a direct-allocated {@link ByteBuffer} with the message
* bytes between position zero and current position, or null.
* @param callback a {@link BinaryReply} callback invoked when the Flutter application responds to
* the message, possibly null.
*/
@UiThread
void send(@NonNull String channel, @Nullable ByteBuffer message, @Nullable BinaryReply callback);
/**
* Registers a handler to be invoked when the Flutter application sends a message to its host
* platform.
*
* <p>Registration overwrites any previous registration for the same channel name. Use a null
* handler to deregister.
*
* <p>If no handler has been registered for a particular channel, any incoming message on that
* channel will be handled silently by sending a null reply.
*
* @param channel the name {@link String} of the channel.
* @param handler a {@link BinaryMessageHandler} to be invoked on incoming messages, or null.
*/
@UiThread
void setMessageHandler(@NonNull String channel, @Nullable BinaryMessageHandler handler);
/** Handler for incoming binary messages from Flutter. */
interface BinaryMessageHandler {
/**
* Handles the specified message.
*
* <p>Handler implementations must reply to all incoming messages, by submitting a single reply
* message to the given {@link BinaryReply}. Failure to do so will result in lingering Flutter
* reply handlers. The reply may be submitted asynchronously.
*
* <p>Any uncaught exception thrown by this method will be caught by the messenger
* implementation and logged, and a null reply message will be sent back to Flutter.
*
* @param message the message {@link ByteBuffer} payload, possibly null.
* @param reply A {@link BinaryReply} used for submitting a reply back to Flutter.
*/
@UiThread
void onMessage(@Nullable ByteBuffer message, @NonNull BinaryReply reply);
}
/**
* Binary message reply callback. Used to submit a reply to an incoming message from Flutter. Also
* used in the dual capacity to handle a reply received from Flutter after sending a message.
*/
interface BinaryReply {
/**
* Handles the specified reply.
*
* @param reply the reply payload, a direct-allocated {@link ByteBuffer} or null. Senders of
* outgoing replies must place the reply bytes between position zero and current position.
* Reply receivers can read from the buffer directly.
*/
@UiThread
void reply(@Nullable ByteBuffer reply);
}
}
| bsd-3-clause |